diff --git a/CHANGELOG.md b/CHANGELOG.md index 990297e83..1e23ccd43 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased]() +### Changed + +- BREAKING: rename Algo to Function ([#573](https://github.com/Substra/substra-backend/pull/573)) + ## [0.35.0](https://github.com/Substra/substra-backend/releases/tag/0.35.0) 2023-02-06 ### Added diff --git a/backend/api/events/sync.py b/backend/api/events/sync.py index 5f21a9a59..ce211202f 100644 --- a/backend/api/events/sync.py +++ b/backend/api/events/sync.py @@ -22,12 +22,12 @@ from api.models import DataManager from api.models import DataSample from api.models import Model -from api.serializers import AlgoSerializer from api.serializers import ChannelOrganizationSerializer from api.serializers import ComputePlanSerializer from api.serializers import ComputeTaskSerializer from api.serializers import DataManagerSerializer from api.serializers import DataSampleSerializer +from api.serializers import FunctionSerializer from api.serializers import ModelSerializer from api.serializers import PerformanceSerializer from orchestrator import client as orc_client @@ -66,35 +66,35 @@ def _create_organization(channel: str, data: dict) -> None: logger.debug("Organization already exists", organization_id=data["id"], channel=data["channel"]) -def _on_create_algo_event(event: dict) -> None: - """Process create algo event to update local database.""" - logger.debug("Syncing algo create", asset_key=event["asset_key"], event_id=event["id"]) - _create_algo(channel=event["channel"], data=event["algo"]) +def _on_create_function_event(event: dict) -> None: + """Process create function event to update local database.""" + logger.debug("Syncing function create", asset_key=event["asset_key"], event_id=event["id"]) + _create_function(channel=event["channel"], data=event["function"]) -def _create_algo(channel: str, data: dict) -> None: +def _create_function(channel: str, data: dict) -> None: data["channel"] = channel - serializer = AlgoSerializer(data=data) + serializer = FunctionSerializer(data=data) try: serializer.save_if_not_exists() except AlreadyExistsError: - logger.debug("Algo already exists", asset_key=data["key"]) + logger.debug("Function already exists", asset_key=data["key"]) -def _on_update_algo_event(event: dict) -> None: - """Process update algo event to update local database.""" - logger.debug("Syncing algo update", asset_key=event["asset_key"], event_id=event["id"]) - _update_algo(key=event["asset_key"], data=event["algo"]) +def _on_update_function_event(event: dict) -> None: + """Process update function event to update local database.""" + logger.debug("Syncing function update", asset_key=event["asset_key"], event_id=event["id"]) + _update_function(key=event["asset_key"], data=event["function"]) -def _update_algo(key: str, data: dict) -> None: - """Process update algo event to update local database.""" +def _update_function(key: str, data: dict) -> None: + """Process update function event to update local database.""" - from api.models.algo import Algo + from api.models.function import Function - algo = Algo.objects.get(key=key) - algo.name = data["name"] - algo.save() + function = Function.objects.get(key=key) + function.name = data["name"] + function.save() def _on_create_computeplan_event(event: dict) -> None: @@ -385,9 +385,9 @@ def _on_create_failure_report(event: dict) -> None: event_pb2.EVENT_ASSET_CREATED: _on_create_computeplan_event, event_pb2.EVENT_ASSET_UPDATED: _on_update_computeplan_event, }, - common_pb2.ASSET_ALGO: { - event_pb2.EVENT_ASSET_CREATED: _on_create_algo_event, - event_pb2.EVENT_ASSET_UPDATED: _on_update_algo_event, + common_pb2.ASSET_FUNCTION: { + event_pb2.EVENT_ASSET_CREATED: _on_create_function_event, + event_pb2.EVENT_ASSET_UPDATED: _on_update_function_event, }, common_pb2.ASSET_COMPUTE_TASK: { event_pb2.EVENT_ASSET_CREATED: _on_create_computetask_event, diff --git a/backend/api/management/commands/generate_fixtures.py b/backend/api/management/commands/generate_fixtures.py index aed1771f7..75f5cfc96 100644 --- a/backend/api/management/commands/generate_fixtures.py +++ b/backend/api/management/commands/generate_fixtures.py @@ -14,7 +14,7 @@ class Command(BaseCommand): def handle(self, *args, **options): logger.debug("Generate test data") - self.create_algos() + self.create_functions() self.create_data_manager() self.create_data_samples() self.create_empty_cp() @@ -29,56 +29,56 @@ def handle(self, *args, **options): self.create_aggregate_cp() self.create_composite_cp() - def create_algos(self): - logger.debug(" Create algos") - self.simple_algo = factory.create_algo( - inputs=factory.build_algo_inputs(["datasamples", "opener", "model"]), - outputs=factory.build_algo_outputs(["model"]), + def create_functions(self): + logger.debug(" Create functions") + self.simple_function = factory.create_function( + inputs=factory.build_function_inputs(["datasamples", "opener", "model"]), + outputs=factory.build_function_outputs(["model"]), name="simple", ) - factory.create_algo_files( - key=self.simple_algo.key, - description=ContentFile("Simple algo"), + factory.create_function_files( + key=self.simple_function.key, + description=ContentFile("Simple function"), ) - self.aggregate_algo = factory.create_algo( - inputs=factory.build_algo_inputs(["models"]), - outputs=factory.build_algo_outputs(["model"]), + self.aggregate_function = factory.create_function( + inputs=factory.build_function_inputs(["models"]), + outputs=factory.build_function_outputs(["model"]), name="aggregate", ) - factory.create_algo_files( - key=self.aggregate_algo.key, - description=ContentFile("Aggregate algo"), + factory.create_function_files( + key=self.aggregate_function.key, + description=ContentFile("Aggregate function"), ) - self.composite_algo = factory.create_algo( - inputs=factory.build_algo_inputs(["datasamples", "opener", "local", "shared"]), - outputs=factory.build_algo_outputs(["local", "shared"]), + self.composite_function = factory.create_function( + inputs=factory.build_function_inputs(["datasamples", "opener", "local", "shared"]), + outputs=factory.build_function_outputs(["local", "shared"]), name="composite", ) - factory.create_algo_files( - key=self.composite_algo.key, - description=ContentFile("Composite algo"), + factory.create_function_files( + key=self.composite_function.key, + description=ContentFile("Composite function"), ) - self.predict_algo = factory.create_algo( - inputs=factory.build_algo_inputs(["datasamples", "opener", "model", "shared"]), - outputs=factory.build_algo_outputs(["predictions"]), + self.predict_function = factory.create_function( + inputs=factory.build_function_inputs(["datasamples", "opener", "model", "shared"]), + outputs=factory.build_function_outputs(["predictions"]), name="predict", ) - factory.create_algo_files( - key=self.predict_algo.key, - description=ContentFile("Predict algo"), + factory.create_function_files( + key=self.predict_function.key, + description=ContentFile("Predict function"), ) - self.metric_algo = factory.create_algo( - inputs=factory.build_algo_inputs(["datasamples", "opener", "predictions"]), - outputs=factory.build_algo_outputs(["performance"]), + self.metric_function = factory.create_function( + inputs=factory.build_function_inputs(["datasamples", "opener", "predictions"]), + outputs=factory.build_function_outputs(["performance"]), name="metric", ) - factory.create_algo_files( - key=self.metric_algo.key, - description=ContentFile("Metric algo"), + factory.create_function_files( + key=self.metric_function.key, + description=ContentFile("Metric function"), ) def create_data_manager(self): @@ -128,15 +128,15 @@ def create_basic_cp(self, cp_status): first_task_status = cp_status train_task = factory.create_computetask( cp, - self.simple_algo, + self.simple_function, inputs=factory.build_computetask_inputs( - self.simple_algo, + self.simple_function, { "opener": [self.data_manager.key], "datasamples": self.train_data_sample_keys, }, ), - outputs=factory.build_computetask_outputs(self.simple_algo), + outputs=factory.build_computetask_outputs(self.simple_function), data_manager=self.data_manager, data_samples=self.train_data_sample_keys, status=first_task_status, @@ -159,16 +159,16 @@ def create_basic_cp(self, cp_status): ) predict_task = factory.create_computetask( cp, - self.predict_algo, + self.predict_function, inputs=factory.build_computetask_inputs( - self.predict_algo, + self.predict_function, { "opener": [self.data_manager.key], "datasamples": self.predict_data_sample_keys, "model": [train_task.key], }, ), - outputs=factory.build_computetask_outputs(self.predict_algo), + outputs=factory.build_computetask_outputs(self.predict_function), data_manager=self.data_manager, data_samples=self.predict_data_sample_keys, status=task_status, @@ -182,16 +182,16 @@ def create_basic_cp(self, cp_status): test_task = factory.create_computetask( cp, - self.metric_algo, + self.metric_function, inputs=factory.build_computetask_inputs( - self.metric_algo, + self.metric_function, { "opener": [self.data_manager.key], "datasamples": self.test_data_sample_keys, "predictions": [predict_task.key], }, ), - outputs=factory.build_computetask_outputs(self.metric_algo), + outputs=factory.build_computetask_outputs(self.metric_function), data_manager=self.data_manager, data_samples=self.test_data_sample_keys, status=task_status, @@ -199,7 +199,7 @@ def create_basic_cp(self, cp_status): if task_status == ComputeTask.Status.STATUS_DONE: factory.create_performance( test_task, - self.metric_algo, + self.metric_function, identifier="performance", ) @@ -214,44 +214,44 @@ def create_aggregate_cp(self): ) train_task_1 = factory.create_computetask( cp, - self.simple_algo, + self.simple_function, inputs=factory.build_computetask_inputs( - self.simple_algo, + self.simple_function, { "opener": [self.data_manager.key], "datasamples": self.train_data_sample_keys, }, ), - outputs=factory.build_computetask_outputs(self.simple_algo), + outputs=factory.build_computetask_outputs(self.simple_function), data_manager=self.data_manager, data_samples=self.train_data_sample_keys, status=ComputeTask.Status.STATUS_TODO, ) train_task_2 = factory.create_computetask( cp, - self.simple_algo, + self.simple_function, inputs=factory.build_computetask_inputs( - self.simple_algo, + self.simple_function, { "opener": [self.data_manager.key], "datasamples": self.train_data_sample_keys, }, ), - outputs=factory.build_computetask_outputs(self.simple_algo), + outputs=factory.build_computetask_outputs(self.simple_function), data_manager=self.data_manager, data_samples=self.train_data_sample_keys, status=ComputeTask.Status.STATUS_TODO, ) factory.create_computetask( cp, - self.aggregate_algo, + self.aggregate_function, inputs=factory.build_computetask_inputs( - self.aggregate_algo, + self.aggregate_function, { "model": [train_task_1.key, train_task_2.key], }, ), - outputs=factory.build_computetask_outputs(self.aggregate_algo), + outputs=factory.build_computetask_outputs(self.aggregate_function), status=ComputeTask.Status.STATUS_TODO, ) return cp @@ -264,15 +264,15 @@ def create_composite_cp(self): ) composite_task = factory.create_computetask( cp, - self.composite_algo, + self.composite_function, inputs=factory.build_computetask_inputs( - self.composite_algo, + self.composite_function, { "opener": [self.data_manager.key], "datasamples": self.train_data_sample_keys, }, ), - outputs=factory.build_computetask_outputs(self.composite_algo), + outputs=factory.build_computetask_outputs(self.composite_function), data_manager=self.data_manager, data_samples=self.train_data_sample_keys, status=ComputeTask.Status.STATUS_DONE, diff --git a/backend/api/migrations/0047_rename_algo_to_function.py b/backend/api/migrations/0047_rename_algo_to_function.py new file mode 100644 index 000000000..bd8570c6a --- /dev/null +++ b/backend/api/migrations/0047_rename_algo_to_function.py @@ -0,0 +1,49 @@ +# Generated by Django 4.0.7 on 2023-02-02 17:12 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("api", "0046_remove_computeplan_failed_task_category"), + ] + + operations = [ + migrations.RenameModel( + old_name="Algo", + new_name="Function", + ), + migrations.RenameField( + model_name="function", + old_name="algorithm_address", + new_name="function_address", + ), + migrations.RenameField( + model_name="function", + old_name="algorithm_checksum", + new_name="function_checksum", + ), + migrations.RenameModel( + old_name="AlgoInput", + new_name="FunctionInput", + ), + migrations.RenameModel( + old_name="AlgoOutput", + new_name="FunctionOutput", + ), + migrations.RenameField( + model_name="functionoutput", + old_name="algo", + new_name="function", + ), + migrations.RenameField( + model_name="functioninput", + old_name="algo", + new_name="function", + ), + migrations.RenameField( + model_name="computetask", + old_name="algo", + new_name="function", + ), + ] diff --git a/backend/api/models/__init__.py b/backend/api/models/__init__.py index f601c5981..237db5529 100644 --- a/backend/api/models/__init__.py +++ b/backend/api/models/__init__.py @@ -1,6 +1,3 @@ -from .algo import Algo -from .algo import AlgoInput -from .algo import AlgoOutput from .computeplan import ComputePlan from .computetask import ComputeTask from .computetask import ComputeTaskInput @@ -10,6 +7,9 @@ from .datamanager import DataManager from .datasample import DataSample from .events import LastEvent +from .function import Function +from .function import FunctionInput +from .function import FunctionOutput from .model import Model from .organization import ChannelOrganization from .performance import Performance @@ -17,9 +17,9 @@ from .task_profiling import TaskProfiling __all__ = [ - "Algo", - "AlgoInput", - "AlgoOutput", + "Function", + "FunctionInput", + "FunctionOutput", "ComputePlan", "ComputeTask", "ComputeTaskOutput", diff --git a/backend/api/models/computetask.py b/backend/api/models/computetask.py index 6c5da4119..70fb81f0e 100644 --- a/backend/api/models/computetask.py +++ b/backend/api/models/computetask.py @@ -3,9 +3,9 @@ import orchestrator.computetask_pb2 as computetask_pb2 import orchestrator.failure_report_pb2 as failure_report_pb2 -from api.models.algo import AlgoInput -from api.models.algo import AlgoOutput from api.models.datasample import DataSample +from api.models.function import FunctionInput +from api.models.function import FunctionOutput from api.models.utils import AssetPermissionMixin from api.models.utils import URLValidatorWithOptionalTLD @@ -41,14 +41,14 @@ class Meta: class ComputeTaskInputAsset(models.Model): task_input = models.OneToOneField("ComputeTaskInput", on_delete=models.CASCADE, related_name="asset") - asset_kind = models.CharField(max_length=64, choices=AlgoInput.Kind.choices) + asset_kind = models.CharField(max_length=64, choices=FunctionInput.Kind.choices) asset_key = models.UUIDField() channel = models.CharField(max_length=100) class ComputeTaskOutputAsset(models.Model): task_output = models.ForeignKey("ComputeTaskOutput", on_delete=models.CASCADE, related_name="assets") - asset_kind = models.CharField(max_length=64, choices=AlgoOutput.Kind.choices) + asset_kind = models.CharField(max_length=64, choices=FunctionOutput.Kind.choices) asset_key = models.CharField(max_length=73) # performance have composite key: key1|key2 channel = models.CharField(max_length=100) @@ -70,7 +70,7 @@ class ErrorType(models.TextChoices): ERROR_TYPE_INTERNAL = failure_report_pb2.ErrorType.Name(failure_report_pb2.ERROR_TYPE_INTERNAL) key = models.UUIDField(primary_key=True) - algo = models.ForeignKey("Algo", on_delete=models.CASCADE, related_name="compute_tasks") + function = models.ForeignKey("Function", on_delete=models.CASCADE, related_name="compute_tasks") owner = models.CharField(max_length=100) compute_plan = models.ForeignKey("ComputePlan", on_delete=models.deletion.CASCADE, related_name="compute_tasks") rank = models.IntegerField() diff --git a/backend/api/models/algo.py b/backend/api/models/function.py similarity index 77% rename from backend/api/models/algo.py rename to backend/api/models/function.py index f53f5f37a..c6b094904 100644 --- a/backend/api/models/algo.py +++ b/backend/api/models/function.py @@ -6,13 +6,13 @@ from api.models.utils import URLValidatorWithOptionalTLD -class AlgoInput(models.Model): +class FunctionInput(models.Model): class Kind(models.TextChoices): ASSET_DATA_SAMPLE = common_pb2.AssetKind.Name(common_pb2.ASSET_DATA_SAMPLE) ASSET_DATA_MANAGER = common_pb2.AssetKind.Name(common_pb2.ASSET_DATA_MANAGER) ASSET_MODEL = common_pb2.AssetKind.Name(common_pb2.ASSET_MODEL) - algo = models.ForeignKey("Algo", on_delete=models.deletion.CASCADE, related_name="inputs") + function = models.ForeignKey("Function", on_delete=models.deletion.CASCADE, related_name="inputs") identifier = models.CharField(max_length=100) kind = models.CharField(max_length=64, choices=Kind.choices) optional = models.BooleanField(default=False) @@ -20,35 +20,35 @@ class Kind(models.TextChoices): channel = models.CharField(max_length=100) class Meta: - unique_together = (("algo", "identifier"),) + unique_together = (("function", "identifier"),) ordering = ["identifier"] # default order for relations serializations -class AlgoOutput(models.Model): +class FunctionOutput(models.Model): class Kind(models.TextChoices): ASSET_MODEL = common_pb2.AssetKind.Name(common_pb2.ASSET_MODEL) ASSET_PERFORMANCE = common_pb2.AssetKind.Name(common_pb2.ASSET_PERFORMANCE) - algo = models.ForeignKey("Algo", on_delete=models.deletion.CASCADE, related_name="outputs") + function = models.ForeignKey("Function", on_delete=models.deletion.CASCADE, related_name="outputs") identifier = models.CharField(max_length=100) kind = models.CharField(max_length=64, choices=Kind.choices) multiple = models.BooleanField(default=False) channel = models.CharField(max_length=100) class Meta: - unique_together = (("algo", "identifier"),) + unique_together = (("function", "identifier"),) ordering = ["identifier"] # default order for relations serializations -class Algo(models.Model, AssetPermissionMixin): - """Algo represent an algorithm and its associated metadata""" +class Function(models.Model, AssetPermissionMixin): + """Function represent a function and its associated metadata""" key = models.UUIDField(primary_key=True) name = models.CharField(max_length=100) description_address = models.URLField(validators=[URLValidatorWithOptionalTLD()]) description_checksum = models.CharField(max_length=64) - algorithm_address = models.URLField(validators=[URLValidatorWithOptionalTLD()]) - algorithm_checksum = models.CharField(max_length=64) + function_address = models.URLField(validators=[URLValidatorWithOptionalTLD()]) + function_checksum = models.CharField(max_length=64) permissions_download_public = models.BooleanField() permissions_download_authorized_ids = ArrayField(models.CharField(max_length=1024), size=100) permissions_process_public = models.BooleanField() diff --git a/backend/api/models/performance.py b/backend/api/models/performance.py index dba5cfd4a..41a7b64bb 100644 --- a/backend/api/models/performance.py +++ b/backend/api/models/performance.py @@ -3,7 +3,7 @@ class Performance(models.Model): compute_task = models.ForeignKey("ComputeTask", on_delete=models.deletion.DO_NOTHING, related_name="performances") - metric = models.ForeignKey("Algo", on_delete=models.deletion.DO_NOTHING, related_name="performances") + metric = models.ForeignKey("Function", on_delete=models.deletion.DO_NOTHING, related_name="performances") value = models.FloatField() creation_date = models.DateTimeField() channel = models.CharField(max_length=100) diff --git a/backend/api/serializers/__init__.py b/backend/api/serializers/__init__.py index 078e0ef8d..3f2f2d5e8 100644 --- a/backend/api/serializers/__init__.py +++ b/backend/api/serializers/__init__.py @@ -1,4 +1,3 @@ -from .algo import AlgoSerializer from .computeplan import ComputePlanSerializer from .computetask import ComputeTaskInputAssetSerializer from .computetask import ComputeTaskOutputAssetSerializer @@ -6,6 +5,7 @@ from .datamanager import DataManagerSerializer from .datamanager import DataManagerWithRelationsSerializer from .datasample import DataSampleSerializer +from .function import FunctionSerializer from .model import ModelSerializer from .organization import ChannelOrganizationSerializer from .performance import CPPerformanceSerializer @@ -14,7 +14,7 @@ from .task_profiling import TaskProfilingSerializer __all__ = [ - "AlgoSerializer", + "FunctionSerializer", "ComputePlanSerializer", "ComputeTaskSerializer", "ComputeTaskInputAssetSerializer", diff --git a/backend/api/serializers/computetask.py b/backend/api/serializers/computetask.py index e3ecb3d05..0555d2344 100644 --- a/backend/api/serializers/computetask.py +++ b/backend/api/serializers/computetask.py @@ -3,9 +3,6 @@ from rest_framework import serializers import orchestrator.failure_report_pb2 as failure_report_pb2 -from api.models import Algo -from api.models import AlgoInput -from api.models import AlgoOutput from api.models import ComputePlan from api.models import ComputeTask from api.models import ComputeTaskInput @@ -14,12 +11,15 @@ from api.models import ComputeTaskOutputAsset from api.models import DataManager from api.models import DataSample +from api.models import Function +from api.models import FunctionInput +from api.models import FunctionOutput from api.models import Model from api.models import Performance from api.models.computetask import TaskDataSamples -from api.serializers.algo import AlgoSerializer from api.serializers.datamanager import DataManagerSerializer from api.serializers.datasample import DataSampleSerializer +from api.serializers.function import FunctionSerializer from api.serializers.model import ModelSerializer from api.serializers.performance import PerformanceSerializer from api.serializers.utils import SafeSerializerMixin @@ -51,12 +51,12 @@ def to_representation(self, data): def get_asset(self, task_input): data = {} try: - if task_input.asset.asset_kind == AlgoInput.Kind.ASSET_DATA_MANAGER: + if task_input.asset.asset_kind == FunctionInput.Kind.ASSET_DATA_MANAGER: data_manager = DataManager.objects.get(key=task_input.asset.asset_key) data_manager_data = DataManagerSerializer(context=self.context, instance=data_manager).data data["addressable"] = data_manager_data["opener"] data["permissions"] = data_manager_data["permissions"] - elif task_input.asset.asset_kind == AlgoInput.Kind.ASSET_MODEL: + elif task_input.asset.asset_kind == FunctionInput.Kind.ASSET_MODEL: model = Model.objects.get(key=task_input.asset.asset_key) model_data = ModelSerializer(context=self.context, instance=model).data data["addressable"] = model_data["address"] @@ -82,10 +82,10 @@ class Meta: def get_value(self, task_output): data = [] for output_asset in task_output.assets.all(): - if output_asset.asset_kind == AlgoOutput.Kind.ASSET_MODEL: + if output_asset.asset_kind == FunctionOutput.Kind.ASSET_MODEL: model = Model.objects.get(key=output_asset.asset_key) data.append(ModelSerializer(context=self.context, instance=model).data) - elif output_asset.asset_kind == AlgoOutput.Kind.ASSET_PERFORMANCE: + elif output_asset.asset_kind == FunctionOutput.Kind.ASSET_PERFORMANCE: task_key, metric_key = output_asset.asset_key.split("|") perf = Performance.objects.get(compute_task__key=task_key, metric__key=metric_key) data.append(perf.value) @@ -117,13 +117,13 @@ def get_identifier(self, task_input_asset): return task_input_asset.task_input.identifier def get_asset(self, task_input_asset): - if task_input_asset.asset_kind == AlgoInput.Kind.ASSET_DATA_SAMPLE: + if task_input_asset.asset_kind == FunctionInput.Kind.ASSET_DATA_SAMPLE: data_sample = DataSample.objects.get(key=task_input_asset.asset_key) return DataSampleSerializer(context=self.context, instance=data_sample).data - elif task_input_asset.asset_kind == AlgoInput.Kind.ASSET_DATA_MANAGER: + elif task_input_asset.asset_kind == FunctionInput.Kind.ASSET_DATA_MANAGER: data_manager = DataManager.objects.get(key=task_input_asset.asset_key) return DataManagerSerializer(context=self.context, instance=data_manager).data - elif task_input_asset.asset_kind == AlgoInput.Kind.ASSET_MODEL: + elif task_input_asset.asset_kind == FunctionInput.Kind.ASSET_MODEL: model = Model.objects.get(key=task_input_asset.asset_key) return ModelSerializer(context=self.context, instance=model).data @@ -145,26 +145,26 @@ def get_identifier(self, task_output_asset): return task_output_asset.task_output.identifier def get_asset(self, task_output_asset): - if task_output_asset.asset_kind == AlgoOutput.Kind.ASSET_MODEL: + if task_output_asset.asset_kind == FunctionOutput.Kind.ASSET_MODEL: model = Model.objects.get(key=task_output_asset.asset_key) return ModelSerializer(context=self.context, instance=model).data - elif task_output_asset.asset_kind == AlgoOutput.Kind.ASSET_PERFORMANCE: + elif task_output_asset.asset_kind == FunctionOutput.Kind.ASSET_PERFORMANCE: task_key, metric_key = task_output_asset.asset_key.split("|") performance = Performance.objects.get(compute_task__key=task_key, metric__key=metric_key) return PerformanceSerializer(context=self.context, instance=performance).data -class AlgoField(serializers.Field): +class FunctionField(serializers.Field): def to_representation(self, data): - return AlgoSerializer(instance=data).data + return FunctionSerializer(instance=data).data def to_internal_value(self, data): - return Algo.objects.get(key=data["key"]) + return Function.objects.get(key=data["key"]) class ComputeTaskSerializer(serializers.ModelSerializer, SafeSerializerMixin): logs_permission = make_permission_serializer("logs_permission")(source="*") - algo = AlgoField() + function = FunctionField() # Need to set `pk_field` for `PrimaryKeyRelatedField` in order to correctly serialize `UUID` to `str` # See: https://stackoverflow.com/a/51636009 @@ -182,7 +182,7 @@ class ComputeTaskSerializer(serializers.ModelSerializer, SafeSerializerMixin): class Meta: model = ComputeTask fields = [ - "algo", + "function", "channel", "compute_plan_key", "creation_date", @@ -224,12 +224,12 @@ def _replace_storage_addresses(self, task): return task # replace in common relationships - if "algo" in task: - task["algo"]["description"]["storage_address"] = request.build_absolute_uri( - reverse("api:algo-description", args=[task["algo"]["key"]]) + if "function" in task: + task["function"]["description"]["storage_address"] = request.build_absolute_uri( + reverse("api:function-description", args=[task["function"]["key"]]) ) - task["algo"]["algorithm"]["storage_address"] = request.build_absolute_uri( - reverse("api:algo-file", args=[task["algo"]["key"]]) + task["function"]["function"]["storage_address"] = request.build_absolute_uri( + reverse("api:function-file", args=[task["function"]["key"]]) ) @transaction.atomic @@ -242,7 +242,9 @@ def create(self, validated_data): outputs = validated_data.pop("outputs") compute_task = super().create(validated_data) - input_kinds = {algo_input.identifier: algo_input.kind for algo_input in compute_task.algo.inputs.all()} + input_kinds = { + function_input.identifier: function_input.kind for function_input in compute_task.function.inputs.all() + } for order, data_sample in enumerate(data_samples): TaskDataSamples.objects.create(compute_task=compute_task, data_sample=data_sample, order=order) diff --git a/backend/api/serializers/algo.py b/backend/api/serializers/function.py similarity index 57% rename from backend/api/serializers/algo.py rename to backend/api/serializers/function.py index f517e418a..24d3844f2 100644 --- a/backend/api/serializers/algo.py +++ b/backend/api/serializers/function.py @@ -1,18 +1,18 @@ from django.urls import reverse from rest_framework import serializers -from api.models import Algo -from api.models import AlgoInput -from api.models import AlgoOutput +from api.models import Function +from api.models import FunctionInput +from api.models import FunctionOutput from api.serializers.utils import SafeSerializerMixin from api.serializers.utils import get_channel_choices from api.serializers.utils import make_addressable_serializer from api.serializers.utils import make_download_process_permission_serializer -class AlgoInputSerializer(serializers.ModelSerializer, SafeSerializerMixin): +class FunctionInputSerializer(serializers.ModelSerializer, SafeSerializerMixin): class Meta: - model = AlgoInput + model = FunctionInput fields = [ "identifier", "kind", @@ -21,9 +21,9 @@ class Meta: ] -class AlgoOutputSerializer(serializers.ModelSerializer, SafeSerializerMixin): +class FunctionOutputSerializer(serializers.ModelSerializer, SafeSerializerMixin): class Meta: - model = AlgoOutput + model = FunctionOutput fields = [ "identifier", "kind", @@ -31,18 +31,18 @@ class Meta: ] -class AlgoSerializer(serializers.ModelSerializer, SafeSerializerMixin): - algorithm = make_addressable_serializer("algorithm")(source="*") +class FunctionSerializer(serializers.ModelSerializer, SafeSerializerMixin): + function = make_addressable_serializer("function")(source="*") channel = serializers.ChoiceField(choices=get_channel_choices(), write_only=True) description = make_addressable_serializer("description")(source="*") permissions = make_download_process_permission_serializer()(source="*") - inputs = AlgoInputSerializer(many=True) - outputs = AlgoOutputSerializer(many=True) + inputs = FunctionInputSerializer(many=True) + outputs = FunctionOutputSerializer(many=True) class Meta: - model = Algo + model = Function fields = [ - "algorithm", + "function", "channel", "creation_date", "description", @@ -60,10 +60,10 @@ def to_representation(self, instance): request = self.context.get("request") if request: res["description"]["storage_address"] = request.build_absolute_uri( - reverse("api:algo-description", args=[res["key"]]) + reverse("api:function-description", args=[res["key"]]) ) - res["algorithm"]["storage_address"] = request.build_absolute_uri( - reverse("api:algo-file", args=[res["key"]]) + res["function"]["storage_address"] = request.build_absolute_uri( + reverse("api:function-file", args=[res["key"]]) ) # from list to dict, to align with the orchestrator format res["inputs"] = {_input.pop("identifier"): _input for _input in res["inputs"]} @@ -80,24 +80,24 @@ def to_internal_value(self, data): def create(self, validated_data): inputs = validated_data.pop("inputs") outputs = validated_data.pop("outputs") - algo = super().create(validated_data) + function = super().create(validated_data) - algo_inputs = AlgoInputSerializer(data=inputs, many=True) - algo_inputs.is_valid(raise_exception=True) - for algo_input in algo_inputs.validated_data: - AlgoInput.objects.create( - channel=algo.channel, - algo=algo, - **algo_input, + function_inputs = FunctionInputSerializer(data=inputs, many=True) + function_inputs.is_valid(raise_exception=True) + for function_input in function_inputs.validated_data: + FunctionInput.objects.create( + channel=function.channel, + function=function, + **function_input, ) - algo_outputs = AlgoOutputSerializer(data=outputs, many=True) - algo_outputs.is_valid(raise_exception=True) - for algo_output in algo_outputs.validated_data: - AlgoOutput.objects.create( - channel=algo.channel, - algo=algo, - **algo_output, + function_outputs = FunctionOutputSerializer(data=outputs, many=True) + function_outputs.is_valid(raise_exception=True) + for function_output in function_outputs.validated_data: + FunctionOutput.objects.create( + channel=function.channel, + function=function, + **function_output, ) - return algo + return function diff --git a/backend/api/serializers/performance.py b/backend/api/serializers/performance.py index 958a13425..b667aeab1 100644 --- a/backend/api/serializers/performance.py +++ b/backend/api/serializers/performance.py @@ -1,8 +1,8 @@ from rest_framework import serializers -from api.models import Algo -from api.models import AlgoOutput from api.models import ComputeTask +from api.models import Function +from api.models import FunctionOutput from api.models import Performance from api.serializers.utils import SafeSerializerMixin from orchestrator import common_pb2 @@ -20,7 +20,7 @@ class PerformanceSerializer(serializers.ModelSerializer, SafeSerializerMixin): ) metric_key = serializers.PrimaryKeyRelatedField( - queryset=Algo.objects.all(), source="metric", pk_field=serializers.UUIDField(format="hex_verbose") + queryset=Function.objects.all(), source="metric", pk_field=serializers.UUIDField(format="hex_verbose") ) performance_value = serializers.FloatField(source="value") @@ -39,23 +39,25 @@ class _PerformanceMetricSerializer(serializers.ModelSerializer): output_identifier = serializers.SerializerMethodField() class Meta: - model = Algo + model = Function fields = ["key", "name", "output_identifier"] def get_output_identifier(self, obj): try: - performance_output = AlgoOutput.objects.get( - algo_id=obj.key, kind=common_pb2.AssetKind.Name(common_pb2.ASSET_PERFORMANCE) + performance_output = FunctionOutput.objects.get( + function_id=obj.key, kind=common_pb2.AssetKind.Name(common_pb2.ASSET_PERFORMANCE) + ) + except (FunctionOutput.MultipleObjectsReturned, FunctionOutput.DoesNotExist) as e: + raise Exception( + f"Couldn't associate an output identifier to performance for function '{obj.key}', error : {e}" ) - except (AlgoOutput.MultipleObjectsReturned, AlgoOutput.DoesNotExist) as e: - raise Exception(f"Couldn't associate an output identifier to performance for algo '{obj.key}', error : {e}") return performance_output.identifier class _PerformanceComputeTaskSerializer(serializers.ModelSerializer): data_manager_key = serializers.UUIDField(format="hex_verbose", source="data_manager_id") - algo_key = serializers.UUIDField(format="hex_verbose", source="algo_id") + function_key = serializers.UUIDField(format="hex_verbose", source="function_id") round_idx = serializers.SerializerMethodField() class Meta: @@ -63,7 +65,7 @@ class Meta: fields = [ "key", "data_manager_key", - "algo_key", + "function_key", "rank", "round_idx", "data_samples", diff --git a/backend/api/tests/asset_factory.py b/backend/api/tests/asset_factory.py index 922b83f08..610ca49e4 100644 --- a/backend/api/tests/asset_factory.py +++ b/backend/api/tests/asset_factory.py @@ -3,9 +3,9 @@ Basic example: ->>> algo = create_algo( -... inputs=factory.build_algo_inputs(["datasamples", "opener", "model"]), -... outputs=factory.build_algo_outputs(["model"]), +>>> function = create_function( +... inputs=factory.build_function_inputs(["datasamples", "opener", "model"]), +... outputs=factory.build_function_outputs(["model"]), ... ) >>> data_manager = create_datamanager() >>> data_sample = create_datasample([data_manager]) @@ -13,24 +13,24 @@ >>> train_task = create_computetask( ... compute_plan, -... algo, +... function, ... inputs=factory.build_computetask_inputs( -... algo, +... function, ... { ... "opener": [data_manager.key], ... "datasamples": [data_sample.key], ... }, ... ), -... outputs=factory.build_computetask_outputs(algo), +... outputs=factory.build_computetask_outputs(function), ... data_manager=data_manager, ... data_samples=[data_sample.key], ... status=ComputeTask.Status.STATUS_DONE, ... ) >>> model = create_model(train_task, identifier="model") ->>> metric = create_algo( -... inputs=factory.build_algo_inputs(["datasamples", "opener", "model"]), -... outputs=factory.build_algo_outputs(["performance"]), +>>> metric = create_function( +... inputs=factory.build_function_inputs(["datasamples", "opener", "model"]), +... outputs=factory.build_function_outputs(["performance"]), ... ) >>> test_task = create_computetask( ... compute_plan, @@ -52,11 +52,11 @@ Customized example: ->>> algo_data = create_algo_files() ->>> algo = create_algo( -... key=algo_data.key, +>>> function_data = create_function_files() +>>> function = create_function( +... key=function_data.key, ... name="Random forest", -... category=AlgoCategory.simple, +... category=FunctionCategory.simple, ... metadata={"foo": "bar"}, ... owner="MyOrg2MSP", ... channel="yourchannel", @@ -70,9 +70,6 @@ from django.core import files from django.utils import timezone -from api.models import Algo -from api.models import AlgoInput -from api.models import AlgoOutput from api.models import ComputePlan from api.models import ComputeTask from api.models import ComputeTaskInput @@ -81,15 +78,18 @@ from api.models import ComputeTaskOutputAsset from api.models import DataManager from api.models import DataSample +from api.models import Function +from api.models import FunctionInput +from api.models import FunctionOutput from api.models import Model from api.models import Performance from api.models import ProfilingStep from api.models import TaskProfiling from api.models.computetask import TaskDataSamples -from substrapp.models import Algo as AlgoFiles from substrapp.models import ComputeTaskFailureReport as ComputeTaskLogs from substrapp.models import DataManager as DataManagerFiles from substrapp.models import DataSample as DataSampleFiles +from substrapp.models import Function as FunctionFiles from substrapp.models import Model as ModelFiles from substrapp.utils import get_hash @@ -102,63 +102,63 @@ # Inputs and outputs values belongs to the business logic and are handled at the substra SDK level. # We use them here only to have realistic test data, but the API should remained agnostic from them. -ALGO_INPUTS = { - "datasamples": dict(kind=AlgoInput.Kind.ASSET_DATA_SAMPLE, multiple=True, optional=False), - "opener": dict(kind=AlgoInput.Kind.ASSET_DATA_MANAGER, multiple=False, optional=False), - "model": dict(kind=AlgoInput.Kind.ASSET_MODEL, multiple=False, optional=True), - "models": dict(kind=AlgoInput.Kind.ASSET_MODEL, multiple=True, optional=True), - "local": dict(kind=AlgoInput.Kind.ASSET_MODEL, multiple=False, optional=True), - "shared": dict(kind=AlgoInput.Kind.ASSET_MODEL, multiple=False, optional=True), - "predictions": dict(kind=AlgoInput.Kind.ASSET_MODEL, multiple=False, optional=False), +FUNCTION_INPUTS = { + "datasamples": dict(kind=FunctionInput.Kind.ASSET_DATA_SAMPLE, multiple=True, optional=False), + "opener": dict(kind=FunctionInput.Kind.ASSET_DATA_MANAGER, multiple=False, optional=False), + "model": dict(kind=FunctionInput.Kind.ASSET_MODEL, multiple=False, optional=True), + "models": dict(kind=FunctionInput.Kind.ASSET_MODEL, multiple=True, optional=True), + "local": dict(kind=FunctionInput.Kind.ASSET_MODEL, multiple=False, optional=True), + "shared": dict(kind=FunctionInput.Kind.ASSET_MODEL, multiple=False, optional=True), + "predictions": dict(kind=FunctionInput.Kind.ASSET_MODEL, multiple=False, optional=False), } -ALGO_OUTPUTS = { - "model": dict(kind=AlgoOutput.Kind.ASSET_MODEL, multiple=False), - "local": dict(kind=AlgoOutput.Kind.ASSET_MODEL, multiple=False), - "shared": dict(kind=AlgoOutput.Kind.ASSET_MODEL, multiple=False), - "predictions": dict(kind=AlgoOutput.Kind.ASSET_MODEL, multiple=False), - "performance": dict(kind=AlgoOutput.Kind.ASSET_PERFORMANCE, multiple=False), +FUNCTION_OUTPUTS = { + "model": dict(kind=FunctionOutput.Kind.ASSET_MODEL, multiple=False), + "local": dict(kind=FunctionOutput.Kind.ASSET_MODEL, multiple=False), + "shared": dict(kind=FunctionOutput.Kind.ASSET_MODEL, multiple=False), + "predictions": dict(kind=FunctionOutput.Kind.ASSET_MODEL, multiple=False), + "performance": dict(kind=FunctionOutput.Kind.ASSET_PERFORMANCE, multiple=False), } -def build_algo_inputs(identifiers: list[str]) -> list[AlgoInput]: - return [AlgoInput(identifier=identifier, **ALGO_INPUTS[identifier]) for identifier in identifiers] +def build_function_inputs(identifiers: list[str]) -> list[FunctionInput]: + return [FunctionInput(identifier=identifier, **FUNCTION_INPUTS[identifier]) for identifier in identifiers] -def build_algo_outputs(identifiers: list[str]) -> list[AlgoOutput]: - return [AlgoOutput(identifier=identifier, **ALGO_OUTPUTS[identifier]) for identifier in identifiers] +def build_function_outputs(identifiers: list[str]) -> list[FunctionOutput]: + return [FunctionOutput(identifier=identifier, **FUNCTION_OUTPUTS[identifier]) for identifier in identifiers] def build_computetask_inputs( - algo: Algo, + function: Function, keys: dict[str : list[uuid.UUID]], ) -> list[ComputeTaskInput]: task_inputs = [] - for algo_input in algo.inputs.all(): - for key in keys.get(algo_input.identifier, []): - task_input = ComputeTaskInput(identifier=algo_input.identifier) - if algo_input.kind in (AlgoInput.Kind.ASSET_DATA_MANAGER, AlgoInput.Kind.ASSET_DATA_SAMPLE): + for function_input in function.inputs.all(): + for key in keys.get(function_input.identifier, []): + task_input = ComputeTaskInput(identifier=function_input.identifier) + if function_input.kind in (FunctionInput.Kind.ASSET_DATA_MANAGER, FunctionInput.Kind.ASSET_DATA_SAMPLE): task_input.asset_key = key else: # we assume that all other assets are produced by parent tasks task_input.parent_task_key_id = key - task_input.parent_task_output_identifier = algo_input.identifier + task_input.parent_task_output_identifier = function_input.identifier task_inputs.append(task_input) return task_inputs def build_computetask_outputs( - algo: Algo, + function: Function, owner: str = DEFAULT_OWNER, public: bool = False, ) -> list[ComputeTaskOutput]: return [ ComputeTaskOutput( - identifier=algo_output.identifier, + identifier=function_output.identifier, permissions_download_public=public, permissions_download_authorized_ids=[owner], permissions_process_public=public, permissions_process_authorized_ids=[owner], ) - for algo_output in algo.outputs.all() + for function_output in function.outputs.all() ] @@ -218,26 +218,26 @@ def get_computeplan_dates(status: int, creation_date: datetime.datetime) -> tupl return start_date, end_date -def create_algo( - inputs: list[AlgoInput] = None, - outputs: list[AlgoInput] = None, +def create_function( + inputs: list[FunctionInput] = None, + outputs: list[FunctionInput] = None, key: uuid.UUID = None, - name: str = "algo", + name: str = "function", metadata: dict = None, owner: str = DEFAULT_OWNER, channel: str = DEFAULT_CHANNEL, public: bool = False, -) -> Algo: +) -> Function: if key is None: key = uuid.uuid4() - algo = Algo.objects.create( + function = Function.objects.create( key=key, name=name, metadata=metadata or {}, - algorithm_address=get_storage_address("algo", key, "file"), - algorithm_checksum=DUMMY_CHECKSUM, - description_address=get_storage_address("algo", key, "description"), + function_address=get_storage_address("function", key, "file"), + function_checksum=DUMMY_CHECKSUM, + description_address=get_storage_address("function", key, "description"), description_checksum=DUMMY_CHECKSUM, creation_date=timezone.now(), owner=owner, @@ -246,17 +246,17 @@ def create_algo( ) if inputs: - for algo_input in inputs: - algo_input.algo = algo - algo_input.channel = channel - algo_input.save() + for function_input in inputs: + function_input.function = function + function_input.channel = channel + function_input.save() if outputs: - for algo_output in outputs: - algo_output.algo = algo - algo_output.channel = channel - algo_output.save() + for function_output in outputs: + function_output.function = function + function_output.channel = channel + function_output.save() - return algo + return function def create_datamanager( @@ -337,7 +337,7 @@ def create_computeplan( def create_computetask( compute_plan: ComputePlan, - algo: Algo, + function: Function, inputs: list[ComputeTaskInput] = None, outputs: list[ComputeTaskOutput] = None, data_manager: DataManager = None, @@ -359,7 +359,7 @@ def create_computetask( key = uuid.uuid4() compute_task = ComputeTask.objects.create( compute_plan=compute_plan, - algo=algo, + function=function, data_manager=data_manager, key=key, status=status, @@ -384,7 +384,9 @@ def create_computetask( compute_task.refresh_from_db() if inputs: - input_kinds = {algo_input.identifier: algo_input.kind for algo_input in compute_task.algo.inputs.all()} + input_kinds = { + function_input.identifier: function_input.kind for function_input in compute_task.function.inputs.all() + } for position, task_input in enumerate(inputs): task_input.task = compute_task task_input.channel = channel @@ -429,7 +431,7 @@ def create_model( ) ComputeTaskOutputAsset.objects.create( task_output=compute_task.outputs.get(identifier=identifier), - asset_kind=AlgoOutput.Kind.ASSET_MODEL, + asset_kind=FunctionOutput.Kind.ASSET_MODEL, asset_key=model.key, channel=channel, ) @@ -439,7 +441,7 @@ def create_model( ): ComputeTaskInputAsset.objects.create( task_input=task_input, - asset_kind=AlgoOutput.Kind.ASSET_MODEL, + asset_kind=FunctionOutput.Kind.ASSET_MODEL, asset_key=model.key, channel=channel, ) @@ -448,7 +450,7 @@ def create_model( def create_performance( compute_task: ComputeTask, - metric: Algo, + metric: Function, identifier: str = "performance", value: float = 1.0, channel: str = DEFAULT_CHANNEL, @@ -462,18 +464,18 @@ def create_performance( ) ComputeTaskOutputAsset.objects.create( task_output=compute_task.outputs.get(identifier=identifier), - asset_kind=AlgoOutput.Kind.ASSET_PERFORMANCE, + asset_kind=FunctionOutput.Kind.ASSET_PERFORMANCE, asset_key=f"{compute_task.key}|{metric.key}", channel=channel, ) return performance -def create_algo_files( +def create_function_files( key: uuid.UUID = None, file: files.File = None, description: files.File = None, -) -> AlgoFiles: +) -> FunctionFiles: if key is None: key = uuid.uuid4() if file is None: @@ -481,13 +483,13 @@ def create_algo_files( if description is None: description = files.base.ContentFile("dummy content") - algo_files = AlgoFiles.objects.create( + function_files = FunctionFiles.objects.create( key=key, checksum=get_hash(file), ) - algo_files.file.save("algo", file) - algo_files.description.save("description", description) - return algo_files + function_files.file.save("function", file) + function_files.description.save("description", description) + return function_files def create_datamanager_files( diff --git a/backend/api/tests/models/test_models_computeplan.py b/backend/api/tests/models/test_models_computeplan.py index 3452991a8..699e5a077 100644 --- a/backend/api/tests/models/test_models_computeplan.py +++ b/backend/api/tests/models/test_models_computeplan.py @@ -17,9 +17,9 @@ ), ) def test_update_dates_single_task(status, has_start_date, has_end_date): - algo = factory.create_algo() + function = factory.create_function() compute_plan = factory.create_computeplan() - compute_task = factory.create_computetask(compute_plan, algo, status=status) + compute_task = factory.create_computetask(compute_plan, function, status=status) # validate inputs if has_start_date: assert compute_task.start_date is not None @@ -52,10 +52,10 @@ def test_update_dates_single_task(status, has_start_date, has_end_date): ), ) def test_update_dates_ended_cp_with_ongoing_task(status, has_start_date, has_end_date): - algo = factory.create_algo() + function = factory.create_function() compute_plan = factory.create_computeplan() - factory.create_computetask(compute_plan, algo, status=status) - factory.create_computetask(compute_plan, algo, status=ComputeTask.Status.STATUS_WAITING) + factory.create_computetask(compute_plan, function, status=status) + factory.create_computetask(compute_plan, function, status=ComputeTask.Status.STATUS_WAITING) compute_plan.update_dates() # validate outputs diff --git a/backend/api/tests/serializers/test_model.py b/backend/api/tests/serializers/test_model.py index 5417047f2..15100c675 100644 --- a/backend/api/tests/serializers/test_model.py +++ b/backend/api/tests/serializers/test_model.py @@ -37,9 +37,9 @@ def test_disabled_model_address(self): def test_sync_disabled_model(self): """Disabled models should be valid without address""" - algo = factory.create_algo() + function = factory.create_function() compute_plan = factory.create_computeplan() - compute_task = factory.create_computetask(compute_plan, algo) + compute_task = factory.create_computetask(compute_plan, function) data = { "key": str(uuid4()), diff --git a/backend/api/tests/views/test_utils.py b/backend/api/tests/views/test_utils.py index 1968069dc..487b9afd2 100644 --- a/backend/api/tests/views/test_utils.py +++ b/backend/api/tests/views/test_utils.py @@ -18,9 +18,9 @@ from api.views.utils import validate_metadata from organization.authentication import OrganizationUser from organization.models import OutgoingOrganization -from substrapp.models import Algo as AlgoFiles -from substrapp.tests.common import get_description_algo -from substrapp.tests.common import get_sample_algo +from substrapp.models import Function as FunctionFiles +from substrapp.tests.common import get_description_function +from substrapp.tests.common import get_sample_function MEDIA_ROOT = tempfile.mkdtemp() @@ -33,12 +33,12 @@ def setUp(self): if not os.path.exists(MEDIA_ROOT): os.makedirs(MEDIA_ROOT) - self.algo_file, self.algo_filename = get_sample_algo() - self.algo_file.seek(0) - self.algo_content = self.algo_file.read() - self.algo_description_file, self.algo_description_filename = get_description_algo() - self.algo_key = uuid.uuid4() - self.algo_url = reverse("api:algo-file", kwargs={"pk": self.algo_key}) + self.function_file, self.function_filename = get_sample_function() + self.function_file.seek(0) + self.function_content = self.function_file.read() + self.function_description_file, self.function_description_filename = get_description_function() + self.function_key = uuid.uuid4() + self.function_url = reverse("api:function-file", kwargs={"pk": self.function_key}) self.extra = { "HTTP_SUBSTRA_CHANNEL_NAME": "mychannel", "HTTP_ACCEPT": "application/json;version=0.0", @@ -49,32 +49,36 @@ def tearDown(self): def test_download_file_local_allowed(self): """Asset is local (owner is local-organization) and local-organization in authorized ids.""" - AlgoFiles.objects.create(key=self.algo_key, file=self.algo_file, description=self.algo_description_file) - metadata = factory.create_algo(key=self.algo_key, public=False, owner="local-organization") + FunctionFiles.objects.create( + key=self.function_key, file=self.function_file, description=self.function_description_file + ) + metadata = factory.create_function(key=self.function_key, public=False, owner="local-organization") self.assertIn("local-organization", metadata.permissions_process_authorized_ids) with mock.patch("api.views.utils.get_owner", return_value="local-organization"): - response = self.client.get(self.algo_url, **self.extra) + response = self.client.get(self.function_url, **self.extra) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.headers["Content-Disposition"], f'attachment; filename="{self.algo_filename}"') - self.assertEqual(response.getvalue(), self.algo_content) + self.assertEqual(response.headers["Content-Disposition"], f'attachment; filename="{self.function_filename}"') + self.assertEqual(response.getvalue(), self.function_content) def test_download_file_local_denied(self): """Asset is local (owner is local-organization) and local-organization NOT in authorized ids.""" - AlgoFiles.objects.create(key=self.algo_key, file=self.algo_file, description=self.algo_description_file) - metadata = factory.create_algo(key=self.algo_key, public=False, owner="local-organization") + FunctionFiles.objects.create( + key=self.function_key, file=self.function_file, description=self.function_description_file + ) + metadata = factory.create_function(key=self.function_key, public=False, owner="local-organization") metadata.permissions_process_authorized_ids = [] metadata.save() with mock.patch("api.views.utils.get_owner", return_value="local-organization"): - response = self.client.get(self.algo_url, **self.extra) + response = self.client.get(self.function_url, **self.extra) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_download_file_remote_allowed(self): """Asset is remote (owner is remote-organization) and local-organization in authorized ids.""" - metadata = factory.create_algo(key=self.algo_key, public=True, owner="remote-organization") + metadata = factory.create_function(key=self.function_key, public=True, owner="remote-organization") metadata.permissions_process_authorized_ids = ["remote-organization", "local-organization"] metadata.save() OutgoingOrganization.objects.create(organization_id="remote-organization", secret="s3cr37") @@ -84,24 +88,24 @@ def test_download_file_remote_allowed(self): ), responses.RequestsMock() as mocked_responses: mocked_responses.add( responses.GET, - metadata.algorithm_address, - body=self.algo_content, + metadata.function_address, + body=self.function_content, content_type="text/plain; charset=utf-8", ) - response = self.client.get(self.algo_url, **self.extra) - mocked_responses.assert_call_count(metadata.algorithm_address, 1) + response = self.client.get(self.function_url, **self.extra) + mocked_responses.assert_call_count(metadata.function_address, 1) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.getvalue(), self.algo_content) + self.assertEqual(response.getvalue(), self.function_content) def test_download_file_remote_denied(self): """Asset is remote (owner is remote-organization) and local-organization NOT in authorized ids.""" - metadata = factory.create_algo(key=self.algo_key, public=False, owner="remote-organization") + metadata = factory.create_function(key=self.function_key, public=False, owner="remote-organization") metadata.permissions_process_authorized_ids = ["remote-organization"] metadata.save() self.client.force_authenticate(user=OrganizationUser(username="local-organization")) - response = self.client.get(self.algo_url, **self.extra) + response = self.client.get(self.function_url, **self.extra) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) diff --git a/backend/api/tests/views/test_views_authentication.py b/backend/api/tests/views/test_views_authentication.py index 91d88d73c..cf01d4785 100644 --- a/backend/api/tests/views/test_views_authentication.py +++ b/backend/api/tests/views/test_views_authentication.py @@ -13,9 +13,9 @@ from api.tests.common import generate_basic_auth_header from organization.models import IncomingOrganization from organization.models import OutgoingOrganization -from substrapp.models import Algo as AlgoFiles -from substrapp.tests.common import get_description_algo -from substrapp.tests.common import get_sample_algo +from substrapp.models import Function as FunctionFiles +from substrapp.tests.common import get_description_function +from substrapp.tests.common import get_sample_function MEDIA_ROOT = tempfile.mkdtemp() @@ -30,14 +30,16 @@ def setUp(self): os.makedirs(MEDIA_ROOT) self.extra = {"HTTP_SUBSTRA_CHANNEL_NAME": "mychannel", "HTTP_ACCEPT": "application/json;version=0.0"} - # create algo instance which file download is protected - self.algo_file, self.algo_filename = get_sample_algo() - self.algo_description_file, self.algo_description_filename = get_description_algo() - self.algo = AlgoFiles.objects.create(file=self.algo_file, description=self.algo_description_file) - metadata = factory.create_algo(key=self.algo.key, public=True, owner="foo") - metadata.algorithm_address = "http://fake_address.com" + # create function instance which file download is protected + self.function_file, self.function_filename = get_sample_function() + self.function_description_file, self.function_description_filename = get_description_function() + self.function = FunctionFiles.objects.create( + file=self.function_file, description=self.function_description_file + ) + metadata = factory.create_function(key=self.function.key, public=True, owner="foo") + metadata.function_address = "http://fake_address.com" metadata.save() - self.algo_url = reverse("api:algo-file", kwargs={"pk": self.algo.key}) + self.function_url = reverse("api:function-file", kwargs={"pk": self.function.key}) def tearDown(self): shutil.rmtree(MEDIA_ROOT, ignore_errors=True) @@ -57,7 +59,7 @@ def setUpTestData(cls): cls.user = user def test_authentication_fail(self): - response = self.client.get(self.algo_url, **self.extra) + response = self.client.get(self.function_url, **self.extra) self.assertEqual(status.HTTP_401_UNAUTHORIZED, response.status_code) @@ -69,7 +71,7 @@ def test_authentication_internal(self): self.client.credentials(HTTP_AUTHORIZATION=authorization_header) with mock.patch("api.views.utils.get_owner", return_value="foo"): - response = self.client.get(self.algo_url, **self.extra) + response = self.client.get(self.function_url, **self.extra) self.assertEqual(status.HTTP_200_OK, response.status_code) @@ -77,7 +79,7 @@ def test_authentication_with_bad_settings_credentials_fail(self): authorization_header = generate_basic_auth_header("unauthorized_username", "unauthorized_password") self.client.credentials(HTTP_AUTHORIZATION=authorization_header) - response = self.client.get(self.algo_url, **self.extra) + response = self.client.get(self.function_url, **self.extra) self.assertEqual(status.HTTP_401_UNAUTHORIZED, response.status_code) @@ -87,7 +89,7 @@ def test_authentication_with_organization(self): self.client.credentials(HTTP_AUTHORIZATION=authorization_header) with mock.patch("api.views.utils.get_owner", return_value="foo"): - response = self.client.get(self.algo_url, **self.extra) + response = self.client.get(self.function_url, **self.extra) self.assertEqual(status.HTTP_200_OK, response.status_code) @@ -100,7 +102,7 @@ def test_authentication_with_organization_fail(self): for header in bad_authorization_headers: self.client.credentials(HTTP_AUTHORIZATION=header) - response = self.client.get(self.algo_url, **self.extra) + response = self.client.get(self.function_url, **self.extra) self.assertEqual(status.HTTP_401_UNAUTHORIZED, response.status_code) @@ -128,7 +130,7 @@ def test_obtain_token(self): self.client.credentials(HTTP_AUTHORIZATION=invalid_auth_token_header) with mock.patch("api.views.utils.get_owner", return_value="foo"): - response = self.client.get(self.algo_url, **self.extra) + response = self.client.get(self.function_url, **self.extra) self.assertEqual(status.HTTP_401_UNAUTHORIZED, response.status_code) @@ -136,7 +138,7 @@ def test_obtain_token(self): self.client.credentials(HTTP_AUTHORIZATION=valid_auth_token_header) with mock.patch("api.views.utils.get_owner", return_value="foo"): - response = self.client.get(self.algo_url, **self.extra) + response = self.client.get(self.function_url, **self.extra) self.assertEqual(status.HTTP_200_OK, response.status_code) diff --git a/backend/api/tests/views/test_views_compute_plan_graph.py b/backend/api/tests/views/test_views_compute_plan_graph.py index 9821869a3..50b0698ea 100644 --- a/backend/api/tests/views/test_views_compute_plan_graph.py +++ b/backend/api/tests/views/test_views_compute_plan_graph.py @@ -38,11 +38,11 @@ def test_empty_graph(self): def test_too_many_tasks(self): compute_plan = factory.create_computeplan() - algo = factory.create_algo() + function = factory.create_function() for _ in range(MAX_TASKS_DISPLAYED + 1): factory.create_computetask( compute_plan, - algo=algo, + function=function, ) url = reverse(self.base_url, args=[compute_plan.key]) response = self.client.get(url, **self.extra) @@ -51,77 +51,77 @@ def test_too_many_tasks(self): def test_cp_graph(self): compute_plan = factory.create_computeplan() - algo_train = factory.create_algo( - inputs=factory.build_algo_inputs(["datasamples", "opener"]), - outputs=factory.build_algo_outputs(["model"]), + function_train = factory.create_function( + inputs=factory.build_function_inputs(["datasamples", "opener"]), + outputs=factory.build_function_outputs(["model"]), ) - algo_predict = factory.create_algo( - inputs=factory.build_algo_inputs(["model"]), - outputs=factory.build_algo_outputs(["predictions"]), + function_predict = factory.create_function( + inputs=factory.build_function_inputs(["model"]), + outputs=factory.build_function_outputs(["predictions"]), ) - algo_test = factory.create_algo( - inputs=factory.build_algo_inputs(["predictions"]), - outputs=factory.build_algo_outputs(["performance"]), + function_test = factory.create_function( + inputs=factory.build_function_inputs(["predictions"]), + outputs=factory.build_function_outputs(["performance"]), ) - algo_aggregate = factory.create_algo( - inputs=factory.build_algo_inputs(["model"]), - outputs=factory.build_algo_outputs(["model"]), + function_aggregate = factory.create_function( + inputs=factory.build_function_inputs(["model"]), + outputs=factory.build_function_outputs(["model"]), ) train_task = factory.create_computetask( compute_plan, rank=1, - algo=algo_train, - outputs=factory.build_computetask_outputs(algo_train), + function=function_train, + outputs=factory.build_computetask_outputs(function_train), ) predict_task = factory.create_computetask( compute_plan, rank=2, - algo=algo_predict, + function=function_predict, inputs=factory.build_computetask_inputs( - algo_predict, + function_predict, { "model": [train_task.key], }, ), - outputs=factory.build_computetask_outputs(algo_predict), + outputs=factory.build_computetask_outputs(function_predict), ) test_task = factory.create_computetask( compute_plan, rank=3, - algo=algo_test, + function=function_test, inputs=factory.build_computetask_inputs( - algo_test, + function_test, { "predictions": [predict_task.key], }, ), - outputs=factory.build_computetask_outputs(algo_test), + outputs=factory.build_computetask_outputs(function_test), ) composite_task = factory.create_computetask( compute_plan, rank=10, - algo=algo_train, - outputs=factory.build_computetask_outputs(algo_train), + function=function_train, + outputs=factory.build_computetask_outputs(function_train), ) aggregate_task = factory.create_computetask( compute_plan, rank=11, - algo=algo_aggregate, + function=function_aggregate, inputs=factory.build_computetask_inputs( - algo_aggregate, + function_aggregate, { "model": [composite_task.key, train_task.key], }, ), - outputs=factory.build_computetask_outputs(algo_aggregate), + outputs=factory.build_computetask_outputs(function_aggregate), ) expected_results = { diff --git a/backend/api/tests/views/test_views_computeplan.py b/backend/api/tests/views/test_views_computeplan.py index ebec9354c..efcbc89d7 100644 --- a/backend/api/tests/views/test_views_computeplan.py +++ b/backend/api/tests/views/test_views_computeplan.py @@ -67,25 +67,25 @@ def setUp(self): self.url = reverse("api:compute_plan-list") - algo = factory.create_algo() + function = factory.create_function() todo_cp = factory.create_computeplan(name="To do", status=ComputePlan.Status.PLAN_STATUS_TODO) - factory.create_computetask(todo_cp, algo, status=ComputeTask.Status.STATUS_TODO) + factory.create_computetask(todo_cp, function, status=ComputeTask.Status.STATUS_TODO) doing_cp = factory.create_computeplan(name="Doing", status=ComputePlan.Status.PLAN_STATUS_DOING) - factory.create_computetask(doing_cp, algo, status=ComputeTask.Status.STATUS_DOING) + factory.create_computetask(doing_cp, function, status=ComputeTask.Status.STATUS_DOING) self.now = doing_cp.start_date + datetime.timedelta(hours=1) done_cp = factory.create_computeplan(name="Done", status=ComputePlan.Status.PLAN_STATUS_DONE) - factory.create_computetask(done_cp, algo, status=ComputeTask.Status.STATUS_DONE) + factory.create_computetask(done_cp, function, status=ComputeTask.Status.STATUS_DONE) failed_cp = factory.create_computeplan(name="Failed", status=ComputePlan.Status.PLAN_STATUS_FAILED) - failed_task = factory.create_computetask(failed_cp, algo, status=ComputeTask.Status.STATUS_FAILED) + failed_task = factory.create_computetask(failed_cp, function, status=ComputeTask.Status.STATUS_FAILED) failed_cp.failed_task_key = str(failed_task.key) failed_cp.save() canceled_cp = factory.create_computeplan(name="Canceled", status=ComputePlan.Status.PLAN_STATUS_CANCELED) - factory.create_computetask(canceled_cp, algo, status=ComputeTask.Status.STATUS_CANCELED) + factory.create_computetask(canceled_cp, function, status=ComputeTask.Status.STATUS_CANCELED) empty_cp = factory.create_computeplan(name="Empty", status=ComputePlan.Status.PLAN_STATUS_EMPTY) @@ -476,13 +476,13 @@ def test_computeplan_list_filter_by_status_in(self, p_statuses): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_compute_plan_list_cross_assets_filters(self): - """Filter computeplan on other asset key such as algo_key, dataset_key and data_sample_key""" - algo = factory.create_algo() + """Filter computeplan on other asset key such as function_key, dataset_key and data_sample_key""" + function = factory.create_function() data_manager = factory.create_datamanager() data_sample = factory.create_datasample([data_manager]) compute_plan = factory.create_computeplan(name="cp", status=ComputePlan.Status.PLAN_STATUS_TODO) - factory.create_computetask(compute_plan, algo, data_manager=data_manager, data_samples=[data_sample.key]) + factory.create_computetask(compute_plan, function, data_manager=data_manager, data_samples=[data_sample.key]) expected_cp = { "key": str(compute_plan.key), "tag": "", @@ -505,8 +505,8 @@ def test_compute_plan_list_cross_assets_filters(self): "duration": 0, # because start_date is None } - # filter on algo_key - params = urlencode({"algo_key": algo.key}) + # filter on function_key + params = urlencode({"function_key": function.key}) response = self.client.get(f"{self.url}?{params}", **self.extra) self.assertEqual(response.json().get("results"), [expected_cp]) diff --git a/backend/api/tests/views/test_views_computetask.py b/backend/api/tests/views/test_views_computetask.py index 33c675bcb..08c3c8a53 100644 --- a/backend/api/tests/views/test_views_computetask.py +++ b/backend/api/tests/views/test_views_computetask.py @@ -13,9 +13,9 @@ from rest_framework.test import APITestCase from api.models import ComputeTask -from api.serializers import AlgoSerializer from api.serializers import DataManagerSerializer from api.serializers import DataSampleSerializer +from api.serializers import FunctionSerializer from api.serializers import ModelSerializer from api.tests import asset_factory as factory from api.tests.common import AuthenticatedClient @@ -38,10 +38,10 @@ def setUp(self): self.previous_level = self.logger.getEffectiveLevel() self.logger.setLevel(logging.ERROR) - self.simple_algo = factory.create_algo( - inputs=factory.build_algo_inputs(["datasamples", "opener", "model"]), - outputs=factory.build_algo_outputs(["model"]), - name="simple algo", + self.simple_function = factory.create_function( + inputs=factory.build_function_inputs(["datasamples", "opener", "model"]), + outputs=factory.build_function_outputs(["model"]), + name="simple function", ) self.data_manager = factory.create_datamanager() self.data_sample = factory.create_datasample([self.data_manager]) @@ -66,9 +66,9 @@ def setUp(self): ) self.compute_tasks[_status] = factory.create_computetask( self.compute_plan, - self.simple_algo, - inputs=factory.build_computetask_inputs(self.simple_algo, input_keys), - outputs=factory.build_computetask_outputs(self.simple_algo), + self.simple_function, + inputs=factory.build_computetask_inputs(self.simple_function, input_keys), + outputs=factory.build_computetask_outputs(self.simple_function), data_manager=self.data_manager, data_samples=[self.data_sample.key], status=_status, @@ -78,7 +78,7 @@ def setUp(self): self.model = factory.create_model(self.done_task, identifier="model") # we don't explicitly serialize relationships as this test module is focused on computetask - self.simple_algo_data = AlgoSerializer(instance=self.simple_algo).data + self.simple_function_data = FunctionSerializer(instance=self.simple_function).data self.data_manager_data = DataManagerSerializer(instance=self.data_manager).data self.data_sample_data = DataSampleSerializer(instance=self.data_sample).data self.data_sample_data["data_manager_keys"] = [str(key) for key in self.data_sample_data["data_manager_keys"]] @@ -185,7 +185,7 @@ def mock_register_compute_task(orc_request): for in_data in orc_request["tasks"]: out_data = { "key": in_data["key"], - "algo_key": in_data["algo_key"], + "function_key": in_data["function_key"], "compute_plan_key": in_data["compute_plan_key"], "rank": 0, "status": "STATUS_WAITING", @@ -218,7 +218,7 @@ def mock_register_compute_task(orc_request): { "compute_plan_key": self.compute_plan.key, "key": train_task_key, - "algo_key": self.simple_algo.key, + "function_key": self.simple_function.key, "inputs": [self.datasamples_input, self.opener_input, self.model_input], "outputs": { "model": { @@ -233,7 +233,7 @@ def mock_register_compute_task(orc_request): expected_response = [ { "key": train_task_key, - "algo": self.simple_algo_data, + "function": self.simple_function_data, "compute_plan_key": str(self.compute_plan.key), "creation_date": "2021-11-04T13:54:09.882662Z", "end_date": None, @@ -289,7 +289,7 @@ def setUp(self): self.expected_results = [ { "key": str(todo_task.key), - "algo": self.simple_algo_data, + "function": self.simple_function_data, "owner": "MyOrg1MSP", "compute_plan_key": str(self.compute_plan.key), "metadata": {}, @@ -311,7 +311,7 @@ def setUp(self): }, { "key": str(waiting_task.key), - "algo": self.simple_algo_data, + "function": self.simple_function_data, "owner": "MyOrg1MSP", "compute_plan_key": str(self.compute_plan.key), "metadata": {}, @@ -333,7 +333,7 @@ def setUp(self): }, { "key": str(doing_task.key), - "algo": self.simple_algo_data, + "function": self.simple_function_data, "owner": "MyOrg1MSP", "compute_plan_key": str(self.compute_plan.key), "metadata": {}, @@ -355,7 +355,7 @@ def setUp(self): }, { "key": str(done_task.key), - "algo": self.simple_algo_data, + "function": self.simple_function_data, "owner": "MyOrg1MSP", "compute_plan_key": str(self.compute_plan.key), "metadata": {}, @@ -377,7 +377,7 @@ def setUp(self): }, { "key": str(failed_task.key), - "algo": self.simple_algo_data, + "function": self.simple_function_data, "owner": "MyOrg1MSP", "compute_plan_key": str(self.compute_plan.key), "metadata": {}, @@ -399,7 +399,7 @@ def setUp(self): }, { "key": str(canceled_task.key), - "algo": self.simple_algo_data, + "function": self.simple_function_data, "owner": "MyOrg1MSP", "compute_plan_key": str(self.compute_plan.key), "metadata": {}, @@ -597,11 +597,11 @@ def test_task_cp_list_success(self): ) def test_task_list_cross_assets_filters(self): - """Filter task on other asset key such as compute_plan_key, algo_key dataset_key and data_sample_key""" + """Filter task on other asset key such as compute_plan_key, function_key dataset_key and data_sample_key""" # filter on asset keys params_list = [ urlencode({"compute_plan_key": self.compute_plan.key}), - urlencode({"algo_key": self.simple_algo.key}), + urlencode({"function_key": self.simple_function.key}), urlencode({"dataset_key": self.data_manager.key}), urlencode({"data_sample_key": self.data_sample.key}), ] @@ -616,7 +616,7 @@ def test_task_list_cross_assets_filters(self): self.assertEqual(response.json().get("results"), self.expected_results) # filter on wrong key - params = urlencode({"algo_key": self.data_manager.key}) + params = urlencode({"function_key": self.data_manager.key}) response = self.client.get(f"{self.url}?{params}", **self.extra) self.assertEqual(len(response.json().get("results")), 0) diff --git a/backend/api/tests/views/test_views_computetask_logs.py b/backend/api/tests/views/test_views_computetask_logs.py index 03b79f770..99280ce05 100644 --- a/backend/api/tests/views/test_views_computetask_logs.py +++ b/backend/api/tests/views/test_views_computetask_logs.py @@ -20,7 +20,7 @@ def compute_task_failure_report() -> tuple[ComputeTask, ComputeTaskFailureReport]: compute_task = factory.create_computetask( factory.create_computeplan(), - factory.create_algo(), + factory.create_function(), public=False, owner=conf.settings.LEDGER_MSP_ID, ) diff --git a/backend/api/tests/views/test_views_datamanager.py b/backend/api/tests/views/test_views_datamanager.py index 6fc9ecdc7..56fc92047 100644 --- a/backend/api/tests/views/test_views_datamanager.py +++ b/backend/api/tests/views/test_views_datamanager.py @@ -51,11 +51,11 @@ def setUp(self): # only for retrieve view self.data_sample_keys = [str(data_sample_1.key), str(data_sample_2.key)] - self.algo = factory.create_algo() + self.function = factory.create_function() self.compute_plan = factory.create_computeplan() self.data_sample_1_key_uuid = data_sample_1.key factory.create_computetask( - self.compute_plan, self.algo, data_manager=data_manager_1, data_samples=[data_sample_1.key] + self.compute_plan, self.function, data_manager=data_manager_1, data_samples=[data_sample_1.key] ) data_manager_2 = factory.create_datamanager() @@ -246,14 +246,14 @@ def test_datamanager_list_filter_in(self): ) def test_datamanager_list_cross_assets_filters(self): - """Filter datamanager on other asset key such as compute_plan_key, algo_key and data_sample_key""" + """Filter datamanager on other asset key such as compute_plan_key, function_key and data_sample_key""" # filter on compute_plan_key params = urlencode({"compute_plan_key": self.compute_plan.key}) response = self.client.get(f"{self.url}?{params}", **self.extra) self.assertEqual(response.json().get("results"), self.expected_results[:1]) - # filter on algo_key - params = urlencode({"algo_key": self.algo.key}) + # filter on function_key + params = urlencode({"function_key": self.function.key}) response = self.client.get(f"{self.url}?{params}", **self.extra) self.assertEqual(response.json().get("results"), self.expected_results[:1]) @@ -495,14 +495,14 @@ def test_datamanager_retrieve(self): def test_datamanager_retrieve_with_tasks(self): """Ensure the ordering association table does not create duplicate.""" compute_plan = factory.create_computeplan() - algo = factory.create_algo() + function = factory.create_function() data_manager = factory.create_datamanager() data_sample = factory.create_datasample([data_manager]) # Creating compute tasks will insert ordering objects `TaskDataSamples` for _ in range(3): factory.create_computetask( compute_plan, - algo, + function, data_manager=data_manager, data_samples=[data_sample.key], ) diff --git a/backend/api/tests/views/test_views_datasample.py b/backend/api/tests/views/test_views_datasample.py index 133dd68f9..507430eb2 100644 --- a/backend/api/tests/views/test_views_datasample.py +++ b/backend/api/tests/views/test_views_datasample.py @@ -73,11 +73,11 @@ def setUp(self): train_data_sample_2 = factory.create_datasample([data_manager]) test_data_sample = factory.create_datasample([data_manager]) - self.algo = factory.create_algo() + self.function = factory.create_function() self.compute_plan = factory.create_computeplan() self.data_manager_key_uuid = data_manager.key factory.create_computetask( - self.compute_plan, self.algo, data_manager=data_manager, data_samples=[train_data_sample_1.key] + self.compute_plan, self.function, data_manager=data_manager, data_samples=[train_data_sample_1.key] ) self.expected_results = [ @@ -175,14 +175,14 @@ def test_datasample_list_filter_in(self): ) def test_datasample_list_cross_assets_filters(self): - """Filter datasample on other asset key such as compute_plan_key, algo_key and dataset_key""" + """Filter datasample on other asset key such as compute_plan_key, function_key and dataset_key""" # filter on compute_plan_key params = urlencode({"compute_plan_key": self.compute_plan.key}) response = self.client.get(f"{self.url}?{params}", **self.extra) self.assertEqual(response.json().get("results"), self.expected_results) - # filter on algo_key - params = urlencode({"algo_key": self.algo.key}) + # filter on function_key + params = urlencode({"function_key": self.function.key}) response = self.client.get(f"{self.url}?{params}", **self.extra) self.assertEqual(response.json().get("results"), self.expected_results[:1]) diff --git a/backend/api/tests/views/test_views_algo.py b/backend/api/tests/views/test_views_function.py similarity index 63% rename from backend/api/tests/views/test_views_algo.py rename to backend/api/tests/views/test_views_function.py index ff999040a..42ff37720 100644 --- a/backend/api/tests/views/test_views_algo.py +++ b/backend/api/tests/views/test_views_function.py @@ -12,27 +12,27 @@ from rest_framework import status from rest_framework.test import APITestCase -from api.models import Algo +from api.models import Function from api.tests import asset_factory as factory from api.tests.common import AuthenticatedClient from api.tests.common import internal_server_error_on_exception from orchestrator.client import OrchestratorClient from orchestrator.error import OrcError from orchestrator.error import StatusCode -from substrapp.tests.common import AlgoCategory +from substrapp.tests.common import FunctionCategory from substrapp.utils import compute_hash MEDIA_ROOT = tempfile.mkdtemp() DIR_PATH = os.path.dirname(os.path.realpath(__file__)) -FIXTURE_PATH = os.path.join(DIR_PATH, "../../../../fixtures/chunantes/algos/algo0") +FIXTURE_PATH = os.path.join(DIR_PATH, "../../../../fixtures/chunantes/functions/function0") @override_settings( MEDIA_ROOT=MEDIA_ROOT, LEDGER_CHANNELS={"mychannel": {"chaincode": {"name": "mycc"}, "model_export_enabled": True}}, ) -class AlgoViewTests(APITestCase): +class FunctionViewTests(APITestCase): client_class = AuthenticatedClient def setUp(self): @@ -42,39 +42,39 @@ def setUp(self): self.logger = logging.getLogger("django.request") self.previous_level = self.logger.getEffectiveLevel() self.logger.setLevel(logging.ERROR) - self.url = reverse("api:algo-list") + self.url = reverse("api:function-list") - simple_algo = factory.create_algo( - inputs=factory.build_algo_inputs(["datasamples", "opener", "model"]), - outputs=factory.build_algo_outputs(["model"]), - name="simple algo", + simple_function = factory.create_function( + inputs=factory.build_function_inputs(["datasamples", "opener", "model"]), + outputs=factory.build_function_outputs(["model"]), + name="simple function", ) - aggregate_algo = factory.create_algo( - inputs=factory.build_algo_inputs(["models"]), - outputs=factory.build_algo_outputs(["model"]), + aggregate_function = factory.create_function( + inputs=factory.build_function_inputs(["models"]), + outputs=factory.build_function_outputs(["model"]), name="aggregate", ) - composite_algo = factory.create_algo( - inputs=factory.build_algo_inputs(["datasamples", "opener", "local", "shared"]), - outputs=factory.build_algo_outputs(["local", "shared"]), + composite_function = factory.create_function( + inputs=factory.build_function_inputs(["datasamples", "opener", "local", "shared"]), + outputs=factory.build_function_outputs(["local", "shared"]), name="composite", ) - predict_algo = factory.create_algo( - inputs=factory.build_algo_inputs(["datasamples", "opener", "model", "shared"]), - outputs=factory.build_algo_outputs(["predictions"]), + predict_function = factory.create_function( + inputs=factory.build_function_inputs(["datasamples", "opener", "model", "shared"]), + outputs=factory.build_function_outputs(["predictions"]), name="predict", ) - metric_algo = factory.create_algo( - inputs=factory.build_algo_inputs(["datasamples", "opener", "predictions"]), - outputs=factory.build_algo_outputs(["performance"]), + metric_function = factory.create_function( + inputs=factory.build_function_inputs(["datasamples", "opener", "predictions"]), + outputs=factory.build_function_outputs(["performance"]), name="metric", ) - self.algos = [simple_algo, aggregate_algo, composite_algo, predict_algo] - self.expected_algos = [ + self.functions = [simple_function, aggregate_function, composite_function, predict_function] + self.expected_functions = [ { - "key": str(simple_algo.key), - "name": "simple algo", + "key": str(simple_function.key), + "name": "simple function", "owner": "MyOrg1MSP", "permissions": { "process": { @@ -87,14 +87,14 @@ def setUp(self): }, }, "metadata": {}, - "creation_date": simple_algo.creation_date.isoformat().replace("+00:00", "Z"), + "creation_date": simple_function.creation_date.isoformat().replace("+00:00", "Z"), "description": { "checksum": "dummy-checksum", - "storage_address": f"http://testserver/algo/{simple_algo.key}/description/", + "storage_address": f"http://testserver/function/{simple_function.key}/description/", }, - "algorithm": { + "function": { "checksum": "dummy-checksum", - "storage_address": f"http://testserver/algo/{simple_algo.key}/file/", + "storage_address": f"http://testserver/function/{simple_function.key}/file/", }, "inputs": { "datasamples": {"kind": "ASSET_DATA_SAMPLE", "optional": False, "multiple": True}, @@ -106,7 +106,7 @@ def setUp(self): }, }, { - "key": str(aggregate_algo.key), + "key": str(aggregate_function.key), "name": "aggregate", "owner": "MyOrg1MSP", "permissions": { @@ -120,14 +120,14 @@ def setUp(self): }, }, "metadata": {}, - "creation_date": aggregate_algo.creation_date.isoformat().replace("+00:00", "Z"), + "creation_date": aggregate_function.creation_date.isoformat().replace("+00:00", "Z"), "description": { "checksum": "dummy-checksum", - "storage_address": f"http://testserver/algo/{aggregate_algo.key}/description/", + "storage_address": f"http://testserver/function/{aggregate_function.key}/description/", }, - "algorithm": { + "function": { "checksum": "dummy-checksum", - "storage_address": f"http://testserver/algo/{aggregate_algo.key}/file/", + "storage_address": f"http://testserver/function/{aggregate_function.key}/file/", }, "inputs": { "models": {"kind": "ASSET_MODEL", "optional": True, "multiple": True}, @@ -137,7 +137,7 @@ def setUp(self): }, }, { - "key": str(composite_algo.key), + "key": str(composite_function.key), "name": "composite", "owner": "MyOrg1MSP", "permissions": { @@ -151,14 +151,14 @@ def setUp(self): }, }, "metadata": {}, - "creation_date": composite_algo.creation_date.isoformat().replace("+00:00", "Z"), + "creation_date": composite_function.creation_date.isoformat().replace("+00:00", "Z"), "description": { "checksum": "dummy-checksum", - "storage_address": f"http://testserver/algo/{composite_algo.key}/description/", + "storage_address": f"http://testserver/function/{composite_function.key}/description/", }, - "algorithm": { + "function": { "checksum": "dummy-checksum", - "storage_address": f"http://testserver/algo/{composite_algo.key}/file/", + "storage_address": f"http://testserver/function/{composite_function.key}/file/", }, "inputs": { "datasamples": {"kind": "ASSET_DATA_SAMPLE", "optional": False, "multiple": True}, @@ -172,7 +172,7 @@ def setUp(self): }, }, { - "key": str(predict_algo.key), + "key": str(predict_function.key), "name": "predict", "owner": "MyOrg1MSP", "permissions": { @@ -186,14 +186,14 @@ def setUp(self): }, }, "metadata": {}, - "creation_date": predict_algo.creation_date.isoformat().replace("+00:00", "Z"), + "creation_date": predict_function.creation_date.isoformat().replace("+00:00", "Z"), "description": { "checksum": "dummy-checksum", - "storage_address": f"http://testserver/algo/{predict_algo.key}/description/", + "storage_address": f"http://testserver/function/{predict_function.key}/description/", }, - "algorithm": { + "function": { "checksum": "dummy-checksum", - "storage_address": f"http://testserver/algo/{predict_algo.key}/file/", + "storage_address": f"http://testserver/function/{predict_function.key}/file/", }, "inputs": { "datasamples": {"kind": "ASSET_DATA_SAMPLE", "optional": False, "multiple": True}, @@ -206,7 +206,7 @@ def setUp(self): }, }, { - "key": str(metric_algo.key), + "key": str(metric_function.key), "name": "metric", "owner": "MyOrg1MSP", "metadata": {}, @@ -220,14 +220,14 @@ def setUp(self): "authorized_ids": ["MyOrg1MSP"], }, }, - "creation_date": metric_algo.creation_date.isoformat().replace("+00:00", "Z"), + "creation_date": metric_function.creation_date.isoformat().replace("+00:00", "Z"), "description": { "checksum": "dummy-checksum", - "storage_address": f"http://testserver/algo/{metric_algo.key}/description/", + "storage_address": f"http://testserver/function/{metric_function.key}/description/", }, - "algorithm": { + "function": { "checksum": "dummy-checksum", - "storage_address": f"http://testserver/algo/{metric_algo.key}/file/", + "storage_address": f"http://testserver/function/{metric_function.key}/file/", }, "inputs": { "datasamples": {"kind": "ASSET_DATA_SAMPLE", "optional": False, "multiple": True}, @@ -244,129 +244,129 @@ def tearDown(self): shutil.rmtree(MEDIA_ROOT, ignore_errors=True) self.logger.setLevel(self.previous_level) - def test_algo_list_empty(self): - Algo.objects.all().delete() + def test_function_list_empty(self): + Function.objects.all().delete() response = self.client.get(self.url, **self.extra) self.assertEqual(response.json(), {"count": 0, "next": None, "previous": None, "results": []}) - def test_algo_list_success(self): + def test_function_list_success(self): response = self.client.get(self.url, **self.extra) self.assertEqual( response.json(), { - "count": len(self.expected_algos), + "count": len(self.expected_functions), "next": None, "previous": None, - "results": self.expected_algos, + "results": self.expected_functions, }, ) - def test_algo_list_wrong_channel(self): + def test_function_list_wrong_channel(self): extra = {"HTTP_SUBSTRA_CHANNEL_NAME": "yourchannel", "HTTP_ACCEPT": "application/json;version=0.0"} response = self.client.get(self.url, **extra) self.assertEqual(response.json(), {"count": 0, "next": None, "previous": None, "results": []}) @internal_server_error_on_exception() - @mock.patch("api.views.algo.AlgoViewSet.list", side_effect=Exception("Unexpected error")) - def test_algo_list_fail(self, _): + @mock.patch("api.views.function.FunctionViewSet.list", side_effect=Exception("Unexpected error")) + def test_function_list_fail(self, _): response = self.client.get(self.url, **self.extra) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) - def test_algo_list_storage_addresses_update(self): - for algo in Algo.objects.all(): - algo.description_address.replace("http://testserver", "http://remotetestserver") - algo.algorithm_address.replace("http://testserver", "http://remotetestserver") - algo.save() + def test_function_list_storage_addresses_update(self): + for function in Function.objects.all(): + function.description_address.replace("http://testserver", "http://remotetestserver") + function.function_address.replace("http://testserver", "http://remotetestserver") + function.save() response = self.client.get(self.url, **self.extra) - self.assertEqual(response.data["count"], len(self.expected_algos)) - for result, algo in zip(response.data["results"], self.expected_algos): - for field in ("description", "algorithm"): - self.assertEqual(result[field]["storage_address"], algo[field]["storage_address"]) - - def test_algo_list_filter(self): - """Filter algo on key.""" - key = self.expected_algos[0]["key"] + self.assertEqual(response.data["count"], len(self.expected_functions)) + for result, function in zip(response.data["results"], self.expected_functions): + for field in ("description", "function"): + self.assertEqual(result[field]["storage_address"], function[field]["storage_address"]) + + def test_function_list_filter(self): + """Filter function on key.""" + key = self.expected_functions[0]["key"] params = urlencode({"key": key}) response = self.client.get(f"{self.url}?{params}", **self.extra) self.assertEqual( - response.json(), {"count": 1, "next": None, "previous": None, "results": self.expected_algos[:1]} + response.json(), {"count": 1, "next": None, "previous": None, "results": self.expected_functions[:1]} ) - def test_algo_list_filter_and(self): - """Filter algo on key and owner.""" - key, owner = self.expected_algos[0]["key"], self.expected_algos[0]["owner"] + def test_function_list_filter_and(self): + """Filter function on key and owner.""" + key, owner = self.expected_functions[0]["key"], self.expected_functions[0]["owner"] params = urlencode({"key": key, "owner": owner}) response = self.client.get(f"{self.url}?{params}", **self.extra) self.assertEqual( - response.json(), {"count": 1, "next": None, "previous": None, "results": self.expected_algos[:1]} + response.json(), {"count": 1, "next": None, "previous": None, "results": self.expected_functions[:1]} ) - def test_algo_list_filter_in(self): - """Filter algo in key_0, key_1.""" - key_0 = self.expected_algos[0]["key"] - key_1 = self.expected_algos[1]["key"] + def test_function_list_filter_in(self): + """Filter function in key_0, key_1.""" + key_0 = self.expected_functions[0]["key"] + key_1 = self.expected_functions[1]["key"] params = urlencode({"key": ",".join([key_0, key_1])}) response = self.client.get(f"{self.url}?{params}", **self.extra) self.assertEqual( - response.json(), {"count": 2, "next": None, "previous": None, "results": self.expected_algos[:2]} + response.json(), {"count": 2, "next": None, "previous": None, "results": self.expected_functions[:2]} ) - def test_algo_match(self): - """Match algo on part of the name.""" - params = urlencode({"match": "le al"}) + def test_function_match(self): + """Match function on part of the name.""" + params = urlencode({"match": "le fu"}) response = self.client.get(f"{self.url}?{params}", **self.extra) self.assertEqual( - response.json(), {"count": 1, "next": None, "previous": None, "results": self.expected_algos[:1]} + response.json(), {"count": 1, "next": None, "previous": None, "results": self.expected_functions[:1]} ) - def test_algo_match_and_filter(self): - """Match algo with filter.""" + def test_function_match_and_filter(self): + """Match function with filter.""" params = urlencode( { - "key": self.expected_algos[0]["key"], - "match": "le al", + "key": self.expected_functions[0]["key"], + "match": "le fu", } ) response = self.client.get(f"{self.url}?{params}", **self.extra) self.assertEqual( - response.json(), {"count": 1, "next": None, "previous": None, "results": self.expected_algos[:1]} + response.json(), {"count": 1, "next": None, "previous": None, "results": self.expected_functions[:1]} ) - def test_algo_list_cross_assets_filters(self): - """Filter algos on other asset key such as compute_plan_key, dataset_key and data_sample_key""" + def test_function_list_cross_assets_filters(self): + """Filter functions on other asset key such as compute_plan_key, dataset_key and data_sample_key""" compute_plan = factory.create_computeplan() data_manager = factory.create_datamanager() data_sample = factory.create_datasample([data_manager]) factory.create_computetask( - compute_plan, self.algos[0], data_manager=data_manager, data_samples=[data_sample.key] + compute_plan, self.functions[0], data_manager=data_manager, data_samples=[data_sample.key] ) - factory.create_computetask(compute_plan, self.algos[1]) + factory.create_computetask(compute_plan, self.functions[1]) # filter on compute_plan_key params = urlencode({"compute_plan_key": compute_plan.key}) response = self.client.get(f"{self.url}?{params}", **self.extra) - self.assertEqual(response.json().get("results"), self.expected_algos[:2]) + self.assertEqual(response.json().get("results"), self.expected_functions[:2]) # filter on dataset_key params = urlencode({"dataset_key": data_manager.key}) response = self.client.get(f"{self.url}?{params}", **self.extra) - self.assertEqual(response.json().get("results"), self.expected_algos[:1]) + self.assertEqual(response.json().get("results"), self.expected_functions[:1]) # filter on data_sample_key params = urlencode({"data_sample_key": data_sample.key}) response = self.client.get(f"{self.url}?{params}", **self.extra) - self.assertEqual(response.json().get("results"), self.expected_algos[:1]) + self.assertEqual(response.json().get("results"), self.expected_functions[:1]) - def test_algo_list_ordering(self): + def test_function_list_ordering(self): params = urlencode({"ordering": "creation_date"}) response = self.client.get(f"{self.url}?{params}", **self.extra) - self.assertEqual(response.json().get("results"), self.expected_algos), + self.assertEqual(response.json().get("results"), self.expected_functions), params = urlencode({"ordering": "-creation_date"}) response = self.client.get(f"{self.url}?{params}", **self.extra) - self.assertEqual(response.json().get("results"), self.expected_algos[::-1]), + self.assertEqual(response.json().get("results"), self.expected_functions[::-1]), @parameterized.expand( [ @@ -375,77 +375,77 @@ def test_algo_list_ordering(self): ("page_size_3_page_1", 3, 1), ] ) - def test_algo_list_pagination_success(self, _, page_size, page): + def test_function_list_pagination_success(self, _, page_size, page): params = urlencode({"page_size": page_size, "page": page}) response = self.client.get(f"{self.url}?{params}", **self.extra) r = response.json() - self.assertEqual(r["count"], len(self.expected_algos)) + self.assertEqual(r["count"], len(self.expected_functions)) offset = (page - 1) * page_size - self.assertEqual(r["results"], self.expected_algos[offset : offset + page_size]) + self.assertEqual(r["results"], self.expected_functions[offset : offset + page_size]) - def test_algo_cp_list_success(self): - """List algos for a specific compute plan (CPAlgoViewSet).""" + def test_function_cp_list_success(self): + """List functions for a specific compute plan (CPFunctionViewSet).""" compute_plan = factory.create_computeplan() - factory.create_computetask(compute_plan, self.algos[0]) - factory.create_computetask(compute_plan, self.algos[1]) + factory.create_computetask(compute_plan, self.functions[0]) + factory.create_computetask(compute_plan, self.functions[1]) - url = reverse("api:compute_plan_algo-list", args=[compute_plan.key]) + url = reverse("api:compute_plan_function-list", args=[compute_plan.key]) response = self.client.get(url, **self.extra) self.assertEqual( response.json(), { - "count": len(self.expected_algos[:2]), + "count": len(self.expected_functions[:2]), "next": None, "previous": None, - "results": self.expected_algos[:2], + "results": self.expected_functions[:2], }, ) - def test_algo_list_can_process(self): - public_algo = Algo.objects.get(key=self.expected_algos[0]["key"]) - public_algo.permissions_process_public = True - public_algo.save() - self.expected_algos[0]["permissions"]["process"]["public"] = True + def test_function_list_can_process(self): + public_function = Function.objects.get(key=self.expected_functions[0]["key"]) + public_function.permissions_process_public = True + public_function.save() + self.expected_functions[0]["permissions"]["process"]["public"] = True - shared_algo = Algo.objects.get(key=self.expected_algos[1]["key"]) - shared_algo.permissions_process_authorized_ids = ["MyOrg1MSP", "MyOrg2MSP"] - shared_algo.save() - self.expected_algos[1]["permissions"]["process"]["authorized_ids"] = ["MyOrg1MSP", "MyOrg2MSP"] + shared_function = Function.objects.get(key=self.expected_functions[1]["key"]) + shared_function.permissions_process_authorized_ids = ["MyOrg1MSP", "MyOrg2MSP"] + shared_function.save() + self.expected_functions[1]["permissions"]["process"]["authorized_ids"] = ["MyOrg1MSP", "MyOrg2MSP"] params = urlencode({"can_process": "MyOrg1MSP"}) response = self.client.get(f"{self.url}?{params}", **self.extra) - self.assertEqual(response.json().get("results"), self.expected_algos), + self.assertEqual(response.json().get("results"), self.expected_functions), params = urlencode({"can_process": "MyOrg2MSP"}) response = self.client.get(f"{self.url}?{params}", **self.extra) - self.assertEqual(response.json().get("results"), self.expected_algos[:2]), + self.assertEqual(response.json().get("results"), self.expected_functions[:2]), params = urlencode({"can_process": "MyOrg3MSP"}) response = self.client.get(f"{self.url}?{params}", **self.extra) - self.assertEqual(response.json().get("results"), [self.expected_algos[0]]), + self.assertEqual(response.json().get("results"), [self.expected_functions[0]]), params = urlencode({"can_process": "MyOrg1MSP,MyOrg2MSP"}) response = self.client.get(f"{self.url}?{params}", **self.extra) - self.assertEqual(response.json().get("results"), self.expected_algos[:2]), + self.assertEqual(response.json().get("results"), self.expected_functions[:2]), @parameterized.expand( [ (category, filename) for category in [ - AlgoCategory.simple, - AlgoCategory.aggregate, - AlgoCategory.composite, - AlgoCategory.metric, - AlgoCategory.predict, + FunctionCategory.simple, + FunctionCategory.aggregate, + FunctionCategory.composite, + FunctionCategory.metric, + FunctionCategory.predict, ] for filename in [ - "algo.tar.gz", - "algo.zip", + "function.tar.gz", + "function.zip", ] ] ) - def test_algo_create(self, category, filename): + def test_function_create(self, category, filename): def mock_orc_response(data): """Build orchestrator register response from request data.""" return { @@ -459,12 +459,12 @@ def mock_orc_response(data): "metadata": {}, "creation_date": "2021-11-04T13:54:09.882662Z", "description": data["description"], - "algorithm": data["algorithm"], + "function": data["function"], "inputs": data["inputs"], "outputs": data["outputs"], } - algorithm_path = os.path.join(FIXTURE_PATH, filename) + function_path = os.path.join(FIXTURE_PATH, filename) description_path = os.path.join(FIXTURE_PATH, "description.md") data = { "json": json.dumps( @@ -485,23 +485,23 @@ def mock_orc_response(data): }, } ), - "file": open(algorithm_path, "rb"), + "file": open(function_path, "rb"), "description": open(description_path, "rb"), } - with mock.patch.object(OrchestratorClient, "register_algo", side_effect=mock_orc_response): + with mock.patch.object(OrchestratorClient, "register_function", side_effect=mock_orc_response): response = self.client.post(self.url, data=data, format="multipart", **self.extra) self.assertIsNotNone(response.data["key"]) self.assertEqual(response.status_code, status.HTTP_201_CREATED) # asset created in local db - self.assertEqual(Algo.objects.count(), len(self.expected_algos) + 1) + self.assertEqual(Function.objects.count(), len(self.expected_functions) + 1) data["file"].close() data["description"].close() @override_settings(DATA_UPLOAD_MAX_SIZE=150) def test_file_size_limit(self): - algorithm_path = os.path.join(FIXTURE_PATH, "algo.tar.gz") + function_path = os.path.join(FIXTURE_PATH, "function.tar.gz") description_path = os.path.join(FIXTURE_PATH, "description.md") data = { @@ -523,7 +523,7 @@ def test_file_size_limit(self): }, } ), - "file": open(algorithm_path, "rb"), + "file": open(function_path, "rb"), "description": open(description_path, "rb"), } @@ -535,7 +535,7 @@ def test_file_size_limit(self): data["description"].close() data["file"].close() - def test_algo_create_fail_rollback(self): + def test_function_create_fail_rollback(self): class MockOrcError(OrcError): def __init__(self) -> None: pass @@ -543,7 +543,7 @@ def __init__(self) -> None: code = StatusCode.ALREADY_EXISTS details = "already exists" - algorithm_path = os.path.join(FIXTURE_PATH, "algo.tar.gz") + function_path = os.path.join(FIXTURE_PATH, "function.tar.gz") description_path = os.path.join(FIXTURE_PATH, "description.md") data = { "json": json.dumps( @@ -564,84 +564,86 @@ def __init__(self) -> None: }, } ), - "file": open(algorithm_path, "rb"), + "file": open(function_path, "rb"), "description": open(description_path, "rb"), } - with mock.patch.object(OrchestratorClient, "register_algo", side_effect=MockOrcError()): + with mock.patch.object(OrchestratorClient, "register_function", side_effect=MockOrcError()): response = self.client.post(self.url, data=data, format="multipart", **self.extra) # asset not created in local db - self.assertEqual(Algo.objects.count(), len(self.expected_algos)) + self.assertEqual(Function.objects.count(), len(self.expected_functions)) # orc error code should be propagated self.assertEqual(response.status_code, status.HTTP_409_CONFLICT) @internal_server_error_on_exception() - @mock.patch("api.views.algo.AlgoViewSet.create", side_effect=Exception("Unexpected error")) - def test_algo_create_fail(self, _): + @mock.patch("api.views.function.FunctionViewSet.create", side_effect=Exception("Unexpected error")) + def test_function_create_fail(self, _): response = self.client.post(self.url, data={}, format="json") self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) - def test_algo_retrieve(self): - url = reverse("api:algo-detail", args=[self.expected_algos[0]["key"]]) + def test_function_retrieve(self): + url = reverse("api:function-detail", args=[self.expected_functions[0]["key"]]) response = self.client.get(url, **self.extra) - self.assertEqual(response.json(), self.expected_algos[0]) + self.assertEqual(response.json(), self.expected_functions[0]) - def test_algo_retrieve_wrong_channel(self): - url = reverse("api:algo-detail", args=[self.expected_algos[0]["key"]]) + def test_function_retrieve_wrong_channel(self): + url = reverse("api:function-detail", args=[self.expected_functions[0]["key"]]) extra = {"HTTP_SUBSTRA_CHANNEL_NAME": "yourchannel", "HTTP_ACCEPT": "application/json;version=0.0"} response = self.client.get(url, **extra) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - def test_algo_retrieve_storage_addresses_update(self): - algo = Algo.objects.get(key=self.expected_algos[0]["key"]) - algo.description_address.replace("http://testserver", "http://remotetestserver") - algo.algorithm_address.replace("http://testserver", "http://remotetestserver") - algo.save() + def test_function_retrieve_storage_addresses_update(self): + function = Function.objects.get(key=self.expected_functions[0]["key"]) + function.description_address.replace("http://testserver", "http://remotetestserver") + function.function_address.replace("http://testserver", "http://remotetestserver") + function.save() - url = reverse("api:algo-detail", args=[self.expected_algos[0]["key"]]) + url = reverse("api:function-detail", args=[self.expected_functions[0]["key"]]) response = self.client.get(url, **self.extra) - for field in ("description", "algorithm"): - self.assertEqual(response.data[field]["storage_address"], self.expected_algos[0][field]["storage_address"]) + for field in ("description", "function"): + self.assertEqual( + response.data[field]["storage_address"], self.expected_functions[0][field]["storage_address"] + ) @internal_server_error_on_exception() - @mock.patch("api.views.algo.AlgoViewSet.retrieve", side_effect=Exception("Unexpected error")) - def test_algo_retrieve_fail(self, _): - url = reverse("api:algo-detail", args=[self.expected_algos[0]["key"]]) + @mock.patch("api.views.function.FunctionViewSet.retrieve", side_effect=Exception("Unexpected error")) + def test_function_retrieve_fail(self, _): + url = reverse("api:function-detail", args=[self.expected_functions[0]["key"]]) response = self.client.get(url, **self.extra) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) - def test_algo_download_file(self): - algo_files = factory.create_algo_files() - algo = factory.create_algo(key=algo_files.key) - url = reverse("api:algo-file", args=[algo.key]) - with mock.patch("api.views.utils.get_owner", return_value=algo.owner): + def test_function_download_file(self): + function_files = factory.create_function_files() + function = factory.create_function(key=function_files.key) + url = reverse("api:function-file", args=[function.key]) + with mock.patch("api.views.utils.get_owner", return_value=function.owner): response = self.client.get(url, **self.extra) content = response.getvalue() self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(content, algo_files.file.read()) - self.assertEqual(compute_hash(content), algo_files.checksum) - - def test_algo_download_description(self): - algo_files = factory.create_algo_files() - algo = factory.create_algo(key=algo_files.key) - url = reverse("api:algo-description", args=[algo.key]) - with mock.patch("api.views.utils.get_owner", return_value=algo.owner): + self.assertEqual(content, function_files.file.read()) + self.assertEqual(compute_hash(content), function_files.checksum) + + def test_function_download_description(self): + function_files = factory.create_function_files() + function = factory.create_function(key=function_files.key) + url = reverse("api:function-description", args=[function.key]) + with mock.patch("api.views.utils.get_owner", return_value=function.owner): response = self.client.get(url, **self.extra) content = response.getvalue() self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(content, algo_files.description.read()) + self.assertEqual(content, function_files.description.read()) - def test_algo_update(self): - algo = self.expected_algos[0] + def test_function_update(self): + function = self.expected_functions[0] data = { - "key": algo["key"], + "key": function["key"], "name": "Bar", } - url = reverse("api:algo-detail", args=[algo["key"]]) - algo["name"] = data["name"] + url = reverse("api:function-detail", args=[function["key"]]) + function["name"] = data["name"] - with mock.patch.object(OrchestratorClient, "update_algo", side_effect=algo): + with mock.patch.object(OrchestratorClient, "update_function", side_effect=function): response = self.client.put(url, data=data, format="json", **self.extra) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -649,6 +651,6 @@ def test_algo_update(self): error = OrcError() error.code = StatusCode.INTERNAL - with mock.patch.object(OrchestratorClient, "update_algo", side_effect=error): + with mock.patch.object(OrchestratorClient, "update_function", side_effect=error): response = self.client.put(url, data=data, format="json", **self.extra) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR) diff --git a/backend/api/tests/views/test_views_model.py b/backend/api/tests/views/test_views_model.py index e8309c6b6..56910f167 100644 --- a/backend/api/tests/views/test_views_model.py +++ b/backend/api/tests/views/test_views_model.py @@ -48,24 +48,24 @@ def setUp(self): compute_plan = factory.create_computeplan() - simple_algo = factory.create_algo( - outputs=factory.build_algo_outputs(["model"]), + simple_function = factory.create_function( + outputs=factory.build_function_outputs(["model"]), ) self.train_task = factory.create_computetask( compute_plan, - simple_algo, - outputs=factory.build_computetask_outputs(simple_algo), + simple_function, + outputs=factory.build_computetask_outputs(simple_function), ) simple_model_1 = factory.create_model(self.train_task, identifier="model") simple_model_2 = factory.create_model(self.train_task, identifier="model") - composite_algo = factory.create_algo( - outputs=factory.build_algo_outputs(["local", "shared"]), + composite_function = factory.create_function( + outputs=factory.build_function_outputs(["local", "shared"]), ) composite_task = factory.create_computetask( compute_plan, - composite_algo, - outputs=factory.build_computetask_outputs(composite_algo), + composite_function, + outputs=factory.build_computetask_outputs(composite_function), ) local_model = factory.create_model(composite_task, identifier="local") diff --git a/backend/api/tests/views/test_views_performance.py b/backend/api/tests/views/test_views_performance.py index b2d026203..0d8345e92 100644 --- a/backend/api/tests/views/test_views_performance.py +++ b/backend/api/tests/views/test_views_performance.py @@ -35,8 +35,8 @@ def setUp(self): self.extra = {"HTTP_SUBSTRA_CHANNEL_NAME": "mychannel", "HTTP_ACCEPT": "application/json;version=0.0"} self.url = reverse("api:compute_plan_perf-list", args=[self.compute_plan.key]) - self.metric = factory.create_algo( - outputs=factory.build_algo_outputs(["performance"]), + self.metric = factory.create_function( + outputs=factory.build_function_outputs(["performance"]), ) self.compute_tasks = [ factory.create_computetask( @@ -69,7 +69,7 @@ def setUp(self): "compute_task": { "key": str(self.compute_tasks[0].key), "data_manager_key": str(self.data_manager.key), - "algo_key": str(self.metric.key), + "function_key": str(self.metric.key), "rank": 1, "round_idx": 1, "data_samples": [str(self.data_sample.key)], @@ -86,7 +86,7 @@ def setUp(self): "compute_task": { "key": str(self.compute_tasks[1].key), "data_manager_key": str(self.data_manager.key), - "algo_key": str(self.metric.key), + "function_key": str(self.metric.key), "rank": 2, "round_idx": 1, "data_samples": [str(self.data_sample.key)], @@ -103,7 +103,7 @@ def setUp(self): "compute_task": { "key": str(self.compute_tasks[2].key), "data_manager_key": str(self.data_manager.key), - "algo_key": str(self.metric.key), + "function_key": str(self.metric.key), "rank": 3, "round_idx": 1, "data_samples": [str(self.data_sample.key)], @@ -173,8 +173,8 @@ def setUp(self): self.export_url = reverse("api:performance-export") self.metrics = [ - factory.create_algo( - outputs=factory.build_algo_outputs(["performance"]), + factory.create_function( + outputs=factory.build_function_outputs(["performance"]), ) for _ in range(3) ] diff --git a/backend/api/tests/views/test_views_task_profiling.py b/backend/api/tests/views/test_views_task_profiling.py index 58dba2406..51ad86618 100644 --- a/backend/api/tests/views/test_views_task_profiling.py +++ b/backend/api/tests/views/test_views_task_profiling.py @@ -23,10 +23,10 @@ def setUp(self) -> None: self.extra = {"HTTP_SUBSTRA_CHANNEL_NAME": CHANNEL, "HTTP_ACCEPT": "application/json;version=0.0"} self.url = reverse("api:task_profiling-list") - algo = factory.create_algo() + function = factory.create_function() compute_plan = factory.create_computeplan() - self.train_task = factory.create_computetask(compute_plan=compute_plan, algo=algo) + self.train_task = factory.create_computetask(compute_plan=compute_plan, function=function) factory.create_computetask_profiling(compute_task=self.train_task) @@ -58,9 +58,9 @@ def test_task_profiling_retrieve_success(self): self.assertEqual(response.json(), self.expected_results[0]) def test_task_profiling_create_bad_client(self): - algo = factory.create_algo() + function = factory.create_function() cp = factory.create_computeplan() - task = factory.create_computetask(compute_plan=cp, algo=algo) + task = factory.create_computetask(compute_plan=cp, function=function) response = self.client.post(self.url, {"compute_task_key": str(task.key), "channel": CHANNEL}, **self.extra) self.assertEqual(response.status_code, 403) @@ -78,9 +78,9 @@ def setUp(self) -> None: self.url = reverse("api:task_profiling-list") def test_task_profiling_create_success(self): - algo = factory.create_algo() + function = factory.create_function() cp = factory.create_computeplan() - task = factory.create_computetask(compute_plan=cp, algo=algo) + task = factory.create_computetask(compute_plan=cp, function=function) response = self.client.post(self.url, {"compute_task_key": str(task.key)}, **self.extra) self.assertEqual(response.status_code, 201) @@ -106,9 +106,9 @@ def test_task_profiling_create_success(self): ) def test_already_exist_task_profiling(self): - algo = factory.create_algo() + function = factory.create_function() cp = factory.create_computeplan() - task = factory.create_computetask(compute_plan=cp, algo=algo) + task = factory.create_computetask(compute_plan=cp, function=function) response = self.client.post(self.url, {"compute_task_key": str(task.key)}, **self.extra) self.assertEqual(response.status_code, 201) @@ -129,9 +129,9 @@ def setUp(self) -> None: self.url = reverse("api:task_profiling-list") def test_task_profiling_create_fail_other_backend(self): - algo = factory.create_algo() + function = factory.create_function() cp = factory.create_computeplan() - task = factory.create_computetask(compute_plan=cp, algo=algo) + task = factory.create_computetask(compute_plan=cp, function=function) response = self.client.post(self.url, {"compute_task_key": str(task.key)}, **self.extra) self.assertEqual(response.status_code, 403) diff --git a/backend/api/urls.py b/backend/api/urls.py index e2dea3848..6826dff8c 100644 --- a/backend/api/urls.py +++ b/backend/api/urls.py @@ -18,8 +18,8 @@ router.register(r"data_sample", views.DataSampleViewSet, basename="data_sample") router.register(r"data_manager", views.DataManagerViewSet, basename="data_manager") router.register(r"data_manager", views.DataManagerPermissionViewSet, basename="data_manager") -router.register(r"algo", views.AlgoViewSet, basename="algo") -router.register(r"algo", views.AlgoPermissionViewSet, basename="algo") +router.register(r"function", views.FunctionViewSet, basename="function") +router.register(r"function", views.FunctionPermissionViewSet, basename="function") router.register(r"task", views.ComputeTaskViewSet, basename="task") router.register(r"compute_plan", views.ComputePlanViewSet, basename="compute_plan") router.register(r"compute_plan_metadata", views.ComputePlanMetadataViewSet, basename="compute_plan_metadata") @@ -33,7 +33,7 @@ compute_plan_router = routers.NestedDefaultRouter(router, r"compute_plan", lookup="compute_plan") compute_plan_router.register(r"task", views.CPTaskViewSet, basename=f"{CP_BASENAME_PREFIX}task") -compute_plan_router.register(r"algos", views.CPAlgoViewSet, basename=f"{CP_BASENAME_PREFIX}algo") +compute_plan_router.register(r"functions", views.CPFunctionViewSet, basename=f"{CP_BASENAME_PREFIX}function") compute_plan_router.register(r"perf", views.CPPerformanceViewSet, basename=f"{CP_BASENAME_PREFIX}perf") diff --git a/backend/api/views/__init__.py b/backend/api/views/__init__.py index 499ba005c..ab83a34e5 100644 --- a/backend/api/views/__init__.py +++ b/backend/api/views/__init__.py @@ -1,6 +1,3 @@ -from .algo import AlgoPermissionViewSet -from .algo import AlgoViewSet -from .algo import CPAlgoViewSet from .compute_plan_graph import get_cp_graph from .computeplan import ComputePlanViewSet from .computetask import ComputeTaskViewSet @@ -9,6 +6,9 @@ from .datamanager import DataManagerPermissionViewSet from .datamanager import DataManagerViewSet from .datasample import DataSampleViewSet +from .function import CPFunctionViewSet +from .function import FunctionPermissionViewSet +from .function import FunctionViewSet from .metadata import ComputePlanMetadataViewSet from .model import ModelPermissionViewSet from .model import ModelViewSet @@ -24,12 +24,12 @@ "DataManagerPermissionViewSet", "ModelViewSet", "ModelPermissionViewSet", - "AlgoViewSet", - "AlgoPermissionViewSet", + "FunctionViewSet", + "FunctionPermissionViewSet", "ComputeTaskViewSet", "ComputePlanViewSet", "CPTaskViewSet", - "CPAlgoViewSet", + "CPFunctionViewSet", "NewsFeedViewSet", "ComputeTaskLogsViewSet", "CPPerformanceViewSet", diff --git a/backend/api/views/compute_plan_graph.py b/backend/api/views/compute_plan_graph.py index a2b84e09e..45c219805 100644 --- a/backend/api/views/compute_plan_graph.py +++ b/backend/api/views/compute_plan_graph.py @@ -39,7 +39,9 @@ def get_cp_graph(request, compute_plan_pk): ComputeTask.objects.filter(key=OuterRef("pk")) .annotate( outputs_specs=JSONBAgg( - JsonbBuildObj(Value("identifier"), F("algo__outputs__identifier"), Value("kind"), "algo__outputs__kind") + JsonbBuildObj( + Value("identifier"), F("function__outputs__identifier"), Value("kind"), "function__outputs__kind" + ) ), ) .values("outputs_specs") @@ -49,7 +51,9 @@ def get_cp_graph(request, compute_plan_pk): ComputeTask.objects.filter(key=OuterRef("pk")) .annotate( inputs_specs=JSONBAgg( - JsonbBuildObj(Value("identifier"), F("algo__inputs__identifier"), Value("kind"), "algo__inputs__kind") + JsonbBuildObj( + Value("identifier"), F("function__inputs__identifier"), Value("kind"), "function__inputs__kind" + ) ), ) .values("inputs_specs") diff --git a/backend/api/views/computeplan.py b/backend/api/views/computeplan.py index 5e55583dd..2e8bdb679 100644 --- a/backend/api/views/computeplan.py +++ b/backend/api/views/computeplan.py @@ -123,7 +123,7 @@ class ComputePlanFilter(FilterSet): field_name="status", choices=ComputePlan.Status.choices, ) - algo_key = CharFilter(field_name="compute_tasks__algo__key", distinct=True, label="algo_key") + function_key = CharFilter(field_name="compute_tasks__function__key", distinct=True, label="function_key") dataset_key = CharFilter(field_name="compute_tasks__data_manager__key", distinct=True, label="dataset_key") data_sample_key = CharInFilter( field_name="compute_tasks__data_samples__key", distinct=True, label="data_sample_key" diff --git a/backend/api/views/computetask.py b/backend/api/views/computetask.py index 0fa3006ee..625ca1606 100644 --- a/backend/api/views/computetask.py +++ b/backend/api/views/computetask.py @@ -23,8 +23,8 @@ from api.models import ComputeTask from api.models import ComputeTaskInputAsset from api.models import ComputeTaskOutputAsset -from api.models.algo import AlgoInput -from api.models.algo import AlgoOutput +from api.models.function import FunctionInput +from api.models.function import FunctionOutput from api.serializers import ComputeTaskInputAssetSerializer from api.serializers import ComputeTaskOutputAssetSerializer from api.serializers import ComputeTaskSerializer @@ -50,7 +50,7 @@ def _register_in_orchestrator(tasks_data, channel_name): for task_data in tasks_data: orc_task = { "key": task_data["key"], - "algo_key": task_data.get("algo_key"), + "function_key": task_data.get("function_key"), "compute_plan_key": task_data["compute_plan_key"], "inputs": task_data.get("inputs", []), "outputs": task_data.get("outputs", {}), @@ -152,7 +152,7 @@ class ComputeTaskFilter(FilterSet): choices=ComputeTask.Status.choices, ) compute_plan_key = CharInFilter(field_name="compute_plan__key") - algo_key = CharFilter(field_name="algo__key", distinct=True, label="algo_key") + function_key = CharFilter(field_name="function__key", distinct=True, label="function_key") dataset_key = CharFilter(field_name="data_manager__key", distinct=True, label="dataset_key") data_sample_key = CharInFilter(field_name="data_samples__key", distinct=True, label="data_sample_key") duration = RangeFilter(label="duration") @@ -189,7 +189,7 @@ class Meta: class InputAssetFilter(FilterSet): - kind = ChoiceInFilter(field_name="asset_kind", choices=AlgoInput.Kind.choices) + kind = ChoiceInFilter(field_name="asset_kind", choices=FunctionInput.Kind.choices) class Meta: model = ComputeTaskInputAsset @@ -197,7 +197,7 @@ class Meta: class OutputAssetFilter(FilterSet): - kind = ChoiceInFilter(field_name="asset_kind", choices=AlgoOutput.Kind.choices) + kind = ChoiceInFilter(field_name="asset_kind", choices=FunctionOutput.Kind.choices) class Meta: model = ComputeTaskOutputAsset @@ -264,7 +264,7 @@ def output_assets(self, request, pk): def get_queryset(self): return ( ComputeTask.objects.filter(channel=get_channel_name(self.request)) - .select_related("algo") + .select_related("function") .annotate( # Using 0 as default value instead of None for ordering purpose, as default # Postgres behavior considers null as greater than any other value. diff --git a/backend/api/views/datamanager.py b/backend/api/views/datamanager.py index a5934f05f..82b3de3ff 100644 --- a/backend/api/views/datamanager.py +++ b/backend/api/views/datamanager.py @@ -122,7 +122,8 @@ def create(request, get_success_headers): else: data = api_serializer.data - # Returns algo metadata from local database (and algo data) to ensure consistency between GET and CREATE views + # Returns function metadata from local database (and function data) + # to ensure consistency between GET and CREATE views data.update(serializer.data) # Return ApiResponse @@ -135,7 +136,7 @@ class DataManagerFilter(FilterSet): compute_plan_key = CharInFilter( field_name="compute_tasks__compute_plan__key", distinct=True, label="compute_plan_key" ) - algo_key = CharFilter(field_name="compute_tasks__algo__key", distinct=True, label="algo_key") + function_key = CharFilter(field_name="compute_tasks__function__key", distinct=True, label="function_key") data_sample_key = CharInFilter( field_name="compute_tasks__data_samples__key", distinct=True, label="data_sample_key" ) @@ -191,7 +192,7 @@ def update(self, request, *args, **kwargs): datamanager = self.get_object() name = request.data.get("name") - orc_algo = { + orc_function = { "key": str(datamanager.key), "name": name, } @@ -199,7 +200,7 @@ def update(self, request, *args, **kwargs): # send update to orchestrator # the modification in local db will be done upon corresponding event reception with get_orchestrator_client(get_channel_name(request)) as client: - client.update_datamanager(orc_algo) + client.update_datamanager(orc_function) return ApiResponse({}, status=status.HTTP_200_OK) diff --git a/backend/api/views/datasample.py b/backend/api/views/datasample.py index 5305f705d..1590f3a4d 100644 --- a/backend/api/views/datasample.py +++ b/backend/api/views/datasample.py @@ -284,7 +284,7 @@ class DataSampleFilter(FilterSet): compute_plan_key = CharInFilter( field_name="data_managers__compute_tasks__compute_plan__key", distinct=True, label="compute_plan_key" ) - algo_key = CharFilter(field_name="compute_tasks__algo__key", distinct=True, label="algo_key") + function_key = CharFilter(field_name="compute_tasks__function__key", distinct=True, label="function_key") dataset_key = CharFilter(field_name="compute_tasks__data_manager__key", distinct=True, label="dataset_key") class Meta: diff --git a/backend/api/views/algo.py b/backend/api/views/function.py similarity index 76% rename from backend/api/views/algo.py rename to backend/api/views/function.py index b8e8ce15f..1a9bb92a6 100644 --- a/backend/api/views/algo.py +++ b/backend/api/views/function.py @@ -14,8 +14,8 @@ from rest_framework.viewsets import GenericViewSet from api.errors import AlreadyExistsError -from api.models import Algo -from api.serializers import AlgoSerializer +from api.models import Function +from api.serializers import FunctionSerializer from api.views.filters_utils import CharInFilter from api.views.filters_utils import MatchFilter from api.views.filters_utils import ProcessPermissionFilter @@ -26,30 +26,30 @@ from api.views.utils import validate_key from api.views.utils import validate_metadata from libs.pagination import DefaultPageNumberPagination -from substrapp.models import Algo as AlgoFiles +from substrapp.models import Function as FunctionFiles from substrapp.orchestrator import get_orchestrator_client -from substrapp.serializers import AlgoSerializer as AlgoFilesSerializer +from substrapp.serializers import FunctionSerializer as FunctionFilesSerializer from substrapp.utils import get_hash logger = structlog.get_logger(__name__) def _register_in_orchestrator(request, basename, instance): - """Register algo in orchestrator.""" + """Register function in orchestrator.""" current_site = settings.DEFAULT_DOMAIN permissions = request.data.get("permissions", {}) - orc_algo = { + orc_function = { "key": str(instance.key), "name": request.data.get("name"), "description": { "checksum": get_hash(instance.description), - "storage_address": current_site + reverse("api:algo-description", args=[instance.key]), + "storage_address": current_site + reverse("api:function-description", args=[instance.key]), }, - "algorithm": { + "function": { "checksum": instance.checksum, - "storage_address": current_site + reverse("api:algo-file", args=[instance.key]), + "storage_address": current_site + reverse("api:function-file", args=[instance.key]), }, "new_permissions": { "public": permissions.get("public"), @@ -61,11 +61,11 @@ def _register_in_orchestrator(request, basename, instance): } with get_orchestrator_client(get_channel_name(request)) as client: - return client.register_algo(orc_algo) + return client.register_function(orc_function) def create(request, basename, get_success_headers): - """Create a new algo. + """Create a new function. The workflow is composed of several steps: - Save files in local database to get the addresses. @@ -79,7 +79,7 @@ def create(request, basename, get_success_headers): except Exception as e: raise ValidationExceptionError(e.args, "(not computed)", status.HTTP_400_BAD_REQUEST) - serializer = AlgoFilesSerializer( + serializer = FunctionFilesSerializer( data={"file": file, "description": request.data.get("description"), "checksum": checksum} ) @@ -99,20 +99,21 @@ def create(request, basename, get_success_headers): # Step3: save metadata in local database api_data["channel"] = get_channel_name(request) - api_serializer = AlgoSerializer(data=api_data) + api_serializer = FunctionSerializer(data=api_data) try: api_serializer.save_if_not_exists() except AlreadyExistsError: # May happen if the events app already processed the event pushed by the orchestrator - algo = Algo.objects.get(key=api_data["key"]) - data = AlgoSerializer(algo).data + function = Function.objects.get(key=api_data["key"]) + data = FunctionSerializer(function).data except Exception: instance.delete() # warning: post delete signals are not executed by django rollback raise else: data = api_serializer.data - # Returns algo metadata from local database (and algo data) to ensure consistency between GET and CREATE views + # Returns function metadata from local database (and function data) + # to ensure consistency between GET and CREATE views data.update(serializer.data) # Return ApiResponse @@ -120,7 +121,7 @@ def create(request, basename, get_success_headers): return ApiResponse(data, status=status.HTTP_201_CREATED, headers=headers) -class AlgoFilter(FilterSet): +class FunctionFilter(FilterSet): creation_date = DateTimeFromToRangeFilter() compute_plan_key = CharInFilter(field_name="compute_tasks__compute_plan__key", label="compute_plan_key") @@ -130,7 +131,7 @@ class AlgoFilter(FilterSet): ) class Meta: - model = Algo + model = Function fields = { "owner": ["exact"], "key": ["exact"], @@ -152,42 +153,42 @@ class Meta: } -class AlgoViewSetConfig: - serializer_class = AlgoSerializer +class FunctionViewSetConfig: + serializer_class = FunctionSerializer filter_backends = (OrderingFilter, MatchFilter, DjangoFilterBackend, ProcessPermissionFilter) ordering_fields = ["creation_date", "key", "name", "owner"] ordering = ["creation_date", "key"] pagination_class = DefaultPageNumberPagination - filterset_class = AlgoFilter + filterset_class = FunctionFilter def get_queryset(self): - return Algo.objects.filter(channel=get_channel_name(self.request)) + return Function.objects.filter(channel=get_channel_name(self.request)) -class AlgoViewSet( - AlgoViewSetConfig, mixins.RetrieveModelMixin, mixins.ListModelMixin, mixins.CreateModelMixin, GenericViewSet +class FunctionViewSet( + FunctionViewSetConfig, mixins.RetrieveModelMixin, mixins.ListModelMixin, mixins.CreateModelMixin, GenericViewSet ): def create(self, request, *args, **kwargs): return create(request, self.basename, lambda data: self.get_success_headers(data)) def update(self, request, *args, **kwargs): - algo = self.get_object() + function = self.get_object() name = request.data.get("name") - orc_algo = { - "key": str(algo.key), + orc_function = { + "key": str(function.key), "name": name, } # send update to orchestrator # the modification in local db will be done upon corresponding event reception with get_orchestrator_client(get_channel_name(request)) as client: - client.update_algo(orc_algo) + client.update_function(orc_function) return ApiResponse({}, status=status.HTTP_200_OK) -class CPAlgoViewSet(AlgoViewSetConfig, mixins.ListModelMixin, GenericViewSet): +class CPFunctionViewSet(FunctionViewSetConfig, mixins.ListModelMixin, GenericViewSet): def get_queryset(self): compute_plan_key = self.kwargs.get("compute_plan_pk") validate_key(compute_plan_key) @@ -195,13 +196,13 @@ def get_queryset(self): return queryset.filter(compute_tasks__compute_plan__key=compute_plan_key).distinct() -class AlgoPermissionViewSet(PermissionMixin, GenericViewSet): - queryset = AlgoFiles.objects.all() - serializer_class = AlgoFilesSerializer +class FunctionPermissionViewSet(PermissionMixin, GenericViewSet): + queryset = FunctionFiles.objects.all() + serializer_class = FunctionFilesSerializer @action(detail=True) def file(self, request, *args, **kwargs): - return self.download_file(request, Algo, "file", "algorithm_address") + return self.download_file(request, Function, "file", "function_address") # actions cannot be named "description" # https://github.com/encode/django-rest-framework/issues/6490 @@ -209,4 +210,4 @@ def file(self, request, *args, **kwargs): # https://www.django-rest-framework.org/api-guide/viewsets/#introspecting-viewset-actions @action(detail=True, url_path="description", url_name="description") def description_(self, request, *args, **kwargs): - return self.download_file(request, Algo, "description", "description_address") + return self.download_file(request, Function, "description", "description_address") diff --git a/backend/backend/settings/common.py b/backend/backend/settings/common.py index 99b1b8de9..8d0d76c75 100644 --- a/backend/backend/settings/common.py +++ b/backend/backend/settings/common.py @@ -175,7 +175,7 @@ def build_broker_url(user: str, password: str, host: str, port: str) -> str: DATASAMPLE_STORAGE = FileSystemStorage() MODEL_STORAGE = FileSystemStorage() -ALGO_STORAGE = FileSystemStorage() +FUNCTION_STORAGE = FileSystemStorage() DATAMANAGER_STORAGE = FileSystemStorage() METRICS_STORAGE = FileSystemStorage() COMPUTE_TASK_LOGS_STORAGE = FileSystemStorage() diff --git a/backend/backend/settings/dev.py b/backend/backend/settings/dev.py index 476eefcf5..c25db8402 100644 --- a/backend/backend/settings/dev.py +++ b/backend/backend/settings/dev.py @@ -40,8 +40,8 @@ MEDIA_ROOT = os.environ.get("MEDIA_ROOT", os.path.join(PROJECT_ROOT, f"medias/{ORG_NAME}")) SERVERMEDIAS_ROOT = os.environ.get("SERVERMEDIAS_ROOT", os.path.join(PROJECT_ROOT, f"servermedias/{ORG_NAME}")) -ALGO_BUCKET_NAME = "substra-algo" -ALGO_STORAGE = MinioStorage(ALGO_BUCKET_NAME) +FUNCTION_BUCKET_NAME = "substra-function" +FUNCTION_STORAGE = MinioStorage(FUNCTION_BUCKET_NAME) DATAMANAGER_BUCKET_NAME = "substra-datamanager" DATAMANAGER_STORAGE = MinioStorage(DATAMANAGER_BUCKET_NAME) diff --git a/backend/backend/settings/prod.py b/backend/backend/settings/prod.py index 4695cfb07..5c8d23d30 100644 --- a/backend/backend/settings/prod.py +++ b/backend/backend/settings/prod.py @@ -43,8 +43,8 @@ MEDIA_ROOT = os.environ.get("MEDIA_ROOT", f"/substra/medias/{ORG_NAME}") SERVERMEDIAS_ROOT = os.environ.get("SERVERMEDIAS_ROOT", f"/substra/servermedias/{ORG_NAME}") -ALGO_BUCKET_NAME = "substra-algo" -ALGO_STORAGE = MinioStorage(ALGO_BUCKET_NAME) +FUNCTION_BUCKET_NAME = "substra-function" +FUNCTION_STORAGE = MinioStorage(FUNCTION_BUCKET_NAME) DATAMANAGER_BUCKET_NAME = "substra-datamanager" DATAMANAGER_STORAGE = MinioStorage(DATAMANAGER_BUCKET_NAME) diff --git a/backend/orchestrator/__init__.py b/backend/orchestrator/__init__.py index a8aa0682b..1440dcc80 100644 --- a/backend/orchestrator/__init__.py +++ b/backend/orchestrator/__init__.py @@ -1,9 +1,6 @@ from .client import OrcError from .client import OrchestratorClient as Client from .resources import Address -from .resources import Algo -from .resources import AlgoInput -from .resources import AlgoOutput from .resources import AssetKind from .resources import ComputePlan from .resources import ComputeTask @@ -13,6 +10,9 @@ from .resources import ComputeTaskStatus from .resources import DataManager from .resources import DataSample +from .resources import Function +from .resources import FunctionInput +from .resources import FunctionOutput from .resources import InvalidInputAsset from .resources import Model from .resources import Permission @@ -34,8 +34,8 @@ "InvalidInputAsset", "Client", "ComputePlan", - "Algo", + "Function", "OrcError", - "AlgoInput", - "AlgoOutput", + "FunctionInput", + "FunctionOutput", ) diff --git a/backend/orchestrator/algo_pb2.py b/backend/orchestrator/algo_pb2.py deleted file mode 100644 index fbfbc4eca..000000000 --- a/backend/orchestrator/algo_pb2.py +++ /dev/null @@ -1,208 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: algo.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from . import common_pb2 as common__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\nalgo.proto\x12\x0corchestrator\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x0c\x63ommon.proto\"V\n\tAlgoInput\x12%\n\x04kind\x18\x01 \x01(\x0e\x32\x17.orchestrator.AssetKind\x12\x10\n\x08multiple\x18\x02 \x01(\x08\x12\x10\n\x08optional\x18\x03 \x01(\x08\"E\n\nAlgoOutput\x12%\n\x04kind\x18\x01 \x01(\x0e\x32\x17.orchestrator.AssetKind\x12\x10\n\x08multiple\x18\x02 \x01(\x08\"\xda\x04\n\x04\x41lgo\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12.\n\x0b\x64\x65scription\x18\x04 \x01(\x0b\x32\x19.orchestrator.Addressable\x12,\n\talgorithm\x18\x05 \x01(\x0b\x32\x19.orchestrator.Addressable\x12.\n\x0bpermissions\x18\x06 \x01(\x0b\x32\x19.orchestrator.Permissions\x12\r\n\x05owner\x18\x07 \x01(\t\x12\x31\n\rcreation_date\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08metadata\x18\x10 \x03(\x0b\x32 .orchestrator.Algo.MetadataEntry\x12.\n\x06inputs\x18\x11 \x03(\x0b\x32\x1e.orchestrator.Algo.InputsEntry\x12\x30\n\x07outputs\x18\x12 \x03(\x0b\x32\x1f.orchestrator.Algo.OutputsEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x46\n\x0bInputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12&\n\x05value\x18\x02 \x01(\x0b\x32\x17.orchestrator.AlgoInput:\x02\x38\x01\x1aH\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\'\n\x05value\x18\x02 \x01(\x0b\x32\x18.orchestrator.AlgoOutput:\x02\x38\x01J\x04\x08\x03\x10\x04R\x08\x63\x61tegory\"\xab\x04\n\x07NewAlgo\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12.\n\x0b\x64\x65scription\x18\x04 \x01(\x0b\x32\x19.orchestrator.Addressable\x12,\n\talgorithm\x18\x05 \x01(\x0b\x32\x19.orchestrator.Addressable\x12\x35\n\x0fnew_permissions\x18\x06 \x01(\x0b\x32\x1c.orchestrator.NewPermissions\x12\x35\n\x08metadata\x18\x11 \x03(\x0b\x32#.orchestrator.NewAlgo.MetadataEntry\x12\x31\n\x06inputs\x18\x12 \x03(\x0b\x32!.orchestrator.NewAlgo.InputsEntry\x12\x33\n\x07outputs\x18\x13 \x03(\x0b\x32\".orchestrator.NewAlgo.OutputsEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x46\n\x0bInputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12&\n\x05value\x18\x02 \x01(\x0b\x32\x17.orchestrator.AlgoInput:\x02\x38\x01\x1aH\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\'\n\x05value\x18\x02 \x01(\x0b\x32\x18.orchestrator.AlgoOutput:\x02\x38\x01J\x04\x08\x03\x10\x04R\x08\x63\x61tegory\"\x1b\n\x0cGetAlgoParam\x12\x0b\n\x03key\x18\x01 \x01(\t\"P\n\x12QueryAlgosResponse\x12!\n\x05\x41lgos\x18\x01 \x03(\x0b\x32\x12.orchestrator.Algo\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"+\n\x0f\x41lgoQueryFilter\x12\x18\n\x10\x63ompute_plan_key\x18\x02 \x01(\t\"g\n\x0fQueryAlgosParam\x12\x12\n\npage_token\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\r\x12-\n\x06\x66ilter\x18\x03 \x01(\x0b\x32\x1d.orchestrator.AlgoQueryFilter\",\n\x0fUpdateAlgoParam\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x14\n\x12UpdateAlgoResponse2\xa1\x02\n\x0b\x41lgoService\x12\x39\n\x0cRegisterAlgo\x12\x15.orchestrator.NewAlgo\x1a\x12.orchestrator.Algo\x12\x39\n\x07GetAlgo\x12\x1a.orchestrator.GetAlgoParam\x1a\x12.orchestrator.Algo\x12M\n\nQueryAlgos\x12\x1d.orchestrator.QueryAlgosParam\x1a .orchestrator.QueryAlgosResponse\x12M\n\nUpdateAlgo\x12\x1d.orchestrator.UpdateAlgoParam\x1a .orchestrator.UpdateAlgoResponseB+Z)github.com/substra/orchestrator/lib/assetb\x06proto3') - - - -_ALGOINPUT = DESCRIPTOR.message_types_by_name['AlgoInput'] -_ALGOOUTPUT = DESCRIPTOR.message_types_by_name['AlgoOutput'] -_ALGO = DESCRIPTOR.message_types_by_name['Algo'] -_ALGO_METADATAENTRY = _ALGO.nested_types_by_name['MetadataEntry'] -_ALGO_INPUTSENTRY = _ALGO.nested_types_by_name['InputsEntry'] -_ALGO_OUTPUTSENTRY = _ALGO.nested_types_by_name['OutputsEntry'] -_NEWALGO = DESCRIPTOR.message_types_by_name['NewAlgo'] -_NEWALGO_METADATAENTRY = _NEWALGO.nested_types_by_name['MetadataEntry'] -_NEWALGO_INPUTSENTRY = _NEWALGO.nested_types_by_name['InputsEntry'] -_NEWALGO_OUTPUTSENTRY = _NEWALGO.nested_types_by_name['OutputsEntry'] -_GETALGOPARAM = DESCRIPTOR.message_types_by_name['GetAlgoParam'] -_QUERYALGOSRESPONSE = DESCRIPTOR.message_types_by_name['QueryAlgosResponse'] -_ALGOQUERYFILTER = DESCRIPTOR.message_types_by_name['AlgoQueryFilter'] -_QUERYALGOSPARAM = DESCRIPTOR.message_types_by_name['QueryAlgosParam'] -_UPDATEALGOPARAM = DESCRIPTOR.message_types_by_name['UpdateAlgoParam'] -_UPDATEALGORESPONSE = DESCRIPTOR.message_types_by_name['UpdateAlgoResponse'] -AlgoInput = _reflection.GeneratedProtocolMessageType('AlgoInput', (_message.Message,), { - 'DESCRIPTOR' : _ALGOINPUT, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.AlgoInput) - }) -_sym_db.RegisterMessage(AlgoInput) - -AlgoOutput = _reflection.GeneratedProtocolMessageType('AlgoOutput', (_message.Message,), { - 'DESCRIPTOR' : _ALGOOUTPUT, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.AlgoOutput) - }) -_sym_db.RegisterMessage(AlgoOutput) - -Algo = _reflection.GeneratedProtocolMessageType('Algo', (_message.Message,), { - - 'MetadataEntry' : _reflection.GeneratedProtocolMessageType('MetadataEntry', (_message.Message,), { - 'DESCRIPTOR' : _ALGO_METADATAENTRY, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.Algo.MetadataEntry) - }) - , - - 'InputsEntry' : _reflection.GeneratedProtocolMessageType('InputsEntry', (_message.Message,), { - 'DESCRIPTOR' : _ALGO_INPUTSENTRY, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.Algo.InputsEntry) - }) - , - - 'OutputsEntry' : _reflection.GeneratedProtocolMessageType('OutputsEntry', (_message.Message,), { - 'DESCRIPTOR' : _ALGO_OUTPUTSENTRY, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.Algo.OutputsEntry) - }) - , - 'DESCRIPTOR' : _ALGO, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.Algo) - }) -_sym_db.RegisterMessage(Algo) -_sym_db.RegisterMessage(Algo.MetadataEntry) -_sym_db.RegisterMessage(Algo.InputsEntry) -_sym_db.RegisterMessage(Algo.OutputsEntry) - -NewAlgo = _reflection.GeneratedProtocolMessageType('NewAlgo', (_message.Message,), { - - 'MetadataEntry' : _reflection.GeneratedProtocolMessageType('MetadataEntry', (_message.Message,), { - 'DESCRIPTOR' : _NEWALGO_METADATAENTRY, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.NewAlgo.MetadataEntry) - }) - , - - 'InputsEntry' : _reflection.GeneratedProtocolMessageType('InputsEntry', (_message.Message,), { - 'DESCRIPTOR' : _NEWALGO_INPUTSENTRY, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.NewAlgo.InputsEntry) - }) - , - - 'OutputsEntry' : _reflection.GeneratedProtocolMessageType('OutputsEntry', (_message.Message,), { - 'DESCRIPTOR' : _NEWALGO_OUTPUTSENTRY, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.NewAlgo.OutputsEntry) - }) - , - 'DESCRIPTOR' : _NEWALGO, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.NewAlgo) - }) -_sym_db.RegisterMessage(NewAlgo) -_sym_db.RegisterMessage(NewAlgo.MetadataEntry) -_sym_db.RegisterMessage(NewAlgo.InputsEntry) -_sym_db.RegisterMessage(NewAlgo.OutputsEntry) - -GetAlgoParam = _reflection.GeneratedProtocolMessageType('GetAlgoParam', (_message.Message,), { - 'DESCRIPTOR' : _GETALGOPARAM, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.GetAlgoParam) - }) -_sym_db.RegisterMessage(GetAlgoParam) - -QueryAlgosResponse = _reflection.GeneratedProtocolMessageType('QueryAlgosResponse', (_message.Message,), { - 'DESCRIPTOR' : _QUERYALGOSRESPONSE, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.QueryAlgosResponse) - }) -_sym_db.RegisterMessage(QueryAlgosResponse) - -AlgoQueryFilter = _reflection.GeneratedProtocolMessageType('AlgoQueryFilter', (_message.Message,), { - 'DESCRIPTOR' : _ALGOQUERYFILTER, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.AlgoQueryFilter) - }) -_sym_db.RegisterMessage(AlgoQueryFilter) - -QueryAlgosParam = _reflection.GeneratedProtocolMessageType('QueryAlgosParam', (_message.Message,), { - 'DESCRIPTOR' : _QUERYALGOSPARAM, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.QueryAlgosParam) - }) -_sym_db.RegisterMessage(QueryAlgosParam) - -UpdateAlgoParam = _reflection.GeneratedProtocolMessageType('UpdateAlgoParam', (_message.Message,), { - 'DESCRIPTOR' : _UPDATEALGOPARAM, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.UpdateAlgoParam) - }) -_sym_db.RegisterMessage(UpdateAlgoParam) - -UpdateAlgoResponse = _reflection.GeneratedProtocolMessageType('UpdateAlgoResponse', (_message.Message,), { - 'DESCRIPTOR' : _UPDATEALGORESPONSE, - '__module__' : 'algo_pb2' - # @@protoc_insertion_point(class_scope:orchestrator.UpdateAlgoResponse) - }) -_sym_db.RegisterMessage(UpdateAlgoResponse) - -_ALGOSERVICE = DESCRIPTOR.services_by_name['AlgoService'] -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - DESCRIPTOR._serialized_options = b'Z)github.com/substra/orchestrator/lib/asset' - _ALGO_METADATAENTRY._options = None - _ALGO_METADATAENTRY._serialized_options = b'8\001' - _ALGO_INPUTSENTRY._options = None - _ALGO_INPUTSENTRY._serialized_options = b'8\001' - _ALGO_OUTPUTSENTRY._options = None - _ALGO_OUTPUTSENTRY._serialized_options = b'8\001' - _NEWALGO_METADATAENTRY._options = None - _NEWALGO_METADATAENTRY._serialized_options = b'8\001' - _NEWALGO_INPUTSENTRY._options = None - _NEWALGO_INPUTSENTRY._serialized_options = b'8\001' - _NEWALGO_OUTPUTSENTRY._options = None - _NEWALGO_OUTPUTSENTRY._serialized_options = b'8\001' - _ALGOINPUT._serialized_start=75 - _ALGOINPUT._serialized_end=161 - _ALGOOUTPUT._serialized_start=163 - _ALGOOUTPUT._serialized_end=232 - _ALGO._serialized_start=235 - _ALGO._serialized_end=837 - _ALGO_METADATAENTRY._serialized_start=628 - _ALGO_METADATAENTRY._serialized_end=675 - _ALGO_INPUTSENTRY._serialized_start=677 - _ALGO_INPUTSENTRY._serialized_end=747 - _ALGO_OUTPUTSENTRY._serialized_start=749 - _ALGO_OUTPUTSENTRY._serialized_end=821 - _NEWALGO._serialized_start=840 - _NEWALGO._serialized_end=1395 - _NEWALGO_METADATAENTRY._serialized_start=628 - _NEWALGO_METADATAENTRY._serialized_end=675 - _NEWALGO_INPUTSENTRY._serialized_start=677 - _NEWALGO_INPUTSENTRY._serialized_end=747 - _NEWALGO_OUTPUTSENTRY._serialized_start=749 - _NEWALGO_OUTPUTSENTRY._serialized_end=821 - _GETALGOPARAM._serialized_start=1397 - _GETALGOPARAM._serialized_end=1424 - _QUERYALGOSRESPONSE._serialized_start=1426 - _QUERYALGOSRESPONSE._serialized_end=1506 - _ALGOQUERYFILTER._serialized_start=1508 - _ALGOQUERYFILTER._serialized_end=1551 - _QUERYALGOSPARAM._serialized_start=1553 - _QUERYALGOSPARAM._serialized_end=1656 - _UPDATEALGOPARAM._serialized_start=1658 - _UPDATEALGOPARAM._serialized_end=1702 - _UPDATEALGORESPONSE._serialized_start=1704 - _UPDATEALGORESPONSE._serialized_end=1724 - _ALGOSERVICE._serialized_start=1727 - _ALGOSERVICE._serialized_end=2016 -# @@protoc_insertion_point(module_scope) diff --git a/backend/orchestrator/algo_pb2_grpc.pyi b/backend/orchestrator/algo_pb2_grpc.pyi deleted file mode 100644 index a86cad132..000000000 --- a/backend/orchestrator/algo_pb2_grpc.pyi +++ /dev/null @@ -1,54 +0,0 @@ -""" -@generated by mypy-protobuf. Do not edit manually! -isort:skip_file -""" -import abc -import algo_pb2 -import grpc - -class AlgoServiceStub: - def __init__(self, channel: grpc.Channel) -> None: ... - RegisterAlgo: grpc.UnaryUnaryMultiCallable[ - algo_pb2.NewAlgo, - algo_pb2.Algo, - ] - GetAlgo: grpc.UnaryUnaryMultiCallable[ - algo_pb2.GetAlgoParam, - algo_pb2.Algo, - ] - QueryAlgos: grpc.UnaryUnaryMultiCallable[ - algo_pb2.QueryAlgosParam, - algo_pb2.QueryAlgosResponse, - ] - UpdateAlgo: grpc.UnaryUnaryMultiCallable[ - algo_pb2.UpdateAlgoParam, - algo_pb2.UpdateAlgoResponse, - ] - -class AlgoServiceServicer(metaclass=abc.ABCMeta): - @abc.abstractmethod - def RegisterAlgo( - self, - request: algo_pb2.NewAlgo, - context: grpc.ServicerContext, - ) -> algo_pb2.Algo: ... - @abc.abstractmethod - def GetAlgo( - self, - request: algo_pb2.GetAlgoParam, - context: grpc.ServicerContext, - ) -> algo_pb2.Algo: ... - @abc.abstractmethod - def QueryAlgos( - self, - request: algo_pb2.QueryAlgosParam, - context: grpc.ServicerContext, - ) -> algo_pb2.QueryAlgosResponse: ... - @abc.abstractmethod - def UpdateAlgo( - self, - request: algo_pb2.UpdateAlgoParam, - context: grpc.ServicerContext, - ) -> algo_pb2.UpdateAlgoResponse: ... - -def add_AlgoServiceServicer_to_server(servicer: AlgoServiceServicer, server: grpc.Server) -> None: ... diff --git a/backend/orchestrator/client.py b/backend/orchestrator/client.py index 864e98a13..bbaf33d60 100644 --- a/backend/orchestrator/client.py +++ b/backend/orchestrator/client.py @@ -8,18 +8,17 @@ from django.conf import settings from google.protobuf.json_format import MessageToDict -import orchestrator.algo_pb2 as algo_pb2 import orchestrator.computeplan_pb2 as computeplan_pb2 import orchestrator.computetask_pb2 as computetask_pb2 import orchestrator.datamanager_pb2 as datamanager_pb2 import orchestrator.datasample_pb2 as datasample_pb2 import orchestrator.event_pb2 as event_pb2 import orchestrator.failure_report_pb2 as failure_report_pb2 +import orchestrator.function_pb2 as function_pb2 import orchestrator.info_pb2 as info_pb2 import orchestrator.model_pb2 as model_pb2 import orchestrator.organization_pb2 as organization_pb2 import orchestrator.performance_pb2 as performance_pb2 -from orchestrator.algo_pb2_grpc import AlgoServiceStub from orchestrator.computeplan_pb2_grpc import ComputePlanServiceStub from orchestrator.computetask_pb2_grpc import ComputeTaskServiceStub from orchestrator.datamanager_pb2_grpc import DataManagerServiceStub @@ -28,15 +27,16 @@ from orchestrator.error import OrcError from orchestrator.event_pb2_grpc import EventServiceStub from orchestrator.failure_report_pb2_grpc import FailureReportServiceStub +from orchestrator.function_pb2_grpc import FunctionServiceStub from orchestrator.info_pb2_grpc import InfoServiceStub from orchestrator.model_pb2_grpc import ModelServiceStub from orchestrator.organization_pb2_grpc import OrganizationServiceStub from orchestrator.performance_pb2_grpc import PerformanceServiceStub from orchestrator.resources import TAG_KEY -from orchestrator.resources import Algo from orchestrator.resources import ComputePlan from orchestrator.resources import ComputeTask from orchestrator.resources import ComputeTaskInputAsset +from orchestrator.resources import Function from orchestrator.resources import OrchestratorVersion logger = structlog.get_logger(__name__) @@ -162,7 +162,7 @@ def __init__( self.grpc_channel = grpc.secure_channel(target, creds, opts) self._organization_client = OrganizationServiceStub(self.grpc_channel) - self._algo_client = AlgoServiceStub(self.grpc_channel) + self._function_client = FunctionServiceStub(self.grpc_channel) self._datasample_client = DataSampleServiceStub(self.grpc_channel) self._datamanager_client = DataManagerServiceStub(self.grpc_channel) self._dataset_client = DatasetServiceStub(self.grpc_channel) @@ -194,38 +194,40 @@ def register_organization(self, args: dict): MessageToDict(data, **CONVERT_SETTINGS) @grpc_retry - def register_algo(self, args): - args["inputs"] = {identifier: algo_pb2.AlgoInput(**_input) for identifier, _input in args["inputs"].items()} + def register_function(self, args): + args["inputs"] = { + identifier: function_pb2.FunctionInput(**_input) for identifier, _input in args["inputs"].items() + } args["outputs"] = { - identifier: algo_pb2.AlgoOutput(**_output) for identifier, _output in args["outputs"].items() + identifier: function_pb2.FunctionOutput(**_output) for identifier, _output in args["outputs"].items() } - data = self._algo_client.RegisterAlgo(algo_pb2.NewAlgo(**args), metadata=self._metadata) + data = self._function_client.RegisterFunction(function_pb2.NewFunction(**args), metadata=self._metadata) return MessageToDict(data, **CONVERT_SETTINGS) @grpc_retry - def update_algo(self, args): - data = self._algo_client.UpdateAlgo(algo_pb2.UpdateAlgoParam(**args), metadata=self._metadata) + def update_function(self, args): + data = self._function_client.UpdateFunction(function_pb2.UpdateFunctionParam(**args), metadata=self._metadata) return MessageToDict(data, **CONVERT_SETTINGS) @grpc_retry - def query_algo(self, key) -> Algo: - data = self._algo_client.GetAlgo(algo_pb2.GetAlgoParam(key=key), metadata=self._metadata) - return Algo.from_grpc(data) + def query_function(self, key) -> Function: + data = self._function_client.GetFunction(function_pb2.GetFunctionParam(key=key), metadata=self._metadata) + return Function.from_grpc(data) @grpc_retry - def query_algos(self, compute_plan_key=None) -> Generator[Algo, None, None]: - algo_filter = algo_pb2.AlgoQueryFilter(compute_plan_key=compute_plan_key) + def query_functions(self, compute_plan_key=None) -> Generator[Function, None, None]: + function_filter = function_pb2.FunctionQueryFilter(compute_plan_key=compute_plan_key) page_token = "" # nosec while True: - data = self._algo_client.QueryAlgos( - algo_pb2.QueryAlgosParam(filter=algo_filter, page_token=page_token), + data = self._function_client.QueryFunctions( + function_pb2.QueryFunctionsParam(filter=function_filter, page_token=page_token), metadata=self._metadata, ) - for algo in data.Algos: - yield Algo.from_grpc(algo) + for function in data.Functions: + yield Function.from_grpc(function) page_token = data.next_page_token - if page_token == "" or len(data.Algos) == 0: # nosec + if page_token == "" or len(data.Functions) == 0: # nosec break @grpc_retry diff --git a/backend/orchestrator/common_pb2.py b/backend/orchestrator/common_pb2.py index e4c5bf065..0fd770953 100644 --- a/backend/orchestrator/common_pb2.py +++ b/backend/orchestrator/common_pb2.py @@ -15,7 +15,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x63ommon.proto\x12\x0corchestrator\"8\n\x0b\x41\x64\x64ressable\x12\x10\n\x08\x63hecksum\x18\x01 \x01(\t\x12\x17\n\x0fstorage_address\x18\x02 \x01(\t\"d\n\x0bPermissions\x12)\n\x07process\x18\x01 \x01(\x0b\x32\x18.orchestrator.Permission\x12*\n\x08\x64ownload\x18\x02 \x01(\x0b\x32\x18.orchestrator.Permission\"4\n\nPermission\x12\x0e\n\x06public\x18\x01 \x01(\x08\x12\x16\n\x0e\x61uthorized_ids\x18\x02 \x03(\t\"8\n\x0eNewPermissions\x12\x0e\n\x06public\x18\x01 \x01(\x08\x12\x16\n\x0e\x61uthorized_ids\x18\x02 \x03(\t*\xa0\x02\n\tAssetKind\x12\x11\n\rASSET_UNKNOWN\x10\x00\x12\x16\n\x12\x41SSET_ORGANIZATION\x10\x01\x12\x15\n\x11\x41SSET_DATA_SAMPLE\x10\x03\x12\x16\n\x12\x41SSET_DATA_MANAGER\x10\x04\x12\x0e\n\nASSET_ALGO\x10\x05\x12\x16\n\x12\x41SSET_COMPUTE_TASK\x10\x06\x12\x16\n\x12\x41SSET_COMPUTE_PLAN\x10\x07\x12\x0f\n\x0b\x41SSET_MODEL\x10\x08\x12\x15\n\x11\x41SSET_PERFORMANCE\x10\t\x12\x18\n\x14\x41SSET_FAILURE_REPORT\x10\n\x12#\n\x1f\x41SSET_COMPUTE_TASK_OUTPUT_ASSET\x10\x0b\"\x04\x08\x02\x10\x02*\x0c\x41SSET_METRIC*;\n\tSortOrder\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02\x42+Z)github.com/substra/orchestrator/lib/assetb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x63ommon.proto\x12\x0corchestrator\"8\n\x0b\x41\x64\x64ressable\x12\x10\n\x08\x63hecksum\x18\x01 \x01(\t\x12\x17\n\x0fstorage_address\x18\x02 \x01(\t\"d\n\x0bPermissions\x12)\n\x07process\x18\x01 \x01(\x0b\x32\x18.orchestrator.Permission\x12*\n\x08\x64ownload\x18\x02 \x01(\x0b\x32\x18.orchestrator.Permission\"4\n\nPermission\x12\x0e\n\x06public\x18\x01 \x01(\x08\x12\x16\n\x0e\x61uthorized_ids\x18\x02 \x03(\t\"8\n\x0eNewPermissions\x12\x0e\n\x06public\x18\x01 \x01(\x08\x12\x16\n\x0e\x61uthorized_ids\x18\x02 \x03(\t*\xa4\x02\n\tAssetKind\x12\x11\n\rASSET_UNKNOWN\x10\x00\x12\x16\n\x12\x41SSET_ORGANIZATION\x10\x01\x12\x15\n\x11\x41SSET_DATA_SAMPLE\x10\x03\x12\x16\n\x12\x41SSET_DATA_MANAGER\x10\x04\x12\x12\n\x0e\x41SSET_FUNCTION\x10\x05\x12\x16\n\x12\x41SSET_COMPUTE_TASK\x10\x06\x12\x16\n\x12\x41SSET_COMPUTE_PLAN\x10\x07\x12\x0f\n\x0b\x41SSET_MODEL\x10\x08\x12\x15\n\x11\x41SSET_PERFORMANCE\x10\t\x12\x18\n\x14\x41SSET_FAILURE_REPORT\x10\n\x12#\n\x1f\x41SSET_COMPUTE_TASK_OUTPUT_ASSET\x10\x0b\"\x04\x08\x02\x10\x02*\x0c\x41SSET_METRIC*;\n\tSortOrder\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02\x42+Z)github.com/substra/orchestrator/lib/assetb\x06proto3') _ASSETKIND = DESCRIPTOR.enum_types_by_name['AssetKind'] AssetKind = enum_type_wrapper.EnumTypeWrapper(_ASSETKIND) @@ -25,7 +25,7 @@ ASSET_ORGANIZATION = 1 ASSET_DATA_SAMPLE = 3 ASSET_DATA_MANAGER = 4 -ASSET_ALGO = 5 +ASSET_FUNCTION = 5 ASSET_COMPUTE_TASK = 6 ASSET_COMPUTE_PLAN = 7 ASSET_MODEL = 8 @@ -74,9 +74,9 @@ DESCRIPTOR._options = None DESCRIPTOR._serialized_options = b'Z)github.com/substra/orchestrator/lib/asset' _ASSETKIND._serialized_start=303 - _ASSETKIND._serialized_end=591 - _SORTORDER._serialized_start=593 - _SORTORDER._serialized_end=652 + _ASSETKIND._serialized_end=595 + _SORTORDER._serialized_start=597 + _SORTORDER._serialized_end=656 _ADDRESSABLE._serialized_start=30 _ADDRESSABLE._serialized_end=86 _PERMISSIONS._serialized_start=88 diff --git a/backend/orchestrator/common_pb2.pyi b/backend/orchestrator/common_pb2.pyi index a87a3cbb6..6f51fb88f 100644 --- a/backend/orchestrator/common_pb2.pyi +++ b/backend/orchestrator/common_pb2.pyi @@ -28,7 +28,7 @@ class _AssetKindEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._Enum ASSET_ORGANIZATION: _AssetKind.ValueType # 1 ASSET_DATA_SAMPLE: _AssetKind.ValueType # 3 ASSET_DATA_MANAGER: _AssetKind.ValueType # 4 - ASSET_ALGO: _AssetKind.ValueType # 5 + ASSET_FUNCTION: _AssetKind.ValueType # 5 ASSET_COMPUTE_TASK: _AssetKind.ValueType # 6 ASSET_COMPUTE_PLAN: _AssetKind.ValueType # 7 ASSET_MODEL: _AssetKind.ValueType # 8 @@ -42,7 +42,7 @@ ASSET_UNKNOWN: AssetKind.ValueType # 0 ASSET_ORGANIZATION: AssetKind.ValueType # 1 ASSET_DATA_SAMPLE: AssetKind.ValueType # 3 ASSET_DATA_MANAGER: AssetKind.ValueType # 4 -ASSET_ALGO: AssetKind.ValueType # 5 +ASSET_FUNCTION: AssetKind.ValueType # 5 ASSET_COMPUTE_TASK: AssetKind.ValueType # 6 ASSET_COMPUTE_PLAN: AssetKind.ValueType # 7 ASSET_MODEL: AssetKind.ValueType # 8 diff --git a/backend/orchestrator/computetask.py b/backend/orchestrator/computetask.py index a7703e28a..f78b4ccd2 100644 --- a/backend/orchestrator/computetask.py +++ b/backend/orchestrator/computetask.py @@ -5,7 +5,7 @@ def orc_to_api(data: dict) -> dict: """Convert a compute task from the orchestrator format to the api format""" res = copy.deepcopy(data) - res["algo"] = {"key": res.pop("algo_key")} + res["function"] = {"key": res.pop("function_key")} res["inputs"] = [_input_to_api(input) for input in res["inputs"]] res["outputs"] = [{"identifier": identifier, **output} for identifier, output in res["outputs"].items()] return res diff --git a/backend/orchestrator/computetask_pb2.py b/backend/orchestrator/computetask_pb2.py index f82c76c44..505c8d9e1 100644 --- a/backend/orchestrator/computetask_pb2.py +++ b/backend/orchestrator/computetask_pb2.py @@ -20,7 +20,7 @@ from . import common_pb2 as common__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x63omputetask.proto\x12\x0corchestrator\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x0bmodel.proto\x1a\x11\x64\x61tamanager.proto\x1a\x10\x64\x61tasample.proto\x1a\x0c\x63ommon.proto\"I\n\x13ParentTaskOutputRef\x12\x17\n\x0fparent_task_key\x18\x01 \x01(\t\x12\x19\n\x11output_identifier\x18\x02 \x01(\t\"\x83\x01\n\x10\x43omputeTaskInput\x12\x12\n\nidentifier\x18\x01 \x01(\t\x12\x13\n\tasset_key\x18\x02 \x01(\tH\x00\x12?\n\x12parent_task_output\x18\x03 \x01(\x0b\x32!.orchestrator.ParentTaskOutputRefH\x00\x42\x05\n\x03ref\"V\n\x11\x43omputeTaskOutput\x12.\n\x0bpermissions\x18\x01 \x01(\x0b\x32\x19.orchestrator.Permissions\x12\x11\n\ttransient\x18\x02 \x01(\x08\"\\\n\x14NewComputeTaskOutput\x12\x31\n\x0bpermissions\x18\x01 \x01(\x0b\x32\x1c.orchestrator.NewPermissions\x12\x11\n\ttransient\x18\x02 \x01(\x08\"\xb4\x05\n\x0b\x43omputeTask\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05owner\x18\x04 \x01(\t\x12\x18\n\x10\x63ompute_plan_key\x18\x05 \x01(\t\x12\x0c\n\x04rank\x18\x07 \x01(\x05\x12/\n\x06status\x18\x08 \x01(\x0e\x32\x1f.orchestrator.ComputeTaskStatus\x12\x0e\n\x06worker\x18\t \x01(\t\x12\x31\n\rcreation_date\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\x0flogs_permission\x18\x0b \x01(\x0b\x32\x18.orchestrator.Permission\x12\x39\n\x08metadata\x18\x10 \x03(\x0b\x32\'.orchestrator.ComputeTask.MetadataEntry\x12.\n\x06inputs\x18\x11 \x03(\x0b\x32\x1e.orchestrator.ComputeTaskInput\x12\x37\n\x07outputs\x18\x13 \x03(\x0b\x32&.orchestrator.ComputeTask.OutputsEntry\x12\x10\n\x08\x61lgo_key\x18\x14 \x01(\t\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1aO\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.orchestrator.ComputeTaskOutput:\x02\x38\x01J\x04\x08\x02\x10\x03J\x04\x08\x03\x10\x04J\x04\x08\x06\x10\x07J\x04\x08\x0c\x10\rJ\x04\x08\r\x10\x0eJ\x04\x08\x0e\x10\x0fJ\x04\x08\x0f\x10\x10J\x04\x08\x12\x10\x13R\x08\x63\x61tegoryR\x04\x61lgoR\x04\x64\x61taR\x04testR\x05trainR\tcompositeR\taggregateR\x10parent_task_keysR\x07predict\"\x80\x04\n\x0eNewComputeTask\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x10\n\x08\x61lgo_key\x18\x03 \x01(\t\x12\x18\n\x10\x63ompute_plan_key\x18\x04 \x01(\t\x12\x0e\n\x06worker\x18\x06 \x01(\t\x12<\n\x08metadata\x18\x10 \x03(\x0b\x32*.orchestrator.NewComputeTask.MetadataEntry\x12.\n\x06inputs\x18\x11 \x03(\x0b\x32\x1e.orchestrator.ComputeTaskInput\x12:\n\x07outputs\x18\x13 \x03(\x0b\x32).orchestrator.NewComputeTask.OutputsEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1aR\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x31\n\x05value\x18\x02 \x01(\x0b\x32\".orchestrator.NewComputeTaskOutput:\x02\x38\x01J\x04\x08\x02\x10\x03J\x04\x08\x05\x10\x06J\x04\x08\x0c\x10\rJ\x04\x08\r\x10\x0eJ\x04\x08\x0e\x10\x0fJ\x04\x08\x0f\x10\x10J\x04\x08\x12\x10\x13R\x08\x63\x61tegoryR\x10parent_task_keysR\x04\x64\x61taR\x04testR\x05trainR\tcompositeR\taggregateR\x07predict\"A\n\x12RegisterTasksParam\x12+\n\x05tasks\x18\x01 \x03(\x0b\x32\x1c.orchestrator.NewComputeTask\"A\n\x15RegisterTasksResponse\x12(\n\x05tasks\x18\x01 \x03(\x0b\x32\x19.orchestrator.ComputeTask\"\x8e\x01\n\x0fTaskQueryFilter\x12\x0e\n\x06worker\x18\x01 \x01(\t\x12/\n\x06status\x18\x02 \x01(\x0e\x32\x1f.orchestrator.ComputeTaskStatus\x12\x18\n\x10\x63ompute_plan_key\x18\x04 \x01(\t\x12\x10\n\x08\x61lgo_key\x18\x05 \x01(\tJ\x04\x08\x03\x10\x04R\x08\x63\x61tegory\"g\n\x0fQueryTasksParam\x12\x12\n\npage_token\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\r\x12-\n\x06\x66ilter\x18\x03 \x01(\x0b\x32\x1d.orchestrator.TaskQueryFilter\"W\n\x12QueryTasksResponse\x12(\n\x05tasks\x18\x01 \x03(\x0b\x32\x19.orchestrator.ComputeTask\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"\x1b\n\x0cGetTaskParam\x12\x0b\n\x03key\x18\x01 \x01(\t\"\x9a\x01\n\x16\x43omputeTaskOutputAsset\x12\x18\n\x10\x63ompute_task_key\x18\x01 \x01(\t\x12&\n\x1e\x63ompute_task_output_identifier\x18\x02 \x01(\t\x12+\n\nasset_kind\x18\x03 \x01(\x0e\x32\x17.orchestrator.AssetKind\x12\x11\n\tasset_key\x18\x04 \x01(\t\"n\n\x14\x41pplyTaskActionParam\x12\x18\n\x10\x63ompute_task_key\x18\x01 \x01(\t\x12/\n\x06\x61\x63tion\x18\x02 \x01(\x0e\x32\x1f.orchestrator.ComputeTaskAction\x12\x0b\n\x03log\x18\x03 \x01(\t\"\x19\n\x17\x41pplyTaskActionResponse\"\xbe\x01\n\x15\x43omputeTaskInputAsset\x12\x12\n\nidentifier\x18\x01 \x01(\t\x12$\n\x05model\x18\x02 \x01(\x0b\x32\x13.orchestrator.ModelH\x00\x12\x31\n\x0c\x64\x61ta_manager\x18\x03 \x01(\x0b\x32\x19.orchestrator.DataManagerH\x00\x12/\n\x0b\x64\x61ta_sample\x18\x04 \x01(\x0b\x32\x18.orchestrator.DataSampleH\x00\x42\x07\n\x05\x61sset\"3\n\x17GetTaskInputAssetsParam\x12\x18\n\x10\x63ompute_task_key\x18\x01 \x01(\t\"Q\n\x1aGetTaskInputAssetsResponse\x12\x33\n\x06\x61ssets\x18\x01 \x03(\x0b\x32#.orchestrator.ComputeTaskInputAsset\"B\n\x12\x44isableOutputParam\x12\x18\n\x10\x63ompute_task_key\x18\x01 \x01(\t\x12\x12\n\nidentifier\x18\x02 \x01(\t\"\x17\n\x15\x44isableOutputResponse*\x97\x01\n\x11\x43omputeTaskStatus\x12\x12\n\x0eSTATUS_UNKNOWN\x10\x00\x12\x12\n\x0eSTATUS_WAITING\x10\x01\x12\x0f\n\x0bSTATUS_TODO\x10\x02\x12\x10\n\x0cSTATUS_DOING\x10\x03\x12\x0f\n\x0bSTATUS_DONE\x10\x04\x12\x13\n\x0fSTATUS_CANCELED\x10\x05\x12\x11\n\rSTATUS_FAILED\x10\x06*\x8b\x01\n\x11\x43omputeTaskAction\x12\x17\n\x13TASK_ACTION_UNKNOWN\x10\x00\x12\x15\n\x11TASK_ACTION_DOING\x10\x01\x12\x18\n\x14TASK_ACTION_CANCELED\x10\x02\x12\x16\n\x12TASK_ACTION_FAILED\x10\x03\x12\x14\n\x10TASK_ACTION_DONE\x10\x04\x32\x9a\x04\n\x12\x43omputeTaskService\x12V\n\rRegisterTasks\x12 .orchestrator.RegisterTasksParam\x1a#.orchestrator.RegisterTasksResponse\x12M\n\nQueryTasks\x12\x1d.orchestrator.QueryTasksParam\x1a .orchestrator.QueryTasksResponse\x12@\n\x07GetTask\x12\x1a.orchestrator.GetTaskParam\x1a\x19.orchestrator.ComputeTask\x12\\\n\x0f\x41pplyTaskAction\x12\".orchestrator.ApplyTaskActionParam\x1a%.orchestrator.ApplyTaskActionResponse\x12\x65\n\x12GetTaskInputAssets\x12%.orchestrator.GetTaskInputAssetsParam\x1a(.orchestrator.GetTaskInputAssetsResponse\x12V\n\rDisableOutput\x12 .orchestrator.DisableOutputParam\x1a#.orchestrator.DisableOutputResponseB+Z)github.com/substra/orchestrator/lib/assetb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x63omputetask.proto\x12\x0corchestrator\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x0bmodel.proto\x1a\x11\x64\x61tamanager.proto\x1a\x10\x64\x61tasample.proto\x1a\x0c\x63ommon.proto\"I\n\x13ParentTaskOutputRef\x12\x17\n\x0fparent_task_key\x18\x01 \x01(\t\x12\x19\n\x11output_identifier\x18\x02 \x01(\t\"\x83\x01\n\x10\x43omputeTaskInput\x12\x12\n\nidentifier\x18\x01 \x01(\t\x12\x13\n\tasset_key\x18\x02 \x01(\tH\x00\x12?\n\x12parent_task_output\x18\x03 \x01(\x0b\x32!.orchestrator.ParentTaskOutputRefH\x00\x42\x05\n\x03ref\"V\n\x11\x43omputeTaskOutput\x12.\n\x0bpermissions\x18\x01 \x01(\x0b\x32\x19.orchestrator.Permissions\x12\x11\n\ttransient\x18\x02 \x01(\x08\"\\\n\x14NewComputeTaskOutput\x12\x31\n\x0bpermissions\x18\x01 \x01(\x0b\x32\x1c.orchestrator.NewPermissions\x12\x11\n\ttransient\x18\x02 \x01(\x08\"\xbc\x05\n\x0b\x43omputeTask\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05owner\x18\x04 \x01(\t\x12\x18\n\x10\x63ompute_plan_key\x18\x05 \x01(\t\x12\x0c\n\x04rank\x18\x07 \x01(\x05\x12/\n\x06status\x18\x08 \x01(\x0e\x32\x1f.orchestrator.ComputeTaskStatus\x12\x0e\n\x06worker\x18\t \x01(\t\x12\x31\n\rcreation_date\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\x0flogs_permission\x18\x0b \x01(\x0b\x32\x18.orchestrator.Permission\x12\x39\n\x08metadata\x18\x10 \x03(\x0b\x32\'.orchestrator.ComputeTask.MetadataEntry\x12.\n\x06inputs\x18\x11 \x03(\x0b\x32\x1e.orchestrator.ComputeTaskInput\x12\x37\n\x07outputs\x18\x13 \x03(\x0b\x32&.orchestrator.ComputeTask.OutputsEntry\x12\x14\n\x0c\x66unction_key\x18\x14 \x01(\t\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1aO\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.orchestrator.ComputeTaskOutput:\x02\x38\x01J\x04\x08\x02\x10\x03J\x04\x08\x03\x10\x04J\x04\x08\x06\x10\x07J\x04\x08\x0c\x10\rJ\x04\x08\r\x10\x0eJ\x04\x08\x0e\x10\x0fJ\x04\x08\x0f\x10\x10J\x04\x08\x12\x10\x13R\x08\x63\x61tegoryR\x08\x66unctionR\x04\x64\x61taR\x04testR\x05trainR\tcompositeR\taggregateR\x10parent_task_keysR\x07predict\"\x84\x04\n\x0eNewComputeTask\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x14\n\x0c\x66unction_key\x18\x03 \x01(\t\x12\x18\n\x10\x63ompute_plan_key\x18\x04 \x01(\t\x12\x0e\n\x06worker\x18\x06 \x01(\t\x12<\n\x08metadata\x18\x10 \x03(\x0b\x32*.orchestrator.NewComputeTask.MetadataEntry\x12.\n\x06inputs\x18\x11 \x03(\x0b\x32\x1e.orchestrator.ComputeTaskInput\x12:\n\x07outputs\x18\x13 \x03(\x0b\x32).orchestrator.NewComputeTask.OutputsEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1aR\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x31\n\x05value\x18\x02 \x01(\x0b\x32\".orchestrator.NewComputeTaskOutput:\x02\x38\x01J\x04\x08\x02\x10\x03J\x04\x08\x05\x10\x06J\x04\x08\x0c\x10\rJ\x04\x08\r\x10\x0eJ\x04\x08\x0e\x10\x0fJ\x04\x08\x0f\x10\x10J\x04\x08\x12\x10\x13R\x08\x63\x61tegoryR\x10parent_task_keysR\x04\x64\x61taR\x04testR\x05trainR\tcompositeR\taggregateR\x07predict\"A\n\x12RegisterTasksParam\x12+\n\x05tasks\x18\x01 \x03(\x0b\x32\x1c.orchestrator.NewComputeTask\"A\n\x15RegisterTasksResponse\x12(\n\x05tasks\x18\x01 \x03(\x0b\x32\x19.orchestrator.ComputeTask\"\x92\x01\n\x0fTaskQueryFilter\x12\x0e\n\x06worker\x18\x01 \x01(\t\x12/\n\x06status\x18\x02 \x01(\x0e\x32\x1f.orchestrator.ComputeTaskStatus\x12\x18\n\x10\x63ompute_plan_key\x18\x04 \x01(\t\x12\x14\n\x0c\x66unction_key\x18\x05 \x01(\tJ\x04\x08\x03\x10\x04R\x08\x63\x61tegory\"g\n\x0fQueryTasksParam\x12\x12\n\npage_token\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\r\x12-\n\x06\x66ilter\x18\x03 \x01(\x0b\x32\x1d.orchestrator.TaskQueryFilter\"W\n\x12QueryTasksResponse\x12(\n\x05tasks\x18\x01 \x03(\x0b\x32\x19.orchestrator.ComputeTask\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"\x1b\n\x0cGetTaskParam\x12\x0b\n\x03key\x18\x01 \x01(\t\"\x9a\x01\n\x16\x43omputeTaskOutputAsset\x12\x18\n\x10\x63ompute_task_key\x18\x01 \x01(\t\x12&\n\x1e\x63ompute_task_output_identifier\x18\x02 \x01(\t\x12+\n\nasset_kind\x18\x03 \x01(\x0e\x32\x17.orchestrator.AssetKind\x12\x11\n\tasset_key\x18\x04 \x01(\t\"n\n\x14\x41pplyTaskActionParam\x12\x18\n\x10\x63ompute_task_key\x18\x01 \x01(\t\x12/\n\x06\x61\x63tion\x18\x02 \x01(\x0e\x32\x1f.orchestrator.ComputeTaskAction\x12\x0b\n\x03log\x18\x03 \x01(\t\"\x19\n\x17\x41pplyTaskActionResponse\"\xbe\x01\n\x15\x43omputeTaskInputAsset\x12\x12\n\nidentifier\x18\x01 \x01(\t\x12$\n\x05model\x18\x02 \x01(\x0b\x32\x13.orchestrator.ModelH\x00\x12\x31\n\x0c\x64\x61ta_manager\x18\x03 \x01(\x0b\x32\x19.orchestrator.DataManagerH\x00\x12/\n\x0b\x64\x61ta_sample\x18\x04 \x01(\x0b\x32\x18.orchestrator.DataSampleH\x00\x42\x07\n\x05\x61sset\"3\n\x17GetTaskInputAssetsParam\x12\x18\n\x10\x63ompute_task_key\x18\x01 \x01(\t\"Q\n\x1aGetTaskInputAssetsResponse\x12\x33\n\x06\x61ssets\x18\x01 \x03(\x0b\x32#.orchestrator.ComputeTaskInputAsset\"B\n\x12\x44isableOutputParam\x12\x18\n\x10\x63ompute_task_key\x18\x01 \x01(\t\x12\x12\n\nidentifier\x18\x02 \x01(\t\"\x17\n\x15\x44isableOutputResponse*\x97\x01\n\x11\x43omputeTaskStatus\x12\x12\n\x0eSTATUS_UNKNOWN\x10\x00\x12\x12\n\x0eSTATUS_WAITING\x10\x01\x12\x0f\n\x0bSTATUS_TODO\x10\x02\x12\x10\n\x0cSTATUS_DOING\x10\x03\x12\x0f\n\x0bSTATUS_DONE\x10\x04\x12\x13\n\x0fSTATUS_CANCELED\x10\x05\x12\x11\n\rSTATUS_FAILED\x10\x06*\x8b\x01\n\x11\x43omputeTaskAction\x12\x17\n\x13TASK_ACTION_UNKNOWN\x10\x00\x12\x15\n\x11TASK_ACTION_DOING\x10\x01\x12\x18\n\x14TASK_ACTION_CANCELED\x10\x02\x12\x16\n\x12TASK_ACTION_FAILED\x10\x03\x12\x14\n\x10TASK_ACTION_DONE\x10\x04\x32\x9a\x04\n\x12\x43omputeTaskService\x12V\n\rRegisterTasks\x12 .orchestrator.RegisterTasksParam\x1a#.orchestrator.RegisterTasksResponse\x12M\n\nQueryTasks\x12\x1d.orchestrator.QueryTasksParam\x1a .orchestrator.QueryTasksResponse\x12@\n\x07GetTask\x12\x1a.orchestrator.GetTaskParam\x1a\x19.orchestrator.ComputeTask\x12\\\n\x0f\x41pplyTaskAction\x12\".orchestrator.ApplyTaskActionParam\x1a%.orchestrator.ApplyTaskActionResponse\x12\x65\n\x12GetTaskInputAssets\x12%.orchestrator.GetTaskInputAssetsParam\x1a(.orchestrator.GetTaskInputAssetsResponse\x12V\n\rDisableOutput\x12 .orchestrator.DisableOutputParam\x1a#.orchestrator.DisableOutputResponseB+Z)github.com/substra/orchestrator/lib/assetb\x06proto3') _COMPUTETASKSTATUS = DESCRIPTOR.enum_types_by_name['ComputeTaskStatus'] ComputeTaskStatus = enum_type_wrapper.EnumTypeWrapper(_COMPUTETASKSTATUS) @@ -249,10 +249,10 @@ _NEWCOMPUTETASK_METADATAENTRY._serialized_options = b'8\001' _NEWCOMPUTETASK_OUTPUTSENTRY._options = None _NEWCOMPUTETASK_OUTPUTSENTRY._serialized_options = b'8\001' - _COMPUTETASKSTATUS._serialized_start=2954 - _COMPUTETASKSTATUS._serialized_end=3105 - _COMPUTETASKACTION._serialized_start=3108 - _COMPUTETASKACTION._serialized_end=3247 + _COMPUTETASKSTATUS._serialized_start=2970 + _COMPUTETASKSTATUS._serialized_end=3121 + _COMPUTETASKACTION._serialized_start=3124 + _COMPUTETASKACTION._serialized_end=3263 _PARENTTASKOUTPUTREF._serialized_start=132 _PARENTTASKOUTPUTREF._serialized_end=205 _COMPUTETASKINPUT._serialized_start=208 @@ -262,45 +262,45 @@ _NEWCOMPUTETASKOUTPUT._serialized_start=429 _NEWCOMPUTETASKOUTPUT._serialized_end=521 _COMPUTETASK._serialized_start=524 - _COMPUTETASK._serialized_end=1216 - _COMPUTETASK_METADATAENTRY._serialized_start=956 - _COMPUTETASK_METADATAENTRY._serialized_end=1003 - _COMPUTETASK_OUTPUTSENTRY._serialized_start=1005 - _COMPUTETASK_OUTPUTSENTRY._serialized_end=1084 - _NEWCOMPUTETASK._serialized_start=1219 - _NEWCOMPUTETASK._serialized_end=1731 - _NEWCOMPUTETASK_METADATAENTRY._serialized_start=956 - _NEWCOMPUTETASK_METADATAENTRY._serialized_end=1003 - _NEWCOMPUTETASK_OUTPUTSENTRY._serialized_start=1529 - _NEWCOMPUTETASK_OUTPUTSENTRY._serialized_end=1611 - _REGISTERTASKSPARAM._serialized_start=1733 - _REGISTERTASKSPARAM._serialized_end=1798 - _REGISTERTASKSRESPONSE._serialized_start=1800 - _REGISTERTASKSRESPONSE._serialized_end=1865 - _TASKQUERYFILTER._serialized_start=1868 - _TASKQUERYFILTER._serialized_end=2010 - _QUERYTASKSPARAM._serialized_start=2012 - _QUERYTASKSPARAM._serialized_end=2115 - _QUERYTASKSRESPONSE._serialized_start=2117 - _QUERYTASKSRESPONSE._serialized_end=2204 - _GETTASKPARAM._serialized_start=2206 - _GETTASKPARAM._serialized_end=2233 - _COMPUTETASKOUTPUTASSET._serialized_start=2236 - _COMPUTETASKOUTPUTASSET._serialized_end=2390 - _APPLYTASKACTIONPARAM._serialized_start=2392 - _APPLYTASKACTIONPARAM._serialized_end=2502 - _APPLYTASKACTIONRESPONSE._serialized_start=2504 - _APPLYTASKACTIONRESPONSE._serialized_end=2529 - _COMPUTETASKINPUTASSET._serialized_start=2532 - _COMPUTETASKINPUTASSET._serialized_end=2722 - _GETTASKINPUTASSETSPARAM._serialized_start=2724 - _GETTASKINPUTASSETSPARAM._serialized_end=2775 - _GETTASKINPUTASSETSRESPONSE._serialized_start=2777 - _GETTASKINPUTASSETSRESPONSE._serialized_end=2858 - _DISABLEOUTPUTPARAM._serialized_start=2860 - _DISABLEOUTPUTPARAM._serialized_end=2926 - _DISABLEOUTPUTRESPONSE._serialized_start=2928 - _DISABLEOUTPUTRESPONSE._serialized_end=2951 - _COMPUTETASKSERVICE._serialized_start=3250 - _COMPUTETASKSERVICE._serialized_end=3788 + _COMPUTETASK._serialized_end=1224 + _COMPUTETASK_METADATAENTRY._serialized_start=960 + _COMPUTETASK_METADATAENTRY._serialized_end=1007 + _COMPUTETASK_OUTPUTSENTRY._serialized_start=1009 + _COMPUTETASK_OUTPUTSENTRY._serialized_end=1088 + _NEWCOMPUTETASK._serialized_start=1227 + _NEWCOMPUTETASK._serialized_end=1743 + _NEWCOMPUTETASK_METADATAENTRY._serialized_start=960 + _NEWCOMPUTETASK_METADATAENTRY._serialized_end=1007 + _NEWCOMPUTETASK_OUTPUTSENTRY._serialized_start=1541 + _NEWCOMPUTETASK_OUTPUTSENTRY._serialized_end=1623 + _REGISTERTASKSPARAM._serialized_start=1745 + _REGISTERTASKSPARAM._serialized_end=1810 + _REGISTERTASKSRESPONSE._serialized_start=1812 + _REGISTERTASKSRESPONSE._serialized_end=1877 + _TASKQUERYFILTER._serialized_start=1880 + _TASKQUERYFILTER._serialized_end=2026 + _QUERYTASKSPARAM._serialized_start=2028 + _QUERYTASKSPARAM._serialized_end=2131 + _QUERYTASKSRESPONSE._serialized_start=2133 + _QUERYTASKSRESPONSE._serialized_end=2220 + _GETTASKPARAM._serialized_start=2222 + _GETTASKPARAM._serialized_end=2249 + _COMPUTETASKOUTPUTASSET._serialized_start=2252 + _COMPUTETASKOUTPUTASSET._serialized_end=2406 + _APPLYTASKACTIONPARAM._serialized_start=2408 + _APPLYTASKACTIONPARAM._serialized_end=2518 + _APPLYTASKACTIONRESPONSE._serialized_start=2520 + _APPLYTASKACTIONRESPONSE._serialized_end=2545 + _COMPUTETASKINPUTASSET._serialized_start=2548 + _COMPUTETASKINPUTASSET._serialized_end=2738 + _GETTASKINPUTASSETSPARAM._serialized_start=2740 + _GETTASKINPUTASSETSPARAM._serialized_end=2791 + _GETTASKINPUTASSETSRESPONSE._serialized_start=2793 + _GETTASKINPUTASSETSRESPONSE._serialized_end=2874 + _DISABLEOUTPUTPARAM._serialized_start=2876 + _DISABLEOUTPUTPARAM._serialized_end=2942 + _DISABLEOUTPUTRESPONSE._serialized_start=2944 + _DISABLEOUTPUTRESPONSE._serialized_end=2967 + _COMPUTETASKSERVICE._serialized_start=3266 + _COMPUTETASKSERVICE._serialized_end=3804 # @@protoc_insertion_point(module_scope) diff --git a/backend/orchestrator/computetask_pb2.pyi b/backend/orchestrator/computetask_pb2.pyi index 123aad8cd..3659385d3 100644 --- a/backend/orchestrator/computetask_pb2.pyi +++ b/backend/orchestrator/computetask_pb2.pyi @@ -197,7 +197,7 @@ class ComputeTask(google.protobuf.message.Message): METADATA_FIELD_NUMBER: builtins.int INPUTS_FIELD_NUMBER: builtins.int OUTPUTS_FIELD_NUMBER: builtins.int - ALGO_KEY_FIELD_NUMBER: builtins.int + FUNCTION_KEY_FIELD_NUMBER: builtins.int key: builtins.str owner: builtins.str compute_plan_key: builtins.str @@ -215,7 +215,7 @@ class ComputeTask(google.protobuf.message.Message): def inputs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ComputeTaskInput]: ... @property def outputs(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___ComputeTaskOutput]: ... - algo_key: builtins.str + function_key: builtins.str def __init__( self, *, @@ -230,10 +230,10 @@ class ComputeTask(google.protobuf.message.Message): metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., inputs: collections.abc.Iterable[global___ComputeTaskInput] | None = ..., outputs: collections.abc.Mapping[builtins.str, global___ComputeTaskOutput] | None = ..., - algo_key: builtins.str = ..., + function_key: builtins.str = ..., ) -> None: ... def HasField(self, field_name: typing_extensions.Literal["creation_date", b"creation_date", "logs_permission", b"logs_permission"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["algo_key", b"algo_key", "compute_plan_key", b"compute_plan_key", "creation_date", b"creation_date", "inputs", b"inputs", "key", b"key", "logs_permission", b"logs_permission", "metadata", b"metadata", "outputs", b"outputs", "owner", b"owner", "rank", b"rank", "status", b"status", "worker", b"worker"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["compute_plan_key", b"compute_plan_key", "creation_date", b"creation_date", "function_key", b"function_key", "inputs", b"inputs", "key", b"key", "logs_permission", b"logs_permission", "metadata", b"metadata", "outputs", b"outputs", "owner", b"owner", "rank", b"rank", "status", b"status", "worker", b"worker"]) -> None: ... global___ComputeTask = ComputeTask @@ -273,14 +273,14 @@ class NewComputeTask(google.protobuf.message.Message): def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... KEY_FIELD_NUMBER: builtins.int - ALGO_KEY_FIELD_NUMBER: builtins.int + FUNCTION_KEY_FIELD_NUMBER: builtins.int COMPUTE_PLAN_KEY_FIELD_NUMBER: builtins.int WORKER_FIELD_NUMBER: builtins.int METADATA_FIELD_NUMBER: builtins.int INPUTS_FIELD_NUMBER: builtins.int OUTPUTS_FIELD_NUMBER: builtins.int key: builtins.str - algo_key: builtins.str + function_key: builtins.str compute_plan_key: builtins.str worker: builtins.str @property @@ -293,14 +293,14 @@ class NewComputeTask(google.protobuf.message.Message): self, *, key: builtins.str = ..., - algo_key: builtins.str = ..., + function_key: builtins.str = ..., compute_plan_key: builtins.str = ..., worker: builtins.str = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., inputs: collections.abc.Iterable[global___ComputeTaskInput] | None = ..., outputs: collections.abc.Mapping[builtins.str, global___NewComputeTaskOutput] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["algo_key", b"algo_key", "compute_plan_key", b"compute_plan_key", "inputs", b"inputs", "key", b"key", "metadata", b"metadata", "outputs", b"outputs", "worker", b"worker"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["compute_plan_key", b"compute_plan_key", "function_key", b"function_key", "inputs", b"inputs", "key", b"key", "metadata", b"metadata", "outputs", b"outputs", "worker", b"worker"]) -> None: ... global___NewComputeTask = NewComputeTask @@ -340,20 +340,20 @@ class TaskQueryFilter(google.protobuf.message.Message): WORKER_FIELD_NUMBER: builtins.int STATUS_FIELD_NUMBER: builtins.int COMPUTE_PLAN_KEY_FIELD_NUMBER: builtins.int - ALGO_KEY_FIELD_NUMBER: builtins.int + FUNCTION_KEY_FIELD_NUMBER: builtins.int worker: builtins.str status: global___ComputeTaskStatus.ValueType compute_plan_key: builtins.str - algo_key: builtins.str + function_key: builtins.str def __init__( self, *, worker: builtins.str = ..., status: global___ComputeTaskStatus.ValueType = ..., compute_plan_key: builtins.str = ..., - algo_key: builtins.str = ..., + function_key: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["algo_key", b"algo_key", "compute_plan_key", b"compute_plan_key", "status", b"status", "worker", b"worker"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["compute_plan_key", b"compute_plan_key", "function_key", b"function_key", "status", b"status", "worker", b"worker"]) -> None: ... global___TaskQueryFilter = TaskQueryFilter diff --git a/backend/orchestrator/datasample_pb2.pyi b/backend/orchestrator/datasample_pb2.pyi index b20a3d684..f363c1ba2 100644 --- a/backend/orchestrator/datasample_pb2.pyi +++ b/backend/orchestrator/datasample_pb2.pyi @@ -18,8 +18,8 @@ else: DESCRIPTOR: google.protobuf.descriptor.FileDescriptor class DataSample(google.protobuf.message.Message): - """DataSample represent a data sample that will be processed by an - algorithm to produce or test a model. + """DataSample represent a data sample that will be processed by a + function to produce or test a model. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor diff --git a/backend/orchestrator/event_pb2.py b/backend/orchestrator/event_pb2.py index 07b821d86..caf45cd34 100644 --- a/backend/orchestrator/event_pb2.py +++ b/backend/orchestrator/event_pb2.py @@ -13,7 +13,7 @@ _sym_db = _symbol_database.Default() -from . import algo_pb2 as algo__pb2 +from . import function_pb2 as function__pb2 from . import common_pb2 as common__pb2 from . import computeplan_pb2 as computeplan__pb2 from . import computetask_pb2 as computetask__pb2 @@ -26,7 +26,7 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x0corchestrator\x1a\nalgo.proto\x1a\x0c\x63ommon.proto\x1a\x11\x63omputeplan.proto\x1a\x11\x63omputetask.proto\x1a\x11\x64\x61tamanager.proto\x1a\x10\x64\x61tasample.proto\x1a\x14\x66\x61ilure_report.proto\x1a\x0bmodel.proto\x1a\x12organization.proto\x1a\x11performance.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xab\x06\n\x05\x45vent\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\tasset_key\x18\x02 \x01(\t\x12+\n\nasset_kind\x18\x03 \x01(\x0e\x32\x17.orchestrator.AssetKind\x12+\n\nevent_kind\x18\x04 \x01(\x0e\x32\x17.orchestrator.EventKind\x12\x0f\n\x07\x63hannel\x18\x05 \x01(\t\x12-\n\ttimestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\"\n\x04\x61lgo\x18\x07 \x01(\x0b\x32\x12.orchestrator.AlgoH\x00\x12\x31\n\x0c\x63ompute_plan\x18\x08 \x01(\x0b\x32\x19.orchestrator.ComputePlanH\x00\x12\x31\n\x0c\x63ompute_task\x18\t \x01(\x0b\x32\x19.orchestrator.ComputeTaskH\x00\x12\x31\n\x0c\x64\x61ta_manager\x18\n \x01(\x0b\x32\x19.orchestrator.DataManagerH\x00\x12/\n\x0b\x64\x61ta_sample\x18\x0b \x01(\x0b\x32\x18.orchestrator.DataSampleH\x00\x12\x35\n\x0e\x66\x61ilure_report\x18\x0c \x01(\x0b\x32\x1b.orchestrator.FailureReportH\x00\x12$\n\x05model\x18\r \x01(\x0b\x32\x13.orchestrator.ModelH\x00\x12\x32\n\x0corganization\x18\x0e \x01(\x0b\x32\x1a.orchestrator.OrganizationH\x00\x12\x30\n\x0bperformance\x18\x0f \x01(\x0b\x32\x19.orchestrator.PerformanceH\x00\x12I\n\x19\x63ompute_task_output_asset\x18\x10 \x01(\x0b\x32$.orchestrator.ComputeTaskOutputAssetH\x00\x12\x33\n\x08metadata\x18\x12 \x03(\x0b\x32!.orchestrator.Event.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x07\n\x05\x61sset\"\x90\x01\n\x10QueryEventsParam\x12\x12\n\npage_token\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\r\x12.\n\x06\x66ilter\x18\x03 \x01(\x0b\x32\x1e.orchestrator.EventQueryFilter\x12%\n\x04sort\x18\x04 \x01(\x0e\x32\x17.orchestrator.SortOrder\"\xc4\x02\n\x10\x45ventQueryFilter\x12\x11\n\tasset_key\x18\x01 \x01(\t\x12+\n\nasset_kind\x18\x02 \x01(\x0e\x32\x17.orchestrator.AssetKind\x12+\n\nevent_kind\x18\x03 \x01(\x0e\x32\x17.orchestrator.EventKind\x12>\n\x08metadata\x18\x04 \x03(\x0b\x32,.orchestrator.EventQueryFilter.MetadataEntry\x12)\n\x05start\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\x03\x65nd\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x13QueryEventsResponse\x12#\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x13.orchestrator.Event\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"0\n\x16SubscribeToEventsParam\x12\x16\n\x0estart_event_id\x18\x01 \x01(\t*j\n\tEventKind\x12\x11\n\rEVENT_UNKNOWN\x10\x00\x12\x17\n\x13\x45VENT_ASSET_CREATED\x10\x01\x12\x17\n\x13\x45VENT_ASSET_UPDATED\x10\x02\x12\x18\n\x14\x45VENT_ASSET_DISABLED\x10\x03\x32\xb2\x01\n\x0c\x45ventService\x12P\n\x0bQueryEvents\x12\x1e.orchestrator.QueryEventsParam\x1a!.orchestrator.QueryEventsResponse\x12P\n\x11SubscribeToEvents\x12$.orchestrator.SubscribeToEventsParam\x1a\x13.orchestrator.Event0\x01\x42+Z)github.com/substra/orchestrator/lib/assetb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x65vent.proto\x12\x0corchestrator\x1a\x0e\x66unction.proto\x1a\x0c\x63ommon.proto\x1a\x11\x63omputeplan.proto\x1a\x11\x63omputetask.proto\x1a\x11\x64\x61tamanager.proto\x1a\x10\x64\x61tasample.proto\x1a\x14\x66\x61ilure_report.proto\x1a\x0bmodel.proto\x1a\x12organization.proto\x1a\x11performance.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xb3\x06\n\x05\x45vent\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\tasset_key\x18\x02 \x01(\t\x12+\n\nasset_kind\x18\x03 \x01(\x0e\x32\x17.orchestrator.AssetKind\x12+\n\nevent_kind\x18\x04 \x01(\x0e\x32\x17.orchestrator.EventKind\x12\x0f\n\x07\x63hannel\x18\x05 \x01(\t\x12-\n\ttimestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12*\n\x08\x66unction\x18\x07 \x01(\x0b\x32\x16.orchestrator.FunctionH\x00\x12\x31\n\x0c\x63ompute_plan\x18\x08 \x01(\x0b\x32\x19.orchestrator.ComputePlanH\x00\x12\x31\n\x0c\x63ompute_task\x18\t \x01(\x0b\x32\x19.orchestrator.ComputeTaskH\x00\x12\x31\n\x0c\x64\x61ta_manager\x18\n \x01(\x0b\x32\x19.orchestrator.DataManagerH\x00\x12/\n\x0b\x64\x61ta_sample\x18\x0b \x01(\x0b\x32\x18.orchestrator.DataSampleH\x00\x12\x35\n\x0e\x66\x61ilure_report\x18\x0c \x01(\x0b\x32\x1b.orchestrator.FailureReportH\x00\x12$\n\x05model\x18\r \x01(\x0b\x32\x13.orchestrator.ModelH\x00\x12\x32\n\x0corganization\x18\x0e \x01(\x0b\x32\x1a.orchestrator.OrganizationH\x00\x12\x30\n\x0bperformance\x18\x0f \x01(\x0b\x32\x19.orchestrator.PerformanceH\x00\x12I\n\x19\x63ompute_task_output_asset\x18\x10 \x01(\x0b\x32$.orchestrator.ComputeTaskOutputAssetH\x00\x12\x33\n\x08metadata\x18\x12 \x03(\x0b\x32!.orchestrator.Event.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x07\n\x05\x61sset\"\x90\x01\n\x10QueryEventsParam\x12\x12\n\npage_token\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\r\x12.\n\x06\x66ilter\x18\x03 \x01(\x0b\x32\x1e.orchestrator.EventQueryFilter\x12%\n\x04sort\x18\x04 \x01(\x0e\x32\x17.orchestrator.SortOrder\"\xc4\x02\n\x10\x45ventQueryFilter\x12\x11\n\tasset_key\x18\x01 \x01(\t\x12+\n\nasset_kind\x18\x02 \x01(\x0e\x32\x17.orchestrator.AssetKind\x12+\n\nevent_kind\x18\x03 \x01(\x0e\x32\x17.orchestrator.EventKind\x12>\n\x08metadata\x18\x04 \x03(\x0b\x32,.orchestrator.EventQueryFilter.MetadataEntry\x12)\n\x05start\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\x03\x65nd\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x13QueryEventsResponse\x12#\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x13.orchestrator.Event\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"0\n\x16SubscribeToEventsParam\x12\x16\n\x0estart_event_id\x18\x01 \x01(\t*j\n\tEventKind\x12\x11\n\rEVENT_UNKNOWN\x10\x00\x12\x17\n\x13\x45VENT_ASSET_CREATED\x10\x01\x12\x17\n\x13\x45VENT_ASSET_UPDATED\x10\x02\x12\x18\n\x14\x45VENT_ASSET_DISABLED\x10\x03\x32\xb2\x01\n\x0c\x45ventService\x12P\n\x0bQueryEvents\x12\x1e.orchestrator.QueryEventsParam\x1a!.orchestrator.QueryEventsResponse\x12P\n\x11SubscribeToEvents\x12$.orchestrator.SubscribeToEventsParam\x1a\x13.orchestrator.Event0\x01\x42+Z)github.com/substra/orchestrator/lib/assetb\x06proto3') _EVENTKIND = DESCRIPTOR.enum_types_by_name['EventKind'] EventKind = enum_type_wrapper.EnumTypeWrapper(_EVENTKIND) @@ -103,22 +103,22 @@ _EVENT_METADATAENTRY._serialized_options = b'8\001' _EVENTQUERYFILTER_METADATAENTRY._options = None _EVENTQUERYFILTER_METADATAENTRY._serialized_options = b'8\001' - _EVENTKIND._serialized_start=1660 - _EVENTKIND._serialized_end=1766 - _EVENT._serialized_start=238 - _EVENT._serialized_end=1049 - _EVENT_METADATAENTRY._serialized_start=993 - _EVENT_METADATAENTRY._serialized_end=1040 - _QUERYEVENTSPARAM._serialized_start=1052 - _QUERYEVENTSPARAM._serialized_end=1196 - _EVENTQUERYFILTER._serialized_start=1199 - _EVENTQUERYFILTER._serialized_end=1523 - _EVENTQUERYFILTER_METADATAENTRY._serialized_start=993 - _EVENTQUERYFILTER_METADATAENTRY._serialized_end=1040 - _QUERYEVENTSRESPONSE._serialized_start=1525 - _QUERYEVENTSRESPONSE._serialized_end=1608 - _SUBSCRIBETOEVENTSPARAM._serialized_start=1610 - _SUBSCRIBETOEVENTSPARAM._serialized_end=1658 - _EVENTSERVICE._serialized_start=1769 - _EVENTSERVICE._serialized_end=1947 + _EVENTKIND._serialized_start=1672 + _EVENTKIND._serialized_end=1778 + _EVENT._serialized_start=242 + _EVENT._serialized_end=1061 + _EVENT_METADATAENTRY._serialized_start=1005 + _EVENT_METADATAENTRY._serialized_end=1052 + _QUERYEVENTSPARAM._serialized_start=1064 + _QUERYEVENTSPARAM._serialized_end=1208 + _EVENTQUERYFILTER._serialized_start=1211 + _EVENTQUERYFILTER._serialized_end=1535 + _EVENTQUERYFILTER_METADATAENTRY._serialized_start=1005 + _EVENTQUERYFILTER_METADATAENTRY._serialized_end=1052 + _QUERYEVENTSRESPONSE._serialized_start=1537 + _QUERYEVENTSRESPONSE._serialized_end=1620 + _SUBSCRIBETOEVENTSPARAM._serialized_start=1622 + _SUBSCRIBETOEVENTSPARAM._serialized_end=1670 + _EVENTSERVICE._serialized_start=1781 + _EVENTSERVICE._serialized_end=1959 # @@protoc_insertion_point(module_scope) diff --git a/backend/orchestrator/event_pb2.pyi b/backend/orchestrator/event_pb2.pyi index 1de0c1f79..58eea8760 100644 --- a/backend/orchestrator/event_pb2.pyi +++ b/backend/orchestrator/event_pb2.pyi @@ -2,7 +2,6 @@ @generated by mypy-protobuf. Do not edit manually! isort:skip_file """ -import algo_pb2 import builtins import collections.abc import common_pb2 @@ -11,6 +10,7 @@ import computetask_pb2 import datamanager_pb2 import datasample_pb2 import failure_report_pb2 +import function_pb2 import google.protobuf.descriptor import google.protobuf.internal.containers import google.protobuf.internal.enum_type_wrapper @@ -76,7 +76,7 @@ class Event(google.protobuf.message.Message): EVENT_KIND_FIELD_NUMBER: builtins.int CHANNEL_FIELD_NUMBER: builtins.int TIMESTAMP_FIELD_NUMBER: builtins.int - ALGO_FIELD_NUMBER: builtins.int + FUNCTION_FIELD_NUMBER: builtins.int COMPUTE_PLAN_FIELD_NUMBER: builtins.int COMPUTE_TASK_FIELD_NUMBER: builtins.int DATA_MANAGER_FIELD_NUMBER: builtins.int @@ -95,7 +95,7 @@ class Event(google.protobuf.message.Message): @property def timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp: ... @property - def algo(self) -> algo_pb2.Algo: ... + def function(self) -> function_pb2.Function: ... @property def compute_plan(self) -> computeplan_pb2.ComputePlan: ... @property @@ -125,7 +125,7 @@ class Event(google.protobuf.message.Message): event_kind: global___EventKind.ValueType = ..., channel: builtins.str = ..., timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ..., - algo: algo_pb2.Algo | None = ..., + function: function_pb2.Function | None = ..., compute_plan: computeplan_pb2.ComputePlan | None = ..., compute_task: computetask_pb2.ComputeTask | None = ..., data_manager: datamanager_pb2.DataManager | None = ..., @@ -137,9 +137,9 @@ class Event(google.protobuf.message.Message): compute_task_output_asset: computetask_pb2.ComputeTaskOutputAsset | None = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["algo", b"algo", "asset", b"asset", "compute_plan", b"compute_plan", "compute_task", b"compute_task", "compute_task_output_asset", b"compute_task_output_asset", "data_manager", b"data_manager", "data_sample", b"data_sample", "failure_report", b"failure_report", "model", b"model", "organization", b"organization", "performance", b"performance", "timestamp", b"timestamp"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["algo", b"algo", "asset", b"asset", "asset_key", b"asset_key", "asset_kind", b"asset_kind", "channel", b"channel", "compute_plan", b"compute_plan", "compute_task", b"compute_task", "compute_task_output_asset", b"compute_task_output_asset", "data_manager", b"data_manager", "data_sample", b"data_sample", "event_kind", b"event_kind", "failure_report", b"failure_report", "id", b"id", "metadata", b"metadata", "model", b"model", "organization", b"organization", "performance", b"performance", "timestamp", b"timestamp"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal["asset", b"asset"]) -> typing_extensions.Literal["algo", "compute_plan", "compute_task", "data_manager", "data_sample", "failure_report", "model", "organization", "performance", "compute_task_output_asset"] | None: ... + def HasField(self, field_name: typing_extensions.Literal["asset", b"asset", "compute_plan", b"compute_plan", "compute_task", b"compute_task", "compute_task_output_asset", b"compute_task_output_asset", "data_manager", b"data_manager", "data_sample", b"data_sample", "failure_report", b"failure_report", "function", b"function", "model", b"model", "organization", b"organization", "performance", b"performance", "timestamp", b"timestamp"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["asset", b"asset", "asset_key", b"asset_key", "asset_kind", b"asset_kind", "channel", b"channel", "compute_plan", b"compute_plan", "compute_task", b"compute_task", "compute_task_output_asset", b"compute_task_output_asset", "data_manager", b"data_manager", "data_sample", b"data_sample", "event_kind", b"event_kind", "failure_report", b"failure_report", "function", b"function", "id", b"id", "metadata", b"metadata", "model", b"model", "organization", b"organization", "performance", b"performance", "timestamp", b"timestamp"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["asset", b"asset"]) -> typing_extensions.Literal["function", "compute_plan", "compute_task", "data_manager", "data_sample", "failure_report", "model", "organization", "performance", "compute_task_output_asset"] | None: ... global___Event = Event diff --git a/backend/orchestrator/failure_report_pb2.pyi b/backend/orchestrator/failure_report_pb2.pyi index 93ffb5dc3..3410d46ee 100644 --- a/backend/orchestrator/failure_report_pb2.pyi +++ b/backend/orchestrator/failure_report_pb2.pyi @@ -30,7 +30,7 @@ class _ErrorTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._Enum It is likely to be caused by the user's code. """ ERROR_TYPE_EXECUTION: _ErrorType.ValueType # 2 - """An ERROR_TYPE_EXECUTION is an error occurring during the execution of the algo/metric container. + """An ERROR_TYPE_EXECUTION is an error occurring during the execution of the function/metric container. It is likely to be caused by the user's code. """ ERROR_TYPE_INTERNAL: _ErrorType.ValueType # 3 @@ -47,7 +47,7 @@ ERROR_TYPE_BUILD: ErrorType.ValueType # 1 It is likely to be caused by the user's code. """ ERROR_TYPE_EXECUTION: ErrorType.ValueType # 2 -"""An ERROR_TYPE_EXECUTION is an error occurring during the execution of the algo/metric container. +"""An ERROR_TYPE_EXECUTION is an error occurring during the execution of the function/metric container. It is likely to be caused by the user's code. """ ERROR_TYPE_INTERNAL: ErrorType.ValueType # 3 diff --git a/backend/orchestrator/function_pb2.py b/backend/orchestrator/function_pb2.py new file mode 100644 index 000000000..b031c89d5 --- /dev/null +++ b/backend/orchestrator/function_pb2.py @@ -0,0 +1,208 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: function.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from . import common_pb2 as common__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0e\x66unction.proto\x12\x0corchestrator\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x0c\x63ommon.proto\"Z\n\rFunctionInput\x12%\n\x04kind\x18\x01 \x01(\x0e\x32\x17.orchestrator.AssetKind\x12\x10\n\x08multiple\x18\x02 \x01(\x08\x12\x10\n\x08optional\x18\x03 \x01(\x08\"I\n\x0e\x46unctionOutput\x12%\n\x04kind\x18\x01 \x01(\x0e\x32\x17.orchestrator.AssetKind\x12\x10\n\x08multiple\x18\x02 \x01(\x08\"\xf1\x04\n\x08\x46unction\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12.\n\x0b\x64\x65scription\x18\x04 \x01(\x0b\x32\x19.orchestrator.Addressable\x12+\n\x08\x66unction\x18\x05 \x01(\x0b\x32\x19.orchestrator.Addressable\x12.\n\x0bpermissions\x18\x06 \x01(\x0b\x32\x19.orchestrator.Permissions\x12\r\n\x05owner\x18\x07 \x01(\t\x12\x31\n\rcreation_date\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x08metadata\x18\x10 \x03(\x0b\x32$.orchestrator.Function.MetadataEntry\x12\x32\n\x06inputs\x18\x11 \x03(\x0b\x32\".orchestrator.Function.InputsEntry\x12\x34\n\x07outputs\x18\x12 \x03(\x0b\x32#.orchestrator.Function.OutputsEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1aJ\n\x0bInputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0b\x32\x1b.orchestrator.FunctionInput:\x02\x38\x01\x1aL\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.orchestrator.FunctionOutput:\x02\x38\x01J\x04\x08\x03\x10\x04R\x08\x63\x61tegory\"\xc2\x04\n\x0bNewFunction\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12.\n\x0b\x64\x65scription\x18\x04 \x01(\x0b\x32\x19.orchestrator.Addressable\x12+\n\x08\x66unction\x18\x05 \x01(\x0b\x32\x19.orchestrator.Addressable\x12\x35\n\x0fnew_permissions\x18\x06 \x01(\x0b\x32\x1c.orchestrator.NewPermissions\x12\x39\n\x08metadata\x18\x11 \x03(\x0b\x32\'.orchestrator.NewFunction.MetadataEntry\x12\x35\n\x06inputs\x18\x12 \x03(\x0b\x32%.orchestrator.NewFunction.InputsEntry\x12\x37\n\x07outputs\x18\x13 \x03(\x0b\x32&.orchestrator.NewFunction.OutputsEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1aJ\n\x0bInputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0b\x32\x1b.orchestrator.FunctionInput:\x02\x38\x01\x1aL\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.orchestrator.FunctionOutput:\x02\x38\x01J\x04\x08\x03\x10\x04R\x08\x63\x61tegory\"\x1f\n\x10GetFunctionParam\x12\x0b\n\x03key\x18\x01 \x01(\t\"\\\n\x16QueryFunctionsResponse\x12)\n\tFunctions\x18\x01 \x03(\x0b\x32\x16.orchestrator.Function\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"/\n\x13\x46unctionQueryFilter\x12\x18\n\x10\x63ompute_plan_key\x18\x02 \x01(\t\"o\n\x13QueryFunctionsParam\x12\x12\n\npage_token\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\r\x12\x31\n\x06\x66ilter\x18\x03 \x01(\x0b\x32!.orchestrator.FunctionQueryFilter\"0\n\x13UpdateFunctionParam\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x18\n\x16UpdateFunctionResponse2\xd5\x02\n\x0f\x46unctionService\x12\x45\n\x10RegisterFunction\x12\x19.orchestrator.NewFunction\x1a\x16.orchestrator.Function\x12\x45\n\x0bGetFunction\x12\x1e.orchestrator.GetFunctionParam\x1a\x16.orchestrator.Function\x12Y\n\x0eQueryFunctions\x12!.orchestrator.QueryFunctionsParam\x1a$.orchestrator.QueryFunctionsResponse\x12Y\n\x0eUpdateFunction\x12!.orchestrator.UpdateFunctionParam\x1a$.orchestrator.UpdateFunctionResponseB+Z)github.com/substra/orchestrator/lib/assetb\x06proto3') + + + +_FUNCTIONINPUT = DESCRIPTOR.message_types_by_name['FunctionInput'] +_FUNCTIONOUTPUT = DESCRIPTOR.message_types_by_name['FunctionOutput'] +_FUNCTION = DESCRIPTOR.message_types_by_name['Function'] +_FUNCTION_METADATAENTRY = _FUNCTION.nested_types_by_name['MetadataEntry'] +_FUNCTION_INPUTSENTRY = _FUNCTION.nested_types_by_name['InputsEntry'] +_FUNCTION_OUTPUTSENTRY = _FUNCTION.nested_types_by_name['OutputsEntry'] +_NEWFUNCTION = DESCRIPTOR.message_types_by_name['NewFunction'] +_NEWFUNCTION_METADATAENTRY = _NEWFUNCTION.nested_types_by_name['MetadataEntry'] +_NEWFUNCTION_INPUTSENTRY = _NEWFUNCTION.nested_types_by_name['InputsEntry'] +_NEWFUNCTION_OUTPUTSENTRY = _NEWFUNCTION.nested_types_by_name['OutputsEntry'] +_GETFUNCTIONPARAM = DESCRIPTOR.message_types_by_name['GetFunctionParam'] +_QUERYFUNCTIONSRESPONSE = DESCRIPTOR.message_types_by_name['QueryFunctionsResponse'] +_FUNCTIONQUERYFILTER = DESCRIPTOR.message_types_by_name['FunctionQueryFilter'] +_QUERYFUNCTIONSPARAM = DESCRIPTOR.message_types_by_name['QueryFunctionsParam'] +_UPDATEFUNCTIONPARAM = DESCRIPTOR.message_types_by_name['UpdateFunctionParam'] +_UPDATEFUNCTIONRESPONSE = DESCRIPTOR.message_types_by_name['UpdateFunctionResponse'] +FunctionInput = _reflection.GeneratedProtocolMessageType('FunctionInput', (_message.Message,), { + 'DESCRIPTOR' : _FUNCTIONINPUT, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.FunctionInput) + }) +_sym_db.RegisterMessage(FunctionInput) + +FunctionOutput = _reflection.GeneratedProtocolMessageType('FunctionOutput', (_message.Message,), { + 'DESCRIPTOR' : _FUNCTIONOUTPUT, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.FunctionOutput) + }) +_sym_db.RegisterMessage(FunctionOutput) + +Function = _reflection.GeneratedProtocolMessageType('Function', (_message.Message,), { + + 'MetadataEntry' : _reflection.GeneratedProtocolMessageType('MetadataEntry', (_message.Message,), { + 'DESCRIPTOR' : _FUNCTION_METADATAENTRY, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.Function.MetadataEntry) + }) + , + + 'InputsEntry' : _reflection.GeneratedProtocolMessageType('InputsEntry', (_message.Message,), { + 'DESCRIPTOR' : _FUNCTION_INPUTSENTRY, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.Function.InputsEntry) + }) + , + + 'OutputsEntry' : _reflection.GeneratedProtocolMessageType('OutputsEntry', (_message.Message,), { + 'DESCRIPTOR' : _FUNCTION_OUTPUTSENTRY, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.Function.OutputsEntry) + }) + , + 'DESCRIPTOR' : _FUNCTION, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.Function) + }) +_sym_db.RegisterMessage(Function) +_sym_db.RegisterMessage(Function.MetadataEntry) +_sym_db.RegisterMessage(Function.InputsEntry) +_sym_db.RegisterMessage(Function.OutputsEntry) + +NewFunction = _reflection.GeneratedProtocolMessageType('NewFunction', (_message.Message,), { + + 'MetadataEntry' : _reflection.GeneratedProtocolMessageType('MetadataEntry', (_message.Message,), { + 'DESCRIPTOR' : _NEWFUNCTION_METADATAENTRY, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.NewFunction.MetadataEntry) + }) + , + + 'InputsEntry' : _reflection.GeneratedProtocolMessageType('InputsEntry', (_message.Message,), { + 'DESCRIPTOR' : _NEWFUNCTION_INPUTSENTRY, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.NewFunction.InputsEntry) + }) + , + + 'OutputsEntry' : _reflection.GeneratedProtocolMessageType('OutputsEntry', (_message.Message,), { + 'DESCRIPTOR' : _NEWFUNCTION_OUTPUTSENTRY, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.NewFunction.OutputsEntry) + }) + , + 'DESCRIPTOR' : _NEWFUNCTION, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.NewFunction) + }) +_sym_db.RegisterMessage(NewFunction) +_sym_db.RegisterMessage(NewFunction.MetadataEntry) +_sym_db.RegisterMessage(NewFunction.InputsEntry) +_sym_db.RegisterMessage(NewFunction.OutputsEntry) + +GetFunctionParam = _reflection.GeneratedProtocolMessageType('GetFunctionParam', (_message.Message,), { + 'DESCRIPTOR' : _GETFUNCTIONPARAM, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.GetFunctionParam) + }) +_sym_db.RegisterMessage(GetFunctionParam) + +QueryFunctionsResponse = _reflection.GeneratedProtocolMessageType('QueryFunctionsResponse', (_message.Message,), { + 'DESCRIPTOR' : _QUERYFUNCTIONSRESPONSE, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.QueryFunctionsResponse) + }) +_sym_db.RegisterMessage(QueryFunctionsResponse) + +FunctionQueryFilter = _reflection.GeneratedProtocolMessageType('FunctionQueryFilter', (_message.Message,), { + 'DESCRIPTOR' : _FUNCTIONQUERYFILTER, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.FunctionQueryFilter) + }) +_sym_db.RegisterMessage(FunctionQueryFilter) + +QueryFunctionsParam = _reflection.GeneratedProtocolMessageType('QueryFunctionsParam', (_message.Message,), { + 'DESCRIPTOR' : _QUERYFUNCTIONSPARAM, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.QueryFunctionsParam) + }) +_sym_db.RegisterMessage(QueryFunctionsParam) + +UpdateFunctionParam = _reflection.GeneratedProtocolMessageType('UpdateFunctionParam', (_message.Message,), { + 'DESCRIPTOR' : _UPDATEFUNCTIONPARAM, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.UpdateFunctionParam) + }) +_sym_db.RegisterMessage(UpdateFunctionParam) + +UpdateFunctionResponse = _reflection.GeneratedProtocolMessageType('UpdateFunctionResponse', (_message.Message,), { + 'DESCRIPTOR' : _UPDATEFUNCTIONRESPONSE, + '__module__' : 'function_pb2' + # @@protoc_insertion_point(class_scope:orchestrator.UpdateFunctionResponse) + }) +_sym_db.RegisterMessage(UpdateFunctionResponse) + +_FUNCTIONSERVICE = DESCRIPTOR.services_by_name['FunctionService'] +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'Z)github.com/substra/orchestrator/lib/asset' + _FUNCTION_METADATAENTRY._options = None + _FUNCTION_METADATAENTRY._serialized_options = b'8\001' + _FUNCTION_INPUTSENTRY._options = None + _FUNCTION_INPUTSENTRY._serialized_options = b'8\001' + _FUNCTION_OUTPUTSENTRY._options = None + _FUNCTION_OUTPUTSENTRY._serialized_options = b'8\001' + _NEWFUNCTION_METADATAENTRY._options = None + _NEWFUNCTION_METADATAENTRY._serialized_options = b'8\001' + _NEWFUNCTION_INPUTSENTRY._options = None + _NEWFUNCTION_INPUTSENTRY._serialized_options = b'8\001' + _NEWFUNCTION_OUTPUTSENTRY._options = None + _NEWFUNCTION_OUTPUTSENTRY._serialized_options = b'8\001' + _FUNCTIONINPUT._serialized_start=79 + _FUNCTIONINPUT._serialized_end=169 + _FUNCTIONOUTPUT._serialized_start=171 + _FUNCTIONOUTPUT._serialized_end=244 + _FUNCTION._serialized_start=247 + _FUNCTION._serialized_end=872 + _FUNCTION_METADATAENTRY._serialized_start=655 + _FUNCTION_METADATAENTRY._serialized_end=702 + _FUNCTION_INPUTSENTRY._serialized_start=704 + _FUNCTION_INPUTSENTRY._serialized_end=778 + _FUNCTION_OUTPUTSENTRY._serialized_start=780 + _FUNCTION_OUTPUTSENTRY._serialized_end=856 + _NEWFUNCTION._serialized_start=875 + _NEWFUNCTION._serialized_end=1453 + _NEWFUNCTION_METADATAENTRY._serialized_start=655 + _NEWFUNCTION_METADATAENTRY._serialized_end=702 + _NEWFUNCTION_INPUTSENTRY._serialized_start=704 + _NEWFUNCTION_INPUTSENTRY._serialized_end=778 + _NEWFUNCTION_OUTPUTSENTRY._serialized_start=780 + _NEWFUNCTION_OUTPUTSENTRY._serialized_end=856 + _GETFUNCTIONPARAM._serialized_start=1455 + _GETFUNCTIONPARAM._serialized_end=1486 + _QUERYFUNCTIONSRESPONSE._serialized_start=1488 + _QUERYFUNCTIONSRESPONSE._serialized_end=1580 + _FUNCTIONQUERYFILTER._serialized_start=1582 + _FUNCTIONQUERYFILTER._serialized_end=1629 + _QUERYFUNCTIONSPARAM._serialized_start=1631 + _QUERYFUNCTIONSPARAM._serialized_end=1742 + _UPDATEFUNCTIONPARAM._serialized_start=1744 + _UPDATEFUNCTIONPARAM._serialized_end=1792 + _UPDATEFUNCTIONRESPONSE._serialized_start=1794 + _UPDATEFUNCTIONRESPONSE._serialized_end=1818 + _FUNCTIONSERVICE._serialized_start=1821 + _FUNCTIONSERVICE._serialized_end=2162 +# @@protoc_insertion_point(module_scope) diff --git a/backend/orchestrator/algo_pb2.pyi b/backend/orchestrator/function_pb2.pyi similarity index 73% rename from backend/orchestrator/algo_pb2.pyi rename to backend/orchestrator/function_pb2.pyi index b1bce6cfc..9a2efa0a6 100644 --- a/backend/orchestrator/algo_pb2.pyi +++ b/backend/orchestrator/function_pb2.pyi @@ -18,7 +18,7 @@ else: DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -class AlgoInput(google.protobuf.message.Message): +class FunctionInput(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor KIND_FIELD_NUMBER: builtins.int @@ -36,9 +36,9 @@ class AlgoInput(google.protobuf.message.Message): ) -> None: ... def ClearField(self, field_name: typing_extensions.Literal["kind", b"kind", "multiple", b"multiple", "optional", b"optional"]) -> None: ... -global___AlgoInput = AlgoInput +global___FunctionInput = FunctionInput -class AlgoOutput(google.protobuf.message.Message): +class FunctionOutput(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor KIND_FIELD_NUMBER: builtins.int @@ -53,10 +53,10 @@ class AlgoOutput(google.protobuf.message.Message): ) -> None: ... def ClearField(self, field_name: typing_extensions.Literal["kind", b"kind", "multiple", b"multiple"]) -> None: ... -global___AlgoOutput = AlgoOutput +global___FunctionOutput = FunctionOutput -class Algo(google.protobuf.message.Message): - """Algo represents the algorithm code which will be used +class Function(google.protobuf.message.Message): + """Function represents the code which will be used to produce or test a model. """ @@ -84,12 +84,12 @@ class Algo(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str @property - def value(self) -> global___AlgoInput: ... + def value(self) -> global___FunctionInput: ... def __init__( self, *, key: builtins.str = ..., - value: global___AlgoInput | None = ..., + value: global___FunctionInput | None = ..., ) -> None: ... def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -101,12 +101,12 @@ class Algo(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str @property - def value(self) -> global___AlgoOutput: ... + def value(self) -> global___FunctionOutput: ... def __init__( self, *, key: builtins.str = ..., - value: global___AlgoOutput | None = ..., + value: global___FunctionOutput | None = ..., ) -> None: ... def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -114,7 +114,7 @@ class Algo(google.protobuf.message.Message): KEY_FIELD_NUMBER: builtins.int NAME_FIELD_NUMBER: builtins.int DESCRIPTION_FIELD_NUMBER: builtins.int - ALGORITHM_FIELD_NUMBER: builtins.int + FUNCTION_FIELD_NUMBER: builtins.int PERMISSIONS_FIELD_NUMBER: builtins.int OWNER_FIELD_NUMBER: builtins.int CREATION_DATE_FIELD_NUMBER: builtins.int @@ -126,7 +126,7 @@ class Algo(google.protobuf.message.Message): @property def description(self) -> common_pb2.Addressable: ... @property - def algorithm(self) -> common_pb2.Addressable: ... + def function(self) -> common_pb2.Addressable: ... @property def permissions(self) -> common_pb2.Permissions: ... owner: builtins.str @@ -135,31 +135,31 @@ class Algo(google.protobuf.message.Message): @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... @property - def inputs(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___AlgoInput]: ... + def inputs(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___FunctionInput]: ... @property - def outputs(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___AlgoOutput]: ... + def outputs(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___FunctionOutput]: ... def __init__( self, *, key: builtins.str = ..., name: builtins.str = ..., description: common_pb2.Addressable | None = ..., - algorithm: common_pb2.Addressable | None = ..., + function: common_pb2.Addressable | None = ..., permissions: common_pb2.Permissions | None = ..., owner: builtins.str = ..., creation_date: google.protobuf.timestamp_pb2.Timestamp | None = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., - inputs: collections.abc.Mapping[builtins.str, global___AlgoInput] | None = ..., - outputs: collections.abc.Mapping[builtins.str, global___AlgoOutput] | None = ..., + inputs: collections.abc.Mapping[builtins.str, global___FunctionInput] | None = ..., + outputs: collections.abc.Mapping[builtins.str, global___FunctionOutput] | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["algorithm", b"algorithm", "creation_date", b"creation_date", "description", b"description", "permissions", b"permissions"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["algorithm", b"algorithm", "creation_date", b"creation_date", "description", b"description", "inputs", b"inputs", "key", b"key", "metadata", b"metadata", "name", b"name", "outputs", b"outputs", "owner", b"owner", "permissions", b"permissions"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["creation_date", b"creation_date", "description", b"description", "function", b"function", "permissions", b"permissions"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["creation_date", b"creation_date", "description", b"description", "function", b"function", "inputs", b"inputs", "key", b"key", "metadata", b"metadata", "name", b"name", "outputs", b"outputs", "owner", b"owner", "permissions", b"permissions"]) -> None: ... -global___Algo = Algo +global___Function = Function -class NewAlgo(google.protobuf.message.Message): - """NewAlgo is used to register an Algo. - It will be processed into an Algo. +class NewFunction(google.protobuf.message.Message): + """NewFunction is used to register an Function. + It will be processed into an Function. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -186,12 +186,12 @@ class NewAlgo(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str @property - def value(self) -> global___AlgoInput: ... + def value(self) -> global___FunctionInput: ... def __init__( self, *, key: builtins.str = ..., - value: global___AlgoInput | None = ..., + value: global___FunctionInput | None = ..., ) -> None: ... def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -203,12 +203,12 @@ class NewAlgo(google.protobuf.message.Message): VALUE_FIELD_NUMBER: builtins.int key: builtins.str @property - def value(self) -> global___AlgoOutput: ... + def value(self) -> global___FunctionOutput: ... def __init__( self, *, key: builtins.str = ..., - value: global___AlgoOutput | None = ..., + value: global___FunctionOutput | None = ..., ) -> None: ... def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... @@ -216,7 +216,7 @@ class NewAlgo(google.protobuf.message.Message): KEY_FIELD_NUMBER: builtins.int NAME_FIELD_NUMBER: builtins.int DESCRIPTION_FIELD_NUMBER: builtins.int - ALGORITHM_FIELD_NUMBER: builtins.int + FUNCTION_FIELD_NUMBER: builtins.int NEW_PERMISSIONS_FIELD_NUMBER: builtins.int METADATA_FIELD_NUMBER: builtins.int INPUTS_FIELD_NUMBER: builtins.int @@ -226,33 +226,33 @@ class NewAlgo(google.protobuf.message.Message): @property def description(self) -> common_pb2.Addressable: ... @property - def algorithm(self) -> common_pb2.Addressable: ... + def function(self) -> common_pb2.Addressable: ... @property def new_permissions(self) -> common_pb2.NewPermissions: ... @property def metadata(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... @property - def inputs(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___AlgoInput]: ... + def inputs(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___FunctionInput]: ... @property - def outputs(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___AlgoOutput]: ... + def outputs(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___FunctionOutput]: ... def __init__( self, *, key: builtins.str = ..., name: builtins.str = ..., description: common_pb2.Addressable | None = ..., - algorithm: common_pb2.Addressable | None = ..., + function: common_pb2.Addressable | None = ..., new_permissions: common_pb2.NewPermissions | None = ..., metadata: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., - inputs: collections.abc.Mapping[builtins.str, global___AlgoInput] | None = ..., - outputs: collections.abc.Mapping[builtins.str, global___AlgoOutput] | None = ..., + inputs: collections.abc.Mapping[builtins.str, global___FunctionInput] | None = ..., + outputs: collections.abc.Mapping[builtins.str, global___FunctionOutput] | None = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal["algorithm", b"algorithm", "description", b"description", "new_permissions", b"new_permissions"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["algorithm", b"algorithm", "description", b"description", "inputs", b"inputs", "key", b"key", "metadata", b"metadata", "name", b"name", "new_permissions", b"new_permissions", "outputs", b"outputs"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["description", b"description", "function", b"function", "new_permissions", b"new_permissions"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "function", b"function", "inputs", b"inputs", "key", b"key", "metadata", b"metadata", "name", b"name", "new_permissions", b"new_permissions", "outputs", b"outputs"]) -> None: ... -global___NewAlgo = NewAlgo +global___NewFunction = NewFunction -class GetAlgoParam(google.protobuf.message.Message): +class GetFunctionParam(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor KEY_FIELD_NUMBER: builtins.int @@ -264,27 +264,27 @@ class GetAlgoParam(google.protobuf.message.Message): ) -> None: ... def ClearField(self, field_name: typing_extensions.Literal["key", b"key"]) -> None: ... -global___GetAlgoParam = GetAlgoParam +global___GetFunctionParam = GetFunctionParam -class QueryAlgosResponse(google.protobuf.message.Message): +class QueryFunctionsResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor - ALGOS_FIELD_NUMBER: builtins.int + FUNCTIONS_FIELD_NUMBER: builtins.int NEXT_PAGE_TOKEN_FIELD_NUMBER: builtins.int @property - def Algos(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Algo]: ... + def Functions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Function]: ... next_page_token: builtins.str def __init__( self, *, - Algos: collections.abc.Iterable[global___Algo] | None = ..., + Functions: collections.abc.Iterable[global___Function] | None = ..., next_page_token: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal["Algos", b"Algos", "next_page_token", b"next_page_token"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["Functions", b"Functions", "next_page_token", b"next_page_token"]) -> None: ... -global___QueryAlgosResponse = QueryAlgosResponse +global___QueryFunctionsResponse = QueryFunctionsResponse -class AlgoQueryFilter(google.protobuf.message.Message): +class FunctionQueryFilter(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor COMPUTE_PLAN_KEY_FIELD_NUMBER: builtins.int @@ -296,9 +296,9 @@ class AlgoQueryFilter(google.protobuf.message.Message): ) -> None: ... def ClearField(self, field_name: typing_extensions.Literal["compute_plan_key", b"compute_plan_key"]) -> None: ... -global___AlgoQueryFilter = AlgoQueryFilter +global___FunctionQueryFilter = FunctionQueryFilter -class QueryAlgosParam(google.protobuf.message.Message): +class QueryFunctionsParam(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor PAGE_TOKEN_FIELD_NUMBER: builtins.int @@ -307,20 +307,20 @@ class QueryAlgosParam(google.protobuf.message.Message): page_token: builtins.str page_size: builtins.int @property - def filter(self) -> global___AlgoQueryFilter: ... + def filter(self) -> global___FunctionQueryFilter: ... def __init__( self, *, page_token: builtins.str = ..., page_size: builtins.int = ..., - filter: global___AlgoQueryFilter | None = ..., + filter: global___FunctionQueryFilter | None = ..., ) -> None: ... def HasField(self, field_name: typing_extensions.Literal["filter", b"filter"]) -> builtins.bool: ... def ClearField(self, field_name: typing_extensions.Literal["filter", b"filter", "page_size", b"page_size", "page_token", b"page_token"]) -> None: ... -global___QueryAlgosParam = QueryAlgosParam +global___QueryFunctionsParam = QueryFunctionsParam -class UpdateAlgoParam(google.protobuf.message.Message): +class UpdateFunctionParam(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor KEY_FIELD_NUMBER: builtins.int @@ -335,13 +335,13 @@ class UpdateAlgoParam(google.protobuf.message.Message): ) -> None: ... def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "name", b"name"]) -> None: ... -global___UpdateAlgoParam = UpdateAlgoParam +global___UpdateFunctionParam = UpdateFunctionParam -class UpdateAlgoResponse(google.protobuf.message.Message): +class UpdateFunctionResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor def __init__( self, ) -> None: ... -global___UpdateAlgoResponse = UpdateAlgoResponse +global___UpdateFunctionResponse = UpdateFunctionResponse diff --git a/backend/orchestrator/algo_pb2_grpc.py b/backend/orchestrator/function_pb2_grpc.py similarity index 51% rename from backend/orchestrator/algo_pb2_grpc.py rename to backend/orchestrator/function_pb2_grpc.py index 001bf10df..17b7046f6 100644 --- a/backend/orchestrator/algo_pb2_grpc.py +++ b/backend/orchestrator/function_pb2_grpc.py @@ -2,10 +2,10 @@ """Client and server classes corresponding to protobuf-defined services.""" import grpc -from . import algo_pb2 as algo__pb2 +from . import function_pb2 as function__pb2 -class AlgoServiceStub(object): +class FunctionServiceStub(object): """Missing associated documentation comment in .proto file.""" def __init__(self, channel): @@ -14,90 +14,90 @@ def __init__(self, channel): Args: channel: A grpc.Channel. """ - self.RegisterAlgo = channel.unary_unary( - '/orchestrator.AlgoService/RegisterAlgo', - request_serializer=algo__pb2.NewAlgo.SerializeToString, - response_deserializer=algo__pb2.Algo.FromString, + self.RegisterFunction = channel.unary_unary( + '/orchestrator.FunctionService/RegisterFunction', + request_serializer=function__pb2.NewFunction.SerializeToString, + response_deserializer=function__pb2.Function.FromString, ) - self.GetAlgo = channel.unary_unary( - '/orchestrator.AlgoService/GetAlgo', - request_serializer=algo__pb2.GetAlgoParam.SerializeToString, - response_deserializer=algo__pb2.Algo.FromString, + self.GetFunction = channel.unary_unary( + '/orchestrator.FunctionService/GetFunction', + request_serializer=function__pb2.GetFunctionParam.SerializeToString, + response_deserializer=function__pb2.Function.FromString, ) - self.QueryAlgos = channel.unary_unary( - '/orchestrator.AlgoService/QueryAlgos', - request_serializer=algo__pb2.QueryAlgosParam.SerializeToString, - response_deserializer=algo__pb2.QueryAlgosResponse.FromString, + self.QueryFunctions = channel.unary_unary( + '/orchestrator.FunctionService/QueryFunctions', + request_serializer=function__pb2.QueryFunctionsParam.SerializeToString, + response_deserializer=function__pb2.QueryFunctionsResponse.FromString, ) - self.UpdateAlgo = channel.unary_unary( - '/orchestrator.AlgoService/UpdateAlgo', - request_serializer=algo__pb2.UpdateAlgoParam.SerializeToString, - response_deserializer=algo__pb2.UpdateAlgoResponse.FromString, + self.UpdateFunction = channel.unary_unary( + '/orchestrator.FunctionService/UpdateFunction', + request_serializer=function__pb2.UpdateFunctionParam.SerializeToString, + response_deserializer=function__pb2.UpdateFunctionResponse.FromString, ) -class AlgoServiceServicer(object): +class FunctionServiceServicer(object): """Missing associated documentation comment in .proto file.""" - def RegisterAlgo(self, request, context): + def RegisterFunction(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') - def GetAlgo(self, request, context): + def GetFunction(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') - def QueryAlgos(self, request, context): + def QueryFunctions(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') - def UpdateAlgo(self, request, context): + def UpdateFunction(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') -def add_AlgoServiceServicer_to_server(servicer, server): +def add_FunctionServiceServicer_to_server(servicer, server): rpc_method_handlers = { - 'RegisterAlgo': grpc.unary_unary_rpc_method_handler( - servicer.RegisterAlgo, - request_deserializer=algo__pb2.NewAlgo.FromString, - response_serializer=algo__pb2.Algo.SerializeToString, + 'RegisterFunction': grpc.unary_unary_rpc_method_handler( + servicer.RegisterFunction, + request_deserializer=function__pb2.NewFunction.FromString, + response_serializer=function__pb2.Function.SerializeToString, ), - 'GetAlgo': grpc.unary_unary_rpc_method_handler( - servicer.GetAlgo, - request_deserializer=algo__pb2.GetAlgoParam.FromString, - response_serializer=algo__pb2.Algo.SerializeToString, + 'GetFunction': grpc.unary_unary_rpc_method_handler( + servicer.GetFunction, + request_deserializer=function__pb2.GetFunctionParam.FromString, + response_serializer=function__pb2.Function.SerializeToString, ), - 'QueryAlgos': grpc.unary_unary_rpc_method_handler( - servicer.QueryAlgos, - request_deserializer=algo__pb2.QueryAlgosParam.FromString, - response_serializer=algo__pb2.QueryAlgosResponse.SerializeToString, + 'QueryFunctions': grpc.unary_unary_rpc_method_handler( + servicer.QueryFunctions, + request_deserializer=function__pb2.QueryFunctionsParam.FromString, + response_serializer=function__pb2.QueryFunctionsResponse.SerializeToString, ), - 'UpdateAlgo': grpc.unary_unary_rpc_method_handler( - servicer.UpdateAlgo, - request_deserializer=algo__pb2.UpdateAlgoParam.FromString, - response_serializer=algo__pb2.UpdateAlgoResponse.SerializeToString, + 'UpdateFunction': grpc.unary_unary_rpc_method_handler( + servicer.UpdateFunction, + request_deserializer=function__pb2.UpdateFunctionParam.FromString, + response_serializer=function__pb2.UpdateFunctionResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( - 'orchestrator.AlgoService', rpc_method_handlers) + 'orchestrator.FunctionService', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) # This class is part of an EXPERIMENTAL API. -class AlgoService(object): +class FunctionService(object): """Missing associated documentation comment in .proto file.""" @staticmethod - def RegisterAlgo(request, + def RegisterFunction(request, target, options=(), channel_credentials=None, @@ -107,14 +107,14 @@ def RegisterAlgo(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary(request, target, '/orchestrator.AlgoService/RegisterAlgo', - algo__pb2.NewAlgo.SerializeToString, - algo__pb2.Algo.FromString, + return grpc.experimental.unary_unary(request, target, '/orchestrator.FunctionService/RegisterFunction', + function__pb2.NewFunction.SerializeToString, + function__pb2.Function.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def GetAlgo(request, + def GetFunction(request, target, options=(), channel_credentials=None, @@ -124,14 +124,14 @@ def GetAlgo(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary(request, target, '/orchestrator.AlgoService/GetAlgo', - algo__pb2.GetAlgoParam.SerializeToString, - algo__pb2.Algo.FromString, + return grpc.experimental.unary_unary(request, target, '/orchestrator.FunctionService/GetFunction', + function__pb2.GetFunctionParam.SerializeToString, + function__pb2.Function.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def QueryAlgos(request, + def QueryFunctions(request, target, options=(), channel_credentials=None, @@ -141,14 +141,14 @@ def QueryAlgos(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary(request, target, '/orchestrator.AlgoService/QueryAlgos', - algo__pb2.QueryAlgosParam.SerializeToString, - algo__pb2.QueryAlgosResponse.FromString, + return grpc.experimental.unary_unary(request, target, '/orchestrator.FunctionService/QueryFunctions', + function__pb2.QueryFunctionsParam.SerializeToString, + function__pb2.QueryFunctionsResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def UpdateAlgo(request, + def UpdateFunction(request, target, options=(), channel_credentials=None, @@ -158,8 +158,8 @@ def UpdateAlgo(request, wait_for_ready=None, timeout=None, metadata=None): - return grpc.experimental.unary_unary(request, target, '/orchestrator.AlgoService/UpdateAlgo', - algo__pb2.UpdateAlgoParam.SerializeToString, - algo__pb2.UpdateAlgoResponse.FromString, + return grpc.experimental.unary_unary(request, target, '/orchestrator.FunctionService/UpdateFunction', + function__pb2.UpdateFunctionParam.SerializeToString, + function__pb2.UpdateFunctionResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/backend/orchestrator/function_pb2_grpc.pyi b/backend/orchestrator/function_pb2_grpc.pyi new file mode 100644 index 000000000..66b7356a1 --- /dev/null +++ b/backend/orchestrator/function_pb2_grpc.pyi @@ -0,0 +1,54 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +""" +import abc +import function_pb2 +import grpc + +class FunctionServiceStub: + def __init__(self, channel: grpc.Channel) -> None: ... + RegisterFunction: grpc.UnaryUnaryMultiCallable[ + function_pb2.NewFunction, + function_pb2.Function, + ] + GetFunction: grpc.UnaryUnaryMultiCallable[ + function_pb2.GetFunctionParam, + function_pb2.Function, + ] + QueryFunctions: grpc.UnaryUnaryMultiCallable[ + function_pb2.QueryFunctionsParam, + function_pb2.QueryFunctionsResponse, + ] + UpdateFunction: grpc.UnaryUnaryMultiCallable[ + function_pb2.UpdateFunctionParam, + function_pb2.UpdateFunctionResponse, + ] + +class FunctionServiceServicer(metaclass=abc.ABCMeta): + @abc.abstractmethod + def RegisterFunction( + self, + request: function_pb2.NewFunction, + context: grpc.ServicerContext, + ) -> function_pb2.Function: ... + @abc.abstractmethod + def GetFunction( + self, + request: function_pb2.GetFunctionParam, + context: grpc.ServicerContext, + ) -> function_pb2.Function: ... + @abc.abstractmethod + def QueryFunctions( + self, + request: function_pb2.QueryFunctionsParam, + context: grpc.ServicerContext, + ) -> function_pb2.QueryFunctionsResponse: ... + @abc.abstractmethod + def UpdateFunction( + self, + request: function_pb2.UpdateFunctionParam, + context: grpc.ServicerContext, + ) -> function_pb2.UpdateFunctionResponse: ... + +def add_FunctionServiceServicer_to_server(servicer: FunctionServiceServicer, server: grpc.Server) -> None: ... diff --git a/backend/orchestrator/mock.py b/backend/orchestrator/mock.py index b06cb8584..b0995298e 100644 --- a/backend/orchestrator/mock.py +++ b/backend/orchestrator/mock.py @@ -1,8 +1,6 @@ import factory from .resources import Address -from .resources import Algo -from .resources import AlgoInput from .resources import AssetKind from .resources import ComputePlan from .resources import ComputeTask @@ -11,6 +9,8 @@ from .resources import ComputeTaskStatus from .resources import DataManager from .resources import DataSample +from .resources import Function +from .resources import FunctionInput from .resources import Model from .resources import Permission from .resources import Permissions @@ -33,7 +33,7 @@ class Meta: key = factory.Faker("uuid4") owner = "OrgA" compute_plan_key = factory.Faker("uuid4") - algo_key = factory.Faker("uuid4") + function_key = factory.Faker("uuid4") rank = 0 status = ComputeTaskStatus.STATUS_TODO worker = "OrgA" @@ -76,22 +76,22 @@ class Meta: opener = factory.SubFactory(AddressFactory) -class AlgoInputFactory(factory.Factory): +class FunctionInputFactory(factory.Factory): class Meta: - model = AlgoInput + model = FunctionInput kind = AssetKind.ASSET_MODEL multiple = False optional = True -class AlgoFactory(factory.Factory): +class FunctionFactory(factory.Factory): class Meta: - model = Algo + model = Function key = factory.Faker("uuid4") owner = "OrgA" - algorithm = factory.SubFactory(AddressFactory) + function_address = factory.SubFactory(AddressFactory) inputs = {} outputs = {} diff --git a/backend/orchestrator/resources.py b/backend/orchestrator/resources.py index a068f9444..5e92e7d0d 100644 --- a/backend/orchestrator/resources.py +++ b/backend/orchestrator/resources.py @@ -7,12 +7,12 @@ import pydantic -from orchestrator import algo_pb2 from orchestrator import common_pb2 from orchestrator import computeplan_pb2 from orchestrator import computetask_pb2 from orchestrator import datamanager_pb2 from orchestrator import datasample_pb2 +from orchestrator import function_pb2 from orchestrator import info_pb2 from orchestrator import model_pb2 @@ -38,7 +38,7 @@ class AssetKind(AutoNameEnum): ASSET_ORGANIZATION = enum.auto() ASSET_DATA_SAMPLE = enum.auto() ASSET_DATA_MANAGER = enum.auto() - ASSET_ALGO = enum.auto() + ASSET_FUNCTION = enum.auto() ASSET_COMPUTE_TASK = enum.auto() ASSET_COMPUTEPLAN = enum.auto() ASSET_MODEL = enum.auto() @@ -119,40 +119,40 @@ def from_grpc(cls, m: datamanager_pb2.DataManager) -> DataManager: return cls(key=m.key, opener=Address.from_grpc(m.opener)) -class AlgoInput(pydantic.BaseModel): +class FunctionInput(pydantic.BaseModel): kind: AssetKind multiple: bool optional: bool @classmethod - def from_grpc(cls, i: algo_pb2.AlgoInput) -> AlgoInput: + def from_grpc(cls, i: function_pb2.FunctionInput) -> FunctionInput: return cls(kind=AssetKind.from_grpc(i.kind), multiple=i.multiple, optional=i.optional) -class AlgoOutput(pydantic.BaseModel): +class FunctionOutput(pydantic.BaseModel): kind: AssetKind multiple: bool @classmethod - def from_grpc(cls, o: algo_pb2.AlgoOutput) -> AlgoOutput: + def from_grpc(cls, o: function_pb2.FunctionOutput) -> FunctionOutput: return cls(kind=AssetKind.from_grpc(o.kind), multiple=o.multiple) -class Algo(pydantic.BaseModel): +class Function(pydantic.BaseModel): key: str owner: str - algorithm: Address - inputs: dict[str, AlgoInput] - outputs: dict[str, AlgoOutput] + function_address: Address + inputs: dict[str, FunctionInput] + outputs: dict[str, FunctionOutput] @classmethod - def from_grpc(cls, a: algo_pb2.Algo) -> Algo: + def from_grpc(cls, a: function_pb2.Function) -> Function: return cls( key=a.key, owner=a.owner, - algorithm=Address.from_grpc(a.algorithm), - inputs={k: AlgoInput.from_grpc(i) for k, i in a.inputs.items()}, - outputs={k: AlgoOutput.from_grpc(o) for k, o in a.outputs.items()}, + function_address=Address.from_grpc(a.function), + inputs={k: FunctionInput.from_grpc(i) for k, i in a.inputs.items()}, + outputs={k: FunctionOutput.from_grpc(o) for k, o in a.outputs.items()}, ) @@ -230,7 +230,7 @@ class ComputeTask(_Base): # This property is only temporary and will disappear soon owner: str compute_plan_key: str - algo_key: str + function_key: str rank: int status: ComputeTaskStatus worker: str @@ -247,7 +247,7 @@ def from_grpc(cls, t: computetask_pb2.ComputeTask) -> ComputeTask: key=t.key, owner=t.owner, compute_plan_key=t.compute_plan_key, - algo_key=t.algo_key, + function_key=t.function_key, rank=t.rank, status=ComputeTaskStatus.from_grpc(t.status), worker=t.worker, diff --git a/backend/substrapp/apps.py b/backend/substrapp/apps.py index bff6d3275..ae6807f94 100644 --- a/backend/substrapp/apps.py +++ b/backend/substrapp/apps.py @@ -7,14 +7,14 @@ class SubstrappConfig(AppConfig): def ready(self): # registering signals with the model's string label - from substrapp.models import Algo from substrapp.models import DataManager + from substrapp.models import Function from substrapp.models import Model - from .signals.algo.post_delete import algo_post_delete from .signals.datamanager.post_delete import datamanager_post_delete + from .signals.function.post_delete import function_post_delete from .signals.model.post_delete import model_post_delete - post_delete.connect(algo_post_delete, sender=Algo) + post_delete.connect(function_post_delete, sender=Function) post_delete.connect(datamanager_post_delete, sender=DataManager) post_delete.connect(model_post_delete, sender=Model) diff --git a/backend/substrapp/compute_tasks/command.py b/backend/substrapp/compute_tasks/command.py index faa646255..05760b3f1 100644 --- a/backend/substrapp/compute_tasks/command.py +++ b/backend/substrapp/compute_tasks/command.py @@ -24,7 +24,7 @@ class Filenames: def get_exec_command(ctx: Context) -> list[str]: - entrypoint = ImageEntrypoint.objects.get(algo_checksum=ctx.algo.algorithm.checksum) + entrypoint = ImageEntrypoint.objects.get(function_checksum=ctx.function.function_address.checksum) command = entrypoint.entrypoint_json @@ -55,7 +55,7 @@ def get_exec_command_args(ctx: Context) -> list[str]: TaskResource( id=input.identifier, value=os.path.join(in_models_dir, input.model.key), - multiple=ctx.algo.inputs[input.identifier].multiple, + multiple=ctx.function.inputs[input.identifier].multiple, ) for input in ctx.input_assets if input.kind == orchestrator.AssetKind.ASSET_MODEL @@ -66,7 +66,7 @@ def get_exec_command_args(ctx: Context) -> list[str]: TaskResource( id=input.identifier, value=os.path.join(openers_dir, input.data_manager.key, Filenames.Opener), - multiple=ctx.algo.inputs[input.identifier].multiple, + multiple=ctx.function.inputs[input.identifier].multiple, ) for input in ctx.input_assets if input.kind == orchestrator.AssetKind.ASSET_DATA_MANAGER @@ -77,7 +77,7 @@ def get_exec_command_args(ctx: Context) -> list[str]: TaskResource( id=input.identifier, value=os.path.join(datasamples_dir, input.data_sample.key), - multiple=ctx.algo.inputs[input.identifier].multiple, + multiple=ctx.function.inputs[input.identifier].multiple, ) for input in ctx.input_assets if input.kind == orchestrator.AssetKind.ASSET_DATA_SAMPLE diff --git a/backend/substrapp/compute_tasks/compute_pod.py b/backend/substrapp/compute_tasks/compute_pod.py index b1baa88e6..bc12bea02 100644 --- a/backend/substrapp/compute_tasks/compute_pod.py +++ b/backend/substrapp/compute_tasks/compute_pod.py @@ -18,7 +18,7 @@ class Label: PodType = "substra.ai/pod-type" PodName = "substra.ai/pod-name" ComputePlanKey = "substra.ai/compute-plan-key" - AlgoKey = "substra.ai/algo-key" + FunctionKey = "substra.ai/function-key" RandomToken = "substra.ai/random-token" # Values @@ -30,14 +30,14 @@ class ComputePod: def __init__( self, compute_plan_key: str, - algo_key: str, + function_key: str, ): self.compute_plan_key = compute_plan_key - self.algo_key = algo_key + self.function_key = function_key @property def name(self) -> str: - return f"substra-{self.compute_plan_key[:8]}-compute-{self.algo_key[:8]}" + return f"substra-{self.compute_plan_key[:8]}-compute-{self.function_key}" @staticmethod def get_compute_plan_label_selector(compute_plan_key: str) -> str: @@ -50,7 +50,7 @@ def get_compute_plan_label_selector(compute_plan_key: str) -> str: @property def label_selector(self) -> str: - return ",".join({f"{k}={self.labels[k]}" for k in [Label.ComputePlanKey, Label.AlgoKey]}) + return ",".join({f"{k}={self.labels[k]}" for k in [Label.ComputePlanKey, Label.FunctionKey]}) @property def labels(self) -> dict: @@ -59,7 +59,7 @@ def labels(self) -> dict: Label.PodType: Label.PodType_ComputeTask, Label.Component: Label.Component_Compute, Label.ComputePlanKey: self.compute_plan_key, - Label.AlgoKey: self.algo_key, + Label.FunctionKey: self.function_key, } diff --git a/backend/substrapp/compute_tasks/context.py b/backend/substrapp/compute_tasks/context.py index f557e3b7f..bd8821565 100644 --- a/backend/substrapp/compute_tasks/context.py +++ b/backend/substrapp/compute_tasks/context.py @@ -47,7 +47,7 @@ class Context: _compute_plan: orchestrator.ComputePlan _input_assets: list[orchestrator.ComputeTaskInputAsset] _directories: Directories - _algo: orchestrator.Algo + _function: orchestrator.Function _has_chainkeys: bool _outputs: list[OutputResource] @@ -57,7 +57,7 @@ def __init__( task: orchestrator.ComputeTask, compute_plan: orchestrator.ComputePlan, input_assets: list[orchestrator.ComputeTaskInputAsset], - algo: orchestrator.Algo, + function: orchestrator.Function, directories: Directories, has_chainkeys: bool, ): @@ -67,8 +67,8 @@ def __init__( self._input_assets = input_assets self._directories = directories self._has_chainkeys = has_chainkeys - self._algo = algo - self._outputs = self._get_output_resources(task, algo) + self._function = function + self._outputs = self._get_output_resources(task, function) @classmethod def from_task(cls, channel_name: str, task: orchestrator.ComputeTask): @@ -78,7 +78,7 @@ def from_task(cls, channel_name: str, task: orchestrator.ComputeTask): with get_orchestrator_client(channel_name) as client: compute_plan = client.query_compute_plan(compute_plan_key) input_assets = client.get_task_input_assets(task.key) - algo = client.query_algo(task.algo_key) + function = client.query_function(task.function_key) logger.debug("retrieved input assets from orchestrator", input_assets=input_assets) @@ -86,7 +86,7 @@ def from_task(cls, channel_name: str, task: orchestrator.ComputeTask): has_chainkeys = settings.TASK["CHAINKEYS_ENABLED"] and bool(compute_plan.tag) - return cls(channel_name, task, compute_plan, input_assets, algo, directories, has_chainkeys) + return cls(channel_name, task, compute_plan, input_assets, function, directories, has_chainkeys) @property def channel_name(self) -> str: @@ -118,8 +118,8 @@ def input_models(self) -> list[orchestrator.Model]: return [input.model for input in self._input_assets if input.kind == orchestrator.AssetKind.ASSET_MODEL] @property - def algo(self) -> orchestrator.Algo: - return self._algo + def function(self) -> orchestrator.Function: + return self._function @property def compute_plan(self) -> orchestrator.ComputePlan: @@ -148,8 +148,8 @@ def data_sample_keys(self) -> list[str]: def outputs(self) -> list[OutputResource]: return self._outputs - def get_compute_pod(self, algo_key: str) -> ComputePod: - return ComputePod(self.compute_plan_key, algo_key) + def get_compute_pod(self, function_key: str) -> ComputePod: + return ComputePod(self.compute_plan_key, function_key) def _get_output_path(self, kind: orchestrator.AssetKind, identifier: str) -> str: if kind == orchestrator.AssetKind.ASSET_MODEL: @@ -159,19 +159,21 @@ def _get_output_path(self, kind: orchestrator.AssetKind, identifier: str) -> str raise UnsupportedOutputAsset(f"{identifier} output has an unsupported kind {kind}") - def _get_output_resources(self, task: orchestrator.ComputeTask, algo: orchestrator.Algo) -> list[OutputResource]: - """return the list of OutputResource built from task outputs and algo output definitions""" + def _get_output_resources( + self, task: orchestrator.ComputeTask, function: orchestrator.Function + ) -> list[OutputResource]: + """return the list of OutputResource built from task outputs and function output definitions""" outputs = [] for identifier in task.outputs: - algo_out = algo.outputs[identifier] + function_out = function.outputs[identifier] outputs.append( OutputResource( identifier=identifier, - kind=algo_out.kind, - multiple=algo_out.multiple, - rel_path=self._get_output_path(algo_out.kind, identifier), + kind=function_out.kind, + multiple=function_out.multiple, + rel_path=self._get_output_path(function_out.kind, identifier), ) ) diff --git a/backend/substrapp/compute_tasks/datastore.py b/backend/substrapp/compute_tasks/datastore.py index 27a48884a..bb059c4a9 100644 --- a/backend/substrapp/compute_tasks/datastore.py +++ b/backend/substrapp/compute_tasks/datastore.py @@ -15,8 +15,8 @@ def _get_from_address(self, organization: str, address: orchestrator.Address) -> channel=self.channel, organization_id=organization, url=address.uri, checksum=address.checksum ) - def get_algo(self, algo: orchestrator.Algo) -> bytes: - return self._get_from_address(algo.owner, algo.algorithm) + def get_function(self, function: orchestrator.Function) -> bytes: + return self._get_from_address(function.owner, function.function_address) def delete_model(self, model_key: str) -> None: from substrapp.models import Model diff --git a/backend/substrapp/compute_tasks/errors.py b/backend/substrapp/compute_tasks/errors.py index 5caae1c42..ae5235b6a 100644 --- a/backend/substrapp/compute_tasks/errors.py +++ b/backend/substrapp/compute_tasks/errors.py @@ -25,7 +25,7 @@ class ComputeTaskErrorType(enum.Enum): Values: * BUILD_ERROR: error occurring during the build of the container image. Likely to be caused by the user's code. - * EXECUTION_ERROR: error occurring during the execution of the algo/metric container. + * EXECUTION_ERROR: error occurring during the execution of the function/metric container. Likely to be caused by the user's code. * INTERNAL_ERROR: any other error that does not fall into the previous categories. Likely to be caused by a fault in the system. It would require the action of a system diff --git a/backend/substrapp/compute_tasks/execute.py b/backend/substrapp/compute_tasks/execute.py index 4bd4ef635..2e0c7c250 100644 --- a/backend/substrapp/compute_tasks/execute.py +++ b/backend/substrapp/compute_tasks/execute.py @@ -1,5 +1,5 @@ """ -This file contains the algo execution part of a compute task. +This file contains the function execution part of a compute task. In these functions, we: - Determine the command to execute (train/test/eval) @@ -41,7 +41,7 @@ @timeit def execute_compute_task(ctx: Context) -> None: channel_name = ctx.channel_name - container_image_tag = utils.container_image_tag_from_algo(ctx.algo) + container_image_tag = utils.container_image_tag_from_function(ctx.function) compute_pod = ctx.get_compute_pod(container_image_tag) pod_name = compute_pod.name diff --git a/backend/substrapp/compute_tasks/image_builder.py b/backend/substrapp/compute_tasks/image_builder.py index a70e39d0f..d563f9cce 100644 --- a/backend/substrapp/compute_tasks/image_builder.py +++ b/backend/substrapp/compute_tasks/image_builder.py @@ -41,25 +41,25 @@ HOSTNAME = settings.HOSTNAME -def build_image_if_missing(datastore: Datastore, algo: orchestrator.Algo) -> None: +def build_image_if_missing(datastore: Datastore, function: orchestrator.Function) -> None: """ Build the container image and the ImageEntryPoint entry if they don't exist already """ - container_image_tag = utils.container_image_tag_from_algo(algo) + container_image_tag = utils.container_image_tag_from_function(function) with lock_resource("image-build", container_image_tag, ttl=MAX_IMAGE_BUILD_TIME, timeout=MAX_IMAGE_BUILD_TIME): if container_image_exists(container_image_tag): logger.info("Reusing existing image", image=container_image_tag) else: - asset_content = datastore.get_algo(algo) - _build_algo_image(asset_content, algo) + asset_content = datastore.get_function(function) + _build_function_image(asset_content, function) -def _build_algo_image(asset: bytes, algo: orchestrator.Algo) -> None: +def _build_function_image(asset: bytes, function: orchestrator.Function) -> None: """ - Build an algo's container image. + Build an function's container image. Perform multiple steps: - 1. Download the algo using the provided asset storage_address/owner. Verify its checksum and uncompress the data + 1. Download the function using the provided asset storage_address/owner. Verify its checksum and uncompress the data to a temporary folder. 2. Extract the ENTRYPOINT from the Dockerfile. 3. Build the container image using Kaniko. @@ -76,20 +76,22 @@ def _build_algo_image(asset: bytes, algo: orchestrator.Algo) -> None: entrypoint = _get_entrypoint_from_dockerfile(tmp_dir) # Build image - _build_container_image(tmp_dir, utils.container_image_tag_from_algo(algo)) + _build_container_image(tmp_dir, utils.container_image_tag_from_function(function)) # Save entrypoint to DB if the image build was successful - ImageEntrypoint.objects.get_or_create(algo_checksum=algo.algorithm.checksum, entrypoint_json=entrypoint) + ImageEntrypoint.objects.get_or_create( + function_checksum=function.function_address.checksum, entrypoint_json=entrypoint + ) def _get_entrypoint_from_dockerfile(dockerfile_dir: str) -> list[str]: """ Get entrypoint from ENTRYPOINT in the Dockerfile. - This is necessary because the user algo can have arbitrary names, ie; "myalgo.py". + This is necessary because the user function can have arbitrary names, ie; "myfunction.py". Example: - ENTRYPOINT ["python3", "myalgo.py"] + ENTRYPOINT ["python3", "myfunction.py"] """ dockerfile_path = f"{dockerfile_dir}/Dockerfile" @@ -103,7 +105,8 @@ def _get_entrypoint_from_dockerfile(dockerfile_dir: str) -> list[str]: if not isinstance(res, list): raise compute_task_errors.BuildError( - "Invalid ENTRYPOINT in algo/metric Dockerfile. You must use the exec form in your Dockerfile. " + "Invalid ENTRYPOINT in function/metric Dockerfile. " + "You must use the exec form in your Dockerfile. " "See https://docs.docker.com/engine/reference/builder/#entrypoint" ) return res diff --git a/backend/substrapp/compute_tasks/outputs.py b/backend/substrapp/compute_tasks/outputs.py index 10ba025eb..73b785b7d 100644 --- a/backend/substrapp/compute_tasks/outputs.py +++ b/backend/substrapp/compute_tasks/outputs.py @@ -44,7 +44,7 @@ def _save_performance(self, output: context.OutputResource): perf = _get_perf(perf_path) performance_obj = { "compute_task_key": self._ctx.task.key, - "metric_key": self._ctx.algo.key, + "metric_key": self._ctx.function.key, "performance_value": perf, "compute_task_output_identifier": output.identifier, } diff --git a/backend/substrapp/compute_tasks/utils.py b/backend/substrapp/compute_tasks/utils.py index 43fdb147f..fcfd66892 100644 --- a/backend/substrapp/compute_tasks/utils.py +++ b/backend/substrapp/compute_tasks/utils.py @@ -1,13 +1,13 @@ import orchestrator -def container_image_tag_from_algo(algo: orchestrator.Algo) -> str: - """builds the container image tag from the algo checksum +def container_image_tag_from_function(function: orchestrator.Function) -> str: + """builds the container image tag from the function checksum Args: - algo (orchestrator.Algo): an algo retrieved from the orchestrator + function (orchestrator.Function): an function retrieved from the orchestrator Returns: str: the container image tag """ - return f"algo-{algo.algorithm.checksum[:16]}" + return f"function-{function.function_address.checksum[:16]}" diff --git a/backend/substrapp/docker_registry.py b/backend/substrapp/docker_registry.py index 28a22ef39..84697ea7d 100644 --- a/backend/substrapp/docker_registry.py +++ b/backend/substrapp/docker_registry.py @@ -138,7 +138,7 @@ def get_container_images() -> list[dict]: return None -def fetch_old_algo_image_names(max_duration: int) -> list[str]: +def fetch_old_function_image_names(max_duration: int) -> list[str]: logger.info("Fetch old image names", max_duration=f"{max_duration}s") images = get_container_images() diff --git a/backend/substrapp/migrations/0001_initial.py b/backend/substrapp/migrations/0001_initial.py index cc6a8b141..20b431109 100644 --- a/backend/substrapp/migrations/0001_initial.py +++ b/backend/substrapp/migrations/0001_initial.py @@ -5,8 +5,8 @@ from django.db import migrations from django.db import models -import substrapp.models.algo import substrapp.models.datamanager +import substrapp.models.function import substrapp.models.model @@ -25,8 +25,8 @@ class Migration(migrations.Migration): name="Algo", fields=[ ("key", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), - ("file", models.FileField(max_length=500, upload_to=substrapp.models.algo.upload_to)), - ("description", models.FileField(max_length=500, upload_to=substrapp.models.algo.upload_to)), + ("file", models.FileField(max_length=500, upload_to=substrapp.models.function.upload_to)), + ("description", models.FileField(max_length=500, upload_to=substrapp.models.function.upload_to)), ("validated", models.BooleanField(default=False)), ("checksum", models.CharField(blank=True, max_length=64)), ], diff --git a/backend/substrapp/migrations/0006_create_compute_task_failure_model.py b/backend/substrapp/migrations/0006_create_compute_task_failure_model.py index 4b52e797b..69303144e 100644 --- a/backend/substrapp/migrations/0006_create_compute_task_failure_model.py +++ b/backend/substrapp/migrations/0006_create_compute_task_failure_model.py @@ -4,7 +4,7 @@ from django.db import migrations from django.db import models -import substrapp.models.algo +import substrapp.models.function class Migration(migrations.Migration): diff --git a/backend/substrapp/migrations/0007_alter_algo_description_alter_algo_file_and_more.py b/backend/substrapp/migrations/0007_alter_algo_description_alter_algo_file_and_more.py index f8396c1e8..318bce1e4 100644 --- a/backend/substrapp/migrations/0007_alter_algo_description_alter_algo_file_and_more.py +++ b/backend/substrapp/migrations/0007_alter_algo_description_alter_algo_file_and_more.py @@ -4,8 +4,8 @@ from django.db import migrations from django.db import models -import substrapp.models.algo import substrapp.models.datamanager +import substrapp.models.function def metric_upload_to(instance, filename) -> str: @@ -25,7 +25,7 @@ class Migration(migrations.Migration): field=models.FileField( max_length=500, storage=django.core.files.storage.FileSystemStorage(), - upload_to=substrapp.models.algo.upload_to, + upload_to=substrapp.models.function.upload_to, ), ), migrations.AlterField( @@ -34,7 +34,7 @@ class Migration(migrations.Migration): field=models.FileField( max_length=500, storage=django.core.files.storage.FileSystemStorage(), - upload_to=substrapp.models.algo.upload_to, + upload_to=substrapp.models.function.upload_to, ), ), migrations.AlterField( diff --git a/backend/substrapp/migrations/0012_alter_algo_description_alter_algo_file_and_more.py b/backend/substrapp/migrations/0012_alter_algo_description_alter_algo_file_and_more.py index 53df84b53..d2b4f62f0 100644 --- a/backend/substrapp/migrations/0012_alter_algo_description_alter_algo_file_and_more.py +++ b/backend/substrapp/migrations/0012_alter_algo_description_alter_algo_file_and_more.py @@ -3,9 +3,9 @@ from django.db import migrations from django.db import models -import substrapp.models.algo import substrapp.models.compute_task_failure_report import substrapp.models.datamanager +import substrapp.models.function import substrapp.storages.minio @@ -22,7 +22,7 @@ class Migration(migrations.Migration): field=models.FileField( max_length=500, storage=substrapp.storages.minio.MinioStorage("substra-algo"), - upload_to=substrapp.models.algo.upload_to, + upload_to=substrapp.models.function.upload_to, ), ), migrations.AlterField( @@ -31,7 +31,7 @@ class Migration(migrations.Migration): field=models.FileField( max_length=500, storage=substrapp.storages.minio.MinioStorage("substra-algo"), - upload_to=substrapp.models.algo.upload_to, + upload_to=substrapp.models.function.upload_to, ), ), migrations.AlterField( diff --git a/backend/substrapp/migrations/0013_alter_algo_description_alter_algo_file_and_more.py b/backend/substrapp/migrations/0013_alter_algo_description_alter_algo_file_and_more.py index cf5a6281e..164e18f2c 100644 --- a/backend/substrapp/migrations/0013_alter_algo_description_alter_algo_file_and_more.py +++ b/backend/substrapp/migrations/0013_alter_algo_description_alter_algo_file_and_more.py @@ -4,9 +4,9 @@ from django.db import migrations from django.db import models -import substrapp.models.algo import substrapp.models.compute_task_failure_report import substrapp.models.datamanager +import substrapp.models.function class Migration(migrations.Migration): @@ -22,7 +22,7 @@ class Migration(migrations.Migration): field=models.FileField( max_length=500, storage=django.core.files.storage.FileSystemStorage(), - upload_to=substrapp.models.algo.upload_to, + upload_to=substrapp.models.function.upload_to, ), ), migrations.AlterField( @@ -31,7 +31,7 @@ class Migration(migrations.Migration): field=models.FileField( max_length=500, storage=django.core.files.storage.FileSystemStorage(), - upload_to=substrapp.models.algo.upload_to, + upload_to=substrapp.models.function.upload_to, ), ), migrations.AlterField( diff --git a/backend/substrapp/migrations/0014_rename_algo_to_function.py b/backend/substrapp/migrations/0014_rename_algo_to_function.py new file mode 100644 index 000000000..1ad6846a6 --- /dev/null +++ b/backend/substrapp/migrations/0014_rename_algo_to_function.py @@ -0,0 +1,22 @@ +# Generated by Django 4.0.7 on 2023-02-02 16:53 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ("substrapp", "0013_alter_algo_description_alter_algo_file_and_more"), + ] + + operations = [ + migrations.RenameModel( + old_name="Algo", + new_name="Function", + ), + migrations.RenameField( + model_name="imageentrypoint", + old_name="algo_checksum", + new_name="function_checksum", + ), + ] diff --git a/backend/substrapp/models/__init__.py b/backend/substrapp/models/__init__.py index f803d6015..7ca4e9815 100644 --- a/backend/substrapp/models/__init__.py +++ b/backend/substrapp/models/__init__.py @@ -1,8 +1,8 @@ -from .algo import Algo from .compute_task_failure_report import ComputeTaskFailureReport from .computeplan_worker_mapping import ComputePlanWorkerMapping from .datamanager import DataManager from .datasample import DataSample +from .function import Function from .image_entrypoint import ImageEntrypoint from .model import Model from .worker_last_event import WorkerLastEvent @@ -10,7 +10,7 @@ __all__ = [ "DataSample", "DataManager", - "Algo", + "Function", "Model", "ComputePlanWorkerMapping", "ImageEntrypoint", diff --git a/backend/substrapp/models/algo.py b/backend/substrapp/models/function.py similarity index 72% rename from backend/substrapp/models/algo.py rename to backend/substrapp/models/function.py index f2a4ad89a..3eb3eed12 100644 --- a/backend/substrapp/models/algo.py +++ b/backend/substrapp/models/function.py @@ -7,18 +7,18 @@ def upload_to(instance, filename) -> str: - return f"algos/{instance.key}/{filename}" + return f"functions/{instance.key}/{filename}" -class Algo(models.Model): +class Function(models.Model): """Storage Data table""" key = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) file = models.FileField( - storage=settings.ALGO_STORAGE, max_length=500, upload_to=upload_to + storage=settings.FUNCTION_STORAGE, max_length=500, upload_to=upload_to ) # path max length to 500 instead of default 100 description = models.FileField( - storage=settings.ALGO_STORAGE, upload_to=upload_to, max_length=500 + storage=settings.FUNCTION_STORAGE, upload_to=upload_to, max_length=500 ) # path max length to 500 instead of default 100 checksum = models.CharField(max_length=64, blank=True) @@ -29,4 +29,4 @@ def save(self, *args, **kwargs) -> None: super().save(*args, **kwargs) def __str__(self) -> str: - return f"Algo with key {self.key}" + return f"Function with key {self.key}" diff --git a/backend/substrapp/models/image_entrypoint.py b/backend/substrapp/models/image_entrypoint.py index f59cc12e4..6c57831b9 100644 --- a/backend/substrapp/models/image_entrypoint.py +++ b/backend/substrapp/models/image_entrypoint.py @@ -2,8 +2,8 @@ class ImageEntrypoint(models.Model): - """The container image entrypoint of an Algo or a Metric""" + """The container image entrypoint of an Function or a Metric""" - algo_checksum = models.CharField(primary_key=True, editable=False, max_length=256, default="invalid") + function_checksum = models.CharField(primary_key=True, editable=False, max_length=256, default="invalid") entrypoint_json = models.JSONField() creation_date = models.DateTimeField(auto_now_add=True) diff --git a/backend/substrapp/serializers/__init__.py b/backend/substrapp/serializers/__init__.py index 5f4de7e68..b6084f901 100644 --- a/backend/substrapp/serializers/__init__.py +++ b/backend/substrapp/serializers/__init__.py @@ -1,9 +1,9 @@ -from .algo import AlgoSerializer from .datamanager import DataManagerSerializer from .datasample import DataSampleSerializer +from .function import FunctionSerializer __all__ = [ "DataSampleSerializer", "DataManagerSerializer", - "AlgoSerializer", + "FunctionSerializer", ] diff --git a/backend/substrapp/serializers/algo.py b/backend/substrapp/serializers/function.py similarity index 74% rename from backend/substrapp/serializers/algo.py rename to backend/substrapp/serializers/function.py index 486a44fdd..4c6bd633c 100644 --- a/backend/substrapp/serializers/algo.py +++ b/backend/substrapp/serializers/function.py @@ -1,14 +1,14 @@ from rest_framework import serializers from libs.serializers import DynamicFieldsModelSerializer -from substrapp.models import Algo +from substrapp.models import Function from substrapp.serializers.utils import FileSizeValidator from substrapp.serializers.utils import FileValidator -class AlgoSerializer(DynamicFieldsModelSerializer): +class FunctionSerializer(DynamicFieldsModelSerializer): file = serializers.FileField(validators=[FileValidator(), FileSizeValidator()]) class Meta: - model = Algo + model = Function fields = "__all__" diff --git a/backend/substrapp/signals/algo/__init__.py b/backend/substrapp/signals/function/__init__.py similarity index 100% rename from backend/substrapp/signals/algo/__init__.py rename to backend/substrapp/signals/function/__init__.py diff --git a/backend/substrapp/signals/algo/post_delete.py b/backend/substrapp/signals/function/post_delete.py similarity index 75% rename from backend/substrapp/signals/algo/post_delete.py rename to backend/substrapp/signals/function/post_delete.py index 0b6bac1e2..8c8b1f73f 100644 --- a/backend/substrapp/signals/algo/post_delete.py +++ b/backend/substrapp/signals/function/post_delete.py @@ -1,4 +1,4 @@ -def algo_post_delete(sender, instance, **kwargs): +def function_post_delete(sender, instance, **kwargs): # delete folder from MinioStorage instance.file.storage.delete(str(instance.file.name)) instance.description.storage.delete(str(instance.description.name)) diff --git a/backend/substrapp/tasks/tasks_compute_plan.py b/backend/substrapp/tasks/tasks_compute_plan.py index a2f445542..8b2a5b5b3 100644 --- a/backend/substrapp/tasks/tasks_compute_plan.py +++ b/backend/substrapp/tasks/tasks_compute_plan.py @@ -57,7 +57,7 @@ def _teardown_compute_plan_resources(orc_client: orchestrator.Client, compute_pl return _teardown_pods_and_dirs(compute_plan_key) - _delete_compute_plan_algos_images(orc_client.query_algos(compute_plan_key)) + _delete_compute_plan_functions_images(orc_client.query_functions(compute_plan_key)) def _teardown_pods_and_dirs(compute_plan_key: str) -> None: @@ -66,6 +66,6 @@ def _teardown_pods_and_dirs(compute_plan_key: str) -> None: teardown_compute_plan_dir(Directories(compute_plan_key)) -def _delete_compute_plan_algos_images(algos: typing.Iterable[orchestrator.Algo]) -> None: - for algo in algos: - delete_container_image_safe(utils.container_image_tag_from_algo(algo)) +def _delete_compute_plan_functions_images(functions: typing.Iterable[orchestrator.Function]) -> None: + for function in functions: + delete_container_image_safe(utils.container_image_tag_from_function(function)) diff --git a/backend/substrapp/tasks/tasks_compute_task.py b/backend/substrapp/tasks/tasks_compute_task.py index 7ae6ecc55..e8180fcc9 100644 --- a/backend/substrapp/tasks/tasks_compute_task.py +++ b/backend/substrapp/tasks/tasks_compute_task.py @@ -245,7 +245,7 @@ def _run( # start build_image timer timer.start() - build_image_if_missing(datastore, ctx.algo) + build_image_if_missing(datastore, ctx.function) # stop build_image timer _create_task_profiling_step(channel_name, task.key, ComputeTaskSteps.BUILD_IMAGE, timer.stop()) diff --git a/backend/substrapp/tasks/tasks_docker_registry.py b/backend/substrapp/tasks/tasks_docker_registry.py index b82e710d6..e084d464f 100644 --- a/backend/substrapp/tasks/tasks_docker_registry.py +++ b/backend/substrapp/tasks/tasks_docker_registry.py @@ -1,14 +1,14 @@ from backend.celery import app from substrapp.docker_registry import delete_container_image_safe -from substrapp.docker_registry import fetch_old_algo_image_names +from substrapp.docker_registry import fetch_old_function_image_names from substrapp.docker_registry import run_garbage_collector @app.task(ignore_result=True) def clean_old_images_task(max_duration: int) -> None: - algo_image_names = fetch_old_algo_image_names(max_duration) - for algo_image_name in algo_image_names: - delete_container_image_safe(algo_image_name) + function_image_names = fetch_old_function_image_names(max_duration) + for function_image_name in function_image_names: + delete_container_image_safe(function_image_name) @app.task(ignore_result=True) diff --git a/backend/substrapp/tests/common.py b/backend/substrapp/tests/common.py index c29c5c340..36000c340 100644 --- a/backend/substrapp/tests/common.py +++ b/backend/substrapp/tests/common.py @@ -7,13 +7,13 @@ from django.core.files.uploadedfile import InMemoryUploadedFile from google.protobuf.json_format import MessageToDict -from orchestrator.algo_pb2 import AlgoInput -from orchestrator.algo_pb2 import AlgoOutput from orchestrator.client import CONVERT_SETTINGS from orchestrator.common_pb2 import ASSET_DATA_MANAGER from orchestrator.common_pb2 import ASSET_DATA_SAMPLE from orchestrator.common_pb2 import ASSET_MODEL from orchestrator.common_pb2 import ASSET_PERFORMANCE +from orchestrator.function_pb2 import FunctionInput +from orchestrator.function_pb2 import FunctionOutput @dataclass @@ -27,81 +27,81 @@ class InputIdentifiers: SHARED = "shared" -class AlgoCategory(str, Enum): - simple = "ALGO_SIMPLE" - composite = "ALGO_COMPOSITE" - aggregate = "ALGO_AGGREGATE" - metric = "ALGO_METRIC" - predict = "ALGO_PREDICT" - predict_composite = "ALGO_PREDICT_COMPOSITE" +class FunctionCategory(str, Enum): + simple = "FUNCTION_SIMPLE" + composite = "FUNCTION_COMPOSITE" + aggregate = "FUNCTION_AGGREGATE" + metric = "FUNCTION_METRIC" + predict = "FUNCTION_PREDICT" + predict_composite = "FUNCTION_PREDICT_COMPOSITE" -# Algo inputs, protobuf format -ALGO_INPUTS_PER_CATEGORY = { - AlgoCategory.simple: { - InputIdentifiers.DATASAMPLES: AlgoInput(kind=ASSET_DATA_SAMPLE, multiple=True, optional=False), - InputIdentifiers.MODEL: AlgoInput(kind=ASSET_MODEL, multiple=False, optional=True), - InputIdentifiers.OPENER: AlgoInput(kind=ASSET_DATA_MANAGER, multiple=False, optional=False), +# Function inputs, protobuf format +FUNCTION_INPUTS_PER_CATEGORY = { + FunctionCategory.simple: { + InputIdentifiers.DATASAMPLES: FunctionInput(kind=ASSET_DATA_SAMPLE, multiple=True, optional=False), + InputIdentifiers.MODEL: FunctionInput(kind=ASSET_MODEL, multiple=False, optional=True), + InputIdentifiers.OPENER: FunctionInput(kind=ASSET_DATA_MANAGER, multiple=False, optional=False), }, - AlgoCategory.aggregate: { - InputIdentifiers.MODEL: AlgoInput(kind=ASSET_MODEL, multiple=True, optional=False), + FunctionCategory.aggregate: { + InputIdentifiers.MODEL: FunctionInput(kind=ASSET_MODEL, multiple=True, optional=False), }, - AlgoCategory.composite: { - InputIdentifiers.DATASAMPLES: AlgoInput(kind=ASSET_DATA_SAMPLE, multiple=True, optional=False), - InputIdentifiers.LOCAL: AlgoInput(kind=ASSET_MODEL, multiple=False, optional=True), - InputIdentifiers.OPENER: AlgoInput(kind=ASSET_DATA_MANAGER, multiple=False, optional=False), - InputIdentifiers.SHARED: AlgoInput(kind=ASSET_MODEL, multiple=False, optional=True), + FunctionCategory.composite: { + InputIdentifiers.DATASAMPLES: FunctionInput(kind=ASSET_DATA_SAMPLE, multiple=True, optional=False), + InputIdentifiers.LOCAL: FunctionInput(kind=ASSET_MODEL, multiple=False, optional=True), + InputIdentifiers.OPENER: FunctionInput(kind=ASSET_DATA_MANAGER, multiple=False, optional=False), + InputIdentifiers.SHARED: FunctionInput(kind=ASSET_MODEL, multiple=False, optional=True), }, - AlgoCategory.metric: { - InputIdentifiers.DATASAMPLES: AlgoInput(kind=ASSET_DATA_SAMPLE, multiple=True, optional=False), - InputIdentifiers.OPENER: AlgoInput(kind=ASSET_DATA_MANAGER, multiple=False, optional=False), - InputIdentifiers.PREDICTIONS: AlgoInput(kind=ASSET_MODEL, multiple=False, optional=False), + FunctionCategory.metric: { + InputIdentifiers.DATASAMPLES: FunctionInput(kind=ASSET_DATA_SAMPLE, multiple=True, optional=False), + InputIdentifiers.OPENER: FunctionInput(kind=ASSET_DATA_MANAGER, multiple=False, optional=False), + InputIdentifiers.PREDICTIONS: FunctionInput(kind=ASSET_MODEL, multiple=False, optional=False), }, - AlgoCategory.predict: { - InputIdentifiers.DATASAMPLES: AlgoInput(kind=ASSET_DATA_SAMPLE, multiple=True, optional=False), - InputIdentifiers.OPENER: AlgoInput(kind=ASSET_DATA_MANAGER, multiple=False, optional=False), - InputIdentifiers.MODEL: AlgoInput(kind=ASSET_MODEL, multiple=False, optional=False), - InputIdentifiers.SHARED: AlgoInput(kind=ASSET_MODEL, multiple=False, optional=True), + FunctionCategory.predict: { + InputIdentifiers.DATASAMPLES: FunctionInput(kind=ASSET_DATA_SAMPLE, multiple=True, optional=False), + InputIdentifiers.OPENER: FunctionInput(kind=ASSET_DATA_MANAGER, multiple=False, optional=False), + InputIdentifiers.MODEL: FunctionInput(kind=ASSET_MODEL, multiple=False, optional=False), + InputIdentifiers.SHARED: FunctionInput(kind=ASSET_MODEL, multiple=False, optional=True), }, } -# Algo outputs, protobuf format -ALGO_OUTPUTS_PER_CATEGORY = { - AlgoCategory.simple: { - InputIdentifiers.MODEL: AlgoOutput(kind=ASSET_MODEL, multiple=False), +# Function outputs, protobuf format +FUNCTION_OUTPUTS_PER_CATEGORY = { + FunctionCategory.simple: { + InputIdentifiers.MODEL: FunctionOutput(kind=ASSET_MODEL, multiple=False), }, - AlgoCategory.aggregate: { - InputIdentifiers.MODEL: AlgoOutput(kind=ASSET_MODEL, multiple=False), + FunctionCategory.aggregate: { + InputIdentifiers.MODEL: FunctionOutput(kind=ASSET_MODEL, multiple=False), }, - AlgoCategory.composite: { - InputIdentifiers.LOCAL: AlgoOutput(kind=ASSET_MODEL, multiple=False), - InputIdentifiers.SHARED: AlgoOutput(kind=ASSET_MODEL, multiple=False), + FunctionCategory.composite: { + InputIdentifiers.LOCAL: FunctionOutput(kind=ASSET_MODEL, multiple=False), + InputIdentifiers.SHARED: FunctionOutput(kind=ASSET_MODEL, multiple=False), }, - AlgoCategory.metric: { - InputIdentifiers.PERFORMANCE: AlgoOutput(kind=ASSET_PERFORMANCE, multiple=False), + FunctionCategory.metric: { + InputIdentifiers.PERFORMANCE: FunctionOutput(kind=ASSET_PERFORMANCE, multiple=False), }, - AlgoCategory.predict: { - InputIdentifiers.PREDICTIONS: AlgoOutput(kind=ASSET_MODEL, multiple=False), + FunctionCategory.predict: { + InputIdentifiers.PREDICTIONS: FunctionOutput(kind=ASSET_MODEL, multiple=False), }, } -# Algo inputs, dictionary format -ALGO_INPUTS_PER_CATEGORY_DICT: dict[str, dict] = { +# Function inputs, dictionary format +FUNCTION_INPUTS_PER_CATEGORY_DICT: dict[str, dict] = { category: { identifier: MessageToDict(input_proto, **CONVERT_SETTINGS) for identifier, input_proto in inputs_by_identifier.items() } - for category, inputs_by_identifier in ALGO_INPUTS_PER_CATEGORY.items() + for category, inputs_by_identifier in FUNCTION_INPUTS_PER_CATEGORY.items() } -# Algo outputs, dictionary format -ALGO_OUTPUTS_PER_CATEGORY_DICT: dict[str, dict] = { +# Function outputs, dictionary format +FUNCTION_OUTPUTS_PER_CATEGORY_DICT: dict[str, dict] = { category: { identifier: MessageToDict(output_proto, **CONVERT_SETTINGS) for identifier, output_proto in outputs_by_identifier.items() } - for category, outputs_by_identifier in ALGO_OUTPUTS_PER_CATEGORY.items() + for category, outputs_by_identifier in FUNCTION_OUTPUTS_PER_CATEGORY.items() } @@ -210,11 +210,13 @@ def get_sample_tar_data_sample(): return file, file_filename -def get_sample_algo(): +def get_sample_function(): dir_path = os.path.dirname(os.path.realpath(__file__)) file_filename = "file.tar.gz" f = BytesIO() - with open(os.path.join(dir_path, "../../../fixtures/chunantes/algos/algo3/algo.tar.gz"), "rb") as tar_file: + with open( + os.path.join(dir_path, "../../../fixtures/chunantes/functions/function3/function.tar.gz"), "rb" + ) as tar_file: flength = f.write(tar_file.read()) file = InMemoryUploadedFile(f, None, file_filename, "application/tar+gzip", flength, None) @@ -223,11 +225,11 @@ def get_sample_algo(): return file, file_filename -def get_sample_algo_zip(): +def get_sample_function_zip(): dir_path = os.path.dirname(os.path.realpath(__file__)) file_filename = "file.zip" f = BytesIO() - with open(os.path.join(dir_path, "../../../fixtures/chunantes/algos/algo0/algo.zip"), "rb") as tar_file: + with open(os.path.join(dir_path, "../../../fixtures/chunantes/functions/function0/function.zip"), "rb") as tar_file: flength = f.write(tar_file.read()) file = InMemoryUploadedFile(f, None, file_filename, "application/zip", flength, None) @@ -236,11 +238,13 @@ def get_sample_algo_zip(): return file, file_filename -def get_description_algo(): +def get_description_function(): dir_path = os.path.dirname(os.path.realpath(__file__)) file_filename = "file.md" f = BytesIO() - with open(os.path.join(dir_path, "../../../fixtures/chunantes/algos/algo3/description.md"), "rb") as desc_file: + with open( + os.path.join(dir_path, "../../../fixtures/chunantes/functions/function3/description.md"), "rb" + ) as desc_file: flength = f.write(desc_file.read()) file = InMemoryUploadedFile(f, None, file_filename, "application/text", flength, None) @@ -270,12 +274,12 @@ def get_sample_model(): } -def get_sample_algo_metadata(): +def get_sample_function_metadata(): return { "owner": "foo", "permissions": DEFAULT_PERMISSIONS, "description": DEFAULT_STORAGE_ADDRESS, - "algorithm": DEFAULT_STORAGE_ADDRESS, + "function": DEFAULT_STORAGE_ADDRESS, } diff --git a/backend/substrapp/tests/compute_tasks/conftest.py b/backend/substrapp/tests/compute_tasks/conftest.py index 28986238b..02eeb0965 100644 --- a/backend/substrapp/tests/compute_tasks/conftest.py +++ b/backend/substrapp/tests/compute_tasks/conftest.py @@ -11,13 +11,13 @@ DOCKERFILE = """ FROM ubuntu:16.04 RUN echo "Hello World" -ENTRYPOINT ["python3", "myalgo.py"] +ENTRYPOINT ["python3", "myfunction.py"] """ @pytest.fixture -def algo() -> orchestrator.Algo: - return orc_mock.AlgoFactory() +def function() -> orchestrator.Function: + return orc_mock.FunctionFactory() @pytest.fixture @@ -27,7 +27,7 @@ def orc_metric() -> dict[str, Any]: "name": "my metric", "owner": "Org1MSP", "description": {"checksum": "", "storage_address": ""}, - "algorithm": {"checksum": "", "storage_address": ""}, + "function": {"checksum": "", "storage_address": ""}, "permissions": { "process": {"public": True, "authorized_ids": []}, "download": {"public": True, "authorized_ids": []}, @@ -46,7 +46,7 @@ def testtuple_context(orc_metric) -> Context: compute_plan_key=cp_key, compute_plan_tag="", input_assets=[], - algo=orc_metric, + function=orc_metric, directories=Directories(cp_key), has_chainkeys=False, ) diff --git a/backend/substrapp/tests/compute_tasks/test_asset_buffer.py b/backend/substrapp/tests/compute_tasks/test_asset_buffer.py index a786acba7..5437e0ab8 100644 --- a/backend/substrapp/tests/compute_tasks/test_asset_buffer.py +++ b/backend/substrapp/tests/compute_tasks/test_asset_buffer.py @@ -202,10 +202,10 @@ class FakeContext: "address": {"storage_address": "test", "checksum": "check"}, } } - algo = { + function = { "key": str(uuid.uuid4()), "owner": "test", - "algorithm": {"storage_address": "test", "checksum": "check"}, + "function": {"storage_address": "test", "checksum": "check"}, } self.ctx = FakeContext() diff --git a/backend/substrapp/tests/compute_tasks/test_command.py b/backend/substrapp/tests/compute_tasks/test_command.py index b5493fa55..5bd9244a6 100644 --- a/backend/substrapp/tests/compute_tasks/test_command.py +++ b/backend/substrapp/tests/compute_tasks/test_command.py @@ -39,11 +39,11 @@ def test_get_args_task_input_one_model_output_one_model(): channel_name=_CHANNEL, task=task, input_assets=input_assets, - algo=orc_mock.AlgoFactory( + function=orc_mock.FunctionFactory( inputs={ - InputIdentifiers.MODEL: orc_mock.AlgoInputFactory(kind=orchestrator.AssetKind.ASSET_MODEL), - InputIdentifiers.OPENER: orc_mock.AlgoInputFactory(kind=orchestrator.AssetKind.ASSET_DATA_MANAGER), - InputIdentifiers.DATASAMPLES: orc_mock.AlgoInputFactory( + InputIdentifiers.MODEL: orc_mock.FunctionInputFactory(kind=orchestrator.AssetKind.ASSET_MODEL), + InputIdentifiers.OPENER: orc_mock.FunctionInputFactory(kind=orchestrator.AssetKind.ASSET_DATA_MANAGER), + InputIdentifiers.DATASAMPLES: orc_mock.FunctionInputFactory( kind=orchestrator.AssetKind.ASSET_DATA_SAMPLE, multiple=True ), } @@ -121,12 +121,12 @@ def test_get_args_task_input_two_models_output_two_models(): channel_name=_CHANNEL, task=task, input_assets=input_assets, - algo=orc_mock.AlgoFactory( + function=orc_mock.FunctionFactory( inputs={ - InputIdentifiers.SHARED: orc_mock.AlgoInputFactory(kind=orchestrator.AssetKind.ASSET_MODEL), - InputIdentifiers.LOCAL: orc_mock.AlgoInputFactory(kind=orchestrator.AssetKind.ASSET_MODEL), - InputIdentifiers.OPENER: orc_mock.AlgoInputFactory(kind=orchestrator.AssetKind.ASSET_DATA_MANAGER), - InputIdentifiers.DATASAMPLES: orc_mock.AlgoInputFactory( + InputIdentifiers.SHARED: orc_mock.FunctionInputFactory(kind=orchestrator.AssetKind.ASSET_MODEL), + InputIdentifiers.LOCAL: orc_mock.FunctionInputFactory(kind=orchestrator.AssetKind.ASSET_MODEL), + InputIdentifiers.OPENER: orc_mock.FunctionInputFactory(kind=orchestrator.AssetKind.ASSET_DATA_MANAGER), + InputIdentifiers.DATASAMPLES: orc_mock.FunctionInputFactory( kind=orchestrator.AssetKind.ASSET_DATA_SAMPLE, multiple=True ), } @@ -208,11 +208,11 @@ def test_get_args_predict_after_train(): channel_name=_CHANNEL, task=task, input_assets=input_assets, - algo=orc_mock.AlgoFactory( + function=orc_mock.FunctionFactory( inputs={ - InputIdentifiers.MODEL: orc_mock.AlgoInputFactory(kind=orchestrator.AssetKind.ASSET_MODEL), - InputIdentifiers.OPENER: orc_mock.AlgoInputFactory(kind=orchestrator.AssetKind.ASSET_DATA_MANAGER), - InputIdentifiers.DATASAMPLES: orc_mock.AlgoInputFactory( + InputIdentifiers.MODEL: orc_mock.FunctionInputFactory(kind=orchestrator.AssetKind.ASSET_MODEL), + InputIdentifiers.OPENER: orc_mock.FunctionInputFactory(kind=orchestrator.AssetKind.ASSET_DATA_MANAGER), + InputIdentifiers.DATASAMPLES: orc_mock.FunctionInputFactory( kind=orchestrator.AssetKind.ASSET_DATA_SAMPLE, multiple=True ), } @@ -290,12 +290,12 @@ def test_get_args_predict_input_two_models_output_one_model(): channel_name=_CHANNEL, task=task, input_assets=input_assets, - algo=orc_mock.AlgoFactory( + function=orc_mock.FunctionFactory( inputs={ - InputIdentifiers.SHARED: orc_mock.AlgoInputFactory(kind=orchestrator.AssetKind.ASSET_MODEL), - InputIdentifiers.LOCAL: orc_mock.AlgoInputFactory(kind=orchestrator.AssetKind.ASSET_MODEL), - InputIdentifiers.OPENER: orc_mock.AlgoInputFactory(kind=orchestrator.AssetKind.ASSET_DATA_MANAGER), - InputIdentifiers.DATASAMPLES: orc_mock.AlgoInputFactory( + InputIdentifiers.SHARED: orc_mock.FunctionInputFactory(kind=orchestrator.AssetKind.ASSET_MODEL), + InputIdentifiers.LOCAL: orc_mock.FunctionInputFactory(kind=orchestrator.AssetKind.ASSET_MODEL), + InputIdentifiers.OPENER: orc_mock.FunctionInputFactory(kind=orchestrator.AssetKind.ASSET_DATA_MANAGER), + InputIdentifiers.DATASAMPLES: orc_mock.FunctionInputFactory( kind=orchestrator.AssetKind.ASSET_DATA_SAMPLE, multiple=True ), } @@ -350,11 +350,11 @@ def test_get_args_test_input_one_model_output_one_performance(): task = orc_mock.ComputeTaskFactory( rank=0, ) - algo = orc_mock.AlgoFactory( + function = orc_mock.FunctionFactory( inputs={ - InputIdentifiers.PREDICTIONS: orc_mock.AlgoInputFactory(kind=orchestrator.AssetKind.ASSET_MODEL), - InputIdentifiers.OPENER: orc_mock.AlgoInputFactory(kind=orchestrator.AssetKind.ASSET_DATA_MANAGER), - InputIdentifiers.DATASAMPLES: orc_mock.AlgoInputFactory( + InputIdentifiers.PREDICTIONS: orc_mock.FunctionInputFactory(kind=orchestrator.AssetKind.ASSET_MODEL), + InputIdentifiers.OPENER: orc_mock.FunctionInputFactory(kind=orchestrator.AssetKind.ASSET_DATA_MANAGER), + InputIdentifiers.DATASAMPLES: orc_mock.FunctionInputFactory( kind=orchestrator.AssetKind.ASSET_DATA_SAMPLE, multiple=True ), } @@ -381,7 +381,7 @@ def test_get_args_test_input_one_model_output_one_performance(): channel_name=_CHANNEL, task=task, input_assets=input_assets, - algo=algo, + function=function, has_chainkeys=False, compute_plan=None, directories=Directories(task.compute_plan_key), diff --git a/backend/substrapp/tests/compute_tasks/test_image_builder.py b/backend/substrapp/tests/compute_tasks/test_image_builder.py index 14e9a9a0f..6d6310626 100644 --- a/backend/substrapp/tests/compute_tasks/test_image_builder.py +++ b/backend/substrapp/tests/compute_tasks/test_image_builder.py @@ -11,42 +11,42 @@ _VALID_DOCKERFILE = """ FROM ubuntu:16.04 RUN echo "Hello world" -ENTRYPOINT ["python3", "myalgo.py"] +ENTRYPOINT ["python3", "myfunction.py"] """ _NO_ENTRYPOINT = """ FROM ubuntu:16.04 """ _ENTRYPOINT_SHELL_FORM = """ FROM ubuntu:16.04 -ENTRYPOINT python3 myalgo.py +ENTRYPOINT python3 myfunction.py """ class TestBuildImageIfMissing: - def test_image_already_exists(self, mocker: MockerFixture, algo: orchestrator.Algo): + def test_image_already_exists(self, mocker: MockerFixture, function: orchestrator.Function): ds = mocker.Mock() m_container_image_exists = mocker.patch( "substrapp.compute_tasks.image_builder.container_image_exists", return_value=True ) - algo_image_tag = utils.container_image_tag_from_algo(algo) + function_image_tag = utils.container_image_tag_from_function(function) - image_builder.build_image_if_missing(datastore=ds, algo=algo) + image_builder.build_image_if_missing(datastore=ds, function=function) - m_container_image_exists.assert_called_once_with(algo_image_tag) + m_container_image_exists.assert_called_once_with(function_image_tag) - def test_image_build_needed(self, mocker: MockerFixture, algo: orchestrator.Algo): + def test_image_build_needed(self, mocker: MockerFixture, function: orchestrator.Function): ds = mocker.Mock() m_container_image_exists = mocker.patch( "substrapp.compute_tasks.image_builder.container_image_exists", return_value=False ) - m_build_algo_image = mocker.patch("substrapp.compute_tasks.image_builder._build_algo_image") - algo_image_tag = utils.container_image_tag_from_algo(algo) + m_build_function_image = mocker.patch("substrapp.compute_tasks.image_builder._build_function_image") + function_image_tag = utils.container_image_tag_from_function(function) - image_builder.build_image_if_missing(datastore=ds, algo=algo) + image_builder.build_image_if_missing(datastore=ds, function=function) - m_container_image_exists.assert_called_once_with(algo_image_tag) - m_build_algo_image.assert_called_once() - assert m_build_algo_image.call_args.args[1] == algo + m_container_image_exists.assert_called_once_with(function_image_tag) + m_build_function_image.assert_called_once() + assert m_build_function_image.call_args.args[1] == function class TestGetEntrypointFromDockerfile: @@ -55,7 +55,7 @@ def test_valid_dockerfile(self, tmp_path: pathlib.Path): dockerfile_path.write_text(_VALID_DOCKERFILE) entrypoint = image_builder._get_entrypoint_from_dockerfile(str(tmp_path)) - assert entrypoint == ["python3", "myalgo.py"] + assert entrypoint == ["python3", "myfunction.py"] @pytest.mark.parametrize( "dockerfile,expected_exc_content", diff --git a/backend/substrapp/tests/compute_tasks/test_outputs.py b/backend/substrapp/tests/compute_tasks/test_outputs.py index 5c2bb1c27..ababaa09b 100644 --- a/backend/substrapp/tests/compute_tasks/test_outputs.py +++ b/backend/substrapp/tests/compute_tasks/test_outputs.py @@ -26,7 +26,7 @@ def test_commit_chainkeys(has_chainkeys: bool, mocker: MockerFixture): task=mock.ComputeTaskFactory(), compute_plan=None, input_assets=[], - algo=mock.AlgoFactory(), + function=mock.FunctionFactory(), directories=Directories("cpkey"), has_chainkeys=has_chainkeys, ) @@ -54,7 +54,7 @@ def test_save_model(settings, mocker: MockerFixture, orc_raise: bool): task=mock.ComputeTaskFactory(), compute_plan=None, input_assets=[], - algo=mock.AlgoFactory(), + function=mock.FunctionFactory(), directories=Directories("cpkey"), has_chainkeys=False, ) diff --git a/backend/substrapp/tests/compute_tasks/utils.py b/backend/substrapp/tests/compute_tasks/utils.py index 38efa0cb7..157943a76 100644 --- a/backend/substrapp/tests/compute_tasks/utils.py +++ b/backend/substrapp/tests/compute_tasks/utils.py @@ -2,9 +2,9 @@ from substrapp.compute_tasks import utils -def test_container_image_tag_from_algo(): - algo_address = orc_mock.AddressFactory(checksum="test_long_checksum") - algo = orc_mock.AlgoFactory(algorithm=algo_address) +def test_container_image_tag_from_function(): + function_address = orc_mock.AddressFactory(checksum="test_long_checksum") + function = orc_mock.FunctionFactory(function=function_address) - tag = utils.container_image_tag_from_algo(algo) - assert tag == "algo-test_long_checks" + tag = utils.container_image_tag_from_function(function) + assert tag == "function-test_long_checks" diff --git a/backend/substrapp/tests/tasks/test_compute_plan.py b/backend/substrapp/tests/tasks/test_compute_plan.py index d89392c45..65706531c 100644 --- a/backend/substrapp/tests/tasks/test_compute_plan.py +++ b/backend/substrapp/tests/tasks/test_compute_plan.py @@ -10,7 +10,7 @@ def test_teardown_compute_plan_resources_cp_doing(mocker: MockerFixture): client = mocker.Mock() client.is_compute_plan_running.return_value = True mocked_teardown = mocker.patch("substrapp.tasks.tasks_compute_plan._teardown_pods_and_dirs") - mocked_algo_delete = mocker.patch("substrapp.tasks.tasks_compute_plan._delete_compute_plan_algos_images") + mocked_function_delete = mocker.patch("substrapp.tasks.tasks_compute_plan._delete_compute_plan_functions_images") cp_key = str(uuid.uuid4()) @@ -18,14 +18,14 @@ def test_teardown_compute_plan_resources_cp_doing(mocker: MockerFixture): client.is_compute_plan_running.assert_called_once() mocked_teardown.assert_not_called() - mocked_algo_delete.assert_not_called() + mocked_function_delete.assert_not_called() def test_teardown_compute_plan_resources_cp_done(mocker: MockerFixture): client = mocker.Mock() client.is_compute_plan_running.return_value = False mocked_teardown = mocker.patch("substrapp.tasks.tasks_compute_plan._teardown_pods_and_dirs") - mocked_algo_delete = mocker.patch("substrapp.tasks.tasks_compute_plan._delete_compute_plan_algos_images") + mocked_function_delete = mocker.patch("substrapp.tasks.tasks_compute_plan._delete_compute_plan_functions_images") cp_key = str(uuid.uuid4()) @@ -33,20 +33,20 @@ def test_teardown_compute_plan_resources_cp_done(mocker: MockerFixture): client.is_compute_plan_running.assert_called_once() mocked_teardown.assert_called_once() - mocked_algo_delete.assert_called_once() + mocked_function_delete.assert_called_once() -def test_delete_cp_algo_images(mocker: MockerFixture): - algo_1_address = orc_mock.AddressFactory(checksum="azerty") - algo_2_address = orc_mock.AddressFactory(checksum="qwerty") - algos = [ - orc_mock.AlgoFactory(algorithm=algo_1_address), - orc_mock.AlgoFactory(algorithm=algo_2_address), +def test_delete_cp_function_images(mocker: MockerFixture): + function_1_address = orc_mock.AddressFactory(checksum="azerty") + function_2_address = orc_mock.AddressFactory(checksum="qwerty") + functions = [ + orc_mock.FunctionFactory(function_address=function_1_address), + orc_mock.FunctionFactory(function_address=function_2_address), ] mocked_delete_image = mocker.patch("substrapp.tasks.tasks_compute_plan.delete_container_image_safe") - tasks_compute_plan._delete_compute_plan_algos_images(algos) + tasks_compute_plan._delete_compute_plan_functions_images(functions) - mocked_delete_image.assert_any_call("algo-azerty") - mocked_delete_image.assert_any_call("algo-qwerty") + mocked_delete_image.assert_any_call("function-azerty") + mocked_delete_image.assert_any_call("function-qwerty") assert mocked_delete_image.call_count == 2 diff --git a/backend/substrapp/tests/tasks/test_compute_task.py b/backend/substrapp/tests/tasks/test_compute_task.py index 271718407..120574e04 100644 --- a/backend/substrapp/tests/tasks/test_compute_task.py +++ b/backend/substrapp/tests/tasks/test_compute_task.py @@ -45,7 +45,7 @@ class FakeDirectories: task=task, compute_plan=None, input_assets=[], - algo=orc_mock.AlgoFactory(), + function=orc_mock.FunctionFactory(), directories=FakeDirectories(), has_chainkeys=False, ) diff --git a/backend/substrapp/tests/test_model.py b/backend/substrapp/tests/test_model.py index ee017b4d5..4600f8cea 100644 --- a/backend/substrapp/tests/test_model.py +++ b/backend/substrapp/tests/test_model.py @@ -5,9 +5,9 @@ from django.test import TestCase from django.test import override_settings -from substrapp.models import Algo from substrapp.models import DataManager from substrapp.models import DataSample +from substrapp.models import Function from substrapp.models import Model from substrapp.utils import get_hash @@ -36,10 +36,10 @@ def test_create_data(self): data_sample = DataSample.objects.create(file=File(data_file), checksum="checksum") self.assertEqual(data_sample.checksum, "checksum") - def test_create_algo(self): + def test_create_function(self): script, _ = get_sample_script() - algo = Algo.objects.create(file=script) - self.assertEqual(algo.checksum, get_hash(script)) + function = Function.objects.create(file=script) + self.assertEqual(function.checksum, get_hash(script)) def test_create_model(self): modelfile, _ = get_sample_model() diff --git a/backend/substrapp/tests/test_utils.py b/backend/substrapp/tests/test_utils.py index 5eb1a034c..6734a8174 100644 --- a/backend/substrapp/tests/test_utils.py +++ b/backend/substrapp/tests/test_utils.py @@ -11,7 +11,7 @@ from substrapp.utils import get_hash from substrapp.utils import uncompress_content -from .common import get_sample_algo +from .common import get_sample_function CHANNEL = "mychannel" SUBTUPLE_DIR = tempfile.mkdtemp() @@ -21,23 +21,23 @@ class UtilsTests(APITestCase): def setUp(self): self.subtuple_path = SUBTUPLE_DIR - self.algo, self.algo_filename = get_sample_algo() + self.function, self.function_filename = get_sample_function() def test_uncompress_content_tar(self): - algo_content = self.algo.read() - checksum = get_hash(self.algo) + function_content = self.function.read() + checksum = get_hash(self.function) - subtuple = {"key": checksum, "algo": "testalgo"} + subtuple = {"key": checksum, "function": "testfunction"} with mock.patch("substrapp.utils.get_hash") as mget_hash: mget_hash.return_value = checksum - uncompress_content(algo_content, os.path.join(self.subtuple_path, f'subtuple/{subtuple["key"]}/')) + uncompress_content(function_content, os.path.join(self.subtuple_path, f'subtuple/{subtuple["key"]}/')) - self.assertTrue(os.path.exists(os.path.join(self.subtuple_path, f'subtuple/{subtuple["key"]}/algo.py'))) + self.assertTrue(os.path.exists(os.path.join(self.subtuple_path, f'subtuple/{subtuple["key"]}/function.py'))) self.assertTrue(os.path.exists(os.path.join(self.subtuple_path, f'subtuple/{subtuple["key"]}/Dockerfile'))) def test_uncompress_content_zip(self): - filename = "algo.py" + filename = "function.py" filepath = os.path.join(self.subtuple_path, filename) with open(filepath, "w") as f: f.write("Hello World") @@ -50,7 +50,7 @@ def test_uncompress_content_zip(self): self.assertTrue(os.path.exists(zippath)) subtuple_key = "testkey" - subtuple = {"key": subtuple_key, "algo": "testalgo"} + subtuple = {"key": subtuple_key, "function": "testfunction"} with mock.patch("substrapp.utils.get_hash") as mget_hash: with open(zippath, "rb") as content: diff --git a/backend/substrapp/utils/__init__.py b/backend/substrapp/utils/__init__.py index 42c2174ff..a9795daa6 100644 --- a/backend/substrapp/utils/__init__.py +++ b/backend/substrapp/utils/__init__.py @@ -118,20 +118,20 @@ def uncompress_content(archive_content, to_directory): raise Exception("Archive must be zip or tar.*") -def timeit(function): +def timeit(function_to_time): def timed(*args, **kw): ts = time.time() exception = None try: - result = function(*args, **kw) + result = function_to_time(*args, **kw) except Exception as ex: exception = ex elaps = (time.time() - ts) * 1000 log_data = { - "function": function.__name__, + "function": function_to_time.__name__, "duration": f"{elaps:.2f}ms", } diff --git a/fixtures/.DS_Store b/fixtures/.DS_Store new file mode 100644 index 000000000..5159b676d Binary files /dev/null and b/fixtures/.DS_Store differ diff --git a/fixtures/chunantes/.DS_Store b/fixtures/chunantes/.DS_Store new file mode 100644 index 000000000..5008ddfcf Binary files /dev/null and b/fixtures/chunantes/.DS_Store differ diff --git a/fixtures/chunantes/algos/algo3/algo.tar.gz b/fixtures/chunantes/algos/algo3/algo.tar.gz deleted file mode 100644 index f3bbb776a..000000000 Binary files a/fixtures/chunantes/algos/algo3/algo.tar.gz and /dev/null differ diff --git a/fixtures/chunantes/functions/.DS_Store b/fixtures/chunantes/functions/.DS_Store new file mode 100644 index 000000000..e0ecf4057 Binary files /dev/null and b/fixtures/chunantes/functions/.DS_Store differ diff --git a/fixtures/chunantes/algos/algo0/description.md b/fixtures/chunantes/functions/function0/description.md similarity index 100% rename from fixtures/chunantes/algos/algo0/description.md rename to fixtures/chunantes/functions/function0/description.md diff --git a/fixtures/chunantes/algos/algo0/algo.tar.gz b/fixtures/chunantes/functions/function0/function.tar.gz similarity index 100% rename from fixtures/chunantes/algos/algo0/algo.tar.gz rename to fixtures/chunantes/functions/function0/function.tar.gz diff --git a/fixtures/chunantes/algos/algo0/algo.zip b/fixtures/chunantes/functions/function0/function.zip similarity index 100% rename from fixtures/chunantes/algos/algo0/algo.zip rename to fixtures/chunantes/functions/function0/function.zip diff --git a/fixtures/chunantes/algos/algo1/description.md b/fixtures/chunantes/functions/function1/description.md similarity index 100% rename from fixtures/chunantes/algos/algo1/description.md rename to fixtures/chunantes/functions/function1/description.md diff --git a/fixtures/chunantes/algos/algo1/algo.tar.gz b/fixtures/chunantes/functions/function1/function.tar.gz similarity index 100% rename from fixtures/chunantes/algos/algo1/algo.tar.gz rename to fixtures/chunantes/functions/function1/function.tar.gz diff --git a/fixtures/chunantes/algos/algo2/algo.zip b/fixtures/chunantes/functions/function2/function.zip similarity index 100% rename from fixtures/chunantes/algos/algo2/algo.zip rename to fixtures/chunantes/functions/function2/function.zip diff --git a/fixtures/chunantes/functions/function3/.DS_Store b/fixtures/chunantes/functions/function3/.DS_Store new file mode 100644 index 000000000..7e9d20232 Binary files /dev/null and b/fixtures/chunantes/functions/function3/.DS_Store differ diff --git a/fixtures/chunantes/algos/algo3/description.md b/fixtures/chunantes/functions/function3/description.md similarity index 100% rename from fixtures/chunantes/algos/algo3/description.md rename to fixtures/chunantes/functions/function3/description.md diff --git a/fixtures/chunantes/functions/function3/function.tar.gz b/fixtures/chunantes/functions/function3/function.tar.gz new file mode 100644 index 000000000..a4a8880b4 Binary files /dev/null and b/fixtures/chunantes/functions/function3/function.tar.gz differ diff --git a/fixtures/chunantes/algos/algo4/description.md b/fixtures/chunantes/functions/function4/description.md similarity index 100% rename from fixtures/chunantes/algos/algo4/description.md rename to fixtures/chunantes/functions/function4/description.md diff --git a/fixtures/chunantes/algos/algo4/algo.tar.gz b/fixtures/chunantes/functions/function4/function.tar.gz similarity index 100% rename from fixtures/chunantes/algos/algo4/algo.tar.gz rename to fixtures/chunantes/functions/function4/function.tar.gz diff --git a/fixtures/chunantes/algos/algo4/algo.zip b/fixtures/chunantes/functions/function4/function.zip similarity index 100% rename from fixtures/chunantes/algos/algo4/algo.zip rename to fixtures/chunantes/functions/function4/function.zip diff --git a/fixtures/owkin/compositealgos/compositealgo0/description.md b/fixtures/owkin/compositefunctions/compositefunction0/description.md similarity index 100% rename from fixtures/owkin/compositealgos/compositealgo0/description.md rename to fixtures/owkin/compositefunctions/compositefunction0/description.md diff --git a/fixtures/owkin/compositealgos/compositealgo0/algo.tar.gz b/fixtures/owkin/compositefunctions/compositefunction0/function.tar.gz similarity index 100% rename from fixtures/owkin/compositealgos/compositealgo0/algo.tar.gz rename to fixtures/owkin/compositefunctions/compositefunction0/function.tar.gz