Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore!: rename algo to function #573

Merged
merged 39 commits into from
Feb 10, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
4849917
rename algo to function
ThibaultFy Jan 31, 2023
1f295c3
rename algo to function
ThibaultFy Jan 31, 2023
a6f8ac2
rename algo to function
ThibaultFy Jan 31, 2023
9bcd4d9
rename algo to function
ThibaultFy Jan 31, 2023
7a65aa3
rename algo to function
ThibaultFy Jan 31, 2023
7a6a0d2
migration
ThibaultFy Feb 1, 2023
7b9b338
migration
ThibaultFy Feb 1, 2023
b597a73
migration
ThibaultFy Feb 1, 2023
51c2c4a
migration
ThibaultFy Feb 1, 2023
0497786
migration
ThibaultFy Feb 1, 2023
c64be50
test
ThibaultFy Feb 1, 2023
7873769
test
ThibaultFy Feb 1, 2023
fabda6b
test
ThibaultFy Feb 1, 2023
6f56f54
test
ThibaultFy Feb 1, 2023
19b2f6d
rename algo to function
ThibaultFy Feb 2, 2023
6681a59
rename algo to function
ThibaultFy Feb 2, 2023
4625e0e
rename algo to function
ThibaultFy Feb 2, 2023
14e3ce0
rename algo to function
ThibaultFy Feb 2, 2023
577c0fc
rename algo to function
ThibaultFy Feb 2, 2023
a9fe5c7
migrations
ThibaultFy Feb 2, 2023
9392967
rename
ThibaultFy Feb 2, 2023
811970c
rename
ThibaultFy Feb 2, 2023
617ac9f
rename
ThibaultFy Feb 2, 2023
e36f1dc
rename
ThibaultFy Feb 2, 2023
20c1865
migration
ThibaultFy Feb 2, 2023
7a7ca22
migration
ThibaultFy Feb 3, 2023
166ddb5
migration
ThibaultFy Feb 3, 2023
8fb792d
migration
ThibaultFy Feb 3, 2023
10ffe7a
migration
ThibaultFy Feb 3, 2023
3dfe28c
fix
ThibaultFy Feb 6, 2023
6844b8f
fic
ThibaultFy Feb 7, 2023
f8568a2
rename doc
ThibaultFy Feb 7, 2023
9fb4be3
fix
ThibaultFy Feb 7, 2023
0f10bdf
fix
ThibaultFy Feb 7, 2023
d4f241c
rename
ThibaultFy Feb 8, 2023
36fd6fd
rename
ThibaultFy Feb 8, 2023
3959c9e
rename
ThibaultFy Feb 8, 2023
db7d0b0
changelog
ThibaultFy Feb 9, 2023
7c6760a
changelog
ThibaultFy Feb 9, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## [Unreleased]()

### Changed

- BREAKING: rename Algo to Function ([#573](https://github.com/Substra/substra-backend/pull/573))

## [0.35.0](https://github.com/Substra/substra-backend/releases/tag/0.35.0) 2023-02-06

### Added
Expand Down
42 changes: 21 additions & 21 deletions backend/api/events/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,12 @@
from api.models import DataManager
from api.models import DataSample
from api.models import Model
from api.serializers import AlgoSerializer
from api.serializers import ChannelOrganizationSerializer
from api.serializers import ComputePlanSerializer
from api.serializers import ComputeTaskSerializer
from api.serializers import DataManagerSerializer
from api.serializers import DataSampleSerializer
from api.serializers import FunctionSerializer
from api.serializers import ModelSerializer
from api.serializers import PerformanceSerializer
from orchestrator import client as orc_client
Expand Down Expand Up @@ -66,35 +66,35 @@ def _create_organization(channel: str, data: dict) -> None:
logger.debug("Organization already exists", organization_id=data["id"], channel=data["channel"])


def _on_create_algo_event(event: dict) -> None:
"""Process create algo event to update local database."""
logger.debug("Syncing algo create", asset_key=event["asset_key"], event_id=event["id"])
_create_algo(channel=event["channel"], data=event["algo"])
def _on_create_function_event(event: dict) -> None:
"""Process create function event to update local database."""
logger.debug("Syncing function create", asset_key=event["asset_key"], event_id=event["id"])
_create_function(channel=event["channel"], data=event["function"])


def _create_algo(channel: str, data: dict) -> None:
def _create_function(channel: str, data: dict) -> None:
data["channel"] = channel
serializer = AlgoSerializer(data=data)
serializer = FunctionSerializer(data=data)
try:
serializer.save_if_not_exists()
except AlreadyExistsError:
logger.debug("Algo already exists", asset_key=data["key"])
logger.debug("Function already exists", asset_key=data["key"])


def _on_update_algo_event(event: dict) -> None:
"""Process update algo event to update local database."""
logger.debug("Syncing algo update", asset_key=event["asset_key"], event_id=event["id"])
_update_algo(key=event["asset_key"], data=event["algo"])
def _on_update_function_event(event: dict) -> None:
"""Process update function event to update local database."""
logger.debug("Syncing function update", asset_key=event["asset_key"], event_id=event["id"])
_update_function(key=event["asset_key"], data=event["function"])


def _update_algo(key: str, data: dict) -> None:
"""Process update algo event to update local database."""
def _update_function(key: str, data: dict) -> None:
"""Process update function event to update local database."""

from api.models.algo import Algo
from api.models.function import Function

algo = Algo.objects.get(key=key)
algo.name = data["name"]
algo.save()
function = Function.objects.get(key=key)
function.name = data["name"]
function.save()


def _on_create_computeplan_event(event: dict) -> None:
Expand Down Expand Up @@ -385,9 +385,9 @@ def _on_create_failure_report(event: dict) -> None:
event_pb2.EVENT_ASSET_CREATED: _on_create_computeplan_event,
event_pb2.EVENT_ASSET_UPDATED: _on_update_computeplan_event,
},
common_pb2.ASSET_ALGO: {
event_pb2.EVENT_ASSET_CREATED: _on_create_algo_event,
event_pb2.EVENT_ASSET_UPDATED: _on_update_algo_event,
common_pb2.ASSET_FUNCTION: {
event_pb2.EVENT_ASSET_CREATED: _on_create_function_event,
event_pb2.EVENT_ASSET_UPDATED: _on_update_function_event,
},
common_pb2.ASSET_COMPUTE_TASK: {
event_pb2.EVENT_ASSET_CREATED: _on_create_computetask_event,
Expand Down
110 changes: 55 additions & 55 deletions backend/api/management/commands/generate_fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ class Command(BaseCommand):

def handle(self, *args, **options):
logger.debug("Generate test data")
self.create_algos()
self.create_functions()
self.create_data_manager()
self.create_data_samples()
self.create_empty_cp()
Expand All @@ -29,56 +29,56 @@ def handle(self, *args, **options):
self.create_aggregate_cp()
self.create_composite_cp()

def create_algos(self):
logger.debug(" Create algos")
self.simple_algo = factory.create_algo(
inputs=factory.build_algo_inputs(["datasamples", "opener", "model"]),
outputs=factory.build_algo_outputs(["model"]),
def create_functions(self):
logger.debug(" Create functions")
self.simple_function = factory.create_function(
inputs=factory.build_function_inputs(["datasamples", "opener", "model"]),
outputs=factory.build_function_outputs(["model"]),
name="simple",
)
factory.create_algo_files(
key=self.simple_algo.key,
description=ContentFile("Simple algo"),
factory.create_function_files(
key=self.simple_function.key,
description=ContentFile("Simple function"),
)

self.aggregate_algo = factory.create_algo(
inputs=factory.build_algo_inputs(["models"]),
outputs=factory.build_algo_outputs(["model"]),
self.aggregate_function = factory.create_function(
inputs=factory.build_function_inputs(["models"]),
outputs=factory.build_function_outputs(["model"]),
name="aggregate",
)
factory.create_algo_files(
key=self.aggregate_algo.key,
description=ContentFile("Aggregate algo"),
factory.create_function_files(
key=self.aggregate_function.key,
description=ContentFile("Aggregate function"),
)

self.composite_algo = factory.create_algo(
inputs=factory.build_algo_inputs(["datasamples", "opener", "local", "shared"]),
outputs=factory.build_algo_outputs(["local", "shared"]),
self.composite_function = factory.create_function(
inputs=factory.build_function_inputs(["datasamples", "opener", "local", "shared"]),
outputs=factory.build_function_outputs(["local", "shared"]),
name="composite",
)
factory.create_algo_files(
key=self.composite_algo.key,
description=ContentFile("Composite algo"),
factory.create_function_files(
key=self.composite_function.key,
description=ContentFile("Composite function"),
)

self.predict_algo = factory.create_algo(
inputs=factory.build_algo_inputs(["datasamples", "opener", "model", "shared"]),
outputs=factory.build_algo_outputs(["predictions"]),
self.predict_function = factory.create_function(
inputs=factory.build_function_inputs(["datasamples", "opener", "model", "shared"]),
outputs=factory.build_function_outputs(["predictions"]),
name="predict",
)
factory.create_algo_files(
key=self.predict_algo.key,
description=ContentFile("Predict algo"),
factory.create_function_files(
key=self.predict_function.key,
description=ContentFile("Predict function"),
)

self.metric_algo = factory.create_algo(
inputs=factory.build_algo_inputs(["datasamples", "opener", "predictions"]),
outputs=factory.build_algo_outputs(["performance"]),
self.metric_function = factory.create_function(
inputs=factory.build_function_inputs(["datasamples", "opener", "predictions"]),
outputs=factory.build_function_outputs(["performance"]),
name="metric",
)
factory.create_algo_files(
key=self.metric_algo.key,
description=ContentFile("Metric algo"),
factory.create_function_files(
key=self.metric_function.key,
description=ContentFile("Metric function"),
)

def create_data_manager(self):
Expand Down Expand Up @@ -128,15 +128,15 @@ def create_basic_cp(self, cp_status):
first_task_status = cp_status
train_task = factory.create_computetask(
cp,
self.simple_algo,
self.simple_function,
inputs=factory.build_computetask_inputs(
self.simple_algo,
self.simple_function,
{
"opener": [self.data_manager.key],
"datasamples": self.train_data_sample_keys,
},
),
outputs=factory.build_computetask_outputs(self.simple_algo),
outputs=factory.build_computetask_outputs(self.simple_function),
data_manager=self.data_manager,
data_samples=self.train_data_sample_keys,
status=first_task_status,
Expand All @@ -159,16 +159,16 @@ def create_basic_cp(self, cp_status):
)
predict_task = factory.create_computetask(
cp,
self.predict_algo,
self.predict_function,
inputs=factory.build_computetask_inputs(
self.predict_algo,
self.predict_function,
{
"opener": [self.data_manager.key],
"datasamples": self.predict_data_sample_keys,
"model": [train_task.key],
},
),
outputs=factory.build_computetask_outputs(self.predict_algo),
outputs=factory.build_computetask_outputs(self.predict_function),
data_manager=self.data_manager,
data_samples=self.predict_data_sample_keys,
status=task_status,
Expand All @@ -182,24 +182,24 @@ def create_basic_cp(self, cp_status):

test_task = factory.create_computetask(
cp,
self.metric_algo,
self.metric_function,
inputs=factory.build_computetask_inputs(
self.metric_algo,
self.metric_function,
{
"opener": [self.data_manager.key],
"datasamples": self.test_data_sample_keys,
"predictions": [predict_task.key],
},
),
outputs=factory.build_computetask_outputs(self.metric_algo),
outputs=factory.build_computetask_outputs(self.metric_function),
data_manager=self.data_manager,
data_samples=self.test_data_sample_keys,
status=task_status,
)
if task_status == ComputeTask.Status.STATUS_DONE:
factory.create_performance(
test_task,
self.metric_algo,
self.metric_function,
identifier="performance",
)

Expand All @@ -214,44 +214,44 @@ def create_aggregate_cp(self):
)
train_task_1 = factory.create_computetask(
cp,
self.simple_algo,
self.simple_function,
inputs=factory.build_computetask_inputs(
self.simple_algo,
self.simple_function,
{
"opener": [self.data_manager.key],
"datasamples": self.train_data_sample_keys,
},
),
outputs=factory.build_computetask_outputs(self.simple_algo),
outputs=factory.build_computetask_outputs(self.simple_function),
data_manager=self.data_manager,
data_samples=self.train_data_sample_keys,
status=ComputeTask.Status.STATUS_TODO,
)
train_task_2 = factory.create_computetask(
cp,
self.simple_algo,
self.simple_function,
inputs=factory.build_computetask_inputs(
self.simple_algo,
self.simple_function,
{
"opener": [self.data_manager.key],
"datasamples": self.train_data_sample_keys,
},
),
outputs=factory.build_computetask_outputs(self.simple_algo),
outputs=factory.build_computetask_outputs(self.simple_function),
data_manager=self.data_manager,
data_samples=self.train_data_sample_keys,
status=ComputeTask.Status.STATUS_TODO,
)
factory.create_computetask(
cp,
self.aggregate_algo,
self.aggregate_function,
inputs=factory.build_computetask_inputs(
self.aggregate_algo,
self.aggregate_function,
{
"model": [train_task_1.key, train_task_2.key],
},
),
outputs=factory.build_computetask_outputs(self.aggregate_algo),
outputs=factory.build_computetask_outputs(self.aggregate_function),
status=ComputeTask.Status.STATUS_TODO,
)
return cp
Expand All @@ -264,15 +264,15 @@ def create_composite_cp(self):
)
composite_task = factory.create_computetask(
cp,
self.composite_algo,
self.composite_function,
inputs=factory.build_computetask_inputs(
self.composite_algo,
self.composite_function,
{
"opener": [self.data_manager.key],
"datasamples": self.train_data_sample_keys,
},
),
outputs=factory.build_computetask_outputs(self.composite_algo),
outputs=factory.build_computetask_outputs(self.composite_function),
data_manager=self.data_manager,
data_samples=self.train_data_sample_keys,
status=ComputeTask.Status.STATUS_DONE,
Expand Down
49 changes: 49 additions & 0 deletions backend/api/migrations/0047_rename_algo_to_function.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# Generated by Django 4.0.7 on 2023-02-02 17:12

from django.db import migrations


class Migration(migrations.Migration):
dependencies = [
("api", "0046_remove_computeplan_failed_task_category"),
]

operations = [
migrations.RenameModel(
old_name="Algo",
new_name="Function",
),
migrations.RenameField(
model_name="function",
old_name="algorithm_address",
new_name="function_address",
),
migrations.RenameField(
model_name="function",
old_name="algorithm_checksum",
new_name="function_checksum",
),
migrations.RenameModel(
old_name="AlgoInput",
new_name="FunctionInput",
),
migrations.RenameModel(
old_name="AlgoOutput",
new_name="FunctionOutput",
),
migrations.RenameField(
model_name="functionoutput",
old_name="algo",
new_name="function",
),
migrations.RenameField(
model_name="functioninput",
old_name="algo",
new_name="function",
),
migrations.RenameField(
model_name="computetask",
old_name="algo",
new_name="function",
),
]
Loading