diff --git a/fiftyone/factory/repo_factory.py b/fiftyone/factory/repo_factory.py index da74c28e009..5859bfe386b 100644 --- a/fiftyone/factory/repo_factory.py +++ b/fiftyone/factory/repo_factory.py @@ -56,17 +56,12 @@ def delegated_operation_repo() -> DelegatedOperationRepo: @staticmethod def execution_store_repo( dataset_id: Optional[ObjectId] = None, + collection_name: Optional[str] = None, ) -> ExecutionStoreRepo: - repo_key = ( - f"{MongoExecutionStoreRepo.COLLECTION_NAME}_{dataset_id}" - if dataset_id - else MongoExecutionStoreRepo.COLLECTION_NAME + collection = _get_db()[ + collection_name or MongoExecutionStoreRepo.COLLECTION_NAME + ] + return MongoExecutionStoreRepo( + collection=collection, + dataset_id=dataset_id, ) - - if repo_key not in RepositoryFactory.repos: - RepositoryFactory.repos[repo_key] = MongoExecutionStoreRepo( - collection=_get_db()[MongoExecutionStoreRepo.COLLECTION_NAME], - dataset_id=dataset_id, - ) - - return RepositoryFactory.repos[repo_key] diff --git a/fiftyone/factory/repos/execution_store.py b/fiftyone/factory/repos/execution_store.py index a47df3d24b1..be0c1d0d6f8 100644 --- a/fiftyone/factory/repos/execution_store.py +++ b/fiftyone/factory/repos/execution_store.py @@ -10,7 +10,7 @@ from bson import ObjectId from pymongo.collection import Collection -from typing import Any +from typing import Any, Dict from fiftyone.operators.store.models import StoreDocument, KeyDocument diff --git a/fiftyone/operators/store/service.py b/fiftyone/operators/store/service.py index 3c16e4c028b..e6e93a29148 100644 --- a/fiftyone/operators/store/service.py +++ b/fiftyone/operators/store/service.py @@ -36,6 +36,7 @@ def __init__( self, repo: Optional["ExecutionStoreRepo"] = None, dataset_id: Optional[ObjectId] = None, + collection_name: str = None, ): from fiftyone.factory.repo_factory import ( @@ -45,7 +46,8 @@ def __init__( if repo is None: repo = RepositoryFactory.execution_store_repo( - dataset_id=dataset_id + dataset_id=dataset_id, + collection_name=collection_name, ) self._dataset_id = dataset_id self._repo: ExecutionStoreRepo = repo diff --git a/fiftyone/operators/types.py b/fiftyone/operators/types.py index 6294eb6ed9d..ee83137590a 100644 --- a/fiftyone/operators/types.py +++ b/fiftyone/operators/types.py @@ -1345,7 +1345,7 @@ class Button(View): label (None): a label for the button description (None): a description for the button caption (None): a caption for the button - operator (None): the name of the operator to execute when the button is + operator (None): the uri of the operator to execute when the button is clicked params (None): the parameters to pass to the operator href (None): the URL to navigate to when the button is clicked @@ -1370,6 +1370,49 @@ def to_json(self): ) +class OperatorExecutionButtonView(View): + """Allows users to choose execution options when triggering an operator. + + Examples:: + + import fiftyone.operators.types as types + + button = types.OperatorExecutionButtonView( + label="Execute My Operator", + operator="my_delegated_operator", + params={"msg": "Hello World"}, + ) + + inputs = types.Object() + inputs.view("btn", button) + + Args: + label (None): a label for the button + description (None): a description for the button + caption (None): a caption for the button + operator (None): the uri of the operator to execute when the button is + clicked + params (None): the parameters to pass to the operator + icon (None): the icon to display on the button + """ + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.operator = kwargs.get("operator", None) + self.prompt = kwargs.get("prompt", False) + self.params = kwargs.get("params", None) + + def to_json(self): + return _convert_callables_to_operator_uris( + { + **super().to_json(), + "operator": self.operator, + "params": self.params, + "prompt": self.prompt, + } + ) + + class OneOfView(View): """Displays one of the given :class:`View` instances. diff --git a/tests/unittests/decorators.py b/tests/unittests/decorators.py index ec0421b474f..9749bd2f37f 100644 --- a/tests/unittests/decorators.py +++ b/tests/unittests/decorators.py @@ -11,7 +11,7 @@ import fnmatch import fiftyone as fo -import fiftyone.operators.store as foos +import fiftyone.core.odm as foo def drop_datasets(func): @@ -45,25 +45,25 @@ async def wrapper(*args, **kwargs): return wrapper -def drop_stores(func, pattern="*"): - """Decorator that drops all stores from the database before running a test.""" +from functools import wraps - @wraps(func) - def wrapper(*args, **kwargs): - svc = foos.ExecutionStoreService() - stores = svc.list_stores_global() - for store in stores: - store_name = store.store_name - if fnmatch.fnmatch(store_name, pattern): - try: - svc.delete_store_global(store_name) - except Exception as e: - raise RuntimeError( - f"Failed to delete store '{store_name}'" - ) from e - return func(*args, **kwargs) - return wrapper +def drop_collection(collection_name): + """Decorator that drops a collection from the database before and after running a test.""" + + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + db = foo.get_db_conn() + db.drop_collection(collection_name) + try: + return func(*args, **kwargs) + finally: + db.drop_collection(collection_name) + + return wrapper + + return decorator def skip_windows(func): diff --git a/tests/unittests/execution_store_service_tests.py b/tests/unittests/execution_store_service_tests.py index 80eeabb8d6c..eca8979299e 100644 --- a/tests/unittests/execution_store_service_tests.py +++ b/tests/unittests/execution_store_service_tests.py @@ -11,22 +11,34 @@ import fiftyone as fo from fiftyone.operators.store import ExecutionStoreService -from decorators import drop_stores, drop_datasets +from decorators import drop_collection, drop_datasets + +TEST_COLLECTION_NAME = "execution_store_test_collection" + + +@pytest.fixture +def dataset(): + return fo.Dataset(name="test_dataset") + + +@pytest.fixture +def dataset_id(dataset): + return dataset._doc.id @pytest.fixture def svc(): - return ExecutionStoreService() + return ExecutionStoreService(collection_name=TEST_COLLECTION_NAME) @pytest.fixture -def svc_with_dataset(): - dataset = fo.Dataset(name="test_dataset") - dataset.save() - return ExecutionStoreService(dataset_id=dataset._doc.id) +def svc_with_dataset(dataset_id): + return ExecutionStoreService( + dataset_id=dataset_id, collection_name=TEST_COLLECTION_NAME + ) -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_store_creation(svc): NAME = "test_store" @@ -41,7 +53,7 @@ def test_store_creation(svc): assert svc.count_stores() == 1, "Store count should be 1" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_store_creation_with_dataset(svc_with_dataset): NAME = "test_store" @@ -56,7 +68,7 @@ def test_store_creation_with_dataset(svc_with_dataset): assert svc_with_dataset.count_stores() == 1, "Store count should be 1" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_store_creation_with_metadata(svc): NAME = "test_store" @@ -73,7 +85,7 @@ def test_store_creation_with_metadata(svc): assert svc.count_stores() == 1, "Store count should be 1" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_set_get_key(svc): NAME = "test_store" @@ -89,7 +101,7 @@ def test_set_get_key(svc): ), "Retrieved value should match the set value" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_list_global_stores(svc, svc_with_dataset): NO_DATASET_STORE_NAME = "dataset_less_store" @@ -112,7 +124,7 @@ def test_list_global_stores(svc, svc_with_dataset): assert svc_with_dataset._dataset_id in dataset_ids -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_has_store(svc, svc_with_dataset): NAME = "test_store" @@ -147,7 +159,7 @@ def test_has_store(svc, svc_with_dataset): ), "Nonexistent store should return False globally" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_has_key(svc, svc_with_dataset): NAME = "test_store" @@ -158,7 +170,7 @@ def test_has_key(svc, svc_with_dataset): assert svc_with_dataset.has_key(NAME, KEY) is False -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_get_key(svc): NAME = "test_store" @@ -169,7 +181,7 @@ def test_get_key(svc): assert svc.get_key(NAME, "nonexistent") is None -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_get_store_with_only_keys(svc): NAME = "test_store" @@ -181,7 +193,7 @@ def test_get_store_with_only_keys(svc): assert key_doc.value == "value1" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_delete_store(svc, svc_with_dataset): NAME = "test_store" @@ -253,7 +265,7 @@ def test_delete_store(svc, svc_with_dataset): ), "Final cleanup of 'test_store' in dataset context should succeed" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_global_delete_store(svc, svc_with_dataset): SHARED_NAME = "shared_store" @@ -271,7 +283,7 @@ def test_global_delete_store(svc, svc_with_dataset): ), "SHARED_NAME store should not exist globally" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_scoping(svc, svc_with_dataset): NAME = "test_store" @@ -303,7 +315,7 @@ def test_scoping(svc, svc_with_dataset): @drop_datasets -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) def test_set_key_with_ttl(svc): NAME = "test_store" KEY = "ttl_key" @@ -316,7 +328,7 @@ def test_set_key_with_ttl(svc): @drop_datasets -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) def test_set_key_with_ttl_and_update(svc): NAME = "test_store" KEY = "ttl_key" @@ -333,7 +345,7 @@ def test_set_key_with_ttl_and_update(svc): @drop_datasets -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) def test_set_key_with_dict_value(svc): NAME = "test_store" KEY = "dict_key" @@ -344,7 +356,7 @@ def test_set_key_with_dict_value(svc): @drop_datasets -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) def test_count_stores(svc, svc_with_dataset): assert svc.count_stores() == 0 assert svc.count_stores_global() == 0 @@ -367,7 +379,7 @@ def test_count_stores(svc, svc_with_dataset): @drop_datasets -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) def test_cleanup(svc, svc_with_dataset): A_STORE_NAME = "store_a" B_STORE_NAME = "store_b"