Skip to content

Commit

Permalink
refactor drop_stores
Browse files Browse the repository at this point in the history
  • Loading branch information
ritch committed Nov 13, 2024
1 parent 5495a5e commit ca7c135
Show file tree
Hide file tree
Showing 6 changed files with 108 additions and 56 deletions.
19 changes: 7 additions & 12 deletions fiftyone/factory/repo_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,17 +56,12 @@ def delegated_operation_repo() -> DelegatedOperationRepo:
@staticmethod
def execution_store_repo(
dataset_id: Optional[ObjectId] = None,
collection_name: Optional[str] = None,
) -> ExecutionStoreRepo:
repo_key = (
f"{MongoExecutionStoreRepo.COLLECTION_NAME}_{dataset_id}"
if dataset_id
else MongoExecutionStoreRepo.COLLECTION_NAME
collection = _get_db()[
collection_name or MongoExecutionStoreRepo.COLLECTION_NAME
]
return MongoExecutionStoreRepo(
collection=collection,
dataset_id=dataset_id,
)

if repo_key not in RepositoryFactory.repos:
RepositoryFactory.repos[repo_key] = MongoExecutionStoreRepo(
collection=_get_db()[MongoExecutionStoreRepo.COLLECTION_NAME],
dataset_id=dataset_id,
)

return RepositoryFactory.repos[repo_key]
2 changes: 1 addition & 1 deletion fiftyone/factory/repos/execution_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

from bson import ObjectId
from pymongo.collection import Collection
from typing import Any
from typing import Any, Dict

from fiftyone.operators.store.models import StoreDocument, KeyDocument

Expand Down
4 changes: 3 additions & 1 deletion fiftyone/operators/store/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def __init__(
self,
repo: Optional["ExecutionStoreRepo"] = None,
dataset_id: Optional[ObjectId] = None,
collection_name: str = None,
):

from fiftyone.factory.repo_factory import (
Expand All @@ -45,7 +46,8 @@ def __init__(

if repo is None:
repo = RepositoryFactory.execution_store_repo(
dataset_id=dataset_id
dataset_id=dataset_id,
collection_name=collection_name,
)
self._dataset_id = dataset_id
self._repo: ExecutionStoreRepo = repo
Expand Down
45 changes: 44 additions & 1 deletion fiftyone/operators/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -1345,7 +1345,7 @@ class Button(View):
label (None): a label for the button
description (None): a description for the button
caption (None): a caption for the button
operator (None): the name of the operator to execute when the button is
operator (None): the uri of the operator to execute when the button is
clicked
params (None): the parameters to pass to the operator
href (None): the URL to navigate to when the button is clicked
Expand All @@ -1370,6 +1370,49 @@ def to_json(self):
)


class OperatorExecutionButtonView(View):
"""Allows users to choose execution options when triggering an operator.
Examples::
import fiftyone.operators.types as types
button = types.OperatorExecutionButtonView(
label="Execute My Operator",
operator="my_delegated_operator",
params={"msg": "Hello World"},
)
inputs = types.Object()
inputs.view("btn", button)
Args:
label (None): a label for the button
description (None): a description for the button
caption (None): a caption for the button
operator (None): the uri of the operator to execute when the button is
clicked
params (None): the parameters to pass to the operator
icon (None): the icon to display on the button
"""

def __init__(self, **kwargs):
super().__init__(**kwargs)
self.operator = kwargs.get("operator", None)
self.prompt = kwargs.get("prompt", False)
self.params = kwargs.get("params", None)

def to_json(self):
return _convert_callables_to_operator_uris(
{
**super().to_json(),
"operator": self.operator,
"params": self.params,
"prompt": self.prompt,
}
)


class OneOfView(View):
"""Displays one of the given :class:`View` instances.
Expand Down
36 changes: 18 additions & 18 deletions tests/unittests/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import fnmatch

import fiftyone as fo
import fiftyone.operators.store as foos
import fiftyone.core.odm as foo


def drop_datasets(func):
Expand Down Expand Up @@ -45,25 +45,25 @@ async def wrapper(*args, **kwargs):
return wrapper


def drop_stores(func, pattern="*"):
"""Decorator that drops all stores from the database before running a test."""
from functools import wraps

@wraps(func)
def wrapper(*args, **kwargs):
svc = foos.ExecutionStoreService()
stores = svc.list_stores_global()
for store in stores:
store_name = store.store_name
if fnmatch.fnmatch(store_name, pattern):
try:
svc.delete_store_global(store_name)
except Exception as e:
raise RuntimeError(
f"Failed to delete store '{store_name}'"
) from e
return func(*args, **kwargs)

return wrapper
def drop_collection(collection_name):
"""Decorator that drops a collection from the database before and after running a test."""

def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
db = foo.get_db_conn()
db.drop_collection(collection_name)
try:
return func(*args, **kwargs)
finally:
db.drop_collection(collection_name)

return wrapper

return decorator


def skip_windows(func):
Expand Down
58 changes: 35 additions & 23 deletions tests/unittests/execution_store_service_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,22 +11,34 @@
import fiftyone as fo
from fiftyone.operators.store import ExecutionStoreService

from decorators import drop_stores, drop_datasets
from decorators import drop_collection, drop_datasets

TEST_COLLECTION_NAME = "execution_store_test_collection"


@pytest.fixture
def dataset():
return fo.Dataset(name="test_dataset")


@pytest.fixture
def dataset_id(dataset):
return dataset._doc.id


@pytest.fixture
def svc():
return ExecutionStoreService()
return ExecutionStoreService(collection_name=TEST_COLLECTION_NAME)


@pytest.fixture
def svc_with_dataset():
dataset = fo.Dataset(name="test_dataset")
dataset.save()
return ExecutionStoreService(dataset_id=dataset._doc.id)
def svc_with_dataset(dataset_id):
return ExecutionStoreService(
dataset_id=dataset_id, collection_name=TEST_COLLECTION_NAME
)


@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
@drop_datasets
def test_store_creation(svc):
NAME = "test_store"
Expand All @@ -41,7 +53,7 @@ def test_store_creation(svc):
assert svc.count_stores() == 1, "Store count should be 1"


@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
@drop_datasets
def test_store_creation_with_dataset(svc_with_dataset):
NAME = "test_store"
Expand All @@ -56,7 +68,7 @@ def test_store_creation_with_dataset(svc_with_dataset):
assert svc_with_dataset.count_stores() == 1, "Store count should be 1"


@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
@drop_datasets
def test_store_creation_with_metadata(svc):
NAME = "test_store"
Expand All @@ -73,7 +85,7 @@ def test_store_creation_with_metadata(svc):
assert svc.count_stores() == 1, "Store count should be 1"


@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
@drop_datasets
def test_set_get_key(svc):
NAME = "test_store"
Expand All @@ -89,7 +101,7 @@ def test_set_get_key(svc):
), "Retrieved value should match the set value"


@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
@drop_datasets
def test_list_global_stores(svc, svc_with_dataset):
NO_DATASET_STORE_NAME = "dataset_less_store"
Expand All @@ -112,7 +124,7 @@ def test_list_global_stores(svc, svc_with_dataset):
assert svc_with_dataset._dataset_id in dataset_ids


@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
@drop_datasets
def test_has_store(svc, svc_with_dataset):
NAME = "test_store"
Expand Down Expand Up @@ -147,7 +159,7 @@ def test_has_store(svc, svc_with_dataset):
), "Nonexistent store should return False globally"


@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
@drop_datasets
def test_has_key(svc, svc_with_dataset):
NAME = "test_store"
Expand All @@ -158,7 +170,7 @@ def test_has_key(svc, svc_with_dataset):
assert svc_with_dataset.has_key(NAME, KEY) is False


@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
@drop_datasets
def test_get_key(svc):
NAME = "test_store"
Expand All @@ -169,7 +181,7 @@ def test_get_key(svc):
assert svc.get_key(NAME, "nonexistent") is None


@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
@drop_datasets
def test_get_store_with_only_keys(svc):
NAME = "test_store"
Expand All @@ -181,7 +193,7 @@ def test_get_store_with_only_keys(svc):
assert key_doc.value == "value1"


@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
@drop_datasets
def test_delete_store(svc, svc_with_dataset):
NAME = "test_store"
Expand Down Expand Up @@ -253,7 +265,7 @@ def test_delete_store(svc, svc_with_dataset):
), "Final cleanup of 'test_store' in dataset context should succeed"


@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
@drop_datasets
def test_global_delete_store(svc, svc_with_dataset):
SHARED_NAME = "shared_store"
Expand All @@ -271,7 +283,7 @@ def test_global_delete_store(svc, svc_with_dataset):
), "SHARED_NAME store should not exist globally"


@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
@drop_datasets
def test_scoping(svc, svc_with_dataset):
NAME = "test_store"
Expand Down Expand Up @@ -303,7 +315,7 @@ def test_scoping(svc, svc_with_dataset):


@drop_datasets
@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
def test_set_key_with_ttl(svc):
NAME = "test_store"
KEY = "ttl_key"
Expand All @@ -316,7 +328,7 @@ def test_set_key_with_ttl(svc):


@drop_datasets
@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
def test_set_key_with_ttl_and_update(svc):
NAME = "test_store"
KEY = "ttl_key"
Expand All @@ -333,7 +345,7 @@ def test_set_key_with_ttl_and_update(svc):


@drop_datasets
@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
def test_set_key_with_dict_value(svc):
NAME = "test_store"
KEY = "dict_key"
Expand All @@ -344,7 +356,7 @@ def test_set_key_with_dict_value(svc):


@drop_datasets
@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
def test_count_stores(svc, svc_with_dataset):
assert svc.count_stores() == 0
assert svc.count_stores_global() == 0
Expand All @@ -367,7 +379,7 @@ def test_count_stores(svc, svc_with_dataset):


@drop_datasets
@drop_stores
@drop_collection(TEST_COLLECTION_NAME)
def test_cleanup(svc, svc_with_dataset):
A_STORE_NAME = "store_a"
B_STORE_NAME = "store_b"
Expand Down

0 comments on commit ca7c135

Please sign in to comment.