From 9bd6bd30f7cdb6f3e8cee4ca3e8d87bd8f5a5d39 Mon Sep 17 00:00:00 2001 From: Ritchie Martori Date: Tue, 12 Nov 2024 10:34:45 -0700 Subject: [PATCH 01/13] estore int tests --- fiftyone/factory/repo_factory.py | 17 ++-- fiftyone/factory/repos/execution_store.py | 37 ++++++-- fiftyone/operators/store/models.py | 1 + fiftyone/operators/store/service.py | 12 +++ tests/unittests/decorators.py | 19 ++++ tests/unittests/execution_store_int_tests.py | 95 +++++++++++++++++++ ...tests.py => execution_store_unit_tests.py} | 0 7 files changed, 165 insertions(+), 16 deletions(-) create mode 100644 tests/unittests/execution_store_int_tests.py rename tests/unittests/{execution_store_tests.py => execution_store_unit_tests.py} (100%) diff --git a/fiftyone/factory/repo_factory.py b/fiftyone/factory/repo_factory.py index 64768d0ae0..da74c28e00 100644 --- a/fiftyone/factory/repo_factory.py +++ b/fiftyone/factory/repo_factory.py @@ -57,15 +57,16 @@ def delegated_operation_repo() -> DelegatedOperationRepo: def execution_store_repo( dataset_id: Optional[ObjectId] = None, ) -> ExecutionStoreRepo: - if ( - MongoExecutionStoreRepo.COLLECTION_NAME - not in RepositoryFactory.repos - ): - RepositoryFactory.repos[ - MongoExecutionStoreRepo.COLLECTION_NAME - ] = MongoExecutionStoreRepo( + repo_key = ( + f"{MongoExecutionStoreRepo.COLLECTION_NAME}_{dataset_id}" + if dataset_id + else MongoExecutionStoreRepo.COLLECTION_NAME + ) + + if repo_key not in RepositoryFactory.repos: + RepositoryFactory.repos[repo_key] = MongoExecutionStoreRepo( collection=_get_db()[MongoExecutionStoreRepo.COLLECTION_NAME], dataset_id=dataset_id, ) - return RepositoryFactory.repos[MongoExecutionStoreRepo.COLLECTION_NAME] + return RepositoryFactory.repos[repo_key] diff --git a/fiftyone/factory/repos/execution_store.py b/fiftyone/factory/repos/execution_store.py index f8645b545a..4d4a948fba 100644 --- a/fiftyone/factory/repos/execution_store.py +++ b/fiftyone/factory/repos/execution_store.py @@ -198,14 +198,28 @@ def has_store_global(self, store_name): ) return bool(result) - def list_stores_global(self) -> list[str]: - """Lists the stores in the execution store across all datasets and the - global context. - """ - result = self._collection.find( - dict(key="__store__"), {"store_name": 1} - ) - return [d["store_name"] for d in result] + def list_stores_global(self) -> list[StoreDocument]: + """Lists stores across all datasets and the global context.""" + pipeline = [ + { + "$group": { + "_id": { + "store_name": "$store_name", + "dataset_id": "$dataset_id", + } + } + }, + { + "$project": { + "_id": 0, + "store_name": "$_id.store_name", + "dataset_id": "$_id.dataset_id", + } + }, + ] + + result = self._collection.aggregate(pipeline) + return [StoreDocument(**d) for d in result] def count_stores_global(self) -> int: """Counts the stores in the execution store across all datasets and the @@ -213,6 +227,13 @@ def count_stores_global(self) -> int: """ return self._collection.count_documents(dict(key="__store__")) + def delete_store_global(self, store_name) -> int: + """Deletes the specified store across all datasets and the global + context. + """ + result = self._collection.delete_many(dict(store_name=store_name)) + return result.deleted_count + class MongoExecutionStoreRepo(ExecutionStoreRepo): """MongoDB implementation of execution store repository.""" diff --git a/fiftyone/operators/store/models.py b/fiftyone/operators/store/models.py index f044d5abad..92c114bfaa 100644 --- a/fiftyone/operators/store/models.py +++ b/fiftyone/operators/store/models.py @@ -41,6 +41,7 @@ def to_mongo_dict(self, exclude_id: bool = True) -> dict[str, Any]: data.pop("_id", None) if self.dataset_id is None: data.pop("dataset_id", None) + return data diff --git a/fiftyone/operators/store/service.py b/fiftyone/operators/store/service.py index c4e7cc6993..9a55896d1c 100644 --- a/fiftyone/operators/store/service.py +++ b/fiftyone/operators/store/service.py @@ -208,3 +208,15 @@ def count_stores_global(self) -> int: the number of stores """ return self._repo.count_stores_global() + + def delete_store_global(self, store_name) -> int: + """Deletes the specified store across all datasets and the global + context. + + Args: + store_name: the name of the store + + Returns: + the number of stores deleted + """ + return self._repo.delete_store_global(store_name) diff --git a/tests/unittests/decorators.py b/tests/unittests/decorators.py index d034ce4212..0f7a384738 100644 --- a/tests/unittests/decorators.py +++ b/tests/unittests/decorators.py @@ -8,8 +8,10 @@ from functools import wraps import platform import unittest +import fnmatch import fiftyone as fo +import fiftyone.operators.store as foos def drop_datasets(func): @@ -43,6 +45,23 @@ async def wrapper(*args, **kwargs): return wrapper +def drop_stores(func, pattern="*"): + """Decorator that drops all stores from the database before running a test.""" + + @wraps(func) + def wrapper(*args, **kwargs): + svc = foos.ExecutionStoreService() + stores = svc.list_stores_global() + for store in stores: + store_name = store.store_name + if fnmatch.fnmatch(store_name, pattern): + print(f"Deleting store: {store_name}", pattern) + svc.delete_store_global(store_name) + return func(*args, **kwargs) + + return wrapper + + def skip_windows(func): """Decorator that skips a test when running on Windows.""" diff --git a/tests/unittests/execution_store_int_tests.py b/tests/unittests/execution_store_int_tests.py new file mode 100644 index 0000000000..770556cf11 --- /dev/null +++ b/tests/unittests/execution_store_int_tests.py @@ -0,0 +1,95 @@ +""" +FiftyOne execution store related unit tests. + +| Copyright 2017-2024, Voxel51, Inc. +| `voxel51.com `_ +| +""" + +import pytest + +import fiftyone as fo +from fiftyone.operators.store import ExecutionStoreService + +from decorators import drop_stores + + +@pytest.fixture +def svc(): + return ExecutionStoreService() + + +@pytest.fixture +def svc_with_dataset(): + dataset = fo.Dataset(name="test_dataset") + dataset.save() + yield ExecutionStoreService(dataset_id=dataset._doc.id) + dataset.delete() + + +@drop_stores +def test_store_creation(svc): + NAME = "test_store" + created_store = svc.create_store(NAME) + + assert ( + created_store.store_name == NAME + ), "Store name should match the given name" + assert ( + created_store.dataset_id is None + ), "Dataset ID should be None when not provided" + assert svc.count_stores() == 1, "Store count should be 1" + + +@drop_stores +def test_store_creation_with_dataset(svc_with_dataset): + NAME = "test_store" + created_store = svc_with_dataset.create_store(NAME) + + assert ( + created_store.store_name == NAME + ), "Store name should match the given name" + assert ( + created_store.dataset_id is not None + ), "Dataset ID should be set when provided" + assert svc_with_dataset.count_stores() == 1, "Store count should be 1" + + +@drop_stores +def test_set_get_key(svc): + NAME = "test_store" + KEY = "test_key" + VALUE = "test_value" + + svc.set_key(NAME, KEY, VALUE) + assert ( + svc.count_keys(NAME) == 1 + ), "Store should have 1 key after setting it" + assert ( + svc.get_key(NAME, KEY).value == VALUE + ), "Retrieved value should match the set value" + + +@drop_stores +def test_scoping(svc, svc_with_dataset): + NAME = "test_store" + KEY = "test_key" + VALUE = "test_value" + svc.set_key(NAME, KEY, VALUE) + svc_with_dataset.set_key(NAME, KEY, VALUE) + global_list = svc.list_stores_global() + global_names = [store.store_name for store in global_list] + assert global_names == [NAME, NAME], "Global store should be listed" + assert svc.count_keys(NAME) == 1, "Global store should have 1 key" + assert ( + svc_with_dataset.count_keys(NAME) == 1 + ), "Dataset store should have 1 key" + svc_with_dataset.delete_store(NAME) + assert svc.count_keys(NAME) == 1, "Global store should still have 1 key" + assert ( + svc_with_dataset.count_keys(NAME) == 0 + ), "Dataset store should have 0 keys" + svc.delete_store(NAME) + assert svc.count_keys(NAME) == 0, "Global store should have 0 keys" + global_list = svc.list_stores_global() + assert NAME not in global_list, "Global store should not be listed" diff --git a/tests/unittests/execution_store_tests.py b/tests/unittests/execution_store_unit_tests.py similarity index 100% rename from tests/unittests/execution_store_tests.py rename to tests/unittests/execution_store_unit_tests.py From e7aa5c7a1e09750342e7e284c273425dfcfdf134 Mon Sep 17 00:00:00 2001 From: Ritchie Martori Date: Tue, 12 Nov 2024 11:40:18 -0700 Subject: [PATCH 02/13] add get_store method --- tests/unittests/execution_store_int_tests.py | 47 +++++++++++++++++++- 1 file changed, 45 insertions(+), 2 deletions(-) diff --git a/tests/unittests/execution_store_int_tests.py b/tests/unittests/execution_store_int_tests.py index 770556cf11..8a9d296a40 100644 --- a/tests/unittests/execution_store_int_tests.py +++ b/tests/unittests/execution_store_int_tests.py @@ -11,7 +11,7 @@ import fiftyone as fo from fiftyone.operators.store import ExecutionStoreService -from decorators import drop_stores +from decorators import drop_stores, drop_datasets @pytest.fixture @@ -24,10 +24,10 @@ def svc_with_dataset(): dataset = fo.Dataset(name="test_dataset") dataset.save() yield ExecutionStoreService(dataset_id=dataset._doc.id) - dataset.delete() @drop_stores +@drop_datasets def test_store_creation(svc): NAME = "test_store" created_store = svc.create_store(NAME) @@ -42,6 +42,7 @@ def test_store_creation(svc): @drop_stores +@drop_datasets def test_store_creation_with_dataset(svc_with_dataset): NAME = "test_store" created_store = svc_with_dataset.create_store(NAME) @@ -56,6 +57,24 @@ def test_store_creation_with_dataset(svc_with_dataset): @drop_stores +@drop_datasets +def test_store_creation_with_metadata(svc): + NAME = "test_store" + METADATA = {"test": "value"} + svc.create_store(NAME, metadata=METADATA) + created_store = svc.get_store(NAME) + + assert ( + created_store.store_name == NAME + ), "Store name should match the given name" + assert ( + created_store.metadata == METADATA + ), "Metadata should match the provided metadata" + assert svc.count_stores() == 1, "Store count should be 1" + + +@drop_stores +@drop_datasets def test_set_get_key(svc): NAME = "test_store" KEY = "test_key" @@ -71,6 +90,7 @@ def test_set_get_key(svc): @drop_stores +@drop_datasets def test_scoping(svc, svc_with_dataset): NAME = "test_store" KEY = "test_key" @@ -93,3 +113,26 @@ def test_scoping(svc, svc_with_dataset): assert svc.count_keys(NAME) == 0, "Global store should have 0 keys" global_list = svc.list_stores_global() assert NAME not in global_list, "Global store should not be listed" + + +@drop_stores +@drop_datasets +def test_list_global_stores(svc, svc_with_dataset): + NO_DATASET_STORE_NAME = "dataset_less_store" + DATASET_STORE_NAME = "dataset_store" + KEY_ONLY_STORE_NAME = "key_only_store" + + svc.create_store(NO_DATASET_STORE_NAME) + svc_with_dataset.create_store(DATASET_STORE_NAME) + svc_with_dataset.set_key(DATASET_STORE_NAME, "key", "value") + svc_with_dataset.set_key(KEY_ONLY_STORE_NAME, "key", "value") + + global_list = svc.list_stores_global() + store_names = [store.store_name for store in global_list] + dataset_ids = [store.dataset_id for store in global_list] + assert len(global_list) == 3 + assert NO_DATASET_STORE_NAME in store_names + assert DATASET_STORE_NAME in store_names + assert KEY_ONLY_STORE_NAME in store_names + assert None in dataset_ids + assert svc_with_dataset._dataset_id in dataset_ids From 82c2707540f051cc7561753c9e82091e58becef7 Mon Sep 17 00:00:00 2001 From: Ritchie Martori Date: Tue, 12 Nov 2024 11:58:06 -0700 Subject: [PATCH 03/13] add metadata and other util methods to estores --- fiftyone/factory/repos/execution_store.py | 33 ++++++- fiftyone/operators/store/models.py | 5 ++ fiftyone/operators/store/service.py | 28 +++++- tests/unittests/execution_store_int_tests.py | 90 +++++++++++++++----- 4 files changed, 127 insertions(+), 29 deletions(-) diff --git a/fiftyone/factory/repos/execution_store.py b/fiftyone/factory/repos/execution_store.py index 4d4a948fba..42aae0e4f2 100644 --- a/fiftyone/factory/repos/execution_store.py +++ b/fiftyone/factory/repos/execution_store.py @@ -10,6 +10,7 @@ from bson import ObjectId from pymongo.collection import Collection +from typing import Any from fiftyone.operators.store.models import StoreDocument, KeyDocument @@ -34,23 +35,42 @@ def __init__(self, collection: Collection, dataset_id: ObjectId = None): self._collection = collection self._dataset_id = dataset_id - def create_store(self, store_name) -> StoreDocument: + def create_store( + self, store_name, metadata: dict[str, Any] = None + ) -> StoreDocument: """Creates a store associated with the current context.""" store_doc = StoreDocument( store_name=store_name, dataset_id=self._dataset_id, + value=metadata, ) self._collection.insert_one(store_doc.to_mongo_dict()) return store_doc - def has_store(self, store_name): + def get_store(self, store_name) -> StoreDocument: + """Gets a store associated with the current context.""" + raw_store_doc = self._collection.find_one( + dict( + store_name=store_name, + key="__store__", + dataset_id=self._dataset_id, + ) + ) + if not raw_store_doc and self.has_store(store_name): + return StoreDocument( + store_name=store_name, dataset_id=self._dataset_id + ) + + store_doc = StoreDocument(**raw_store_doc) if raw_store_doc else None + return store_doc + + def has_store(self, store_name) -> bool: """Checks whether a store with the given name exists in the current context. """ result = self._collection.find_one( dict( store_name=store_name, - key="__store__", dataset_id=self._dataset_id, ) ) @@ -134,6 +154,13 @@ def set_key(self, store_name, key, value, ttl=None) -> KeyDocument: return key_doc + def has_key(self, store_name, key) -> bool: + """Determines whether a key exists in the specified store.""" + result = self._collection.find_one( + dict(store_name=store_name, key=key, dataset_id=self._dataset_id) + ) + return bool(result) + def get_key(self, store_name, key) -> KeyDocument: """Gets a key from the specified store.""" raw_key_doc = self._collection.find_one( diff --git a/fiftyone/operators/store/models.py b/fiftyone/operators/store/models.py index 92c114bfaa..b06eca67ac 100644 --- a/fiftyone/operators/store/models.py +++ b/fiftyone/operators/store/models.py @@ -51,3 +51,8 @@ class StoreDocument(KeyDocument): key: str = "__store__" value: Optional[dict[str, Any]] = None + + @property + def metadata(self) -> dict[str, Any]: + """The metadata associated with the store.""" + return self.value or {} diff --git a/fiftyone/operators/store/service.py b/fiftyone/operators/store/service.py index 9a55896d1c..3c16e4c028 100644 --- a/fiftyone/operators/store/service.py +++ b/fiftyone/operators/store/service.py @@ -47,10 +47,12 @@ def __init__( repo = RepositoryFactory.execution_store_repo( dataset_id=dataset_id ) - + self._dataset_id = dataset_id self._repo: ExecutionStoreRepo = repo - def create_store(self, store_name: str) -> StoreDocument: + def create_store( + self, store_name: str, metadata: Optional[dict[str, Any]] = None + ) -> StoreDocument: """Creates a new store with the specified name. Args: @@ -59,7 +61,18 @@ def create_store(self, store_name: str) -> StoreDocument: Returns: a :class:`fiftyone.store.models.StoreDocument` """ - return self._repo.create_store(store_name) + return self._repo.create_store(store_name, metadata) + + def get_store(self, store_name: str) -> StoreDocument: + """Gets the specified store for the current context. + + Args: + store_name: the name of the store + + Returns: + a :class:`fiftyone.store.models.StoreDocument` + """ + return self._repo.get_store(store_name) def list_stores(self) -> list[str]: """Lists all stores for the current context. @@ -116,6 +129,15 @@ def set_key( """ return self._repo.set_key(store_name, key, value, ttl=ttl) + def has_key(self, store_name: str, key: str) -> bool: + """Determines whether the specified key exists in the specified store. + + Args: + store_name: the name of the store + key: the key to check + """ + return self._repo.has_key(store_name, key) + def get_key(self, store_name: str, key: str) -> KeyDocument: """Retrieves the value of a key from the specified store. diff --git a/tests/unittests/execution_store_int_tests.py b/tests/unittests/execution_store_int_tests.py index 8a9d296a40..3a63f98150 100644 --- a/tests/unittests/execution_store_int_tests.py +++ b/tests/unittests/execution_store_int_tests.py @@ -89,6 +89,73 @@ def test_set_get_key(svc): ), "Retrieved value should match the set value" +@drop_stores +@drop_datasets +def test_list_global_stores(svc, svc_with_dataset): + NO_DATASET_STORE_NAME = "dataset_less_store" + DATASET_STORE_NAME = "dataset_store" + KEY_ONLY_STORE_NAME = "key_only_store" + + svc.create_store(NO_DATASET_STORE_NAME) + svc_with_dataset.create_store(DATASET_STORE_NAME) + svc_with_dataset.set_key(DATASET_STORE_NAME, "key", "value") + svc_with_dataset.set_key(KEY_ONLY_STORE_NAME, "key", "value") + + global_list = svc.list_stores_global() + store_names = [store.store_name for store in global_list] + dataset_ids = [store.dataset_id for store in global_list] + assert len(global_list) == 3 + assert NO_DATASET_STORE_NAME in store_names + assert DATASET_STORE_NAME in store_names + assert KEY_ONLY_STORE_NAME in store_names + assert None in dataset_ids + assert svc_with_dataset._dataset_id in dataset_ids + + +@drop_stores +@drop_datasets +def test_has_store(svc, svc_with_dataset): + NAME = "test_store" + KEY = "key1" + svc.set_key(NAME, KEY, "value1") + assert svc.has_store(NAME) + assert svc.has_store("nonexistent") is False + assert svc_with_dataset.has_store(NAME) is False + + +@drop_stores +@drop_datasets +def test_has_key(svc, svc_with_dataset): + NAME = "test_store" + KEY = "key1" + svc.set_key(NAME, KEY, "value1") + assert svc.has_key(NAME, KEY) + assert svc.has_key(NAME, "nonexistent") is False + assert svc_with_dataset.has_key(NAME, KEY) is False + + +@drop_stores +@drop_datasets +def test_get_key(svc): + NAME = "test_store" + KEY = "key1" + svc.set_key(NAME, KEY, "value1") + key_doc = svc.get_key(NAME, KEY) + assert key_doc.value == "value1" + + +@drop_stores +@drop_datasets +def test_get_store_with_only_keys(svc): + NAME = "test_store" + KEY = "key1" + svc.set_key(NAME, KEY, "value1") + store = svc.get_store(NAME) + assert store.store_name == NAME + key_doc = svc.get_key(NAME, KEY) + assert key_doc.value == "value1" + + @drop_stores @drop_datasets def test_scoping(svc, svc_with_dataset): @@ -113,26 +180,3 @@ def test_scoping(svc, svc_with_dataset): assert svc.count_keys(NAME) == 0, "Global store should have 0 keys" global_list = svc.list_stores_global() assert NAME not in global_list, "Global store should not be listed" - - -@drop_stores -@drop_datasets -def test_list_global_stores(svc, svc_with_dataset): - NO_DATASET_STORE_NAME = "dataset_less_store" - DATASET_STORE_NAME = "dataset_store" - KEY_ONLY_STORE_NAME = "key_only_store" - - svc.create_store(NO_DATASET_STORE_NAME) - svc_with_dataset.create_store(DATASET_STORE_NAME) - svc_with_dataset.set_key(DATASET_STORE_NAME, "key", "value") - svc_with_dataset.set_key(KEY_ONLY_STORE_NAME, "key", "value") - - global_list = svc.list_stores_global() - store_names = [store.store_name for store in global_list] - dataset_ids = [store.dataset_id for store in global_list] - assert len(global_list) == 3 - assert NO_DATASET_STORE_NAME in store_names - assert DATASET_STORE_NAME in store_names - assert KEY_ONLY_STORE_NAME in store_names - assert None in dataset_ids - assert svc_with_dataset._dataset_id in dataset_ids From 9db03e930f50afdca5bf0eff5f7c9aa183a15674 Mon Sep 17 00:00:00 2001 From: Ritchie Martori Date: Tue, 12 Nov 2024 12:14:39 -0700 Subject: [PATCH 04/13] more estore cleanup --- fiftyone/factory/repos/execution_store.py | 4 +--- fiftyone/operators/store/store.py | 5 ++++- ...n_store_int_tests.py => execution_store_service_tests.py} | 0 3 files changed, 5 insertions(+), 4 deletions(-) rename tests/unittests/{execution_store_int_tests.py => execution_store_service_tests.py} (100%) diff --git a/fiftyone/factory/repos/execution_store.py b/fiftyone/factory/repos/execution_store.py index 42aae0e4f2..61fb58b040 100644 --- a/fiftyone/factory/repos/execution_store.py +++ b/fiftyone/factory/repos/execution_store.py @@ -249,9 +249,7 @@ def list_stores_global(self) -> list[StoreDocument]: return [StoreDocument(**d) for d in result] def count_stores_global(self) -> int: - """Counts the stores in the execution store across all datasets and the - global context. - """ + """Counts stores across all datasets and the global context.""" return self._collection.count_documents(dict(key="__store__")) def delete_store_global(self, store_name) -> int: diff --git a/fiftyone/operators/store/store.py b/fiftyone/operators/store/store.py index b28a1c31b4..2f489331c3 100644 --- a/fiftyone/operators/store/store.py +++ b/fiftyone/operators/store/store.py @@ -110,7 +110,10 @@ def get_ttl(self, key: str) -> Optional[int]: Returns: the TTL in seconds, or None if the key does not have a TTL """ - return self._store_service.get_ttl(self.store_name, key) + key_doc = self._store_service.get_key(self.store_name, key) + if key_doc is None: + return None + return key_doc.ttl def list_keys(self) -> list[str]: """Lists all keys in the store. diff --git a/tests/unittests/execution_store_int_tests.py b/tests/unittests/execution_store_service_tests.py similarity index 100% rename from tests/unittests/execution_store_int_tests.py rename to tests/unittests/execution_store_service_tests.py From c242324712bd502fb3d71baa6d1b01e3c44813e5 Mon Sep 17 00:00:00 2001 From: Ritchie Martori Date: Tue, 12 Nov 2024 12:16:56 -0700 Subject: [PATCH 05/13] remove impl details from estore unit tests --- tests/unittests/execution_store_unit_tests.py | 41 ------------------- 1 file changed, 41 deletions(-) diff --git a/tests/unittests/execution_store_unit_tests.py b/tests/unittests/execution_store_unit_tests.py index 2056c34ee0..b18eb3244b 100644 --- a/tests/unittests/execution_store_unit_tests.py +++ b/tests/unittests/execution_store_unit_tests.py @@ -224,13 +224,6 @@ def test_get(self): value = self.store.get("widget_1") assert value == {"name": "Widget One", "value": 100} self.mock_collection.find_one.assert_called_once() - self.mock_collection.find_one.assert_called_with( - { - "store_name": "mock_store", - "key": "widget_1", - "dataset_id": None, - } - ) def test_list_keys(self): self.mock_collection.find.return_value = [ @@ -240,50 +233,16 @@ def test_list_keys(self): keys = self.store.list_keys() assert keys == ["widget_1", "widget_2"] self.mock_collection.find.assert_called_once() - self.mock_collection.find.assert_called_with( - { - "store_name": "mock_store", - "key": {"$ne": "__store__"}, - "dataset_id": None, - }, - {"key": 1}, - ) - - def test_has_store(self): - self.mock_collection.find_one.return_value = { - "store_name": "mock_store", - "key": "__store__", - } - has_store = self.store_service.has_store("mock_store") - assert has_store - self.mock_collection.find_one.assert_called_once() - self.mock_collection.find_one.assert_called_with( - { - "store_name": "mock_store", - "key": "__store__", - "dataset_id": None, - } - ) def test_delete(self): self.mock_collection.delete_one.return_value = Mock(deleted_count=1) deleted = self.store.delete("widget_1") assert deleted self.mock_collection.delete_one.assert_called_once() - self.mock_collection.delete_one.assert_called_with( - { - "store_name": "mock_store", - "key": "widget_1", - "dataset_id": None, - } - ) def test_clear(self): self.store.clear() self.mock_collection.delete_many.assert_called_once() - self.mock_collection.delete_many.assert_called_with( - {"store_name": "mock_store", "dataset_id": None} - ) class ExecutionStoreServiceDatasetIdTests(unittest.TestCase): From 0d0ec4c15d88078d082056e9742235dfa68fc3b5 Mon Sep 17 00:00:00 2001 From: Ritchie Martori Date: Tue, 12 Nov 2024 13:12:37 -0700 Subject: [PATCH 06/13] fix estore counting --- .../execution_store_service_tests.py | 75 +++++++++++++++++++ 1 file changed, 75 insertions(+) diff --git a/tests/unittests/execution_store_service_tests.py b/tests/unittests/execution_store_service_tests.py index 3a63f98150..160c3337c8 100644 --- a/tests/unittests/execution_store_service_tests.py +++ b/tests/unittests/execution_store_service_tests.py @@ -142,6 +142,7 @@ def test_get_key(svc): svc.set_key(NAME, KEY, "value1") key_doc = svc.get_key(NAME, KEY) assert key_doc.value == "value1" + assert svc.get_key(NAME, "nonexistent") is None @drop_stores @@ -180,3 +181,77 @@ def test_scoping(svc, svc_with_dataset): assert svc.count_keys(NAME) == 0, "Global store should have 0 keys" global_list = svc.list_stores_global() assert NAME not in global_list, "Global store should not be listed" + + +@drop_datasets +@drop_stores +def test_set_key_with_ttl(svc): + NAME = "test_store" + KEY = "ttl_key" + VALUE = "value" + TTL = 100 + svc.set_key(NAME, KEY, VALUE, ttl=TTL) + key_doc = svc.get_key(NAME, KEY) + assert key_doc.value == VALUE + assert key_doc.expires_at is not None + + +@drop_datasets +@drop_stores +def test_set_key_with_ttl_and_update(svc): + NAME = "test_store" + KEY = "ttl_key" + VALUE = "value" + TTL = 100 + UPDATED_TTL = 200 + svc.set_key(NAME, KEY, VALUE, ttl=TTL) + key_doc = svc.get_key(NAME, KEY) + original_expiry = key_doc.expires_at + assert key_doc.value == VALUE + svc.update_ttl(NAME, KEY, UPDATED_TTL) + updated_key_doc = svc.get_key(NAME, KEY) + assert updated_key_doc.expires_at > original_expiry + + +@drop_datasets +@drop_stores +def test_set_key_with_dict_value(svc): + NAME = "test_store" + KEY = "dict_key" + VALUE = {"key": "value"} + svc.set_key(NAME, KEY, VALUE) + key_doc = svc.get_key(NAME, KEY) + assert key_doc.value == VALUE + + +@drop_datasets +@drop_stores +def test_count_stores(svc, svc_with_dataset): + assert svc.count_stores() == 0 + svc.create_store("store_a") + svc.create_store("store_b") + assert svc.count_stores() == 2 + assert svc_with_dataset.count_stores() == 0 + svc_with_dataset.create_store("store_c") + assert svc_with_dataset.count_stores() == 1 + assert svc.count_stores() == 2 + svc.set_key("store_x", "key_a", "value") + assert svc.count_stores() == 3 + + +@drop_datasets +@drop_stores +def test_cleanup(svc, svc_with_dataset): + A_STORE_NAME = "store_a" + B_STORE_NAME = "store_b" + KEY_A = "key_a" + KEY_B = "key_b" + svc.create_store(A_STORE_NAME) + svc_with_dataset.set_key(B_STORE_NAME, KEY_B, "value_b") + svc.cleanup() + assert svc.has_store(A_STORE_NAME) is False + assert svc_with_dataset.has_store(B_STORE_NAME) is True + assert svc.count_stores() == 0 + assert svc_with_dataset.count_stores() == 1 + svc_with_dataset.cleanup() + assert svc_with_dataset.has_store(B_STORE_NAME) is False From f65a6adfe9a673e9a8c4f6ae7d7a01f8e7949144 Mon Sep 17 00:00:00 2001 From: Ritchie Martori Date: Tue, 12 Nov 2024 13:20:27 -0700 Subject: [PATCH 07/13] additional estore count fixes --- fiftyone/factory/repos/execution_store.py | 40 +++++++++++++++---- fiftyone/operators/store/models.py | 2 - .../execution_store_service_tests.py | 12 +++++- 3 files changed, 43 insertions(+), 11 deletions(-) diff --git a/fiftyone/factory/repos/execution_store.py b/fiftyone/factory/repos/execution_store.py index 61fb58b040..0a29359cd0 100644 --- a/fiftyone/factory/repos/execution_store.py +++ b/fiftyone/factory/repos/execution_store.py @@ -86,9 +86,25 @@ def list_stores(self) -> list[str]: def count_stores(self) -> int: """Counts the stores associated with the current context.""" - return self._collection.count_documents( - dict(key="__store__", dataset_id=self._dataset_id), - ) + pipeline = [ + { + "$match": { + "dataset_id": self._dataset_id, + } + }, + { + "$group": { + "_id": { + "store_name": "$store_name", + "dataset_id": "$dataset_id", + } + } + }, + {"$count": "total_stores"}, + ] + + result = list(self._collection.aggregate(pipeline)) + return result[0]["total_stores"] if result else 0 def delete_store(self, store_name) -> int: """Deletes the specified store.""" @@ -119,9 +135,6 @@ def set_key(self, store_name, key, value, ttl=None) -> KeyDocument: "dataset_id": self._dataset_id, } - if self._dataset_id is None: - on_insert_fields.pop("dataset_id") - # Prepare the update operations update_fields = { "$set": { @@ -250,7 +263,20 @@ def list_stores_global(self) -> list[StoreDocument]: def count_stores_global(self) -> int: """Counts stores across all datasets and the global context.""" - return self._collection.count_documents(dict(key="__store__")) + pipeline = [ + { + "$group": { + "_id": { + "store_name": "$store_name", + "dataset_id": "$dataset_id", + } + } + }, + {"$count": "total_stores"}, + ] + + result = list(self._collection.aggregate(pipeline)) + return result[0]["total_stores"] if result else 0 def delete_store_global(self, store_name) -> int: """Deletes the specified store across all datasets and the global diff --git a/fiftyone/operators/store/models.py b/fiftyone/operators/store/models.py index b06eca67ac..f60896bc54 100644 --- a/fiftyone/operators/store/models.py +++ b/fiftyone/operators/store/models.py @@ -39,8 +39,6 @@ def to_mongo_dict(self, exclude_id: bool = True) -> dict[str, Any]: data = asdict(self) if exclude_id: data.pop("_id", None) - if self.dataset_id is None: - data.pop("dataset_id", None) return data diff --git a/tests/unittests/execution_store_service_tests.py b/tests/unittests/execution_store_service_tests.py index 160c3337c8..4e39581114 100644 --- a/tests/unittests/execution_store_service_tests.py +++ b/tests/unittests/execution_store_service_tests.py @@ -228,15 +228,23 @@ def test_set_key_with_dict_value(svc): @drop_stores def test_count_stores(svc, svc_with_dataset): assert svc.count_stores() == 0 + assert svc.count_stores_global() == 0 + svc.create_store("store_a") svc.create_store("store_b") assert svc.count_stores() == 2 + assert svc.count_stores_global() == 2 + assert svc_with_dataset.count_stores() == 0 + svc_with_dataset.create_store("store_c") assert svc_with_dataset.count_stores() == 1 - assert svc.count_stores() == 2 + assert svc.count_stores() == 2 # global count should still be 2 + assert svc.count_stores_global() == 3 # total across contexts should be 3 + svc.set_key("store_x", "key_a", "value") assert svc.count_stores() == 3 + assert svc.count_stores_global() == 4 @drop_datasets @@ -244,7 +252,7 @@ def test_count_stores(svc, svc_with_dataset): def test_cleanup(svc, svc_with_dataset): A_STORE_NAME = "store_a" B_STORE_NAME = "store_b" - KEY_A = "key_a" + KEY_B = "key_b" svc.create_store(A_STORE_NAME) svc_with_dataset.set_key(B_STORE_NAME, KEY_B, "value_b") From a4db4a1d420f460ec8bc369a8b1cf45136f5bbc0 Mon Sep 17 00:00:00 2001 From: Ritchie Martori Date: Tue, 12 Nov 2024 13:41:39 -0700 Subject: [PATCH 08/13] more global estore tests --- .../execution_store_service_tests.py | 125 +++++++++++++++++- 1 file changed, 122 insertions(+), 3 deletions(-) diff --git a/tests/unittests/execution_store_service_tests.py b/tests/unittests/execution_store_service_tests.py index 4e39581114..cf8a54ef1c 100644 --- a/tests/unittests/execution_store_service_tests.py +++ b/tests/unittests/execution_store_service_tests.py @@ -116,11 +116,35 @@ def test_list_global_stores(svc, svc_with_dataset): @drop_datasets def test_has_store(svc, svc_with_dataset): NAME = "test_store" + GLOBAL_STORE = "global_store" + DATASET_STORE = "dataset_store" KEY = "key1" + + svc.create_store(GLOBAL_STORE) svc.set_key(NAME, KEY, "value1") - assert svc.has_store(NAME) - assert svc.has_store("nonexistent") is False - assert svc_with_dataset.has_store(NAME) is False + + svc_with_dataset.create_store(DATASET_STORE) + + assert svc.has_store(NAME), "Global context should have 'test_store'" + assert ( + svc.has_store("nonexistent") is False + ), "Nonexistent store should return False in global context" + assert ( + svc_with_dataset.has_store(NAME) is False + ), "Dataset context should not have 'test_store'" + assert svc_with_dataset.has_store( + DATASET_STORE + ), "Dataset context should have 'dataset_store'" + + assert svc.has_store_global( + GLOBAL_STORE + ), "Global store should exist globally" + assert svc.has_store_global( + DATASET_STORE + ), "Dataset store should exist globally" + assert ( + svc.has_store_global("nonexistent") is False + ), "Nonexistent store should return False globally" @drop_stores @@ -157,6 +181,96 @@ def test_get_store_with_only_keys(svc): assert key_doc.value == "value1" +@drop_stores +@drop_datasets +def test_delete_store(svc, svc_with_dataset): + NAME = "test_store" + DATASET_STORE = "dataset_store" + GLOBAL_KEY = "global_key" + DATASET_KEY = "dataset_key" + VALUE = "value1" + + svc.create_store(NAME) + svc_with_dataset.create_store(DATASET_STORE) + + assert svc.has_store(NAME), "Global context should have 'test_store'" + assert svc_with_dataset.has_store( + DATASET_STORE + ), "Dataset context should have 'dataset_store'" + + svc.set_key(NAME, GLOBAL_KEY, VALUE) + svc_with_dataset.set_key(DATASET_STORE, DATASET_KEY, VALUE) + + assert svc.has_key( + NAME, GLOBAL_KEY + ), "Global store should contain 'global_key'" + assert svc_with_dataset.has_key( + DATASET_STORE, DATASET_KEY + ), "Dataset store should contain 'dataset_key'" + + svc.delete_store(NAME) + assert not svc.has_store(NAME), "Global 'test_store' should be deleted" + assert not svc.has_key( + NAME, GLOBAL_KEY + ), "Global 'test_store' should no longer contain 'global_key' after deletion" + assert svc_with_dataset.has_store( + DATASET_STORE + ), "Dataset-specific 'dataset_store' should still exist" + assert svc_with_dataset.has_key( + DATASET_STORE, DATASET_KEY + ), "Dataset-specific 'dataset_key' should remain" + + svc.create_store(NAME) + svc.set_key(NAME, GLOBAL_KEY, VALUE) + assert svc.has_store(NAME), "Global 'test_store' should be recreated" + assert svc.has_key( + NAME, GLOBAL_KEY + ), "Recreated 'test_store' in global context should contain 'global_key'" + + svc_with_dataset.delete_store(DATASET_STORE) + assert not svc_with_dataset.has_store( + DATASET_STORE + ), "Dataset-specific 'dataset_store' should be deleted" + assert not svc_with_dataset.has_key( + DATASET_STORE, DATASET_KEY + ), "Dataset-specific 'dataset_key' should no longer exist after deletion" + assert svc.has_store( + NAME + ), "Global 'test_store' should remain unaffected by dataset-specific delete" + + svc_with_dataset.set_key(NAME, DATASET_KEY, VALUE) + svc.delete_store(NAME) + assert not svc.has_store( + NAME + ), "Global 'test_store' should be deleted again" + assert svc_with_dataset.has_key( + NAME, DATASET_KEY + ), "Dataset-specific 'test_store' should still contain 'dataset_key'" + + svc_with_dataset.delete_store(NAME) + assert not svc_with_dataset.has_store( + NAME + ), "Final cleanup of 'test_store' in dataset context should succeed" + + +@drop_stores +@drop_datasets +def test_global_delete_store(svc, svc_with_dataset): + SHARED_NAME = "shared_store" + KEY = "key1" + VALUE = "value1" + svc.create_store(SHARED_NAME) + svc.set_key(SHARED_NAME, KEY, VALUE) + svc.delete_store_global(SHARED_NAME) + + assert not svc.has_store( + SHARED_NAME + ), "SHARED_NAME store should be deleted" + assert not svc_with_dataset.has_store( + SHARED_NAME + ), "SHARED_NAME store should not exist globally" + + @drop_stores @drop_datasets def test_scoping(svc, svc_with_dataset): @@ -167,17 +281,22 @@ def test_scoping(svc, svc_with_dataset): svc_with_dataset.set_key(NAME, KEY, VALUE) global_list = svc.list_stores_global() global_names = [store.store_name for store in global_list] + assert global_names == [NAME, NAME], "Global store should be listed" assert svc.count_keys(NAME) == 1, "Global store should have 1 key" assert ( svc_with_dataset.count_keys(NAME) == 1 ), "Dataset store should have 1 key" + svc_with_dataset.delete_store(NAME) + assert svc.count_keys(NAME) == 1, "Global store should still have 1 key" assert ( svc_with_dataset.count_keys(NAME) == 0 ), "Dataset store should have 0 keys" + svc.delete_store(NAME) + assert svc.count_keys(NAME) == 0, "Global store should have 0 keys" global_list = svc.list_stores_global() assert NAME not in global_list, "Global store should not be listed" From 8a26d6dfe9817a1ca45dab3a0661b2db07e4a5bf Mon Sep 17 00:00:00 2001 From: Ritchie Martori Date: Tue, 12 Nov 2024 14:29:03 -0700 Subject: [PATCH 09/13] Update tests/unittests/decorators.py Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- tests/unittests/decorators.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/unittests/decorators.py b/tests/unittests/decorators.py index 0f7a384738..ec0421b474 100644 --- a/tests/unittests/decorators.py +++ b/tests/unittests/decorators.py @@ -55,8 +55,12 @@ def wrapper(*args, **kwargs): for store in stores: store_name = store.store_name if fnmatch.fnmatch(store_name, pattern): - print(f"Deleting store: {store_name}", pattern) - svc.delete_store_global(store_name) + try: + svc.delete_store_global(store_name) + except Exception as e: + raise RuntimeError( + f"Failed to delete store '{store_name}'" + ) from e return func(*args, **kwargs) return wrapper From 62e578daed9057219b879800804b94bb366e9b15 Mon Sep 17 00:00:00 2001 From: Ritchie Martori Date: Tue, 12 Nov 2024 14:29:31 -0700 Subject: [PATCH 10/13] fix type def Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- fiftyone/factory/repos/execution_store.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fiftyone/factory/repos/execution_store.py b/fiftyone/factory/repos/execution_store.py index 0a29359cd0..a47df3d24b 100644 --- a/fiftyone/factory/repos/execution_store.py +++ b/fiftyone/factory/repos/execution_store.py @@ -36,7 +36,7 @@ def __init__(self, collection: Collection, dataset_id: ObjectId = None): self._dataset_id = dataset_id def create_store( - self, store_name, metadata: dict[str, Any] = None + self, store_name, metadata: Dict[str, Any] = None ) -> StoreDocument: """Creates a store associated with the current context.""" store_doc = StoreDocument( From d15a0883a78fdcafa8d473f3552a1e960232970c Mon Sep 17 00:00:00 2001 From: Ritchie Martori Date: Tue, 12 Nov 2024 13:42:52 -0700 Subject: [PATCH 11/13] remove unused yield --- tests/unittests/execution_store_service_tests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unittests/execution_store_service_tests.py b/tests/unittests/execution_store_service_tests.py index cf8a54ef1c..80eeabb8d6 100644 --- a/tests/unittests/execution_store_service_tests.py +++ b/tests/unittests/execution_store_service_tests.py @@ -23,7 +23,7 @@ def svc(): def svc_with_dataset(): dataset = fo.Dataset(name="test_dataset") dataset.save() - yield ExecutionStoreService(dataset_id=dataset._doc.id) + return ExecutionStoreService(dataset_id=dataset._doc.id) @drop_stores From f0d4db3ec76b61339ad4bf35b2005add6d6fa9ed Mon Sep 17 00:00:00 2001 From: Ritchie Martori Date: Wed, 13 Nov 2024 10:05:02 -0700 Subject: [PATCH 12/13] refactor drop_stores --- fiftyone/factory/repo_factory.py | 19 +++--- fiftyone/factory/repos/execution_store.py | 2 +- fiftyone/operators/store/service.py | 4 +- tests/unittests/decorators.py | 34 +++++------ .../execution_store_service_tests.py | 58 +++++++++++-------- tests/unittests/execution_store_unit_tests.py | 3 + 6 files changed, 64 insertions(+), 56 deletions(-) diff --git a/fiftyone/factory/repo_factory.py b/fiftyone/factory/repo_factory.py index da74c28e00..5859bfe386 100644 --- a/fiftyone/factory/repo_factory.py +++ b/fiftyone/factory/repo_factory.py @@ -56,17 +56,12 @@ def delegated_operation_repo() -> DelegatedOperationRepo: @staticmethod def execution_store_repo( dataset_id: Optional[ObjectId] = None, + collection_name: Optional[str] = None, ) -> ExecutionStoreRepo: - repo_key = ( - f"{MongoExecutionStoreRepo.COLLECTION_NAME}_{dataset_id}" - if dataset_id - else MongoExecutionStoreRepo.COLLECTION_NAME + collection = _get_db()[ + collection_name or MongoExecutionStoreRepo.COLLECTION_NAME + ] + return MongoExecutionStoreRepo( + collection=collection, + dataset_id=dataset_id, ) - - if repo_key not in RepositoryFactory.repos: - RepositoryFactory.repos[repo_key] = MongoExecutionStoreRepo( - collection=_get_db()[MongoExecutionStoreRepo.COLLECTION_NAME], - dataset_id=dataset_id, - ) - - return RepositoryFactory.repos[repo_key] diff --git a/fiftyone/factory/repos/execution_store.py b/fiftyone/factory/repos/execution_store.py index a47df3d24b..be0c1d0d6f 100644 --- a/fiftyone/factory/repos/execution_store.py +++ b/fiftyone/factory/repos/execution_store.py @@ -10,7 +10,7 @@ from bson import ObjectId from pymongo.collection import Collection -from typing import Any +from typing import Any, Dict from fiftyone.operators.store.models import StoreDocument, KeyDocument diff --git a/fiftyone/operators/store/service.py b/fiftyone/operators/store/service.py index 3c16e4c028..e6e93a2914 100644 --- a/fiftyone/operators/store/service.py +++ b/fiftyone/operators/store/service.py @@ -36,6 +36,7 @@ def __init__( self, repo: Optional["ExecutionStoreRepo"] = None, dataset_id: Optional[ObjectId] = None, + collection_name: str = None, ): from fiftyone.factory.repo_factory import ( @@ -45,7 +46,8 @@ def __init__( if repo is None: repo = RepositoryFactory.execution_store_repo( - dataset_id=dataset_id + dataset_id=dataset_id, + collection_name=collection_name, ) self._dataset_id = dataset_id self._repo: ExecutionStoreRepo = repo diff --git a/tests/unittests/decorators.py b/tests/unittests/decorators.py index ec0421b474..6fcfae9936 100644 --- a/tests/unittests/decorators.py +++ b/tests/unittests/decorators.py @@ -8,10 +8,9 @@ from functools import wraps import platform import unittest -import fnmatch import fiftyone as fo -import fiftyone.operators.store as foos +import fiftyone.core.odm as foo def drop_datasets(func): @@ -45,25 +44,22 @@ async def wrapper(*args, **kwargs): return wrapper -def drop_stores(func, pattern="*"): - """Decorator that drops all stores from the database before running a test.""" +def drop_collection(collection_name): + """Decorator that drops a collection from the database before and after running a test.""" - @wraps(func) - def wrapper(*args, **kwargs): - svc = foos.ExecutionStoreService() - stores = svc.list_stores_global() - for store in stores: - store_name = store.store_name - if fnmatch.fnmatch(store_name, pattern): - try: - svc.delete_store_global(store_name) - except Exception as e: - raise RuntimeError( - f"Failed to delete store '{store_name}'" - ) from e - return func(*args, **kwargs) + def decorator(func): + @wraps(func) + def wrapper(*args, **kwargs): + db = foo.get_db_conn() + db.drop_collection(collection_name) + try: + return func(*args, **kwargs) + finally: + db.drop_collection(collection_name) - return wrapper + return wrapper + + return decorator def skip_windows(func): diff --git a/tests/unittests/execution_store_service_tests.py b/tests/unittests/execution_store_service_tests.py index 80eeabb8d6..eca8979299 100644 --- a/tests/unittests/execution_store_service_tests.py +++ b/tests/unittests/execution_store_service_tests.py @@ -11,22 +11,34 @@ import fiftyone as fo from fiftyone.operators.store import ExecutionStoreService -from decorators import drop_stores, drop_datasets +from decorators import drop_collection, drop_datasets + +TEST_COLLECTION_NAME = "execution_store_test_collection" + + +@pytest.fixture +def dataset(): + return fo.Dataset(name="test_dataset") + + +@pytest.fixture +def dataset_id(dataset): + return dataset._doc.id @pytest.fixture def svc(): - return ExecutionStoreService() + return ExecutionStoreService(collection_name=TEST_COLLECTION_NAME) @pytest.fixture -def svc_with_dataset(): - dataset = fo.Dataset(name="test_dataset") - dataset.save() - return ExecutionStoreService(dataset_id=dataset._doc.id) +def svc_with_dataset(dataset_id): + return ExecutionStoreService( + dataset_id=dataset_id, collection_name=TEST_COLLECTION_NAME + ) -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_store_creation(svc): NAME = "test_store" @@ -41,7 +53,7 @@ def test_store_creation(svc): assert svc.count_stores() == 1, "Store count should be 1" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_store_creation_with_dataset(svc_with_dataset): NAME = "test_store" @@ -56,7 +68,7 @@ def test_store_creation_with_dataset(svc_with_dataset): assert svc_with_dataset.count_stores() == 1, "Store count should be 1" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_store_creation_with_metadata(svc): NAME = "test_store" @@ -73,7 +85,7 @@ def test_store_creation_with_metadata(svc): assert svc.count_stores() == 1, "Store count should be 1" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_set_get_key(svc): NAME = "test_store" @@ -89,7 +101,7 @@ def test_set_get_key(svc): ), "Retrieved value should match the set value" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_list_global_stores(svc, svc_with_dataset): NO_DATASET_STORE_NAME = "dataset_less_store" @@ -112,7 +124,7 @@ def test_list_global_stores(svc, svc_with_dataset): assert svc_with_dataset._dataset_id in dataset_ids -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_has_store(svc, svc_with_dataset): NAME = "test_store" @@ -147,7 +159,7 @@ def test_has_store(svc, svc_with_dataset): ), "Nonexistent store should return False globally" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_has_key(svc, svc_with_dataset): NAME = "test_store" @@ -158,7 +170,7 @@ def test_has_key(svc, svc_with_dataset): assert svc_with_dataset.has_key(NAME, KEY) is False -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_get_key(svc): NAME = "test_store" @@ -169,7 +181,7 @@ def test_get_key(svc): assert svc.get_key(NAME, "nonexistent") is None -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_get_store_with_only_keys(svc): NAME = "test_store" @@ -181,7 +193,7 @@ def test_get_store_with_only_keys(svc): assert key_doc.value == "value1" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_delete_store(svc, svc_with_dataset): NAME = "test_store" @@ -253,7 +265,7 @@ def test_delete_store(svc, svc_with_dataset): ), "Final cleanup of 'test_store' in dataset context should succeed" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_global_delete_store(svc, svc_with_dataset): SHARED_NAME = "shared_store" @@ -271,7 +283,7 @@ def test_global_delete_store(svc, svc_with_dataset): ), "SHARED_NAME store should not exist globally" -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) @drop_datasets def test_scoping(svc, svc_with_dataset): NAME = "test_store" @@ -303,7 +315,7 @@ def test_scoping(svc, svc_with_dataset): @drop_datasets -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) def test_set_key_with_ttl(svc): NAME = "test_store" KEY = "ttl_key" @@ -316,7 +328,7 @@ def test_set_key_with_ttl(svc): @drop_datasets -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) def test_set_key_with_ttl_and_update(svc): NAME = "test_store" KEY = "ttl_key" @@ -333,7 +345,7 @@ def test_set_key_with_ttl_and_update(svc): @drop_datasets -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) def test_set_key_with_dict_value(svc): NAME = "test_store" KEY = "dict_key" @@ -344,7 +356,7 @@ def test_set_key_with_dict_value(svc): @drop_datasets -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) def test_count_stores(svc, svc_with_dataset): assert svc.count_stores() == 0 assert svc.count_stores_global() == 0 @@ -367,7 +379,7 @@ def test_count_stores(svc, svc_with_dataset): @drop_datasets -@drop_stores +@drop_collection(TEST_COLLECTION_NAME) def test_cleanup(svc, svc_with_dataset): A_STORE_NAME = "store_a" B_STORE_NAME = "store_b" diff --git a/tests/unittests/execution_store_unit_tests.py b/tests/unittests/execution_store_unit_tests.py index b18eb3244b..5f6b4bae26 100644 --- a/tests/unittests/execution_store_unit_tests.py +++ b/tests/unittests/execution_store_unit_tests.py @@ -76,6 +76,7 @@ def test_set_key(self): "key": "widget_1", "created_at": IsDateTime(), "expires_at": IsDateTime(), + "dataset_id": None, }, }, upsert=True, @@ -112,6 +113,7 @@ def test_create_store(self): "created_at": IsDateTime(), "updated_at": None, "expires_at": None, + "dataset_id": None, } ) @@ -206,6 +208,7 @@ def test_set(self): "key": "widget_1", "created_at": IsDateTime(), "expires_at": IsDateTime(), + "dataset_id": None, }, }, upsert=True, From 044ddb5d59471e21f6594cbcbb4df24b9815e102 Mon Sep 17 00:00:00 2001 From: Ritchie Martori Date: Thu, 14 Nov 2024 08:34:51 -0700 Subject: [PATCH 13/13] remove test_dataset name --- tests/unittests/execution_store_service_tests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unittests/execution_store_service_tests.py b/tests/unittests/execution_store_service_tests.py index eca8979299..1c728a5f77 100644 --- a/tests/unittests/execution_store_service_tests.py +++ b/tests/unittests/execution_store_service_tests.py @@ -18,7 +18,7 @@ @pytest.fixture def dataset(): - return fo.Dataset(name="test_dataset") + return fo.Dataset() @pytest.fixture