diff --git a/openedx/core/djangoapps/content/search/api.py b/openedx/core/djangoapps/content/search/api.py index 7fe964128e5a..b5ed1bde78e1 100644 --- a/openedx/core/djangoapps/content/search/api.py +++ b/openedx/core/djangoapps/content/search/api.py @@ -15,7 +15,7 @@ from django.core.cache import cache from django.core.paginator import Paginator from meilisearch import Client as MeilisearchClient -from meilisearch.errors import MeilisearchError +from meilisearch.errors import MeilisearchApiError, MeilisearchError from meilisearch.models.task import TaskInfo from opaque_keys.edx.keys import UsageKey from opaque_keys.edx.locator import LibraryLocatorV2, LibraryCollectionLocator @@ -34,6 +34,7 @@ searchable_doc_for_course_block, searchable_doc_for_collection, searchable_doc_for_library_block, + searchable_doc_for_usage_key, searchable_doc_collections, searchable_doc_tags, searchable_doc_tags_for_collection, @@ -402,8 +403,8 @@ def index_collection_batch(batch, num_done, library_key) -> int: docs = [] for collection in batch: try: - doc = searchable_doc_for_collection(collection) - doc.update(searchable_doc_tags_for_collection(library_key, collection)) + doc = searchable_doc_for_collection(library_key, collection.key, collection=collection) + doc.update(searchable_doc_tags_for_collection(library_key, collection.key)) docs.append(doc) except Exception as err: # pylint: disable=broad-except status_cb(f"Error indexing collection {collection}: {err}") @@ -512,15 +513,28 @@ def delete_index_doc(usage_key: UsageKey) -> None: Args: usage_key (UsageKey): The usage key of the XBlock to be removed from the index """ - current_rebuild_index_name = _get_running_rebuild_index_name() + doc = searchable_doc_for_usage_key(usage_key) + _delete_index_doc(doc[Fields.id]) + + +def _delete_index_doc(doc_id) -> None: + """ + Helper function that deletes the document with the given ID from the search index + + If there is a rebuild in progress, the document will also be removed from the new index. + """ + if not doc_id: + return client = _get_meilisearch_client() + current_rebuild_index_name = _get_running_rebuild_index_name() tasks = [] if current_rebuild_index_name: - # If there is a rebuild in progress, the document will also be deleted from the new index. - tasks.append(client.index(current_rebuild_index_name).delete_document(meili_id_from_opaque_key(usage_key))) - tasks.append(client.index(STUDIO_INDEX_NAME).delete_document(meili_id_from_opaque_key(usage_key))) + # If there is a rebuild in progress, the document will also be removed from the new index. + tasks.append(client.index(current_rebuild_index_name).delete_document(doc_id)) + + tasks.append(client.index(STUDIO_INDEX_NAME).delete_document(doc_id)) _wait_for_meili_tasks(tasks) @@ -563,20 +577,94 @@ def upsert_library_block_index_doc(usage_key: UsageKey) -> None: _update_index_docs(docs) +def _get_document_from_index(document_id: str) -> dict: + """ + Returns the Document identified by the given ID, from the given index. + + Returns None if the document or index do not exist. + """ + client = _get_meilisearch_client() + document = None + index_name = STUDIO_INDEX_NAME + try: + index = client.get_index(index_name) + document = index.get_document(document_id) + except (MeilisearchError, MeilisearchApiError) as err: + # The index or document doesn't exist + log.warning(f"Unable to fetch document {document_id} from {index_name}: {err}") + + return document + + def upsert_library_collection_index_doc(library_key: LibraryLocatorV2, collection_key: str) -> None: """ - Creates or updates the document for the given Library Collection in the search index + Creates, updates, or deletes the document for the given Library Collection in the search index. + + If the Collection is not found or disabled (i.e. soft-deleted), then delete it from the search index. """ - content_library = lib_api.ContentLibrary.objects.get_by_key(library_key) - collection = authoring_api.get_collection( - learning_package_id=content_library.learning_package_id, - collection_key=collection_key, - ) - docs = [ - searchable_doc_for_collection(collection) - ] + doc = searchable_doc_for_collection(library_key, collection_key) + update_components = False - _update_index_docs(docs) + # Soft-deleted/disabled collections are removed from the index + # and their components updated. + if doc.get('_disabled'): + + _delete_index_doc(doc[Fields.id]) + + update_components = True + + # Hard-deleted collections are also deleted from the index, + # but their components are automatically updated as part of the deletion process, so we don't have to. + elif not doc.get(Fields.type): + + _delete_index_doc(doc[Fields.id]) + + # Otherwise, upsert the collection. + # Newly-added/restored collection get their components updated too. + else: + already_indexed = _get_document_from_index(doc[Fields.id]) + if not already_indexed: + update_components = True + + _update_index_docs([doc]) + + # Asynchronously update the collection's components "collections" field + if update_components: + from .tasks import update_library_components_collections as update_task + + update_task.delay(str(library_key), collection_key) + + +def update_library_components_collections( + library_key: LibraryLocatorV2, + collection_key: str, + batch_size: int = 1000, +) -> None: + """ + Updates the "collections" field for all components associated with a given Library Collection. + + Because there may be a lot of components, we send these updates to Meilisearch in batches. + """ + library = lib_api.get_library(library_key) + components = authoring_api.get_collection_components(library.learning_package.id, collection_key) + + paginator = Paginator(components, batch_size) + for page in paginator.page_range: + docs = [] + + for component in paginator.page(page).object_list: + usage_key = lib_api.library_component_usage_key( + library_key, + component, + ) + doc = searchable_doc_collections(usage_key) + docs.append(doc) + + log.info( + f"Updating document.collections for library {library_key} components" + f" page {page} / {paginator.num_pages}" + ) + _update_index_docs(docs) def upsert_content_library_index_docs(library_key: LibraryLocatorV2) -> None: @@ -614,10 +702,8 @@ def upsert_collection_tags_index_docs(collection_usage_key: LibraryCollectionLoc """ Updates the tags data in documents for the given library collection """ - collection = lib_api.get_library_collection_from_usage_key(collection_usage_key) - doc = {Fields.id: collection.id} - doc.update(searchable_doc_tags_for_collection(collection_usage_key.library_key, collection)) + doc = searchable_doc_tags_for_collection(collection_usage_key.library_key, collection_usage_key.collection_id) _update_index_docs([doc]) diff --git a/openedx/core/djangoapps/content/search/documents.py b/openedx/core/djangoapps/content/search/documents.py index f9041468c296..eabeab9654ca 100644 --- a/openedx/core/djangoapps/content/search/documents.py +++ b/openedx/core/djangoapps/content/search/documents.py @@ -16,7 +16,7 @@ from openedx.core.djangoapps.content_libraries import api as lib_api from openedx.core.djangoapps.content_tagging import api as tagging_api from openedx.core.djangoapps.xblock import api as xblock_api -from openedx_learning.api.authoring_models import LearningPackage +from openedx_learning.api.authoring_models import Collection log = logging.getLogger(__name__) @@ -112,6 +112,15 @@ def _meili_access_id_from_context_key(context_key: LearningContextKey) -> int: return access.id +def searchable_doc_for_usage_key(usage_key: UsageKey) -> dict: + """ + Generates a base document identified by its usage key. + """ + return { + Fields.id: meili_id_from_opaque_key(usage_key), + } + + def _fields_from_block(block) -> dict: """ Given an XBlock instance, call its index_dictionary() method to load any @@ -297,14 +306,14 @@ def searchable_doc_for_library_block(xblock_metadata: lib_api.LibraryXBlockMetad library_name = lib_api.get_library(xblock_metadata.usage_key.context_key).title block = xblock_api.load_block(xblock_metadata.usage_key, user=None) - doc = { - Fields.id: meili_id_from_opaque_key(xblock_metadata.usage_key), + doc = searchable_doc_for_usage_key(xblock_metadata.usage_key) + doc.update({ Fields.type: DocType.library_block, Fields.breadcrumbs: [], Fields.created: xblock_metadata.created.timestamp(), Fields.modified: xblock_metadata.modified.timestamp(), Fields.last_published: xblock_metadata.last_published.timestamp() if xblock_metadata.last_published else None, - } + }) doc.update(_fields_from_block(block)) @@ -319,9 +328,7 @@ def searchable_doc_tags(usage_key: UsageKey) -> dict: Generate a dictionary document suitable for ingestion into a search engine like Meilisearch or Elasticsearch, with the tags data for the given content object. """ - doc = { - Fields.id: meili_id_from_opaque_key(usage_key), - } + doc = searchable_doc_for_usage_key(usage_key) doc.update(_tags_for_content_object(usage_key)) return doc @@ -332,9 +339,7 @@ def searchable_doc_collections(usage_key: UsageKey) -> dict: Generate a dictionary document suitable for ingestion into a search engine like Meilisearch or Elasticsearch, with the collections data for the given content object. """ - doc = { - Fields.id: meili_id_from_opaque_key(usage_key), - } + doc = searchable_doc_for_usage_key(usage_key) doc.update(_collections_for_content_object(usage_key)) return doc @@ -342,21 +347,17 @@ def searchable_doc_collections(usage_key: UsageKey) -> dict: def searchable_doc_tags_for_collection( library_key: LibraryLocatorV2, - collection, + collection_key: str, ) -> dict: """ Generate a dictionary document suitable for ingestion into a search engine like Meilisearch or Elasticsearch, with the tags data for the given library collection. """ - doc = { - Fields.id: collection.id, - } - collection_usage_key = lib_api.get_library_collection_usage_key( library_key, - collection.key, + collection_key, ) - + doc = searchable_doc_for_usage_key(collection_usage_key) doc.update(_tags_for_content_object(collection_usage_key)) return doc @@ -368,49 +369,65 @@ def searchable_doc_for_course_block(block) -> dict: like Meilisearch or Elasticsearch, so that the given course block can be found using faceted search. """ - doc = { - Fields.id: meili_id_from_opaque_key(block.usage_key), + doc = searchable_doc_for_usage_key(block.usage_key) + doc.update({ Fields.type: DocType.course_block, - } + }) doc.update(_fields_from_block(block)) return doc -def searchable_doc_for_collection(collection) -> dict: +def searchable_doc_for_collection( + library_key: LibraryLocatorV2, + collection_key: str, + *, + # Optionally provide the collection if we've already fetched one + collection: Collection | None = None, +) -> dict: """ Generate a dictionary document suitable for ingestion into a search engine like Meilisearch or Elasticsearch, so that the given collection can be found using faceted search. + + If no collection is found for the given library_key + collection_key, the returned document will contain only basic + information derived from the collection usage key, and no Fields.type value will be included in the returned dict. """ - doc = { - Fields.id: collection.id, - Fields.block_id: collection.key, - Fields.type: DocType.collection, - Fields.display_name: collection.title, - Fields.description: collection.description, - Fields.created: collection.created.timestamp(), - Fields.modified: collection.modified.timestamp(), - # Add related learning_package.key as context_key by default. - # If related contentlibrary is found, it will override this value below. - # Mostly contentlibrary.library_key == learning_package.key - Fields.context_key: collection.learning_package.key, - Fields.num_children: collection.entities.count(), - } - # Just in case learning_package is not related to a library + collection_usage_key = lib_api.get_library_collection_usage_key( + library_key, + collection_key, + ) + + doc = searchable_doc_for_usage_key(collection_usage_key) + try: - context_key = collection.learning_package.contentlibrary.library_key - org = str(context_key.org) + collection = collection or lib_api.get_library_collection_from_usage_key(collection_usage_key) + except lib_api.ContentLibraryCollectionNotFound: + # Collection not found, so we can only return the base doc + pass + + if collection: + assert collection.key == collection_key + doc.update({ - Fields.context_key: str(context_key), - Fields.org: org, - Fields.usage_key: str(lib_api.get_library_collection_usage_key(context_key, collection.key)), + Fields.context_key: str(library_key), + Fields.org: str(library_key.org), + Fields.usage_key: str(collection_usage_key), + Fields.block_id: collection.key, + Fields.type: DocType.collection, + Fields.display_name: collection.title, + Fields.description: collection.description, + Fields.created: collection.created.timestamp(), + Fields.modified: collection.modified.timestamp(), + Fields.num_children: collection.entities.count(), + Fields.access_id: _meili_access_id_from_context_key(library_key), + Fields.breadcrumbs: [{"display_name": collection.learning_package.title}], }) - except LearningPackage.contentlibrary.RelatedObjectDoesNotExist: - log.warning(f"Related library not found for {collection}") - doc[Fields.access_id] = _meili_access_id_from_context_key(doc[Fields.context_key]) - # Add the breadcrumbs. - doc[Fields.breadcrumbs] = [{"display_name": collection.learning_package.title}] + + # Disabled collections should be removed from the search index, + # so we mark them as _disabled + if not collection.enabled: + doc['_disabled'] = True return doc diff --git a/openedx/core/djangoapps/content/search/handlers.py b/openedx/core/djangoapps/content/search/handlers.py index f50dead8474a..085387d336b1 100644 --- a/openedx/core/djangoapps/content/search/handlers.py +++ b/openedx/core/djangoapps/content/search/handlers.py @@ -23,6 +23,7 @@ LIBRARY_BLOCK_DELETED, LIBRARY_BLOCK_UPDATED, LIBRARY_COLLECTION_CREATED, + LIBRARY_COLLECTION_DELETED, LIBRARY_COLLECTION_UPDATED, XBLOCK_CREATED, XBLOCK_DELETED, @@ -166,6 +167,7 @@ def content_library_updated_handler(**kwargs) -> None: @receiver(LIBRARY_COLLECTION_CREATED) +@receiver(LIBRARY_COLLECTION_DELETED) @receiver(LIBRARY_COLLECTION_UPDATED) @only_if_meilisearch_enabled def library_collection_updated_handler(**kwargs) -> None: diff --git a/openedx/core/djangoapps/content/search/tasks.py b/openedx/core/djangoapps/content/search/tasks.py index d9dad834db29..98390a12f3b3 100644 --- a/openedx/core/djangoapps/content/search/tasks.py +++ b/openedx/core/djangoapps/content/search/tasks.py @@ -90,10 +90,23 @@ def update_content_library_index_docs(library_key_str: str) -> None: @set_code_owner_attribute def update_library_collection_index_doc(library_key_str: str, collection_key: str) -> None: """ - Celery task to update the content index documents for a library collection + Celery task to update the content index document for a library collection """ library_key = LibraryLocatorV2.from_string(library_key_str) log.info("Updating content index documents for collection %s in library%s", collection_key, library_key) api.upsert_library_collection_index_doc(library_key, collection_key) + + +@shared_task(base=LoggedTask, autoretry_for=(MeilisearchError, ConnectionError)) +@set_code_owner_attribute +def update_library_components_collections(library_key_str: str, collection_key: str) -> None: + """ + Celery task to update the "collections" field for components in the given content library collection. + """ + library_key = LibraryLocatorV2.from_string(library_key_str) + + log.info("Updating document.collections for library %s collection %s components", library_key, collection_key) + + api.update_library_components_collections(library_key, collection_key) diff --git a/openedx/core/djangoapps/content/search/tests/test_api.py b/openedx/core/djangoapps/content/search/tests/test_api.py index 4aa41a156dab..2f43896c197e 100644 --- a/openedx/core/djangoapps/content/search/tests/test_api.py +++ b/openedx/core/djangoapps/content/search/tests/test_api.py @@ -187,7 +187,7 @@ def setUp(self): ) self.collection_usage_key = "lib-collection:org1:lib:MYCOL" self.collection_dict = { - "id": self.collection.id, + "id": "lib-collectionorg1libmycol-5b647617", "block_id": self.collection.key, "usage_key": self.collection_usage_key, "type": "collection", @@ -461,7 +461,7 @@ def test_index_library_block_and_collections(self, mock_meilisearch): # Build expected docs at each stage lib_access, _ = SearchAccess.objects.get_or_create(context_key=self.library.key) doc_collection1_created = { - "id": collection1.id, + "id": "lib-collectionorg1libcol1-283a79c9", "block_id": collection1.key, "usage_key": f"lib-collection:org1:lib:{collection1.key}", "type": "collection", @@ -476,7 +476,7 @@ def test_index_library_block_and_collections(self, mock_meilisearch): "breadcrumbs": [{"display_name": "Library"}], } doc_collection2_created = { - "id": collection2.id, + "id": "lib-collectionorg1libcol2-46823d4d", "block_id": collection2.key, "usage_key": f"lib-collection:org1:lib:{collection2.key}", "type": "collection", @@ -491,7 +491,7 @@ def test_index_library_block_and_collections(self, mock_meilisearch): "breadcrumbs": [{"display_name": "Library"}], } doc_collection2_updated = { - "id": collection2.id, + "id": "lib-collectionorg1libcol2-46823d4d", "block_id": collection2.key, "usage_key": f"lib-collection:org1:lib:{collection2.key}", "type": "collection", @@ -506,7 +506,7 @@ def test_index_library_block_and_collections(self, mock_meilisearch): "breadcrumbs": [{"display_name": "Library"}], } doc_collection1_updated = { - "id": collection1.id, + "id": "lib-collectionorg1libcol1-283a79c9", "block_id": collection1.key, "usage_key": f"lib-collection:org1:lib:{collection1.key}", "type": "collection", @@ -593,14 +593,14 @@ def test_index_tags_in_collections(self, mock_meilisearch): # Build expected docs with tags at each stage doc_collection_with_tags1 = { - "id": self.collection.id, + "id": "lib-collectionorg1libmycol-5b647617", "tags": { 'taxonomy': ['A'], 'level0': ['A > one', 'A > two'] } } doc_collection_with_tags2 = { - "id": self.collection.id, + "id": "lib-collectionorg1libmycol-5b647617", "tags": { 'taxonomy': ['A', 'B'], 'level0': ['A > one', 'A > two', 'B > four', 'B > three'] @@ -615,3 +615,105 @@ def test_index_tags_in_collections(self, mock_meilisearch): ], any_order=True, ) + + @override_settings(MEILISEARCH_ENABLED=True) + def test_delete_collection(self, mock_meilisearch): + """ + Test soft-deleting, restoring, and hard-deleting a collection. + """ + # Add a component to the collection + updated_date = datetime(2023, 6, 7, 8, 9, 10, tzinfo=timezone.utc) + with freeze_time(updated_date): + library_api.update_library_collection_components( + self.library.key, + collection_key=self.collection.key, + usage_keys=[ + self.problem1.usage_key, + ], + ) + + doc_collection = copy.deepcopy(self.collection_dict) + doc_collection["num_children"] = 1 + doc_collection["modified"] = updated_date.timestamp() + doc_problem_with_collection = { + "id": self.doc_problem1["id"], + "collections": { + "display_name": [self.collection.title], + "key": [self.collection.key], + }, + } + + # Should update the collection and its component + assert mock_meilisearch.return_value.index.return_value.update_documents.call_count == 2 + mock_meilisearch.return_value.index.return_value.update_documents.assert_has_calls( + [ + call([doc_collection]), + call([doc_problem_with_collection]), + ], + any_order=True, + ) + mock_meilisearch.return_value.index.reset_mock() + + # Soft-delete the collection + authoring_api.delete_collection( + self.collection.learning_package_id, + self.collection.key, + ) + + doc_problem_without_collection = { + "id": self.doc_problem1["id"], + "collections": {}, + } + + # Should delete the collection document + mock_meilisearch.return_value.index.return_value.delete_document.assert_called_once_with( + self.collection_dict["id"], + ) + # ...and update the component's "collections" field + mock_meilisearch.return_value.index.return_value.update_documents.assert_called_once_with([ + doc_problem_without_collection, + ]) + mock_meilisearch.return_value.index.reset_mock() + + # We need to mock get_document here so that when we restore the collection below, meilisearch knows the + # collection is being re-added, so it will update its components too. + mock_meilisearch.return_value.get_index.return_value.get_document.return_value = None + + # Restore the collection + restored_date = datetime(2023, 8, 9, 10, 11, 12, tzinfo=timezone.utc) + with freeze_time(restored_date): + authoring_api.restore_collection( + self.collection.learning_package_id, + self.collection.key, + ) + + doc_collection = copy.deepcopy(self.collection_dict) + doc_collection["num_children"] = 1 + doc_collection["modified"] = restored_date.timestamp() + + # Should update the collection and its component's "collections" field + assert mock_meilisearch.return_value.index.return_value.update_documents.call_count == 2 + mock_meilisearch.return_value.index.return_value.update_documents.assert_has_calls( + [ + call([doc_collection]), + call([doc_problem_with_collection]), + ], + any_order=True, + ) + mock_meilisearch.return_value.index.reset_mock() + + # Hard-delete the collection + authoring_api.delete_collection( + self.collection.learning_package_id, + self.collection.key, + hard_delete=True, + ) + + # Should delete the collection document + mock_meilisearch.return_value.index.return_value.delete_document.assert_called_once_with( + self.collection_dict["id"], + ) + # ...and cascade delete updates the "collections" field for the associated components + mock_meilisearch.return_value.index.return_value.update_documents.assert_called_once_with([ + doc_problem_without_collection, + ]) diff --git a/openedx/core/djangoapps/content/search/tests/test_documents.py b/openedx/core/djangoapps/content/search/tests/test_documents.py index 9d51bd127bb4..755ab4d19ad3 100644 --- a/openedx/core/djangoapps/content/search/tests/test_documents.py +++ b/openedx/core/djangoapps/content/search/tests/test_documents.py @@ -5,7 +5,6 @@ from organizations.models import Organization from freezegun import freeze_time -from openedx_learning.api import authoring as authoring_api from openedx.core.djangoapps.content_tagging import api as tagging_api from openedx.core.djangoapps.content_libraries import api as library_api @@ -299,11 +298,11 @@ def test_html_library_block(self): } def test_collection_with_library(self): - doc = searchable_doc_for_collection(self.collection) - doc.update(searchable_doc_tags_for_collection(self.library.key, self.collection)) + doc = searchable_doc_for_collection(self.library.key, self.collection.key) + doc.update(searchable_doc_tags_for_collection(self.library.key, self.collection.key)) assert doc == { - "id": self.collection.id, + "id": "lib-collectionedx2012_falltoy_collection-d1d907a4", "block_id": self.collection.key, "usage_key": self.collection_usage_key, "type": "collection", @@ -321,33 +320,3 @@ def test_collection_with_library(self): 'level0': ['Difficulty > Normal'] } } - - def test_collection_with_no_library(self): - created_date = datetime(2023, 4, 5, 6, 7, 8, tzinfo=timezone.utc) - with freeze_time(created_date): - learning_package = authoring_api.create_learning_package( - key="course-v1:edX+toy+2012_Fall", - title="some learning_package", - description="some description", - ) - collection = authoring_api.create_collection( - learning_package_id=learning_package.id, - key="MYCOL", - title="my_collection", - created_by=None, - description="my collection description" - ) - doc = searchable_doc_for_collection(collection) - assert doc == { - "id": collection.id, - "block_id": collection.key, - "type": "collection", - "display_name": "my_collection", - "description": "my collection description", - "num_children": 0, - "context_key": learning_package.key, - "access_id": self.toy_course_access_id, - "breadcrumbs": [{"display_name": "some learning_package"}], - "created": created_date.timestamp(), - "modified": created_date.timestamp(), - } diff --git a/openedx/core/djangoapps/content_libraries/api.py b/openedx/core/djangoapps/content_libraries/api.py index 3dc33aec9616..a9601a4e70a7 100644 --- a/openedx/core/djangoapps/content_libraries/api.py +++ b/openedx/core/djangoapps/content_libraries/api.py @@ -79,20 +79,15 @@ from opaque_keys import InvalidKeyError from openedx_events.content_authoring.data import ( ContentLibraryData, - ContentObjectChangedData, LibraryBlockData, - LibraryCollectionData, ) from openedx_events.content_authoring.signals import ( - CONTENT_OBJECT_ASSOCIATIONS_CHANGED, CONTENT_LIBRARY_CREATED, CONTENT_LIBRARY_DELETED, CONTENT_LIBRARY_UPDATED, LIBRARY_BLOCK_CREATED, LIBRARY_BLOCK_DELETED, LIBRARY_BLOCK_UPDATED, - LIBRARY_COLLECTION_CREATED, - LIBRARY_COLLECTION_UPDATED, ) from openedx_learning.api import authoring as authoring_api from openedx_learning.api.authoring_models import Collection, Component, MediaType, LearningPackage, PublishableEntity @@ -242,10 +237,9 @@ def from_component(cls, library_key, component): last_draft_created_by = draft.publishable_entity_version.created_by if draft else None return cls( - usage_key=LibraryUsageLocatorV2( + usage_key=library_component_usage_key( library_key, - component.component_type.name, - component.local_key, + component, ), display_name=component.versioning.draft.title, created=component.created, @@ -787,6 +781,20 @@ def set_library_block_olx(usage_key, new_olx_str): ) +def library_component_usage_key( + library_key: LibraryLocatorV2, + component: Component, +) -> LibraryUsageLocatorV2: + """ + Returns a LibraryUsageLocatorV2 for the given library + component. + """ + return LibraryUsageLocatorV2( # type: ignore[abstract] + library_key, + block_type=component.component_type.name, + usage_id=component.local_key, + ) + + def validate_can_add_block_to_library( library_key: LibraryLocatorV2, block_type: str, @@ -1103,8 +1111,7 @@ def create_library_collection( content_library: ContentLibrary | None = None, ) -> Collection: """ - Creates a Collection in the given ContentLibrary, - and emits a LIBRARY_COLLECTION_CREATED event. + Creates a Collection in the given ContentLibrary. If you've already fetched a ContentLibrary for the given library_key, pass it in here to avoid refetching. """ @@ -1125,14 +1132,6 @@ def create_library_collection( except IntegrityError as err: raise LibraryCollectionAlreadyExists from err - # Emit event for library collection created - LIBRARY_COLLECTION_CREATED.send_event( - library_collection=LibraryCollectionData( - library_key=library_key, - collection_key=collection.key, - ) - ) - return collection @@ -1146,8 +1145,7 @@ def update_library_collection( content_library: ContentLibrary | None = None, ) -> Collection: """ - Creates a Collection in the given ContentLibrary, - and emits a LIBRARY_COLLECTION_CREATED event. + Updates a Collection in the given ContentLibrary. """ if not content_library: content_library = ContentLibrary.objects.get_by_key(library_key) # type: ignore[attr-defined] @@ -1165,14 +1163,6 @@ def update_library_collection( except Collection.DoesNotExist as exc: raise ContentLibraryCollectionNotFound from exc - # Emit event for library collection updated - LIBRARY_COLLECTION_UPDATED.send_event( - library_collection=LibraryCollectionData( - library_key=library_key, - collection_key=collection.key, - ) - ) - return collection @@ -1243,40 +1233,16 @@ def update_library_collection_components( created_by=created_by, ) - # Emit event for library collection updated - LIBRARY_COLLECTION_UPDATED.send_event( - library_collection=LibraryCollectionData( - library_key=library_key, - collection_key=collection.key, - ) - ) - - # Emit a CONTENT_OBJECT_ASSOCIATIONS_CHANGED event for each of the objects added/removed - for usage_key in usage_keys: - CONTENT_OBJECT_ASSOCIATIONS_CHANGED.send_event( - content_object=ContentObjectChangedData( - object_id=str(usage_key), - changes=["collections"], - ), - ) - return collection def get_library_collection_usage_key( library_key: LibraryLocatorV2, collection_key: str, - # As an optimization, callers may pass in a pre-fetched ContentLibrary instance - content_library: ContentLibrary | None = None, ) -> LibraryCollectionLocator: """ Returns the LibraryCollectionLocator associated to a collection """ - if not content_library: - content_library = ContentLibrary.objects.get_by_key(library_key) # type: ignore[attr-defined] - assert content_library - assert content_library.learning_package_id - assert content_library.library_key == library_key return LibraryCollectionLocator(library_key, collection_key) diff --git a/openedx/core/djangoapps/content_libraries/signal_handlers.py b/openedx/core/djangoapps/content_libraries/signal_handlers.py index 768b49d55f22..fedee045a9f6 100644 --- a/openedx/core/djangoapps/content_libraries/signal_handlers.py +++ b/openedx/core/djangoapps/content_libraries/signal_handlers.py @@ -5,13 +5,28 @@ import logging from django.conf import settings +from django.db.models.signals import post_save, post_delete, m2m_changed from django.dispatch import receiver +from opaque_keys import InvalidKeyError +from opaque_keys.edx.locator import LibraryLocatorV2, LibraryUsageLocatorV2 +from openedx_events.content_authoring.data import ( + ContentObjectChangedData, + LibraryCollectionData, +) +from openedx_events.content_authoring.signals import ( + CONTENT_OBJECT_ASSOCIATIONS_CHANGED, + LIBRARY_COLLECTION_CREATED, + LIBRARY_COLLECTION_DELETED, + LIBRARY_COLLECTION_UPDATED, +) +from openedx_learning.api.authoring import get_collection_components, get_component, get_components +from openedx_learning.api.authoring_models import Collection, CollectionPublishableEntity, Component + from lms.djangoapps.grades.api import signals as grades_signals -from opaque_keys import InvalidKeyError # lint-amnesty, pylint: disable=wrong-import-order -from opaque_keys.edx.locator import LibraryUsageLocatorV2 # lint-amnesty, pylint: disable=wrong-import-order -from .models import LtiGradedResource +from .api import library_component_usage_key +from .models import ContentLibrary, LtiGradedResource log = logging.getLogger(__name__) @@ -55,3 +70,139 @@ def score_changed_handler(sender, **kwargs): # pylint: disable=unused-argument resource.update_score(weighted_earned, weighted_possible, modified) log.info("LTI 1.3: Score Signal: Grade upgraded: resource; %s", resource) + + +@receiver(post_save, sender=Collection, dispatch_uid="library_collection_saved") +def library_collection_saved(sender, instance, created, **kwargs): + """ + Raises LIBRARY_COLLECTION_CREATED if the Collection is new, + or LIBRARY_COLLECTION_UPDATED if updated an existing Collection. + """ + try: + library = ContentLibrary.objects.get(learning_package_id=instance.learning_package_id) + except ContentLibrary.DoesNotExist: + log.error("{instance} is not associated with a content library.") + return + + if created: + LIBRARY_COLLECTION_CREATED.send_event( + library_collection=LibraryCollectionData( + library_key=library.library_key, + collection_key=instance.key, + ) + ) + else: + LIBRARY_COLLECTION_UPDATED.send_event( + library_collection=LibraryCollectionData( + library_key=library.library_key, + collection_key=instance.key, + ) + ) + + +@receiver(post_delete, sender=Collection, dispatch_uid="library_collection_deleted") +def library_collection_deleted(sender, instance, **kwargs): + """ + Raises LIBRARY_COLLECTION_DELETED for the deleted Collection. + """ + try: + library = ContentLibrary.objects.get(learning_package_id=instance.learning_package_id) + except ContentLibrary.DoesNotExist: + log.error("{instance} is not associated with a content library.") + return + + LIBRARY_COLLECTION_DELETED.send_event( + library_collection=LibraryCollectionData( + library_key=library.library_key, + collection_key=instance.key, + ) + ) + + +def _library_collection_component_changed( + component: Component, + library_key: LibraryLocatorV2 | None = None, +) -> None: + """ + Sends a CONTENT_OBJECT_ASSOCIATIONS_CHANGED event for the component. + """ + if not library_key: + try: + library = ContentLibrary.objects.get( + learning_package_id=component.learning_package_id, + ) + except ContentLibrary.DoesNotExist: + log.error("{component} is not associated with a content library.") + return + + library_key = library.library_key + + assert library_key + + usage_key = library_component_usage_key( + library_key, + component, + ) + CONTENT_OBJECT_ASSOCIATIONS_CHANGED.send_event( + content_object=ContentObjectChangedData( + object_id=str(usage_key), + changes=["collections"], + ), + ) + + +@receiver(post_save, sender=CollectionPublishableEntity, dispatch_uid="library_collection_entity_saved") +def library_collection_entity_saved(sender, instance, created, **kwargs): + """ + Sends a CONTENT_OBJECT_ASSOCIATIONS_CHANGED event for components added to a collection. + """ + if created: + # Component.pk matches its entity.pk + component = get_component(instance.entity_id) + _library_collection_component_changed(component) + + +@receiver(post_delete, sender=CollectionPublishableEntity, dispatch_uid="library_collection_entity_deleted") +def library_collection_entity_deleted(sender, instance, **kwargs): + """ + Sends a CONTENT_OBJECT_ASSOCIATIONS_CHANGED event for components removed from a collection. + """ + # Component.pk matches its entity.pk + component = get_component(instance.entity_id) + _library_collection_component_changed(component) + + +@receiver(m2m_changed, sender=CollectionPublishableEntity, dispatch_uid="library_collection_entities_changed") +def library_collection_entities_changed(sender, instance, action, pk_set, **kwargs): + """ + Sends a CONTENT_OBJECT_ASSOCIATIONS_CHANGED event for components added/removed/cleared from a collection. + """ + if not isinstance(instance, Collection): + return + + if action not in ["post_add", "post_remove", "post_clear"]: + return + + try: + library = ContentLibrary.objects.get( + learning_package_id=instance.learning_package_id, + ) + except ContentLibrary.DoesNotExist: + log.error("{instance} is not associated with a content library.") + return + + if pk_set: + components = get_collection_components( + instance.learning_package_id, + instance.key, + ).filter(pk__in=pk_set) + else: + # When action=="post_clear", pk_set==None + # Since the collection instance now has an empty entities set, + # we don't know which ones were removed, so we need to update associations for all library components. + components = get_components( + instance.learning_package_id, + ) + + for component in components.all(): + _library_collection_component_changed(component, library.library_key) diff --git a/openedx/core/djangoapps/content_libraries/tests/test_api.py b/openedx/core/djangoapps/content_libraries/tests/test_api.py index b02e71b002a3..8041c508dc31 100644 --- a/openedx/core/djangoapps/content_libraries/tests/test_api.py +++ b/openedx/core/djangoapps/content_libraries/tests/test_api.py @@ -20,9 +20,11 @@ from openedx_events.content_authoring.signals import ( CONTENT_OBJECT_ASSOCIATIONS_CHANGED, LIBRARY_COLLECTION_CREATED, + LIBRARY_COLLECTION_DELETED, LIBRARY_COLLECTION_UPDATED, ) from openedx_events.tests.utils import OpenEdxEventsTestMixin +from openedx_learning.api import authoring as authoring_api from .. import api from ..models import ContentLibrary @@ -264,6 +266,7 @@ class ContentLibraryCollectionsTest(ContentLibrariesRestApiTest, OpenEdxEventsTe ENABLED_OPENEDX_EVENTS = [ CONTENT_OBJECT_ASSOCIATIONS_CHANGED.event_type, LIBRARY_COLLECTION_CREATED.event_type, + LIBRARY_COLLECTION_DELETED.event_type, LIBRARY_COLLECTION_UPDATED.event_type, ] @@ -386,6 +389,29 @@ def test_update_library_collection_wrong_library(self): self.col2.key, ) + def test_delete_library_collection(self): + event_receiver = mock.Mock() + LIBRARY_COLLECTION_DELETED.connect(event_receiver) + + authoring_api.delete_collection( + self.lib1.learning_package_id, + self.col1.key, + hard_delete=True, + ) + + assert event_receiver.call_count == 1 + self.assertDictContainsSubset( + { + "signal": LIBRARY_COLLECTION_DELETED, + "sender": None, + "library_collection": LibraryCollectionData( + self.lib1.library_key, + collection_key="COL1", + ), + }, + event_receiver.call_args_list[0].kwargs, + ) + def test_update_library_collection_components(self): assert not list(self.col1.entities.all()) @@ -429,11 +455,11 @@ def test_update_library_collection_components_event(self): assert event_receiver.call_count == 3 self.assertDictContainsSubset( { - "signal": LIBRARY_COLLECTION_UPDATED, + "signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED, "sender": None, - "library_collection": LibraryCollectionData( - self.lib1.library_key, - collection_key="COL1", + "content_object": ContentObjectChangedData( + object_id=self.lib1_problem_block["id"], + changes=["collections"], ), }, event_receiver.call_args_list[0].kwargs, @@ -443,7 +469,7 @@ def test_update_library_collection_components_event(self): "signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED, "sender": None, "content_object": ContentObjectChangedData( - object_id=self.lib1_problem_block["id"], + object_id=self.lib1_html_block["id"], changes=["collections"], ), }, @@ -451,11 +477,11 @@ def test_update_library_collection_components_event(self): ) self.assertDictContainsSubset( { - "signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED, + "signal": LIBRARY_COLLECTION_UPDATED, "sender": None, - "content_object": ContentObjectChangedData( - object_id=self.lib1_html_block["id"], - changes=["collections"], + "library_collection": LibraryCollectionData( + self.lib1.library_key, + collection_key="COL1", ), }, event_receiver.call_args_list[2].kwargs, diff --git a/openedx/core/djangoapps/content_libraries/tests/test_views_collections.py b/openedx/core/djangoapps/content_libraries/tests/test_views_collections.py index bc600759b5b3..43c1627c2c76 100644 --- a/openedx/core/djangoapps/content_libraries/tests/test_views_collections.py +++ b/openedx/core/djangoapps/content_libraries/tests/test_views_collections.py @@ -17,6 +17,7 @@ URL_PREFIX = '/api/libraries/v2/{lib_key}/' URL_LIB_COLLECTIONS = URL_PREFIX + 'collections/' URL_LIB_COLLECTION = URL_LIB_COLLECTIONS + '{collection_key}/' +URL_LIB_COLLECTION_RESTORE = URL_LIB_COLLECTIONS + '{collection_key}/restore/' URL_LIB_COLLECTION_COMPONENTS = URL_LIB_COLLECTION + 'components/' @@ -330,15 +331,33 @@ def test_update_invalid_library_collection(self): def test_delete_library_collection(self): """ - Test deleting a Content Library Collection - - Note: Currently not implemented and should return a 405 + Test soft-deleting and restoring a Content Library Collection """ resp = self.client.delete( URL_LIB_COLLECTION.format(lib_key=self.lib2.library_key, collection_key=self.col3.key) ) + assert resp.status_code == 204 - assert resp.status_code == 405 + resp = self.client.get( + URL_LIB_COLLECTION.format(lib_key=self.lib2.library_key, collection_key=self.col3.key) + ) + assert resp.status_code == 404 + + resp = self.client.post( + URL_LIB_COLLECTION_RESTORE.format(lib_key=self.lib2.library_key, collection_key=self.col3.key) + ) + assert resp.status_code == 204 + + resp = self.client.get( + URL_LIB_COLLECTION.format(lib_key=self.lib2.library_key, collection_key=self.col3.key) + ) + # Check that correct Content Library Collection data retrieved + expected_collection = { + "title": "Collection 3", + "description": "Description for Collection 3", + } + assert resp.status_code == 200 + self.assertDictContainsEntries(resp.data, expected_collection) def test_get_components(self): """ diff --git a/openedx/core/djangoapps/content_libraries/views_collections.py b/openedx/core/djangoapps/content_libraries/views_collections.py index 2f40a1788628..b6c1c999ba94 100644 --- a/openedx/core/djangoapps/content_libraries/views_collections.py +++ b/openedx/core/djangoapps/content_libraries/views_collections.py @@ -11,7 +11,7 @@ from rest_framework.decorators import action from rest_framework.response import Response from rest_framework.viewsets import ModelViewSet -from rest_framework.status import HTTP_405_METHOD_NOT_ALLOWED +from rest_framework.status import HTTP_204_NO_CONTENT from opaque_keys.edx.locator import LibraryLocatorV2 from openedx_learning.api import authoring as authoring_api @@ -163,13 +163,31 @@ def partial_update(self, request, *args, **kwargs) -> Response: @convert_exceptions def destroy(self, request, *args, **kwargs) -> Response: """ - Deletes a Collection that belongs to a Content Library - - Note: (currently not allowed) + Soft-deletes a Collection that belongs to a Content Library """ - # TODO: Implement the deletion logic and emit event signal + collection = super().get_object() + assert collection.learning_package_id + authoring_api.delete_collection( + collection.learning_package_id, + collection.key, + hard_delete=False, + ) + return Response(None, status=HTTP_204_NO_CONTENT) - return Response(None, status=HTTP_405_METHOD_NOT_ALLOWED) + @convert_exceptions + @action(detail=True, methods=['post'], url_path='restore', url_name='collection-restore') + def restore(self, request, *args, **kwargs) -> Response: + """ + Restores a soft-deleted Collection that belongs to a Content Library + """ + content_library = self.get_content_library() + assert content_library.learning_package_id + collection_key = kwargs["key"] + authoring_api.restore_collection( + content_library.learning_package_id, + collection_key, + ) + return Response(None, status=HTTP_204_NO_CONTENT) @convert_exceptions @action(detail=True, methods=['delete', 'patch'], url_path='components', url_name='components-update') diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 5afbe5f94e40..43f000a1e8ba 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -93,7 +93,7 @@ libsass==0.10.0 click==8.1.6 # pinning this version to avoid updates while the library is being developed -openedx-learning==0.11.5 +openedx-learning==0.13.0 # Open AI version 1.0.0 dropped support for openai.ChatCompletion which is currently in use in enterprise. openai<=0.28.1 diff --git a/requirements/edx/base.txt b/requirements/edx/base.txt index 097d21524190..b725f8e0c2a4 100644 --- a/requirements/edx/base.txt +++ b/requirements/edx/base.txt @@ -824,7 +824,7 @@ openedx-filters==1.9.0 # -r requirements/edx/kernel.in # lti-consumer-xblock # ora2 -openedx-learning==0.11.5 +openedx-learning==0.13.0 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/kernel.in diff --git a/requirements/edx/development.txt b/requirements/edx/development.txt index 3fa131288294..058214c647b7 100644 --- a/requirements/edx/development.txt +++ b/requirements/edx/development.txt @@ -1373,7 +1373,7 @@ openedx-filters==1.9.0 # -r requirements/edx/testing.txt # lti-consumer-xblock # ora2 -openedx-learning==0.11.5 +openedx-learning==0.13.0 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/doc.txt diff --git a/requirements/edx/doc.txt b/requirements/edx/doc.txt index 1c12d28955c6..c20c28c2e443 100644 --- a/requirements/edx/doc.txt +++ b/requirements/edx/doc.txt @@ -983,7 +983,7 @@ openedx-filters==1.9.0 # -r requirements/edx/base.txt # lti-consumer-xblock # ora2 -openedx-learning==0.11.5 +openedx-learning==0.13.0 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/base.txt diff --git a/requirements/edx/testing.txt b/requirements/edx/testing.txt index 6e36d98f04d8..ab0d190b5d8e 100644 --- a/requirements/edx/testing.txt +++ b/requirements/edx/testing.txt @@ -1034,7 +1034,7 @@ openedx-filters==1.9.0 # -r requirements/edx/base.txt # lti-consumer-xblock # ora2 -openedx-learning==0.11.5 +openedx-learning==0.13.0 # via # -c requirements/edx/../constraints.txt # -r requirements/edx/base.txt