diff --git a/cms/djangoapps/modulestore_migrator/constants.py b/cms/djangoapps/modulestore_migrator/constants.py index ec7740ef1938..34ce788504b9 100644 --- a/cms/djangoapps/modulestore_migrator/constants.py +++ b/cms/djangoapps/modulestore_migrator/constants.py @@ -2,5 +2,6 @@ Constants """ -CONTENT_STAGING_PURPOSE_PREFIX = "modulestore_migrator" -CONTENT_STAGING_PURPOSE_TEMPLATE = CONTENT_STAGING_PURPOSE_PREFIX + "({source_key})" +CONTENT_STAGING_PURPOSE = "modulestore_migrator" +CONTENT_STAGING_PURPOSE_META = "modulestore_migrator_meta" +META_BLOCK_TYPES: list[str] = ["about", "course_info", "static_tab"] \ No newline at end of file diff --git a/cms/djangoapps/modulestore_migrator/data.py b/cms/djangoapps/modulestore_migrator/data.py index 22d444873e1e..3b35b07c8937 100644 --- a/cms/djangoapps/modulestore_migrator/data.py +++ b/cms/djangoapps/modulestore_migrator/data.py @@ -23,8 +23,15 @@ class CompositionLevel(Enum): Section = ContainerType.Section.value OutlineRoot = ContainerType.OutlineRoot.value + # Import the outline root, as well as the weird meta blocks (about, + # course_info, static_tab) that exist as parent-less peers of the outline + # root, and get/create the Course instance. Unlike the other + # CompositionLevels, this level does not correspond to any particular kind of + # publishable entity. + CourseRun = "course_run" + @property - def is_container(self) -> bool: + def is_complex(self) -> bool: return self is not self.Component def is_higher_than(self, other: 'CompositionLevel') -> bool: diff --git a/cms/djangoapps/modulestore_migrator/tasks.py b/cms/djangoapps/modulestore_migrator/tasks.py index d3ee0e40d55c..124fc2cbc216 100644 --- a/cms/djangoapps/modulestore_migrator/tasks.py +++ b/cms/djangoapps/modulestore_migrator/tasks.py @@ -26,21 +26,27 @@ from openedx_learning.api.authoring_models import ( Collection, Component, - ContainerVersion, + Course, + CatalogCourse, LearningPackage, PublishableEntity, PublishableEntityVersion, ) from user_tasks.tasks import UserTask, UserTaskStatus +from xblock.core import XBlock from openedx.core.djangoapps.content_libraries.api import ContainerType from openedx.core.djangoapps.content_libraries import api as libraries_api from openedx.core.djangoapps.content_libraries.models import ContentLibrary from openedx.core.djangoapps.content_staging import api as staging_api +from openedx.core.djangoapps.xblock import models as xblock_models +from openedx.core.djangoapps.xblock.api import create_xblock_field_data_for_container + from xmodule.modulestore import exceptions as modulestore_exceptions from xmodule.modulestore.django import modulestore +from xmodule.modulestore.mixed import MixedModuleStore -from .constants import CONTENT_STAGING_PURPOSE_TEMPLATE +from .constants import CONTENT_STAGING_PURPOSE, CONTENT_STAGING_PURPOSE_META, META_BLOCK_TYPES from .data import CompositionLevel from .models import ModulestoreSource, ModulestoreMigration, ModulestoreBlockSource, ModulestoreBlockMigration @@ -63,6 +69,7 @@ class MigrationStep(Enum): PARSING = 'Parsing staged OLX' IMPORTING_ASSETS = 'Importing staged files and resources' IMPORTING_STRUCTURE = 'Importing staged content structure' + IMPORTING_META = 'Importing course info and other meta-components' UNSTAGING = 'Cleaning staged content' MAPPING_OLD_TO_NEW = 'Saving map of legacy content to migrated content' FORWARDING = 'Forwarding legacy content to migrated content' @@ -110,6 +117,7 @@ def migrate_from_modulestore( status: UserTaskStatus = self.status status.set_state(MigrationStep.VALIDATING_INPUT.value) + comp_level = CompositionLevel(composition_level) try: source = ModulestoreSource.objects.get(pk=source_pk) target_package = LearningPackage.objects.get(pk=target_package_pk) @@ -118,8 +126,16 @@ def migrate_from_modulestore( except ObjectDoesNotExist as exc: status.fail(str(exc)) return + + course_lc_learning_context = None if isinstance(source.key, CourseLocator): source_root_usage_key = source.key.make_usage_key('course', 'course') + + # Support SplitModuleStore shim from Learning Core, force it off for now because we need to build it using ModuleStore + course_lc_learning_context, _created = xblock_models.LearningCoreLearningContext.objects.get_or_create(key=source.key) + course_lc_learning_context.use_learning_core = False + course_lc_learning_context.save() + elif isinstance(source.key, LibraryLocator): source_root_usage_key = source.key.make_usage_key('library', 'library') else: @@ -128,6 +144,7 @@ def migrate_from_modulestore( "Source key must reference a course or a legacy library." ) return + migration = ModulestoreMigration.objects.create( source=source, composition_level=composition_level, @@ -161,23 +178,37 @@ def migrate_from_modulestore( status.increment_completed_steps() status.set_state(MigrationStep.LOADING) + store: MixedModuleStore = modulestore() try: - legacy_root = modulestore().get_item(source_root_usage_key) + legacy_root = store.get_item(source_root_usage_key) except modulestore_exceptions.ItemNotFoundError as exc: status.fail(f"Failed to load source item '{source_root_usage_key}' from ModuleStore: {exc}") return if not legacy_root: status.fail(f"Could not find source item '{source_root_usage_key}' in ModuleStore") return + meta_blocks: list[XBlock] = ( + store.get_items(source.key, qualifiers={"category": {"$in": META_BLOCK_TYPES}}) + if comp_level == CompositionLevel.CourseRun + else [] + ) status.increment_completed_steps() status.set_state(MigrationStep.STAGING.value) staged_content = staging_api.stage_xblock_temporarily( block=legacy_root, user_id=status.user.pk, - purpose=CONTENT_STAGING_PURPOSE_TEMPLATE.format(source_key=source.key), + purpose=CONTENT_STAGING_PURPOSE, ) migration.staged_content = staged_content + staged_meta_contents = [ + staging_api.stage_xblock_temporarily( + block=meta_block, + user_id=status.user.pk, + purpose=CONTENT_STAGING_PURPOSE_META, + ) + for meta_block in meta_blocks + ] status.increment_completed_steps() status.set_state(MigrationStep.PARSING.value) @@ -186,12 +217,26 @@ def migrate_from_modulestore( root_node = etree.fromstring(staged_content.olx, parser=parser) except etree.ParseError as exc: status.fail(f"Failed to parse source OLX (from staged content with id = {staged_content.id}): {exc}") + return + meta_nodes = [] + for staged_meta_content in staged_meta_contents: + meta_parser = etree.XMLParser(strip_cdata=False) + try: + meta_nodes.append(etree.fromstring(staged_meta_content.olx, parser=meta_parser)) + except etree.ParseError as exc: + status.fail(f"Failed to parse source OLX (from staged content with id = {staged_content.id}): {exc}") + return status.increment_completed_steps() status.set_state(MigrationStep.IMPORTING_ASSETS.value) content_by_filename: dict[str, int] = {} now = datetime.now(tz=timezone.utc) - for staged_content_file_data in staging_api.get_staged_content_static_files(staged_content.id): + all_static_files: list[staging_api.StagedContentFileData] = [ + static_file + for staged in [staged_content, *staged_meta_contents] + for static_file in staging_api.get_staged_content_static_files(staged.id) + ] + for staged_content_file_data in all_static_files: old_path = staged_content_file_data.filename file_data = staging_api.get_staged_content_static_file_data(staged_content.id, old_path) if not file_data: @@ -212,21 +257,39 @@ def migrate_from_modulestore( status.increment_completed_steps() status.set_state(MigrationStep.IMPORTING_STRUCTURE.value) + now = datetime.now(timezone.utc) with authoring_api.bulk_draft_changes_for(migration.target.id) as change_log: root_migrated_node = _migrate_node( content_by_filename=content_by_filename, - source_context_key=source_root_usage_key.course_key, + source_context_key=source.key, source_node=root_node, target_library_key=target_library.library_key, target_package_id=target_package_pk, replace_existing=replace_existing, - composition_level=CompositionLevel(composition_level), - created_at=datetime.now(timezone.utc), - created_by=status.user_id, + composition_level=comp_level, + created_at=now, + created_by=user_id, ) migration.change_log = change_log status.increment_completed_steps() + status.set_state(MigrationStep.IMPORTING_META.value) + migrated_meta_nodes: list[_MigratedNode] = [ + _migrate_node( + content_by_filename=content_by_filename, + source_context_key=source.key, + source_node=meta_node, + target_package_id=target_package_pk, + target_library_key=target_library.library_key, + replace_existing=replace_existing, + composition_level=comp_level, + created_at=now, + created_by=user_id, + ) + for meta_node in meta_nodes + ] + status.increment_completed_steps() + status.set_state(MigrationStep.UNSTAGING.value) staged_content.delete() status.increment_completed_steps() @@ -239,8 +302,15 @@ def migrate_from_modulestore( # we did this, we'd want to make sure that the objects are actually visible # to the user mid-import (via django admin, or the library interface, or even just as # as a "progress bar" field in the REST API), otherwise this would be pointless. + migrated_umbrella = _MigratedNode( + # This is a block-less pseudo-node representing an umbrella containing both + # (a) the outline and (b) all the meta blocks. @@TODO this might be too clever + # to leave in the production migrator... revisit. + source_to_target=None, + children=[root_migrated_node, *migrated_meta_nodes], + ) status.set_state(MigrationStep.MAPPING_OLD_TO_NEW.value) - block_source_keys_to_target_vers = dict(root_migrated_node.all_source_to_target_pairs()) + block_source_keys_to_target_vers = dict(migrated_umbrella.all_source_to_target_pairs()) ModulestoreBlockSource.objects.bulk_create( [ ModulestoreBlockSource(overall_source=source, key=source_usage_key) @@ -274,8 +344,22 @@ def migrate_from_modulestore( ], ) block_migrations = ModulestoreBlockMigration.objects.filter(overall_migration=migration) + + xblock_models.Block.objects.bulk_create( + [ + xblock_models.Block( + learning_context=course_lc_learning_context, + key=block_source_key, + entity_id=block_target_ver.entity_id, + ) + for block_source_key, block_target_ver in block_source_keys_to_target_vers.items() + ], + update_conflicts=True, + update_fields=["entity", "learning_context"], + ) status.increment_completed_steps() + status.set_state(MigrationStep.FORWARDING.value) if forward_source_to_target: block_sources_to_block_migrations = { @@ -287,6 +371,24 @@ def migrate_from_modulestore( # ModulestoreBlockSource.objects.bulk_update(block_sources_to_block_migrations.keys(), ["forwarded"]) source.forwarded = migration source.save() + if comp_level == CompositionLevel.CourseRun: + catalog_course, _ = CatalogCourse.objects.get_or_create( + org_id=source.key.org, + course_id=source.key.course, + ) + try: + course = Course.objects.get(catalog_course=catalog_course, run=source.key.run) + except Course.DoesNotExist: + Course.objects.create( + catalog_course=catalog_course, + run=source.key.run, + learning_package=target_package, + outline_root=root_migrated_node.source_to_target[1].entity.container.outlineroot, + ) + else: + course.learning_package = target_package + course.outline_root = root_migrated_node.source_to_target[1].entity.container.outlineroot + course.save() status.increment_completed_steps() status.set_state(MigrationStep.POPULATING_COLLECTION.value) @@ -303,6 +405,10 @@ def migrate_from_modulestore( ) status.increment_completed_steps() + # Now have it use our Learning Core shim for Split instead of Mongo DB + course_lc_learning_context.use_learning_core = True + course_lc_learning_context.save() + @dataclass(frozen=True) class _MigratedNode: @@ -468,7 +574,7 @@ def _migrate_container( entity_id=container.container_pk, version_num=container.draft_version_num, ) - return authoring_api.create_next_container_version( + next_container_version = authoring_api.create_next_container_version( container.container_pk, title=title, entity_rows=[ @@ -478,7 +584,9 @@ def _migrate_container( created=created_at, created_by=created_by, container_version_cls=container_type.container_model_classes[1], - ).publishable_entity_version + ) + create_xblock_field_data_for_container(next_container_version) + return next_container_version.publishable_entity_version def _migrate_component( diff --git a/lms/djangoapps/courseware/toggles.py b/lms/djangoapps/courseware/toggles.py index f9f083cad42e..b97684c2f37f 100644 --- a/lms/djangoapps/courseware/toggles.py +++ b/lms/djangoapps/courseware/toggles.py @@ -201,4 +201,6 @@ def courseware_disable_navigation_sidebar_blocks_caching(course_key=None): """ Return whether the courseware.disable_navigation_sidebar_blocks_caching flag is on. """ + return True # For debugging the Learning core shim proof of concept + return COURSEWARE_MICROFRONTEND_NAVIGATION_SIDEBAR_BLOCKS_DISABLE_CACHING.is_enabled(course_key) diff --git a/openedx/core/djangoapps/content/block_structure/manager.py b/openedx/core/djangoapps/content/block_structure/manager.py index 49f423ce7ac3..78f04b6453f4 100644 --- a/openedx/core/djangoapps/content/block_structure/manager.py +++ b/openedx/core/djangoapps/content/block_structure/manager.py @@ -153,9 +153,11 @@ def _bulk_operations(self): """ A context manager for notifying the store of bulk operations. """ + from xmodule.modulestore import ModuleStoreEnum try: course_key = self.root_block_usage_key.course_key except AttributeError: course_key = None - with self.modulestore.bulk_operations(course_key): - yield + with self.modulestore.branch_setting(ModuleStoreEnum.Branch.published_only, course_key): + with self.modulestore.bulk_operations(course_key): + yield diff --git a/openedx/core/djangoapps/content/block_structure/store.py b/openedx/core/djangoapps/content/block_structure/store.py index bb6359b9d3a3..a89b353eb2a1 100644 --- a/openedx/core/djangoapps/content/block_structure/store.py +++ b/openedx/core/djangoapps/content/block_structure/store.py @@ -102,6 +102,8 @@ def is_up_to_date(self, root_block_usage_key, modulestore): Returns whether the data in storage for the given key is already up-to-date with the version in the given modulestore. """ + return False + try: bs_model = self._get_model(root_block_usage_key) root_block = modulestore.get_item(root_block_usage_key) diff --git a/openedx/core/djangoapps/content/block_structure/tasks.py b/openedx/core/djangoapps/content/block_structure/tasks.py index 5796b8167b2b..de23e1bfc5a4 100644 --- a/openedx/core/djangoapps/content/block_structure/tasks.py +++ b/openedx/core/djangoapps/content/block_structure/tasks.py @@ -59,6 +59,7 @@ def _update_course_in_cache(self, **kwargs): """ Updates the course blocks (mongo -> BlockStructure) for the specified course. """ + log.info("Inner _update_course_in_cache called.") _call_and_retry_if_needed(self, api.update_course_in_cache, **kwargs) diff --git a/openedx/core/djangoapps/content_libraries/api/containers.py b/openedx/core/djangoapps/content_libraries/api/containers.py index 8d9b9592be91..be633c68b1da 100644 --- a/openedx/core/djangoapps/content_libraries/api/containers.py +++ b/openedx/core/djangoapps/content_libraries/api/containers.py @@ -26,8 +26,7 @@ from openedx_learning.api import authoring as authoring_api from openedx_learning.api.authoring_models import Container, ContainerVersion, Component from openedx.core.djangoapps.content_libraries.api.collections import library_collection_locator - -from openedx.core.djangoapps.xblock.api import get_component_from_usage_key +from openedx.core.djangoapps.xblock.api import create_xblock_field_data_for_container, get_component_from_usage_key from ..models import ContentLibrary from .exceptions import ContentLibraryContainerNotFound @@ -52,6 +51,9 @@ "update_container_children", "get_containers_contains_item", "publish_container_changes", + + # Hacky XBlock data-for-containers + "create_xblock_field_data_for_container", ] log = logging.getLogger(__name__) @@ -286,12 +288,12 @@ def create_container( created = datetime.now(tz=timezone.utc) container: Container - _initial_version: ContainerVersion + initial_version: ContainerVersion # Then try creating the actual container: match container_type: case ContainerType.Unit: - container, _initial_version = authoring_api.create_unit_and_version( + container, initial_version = authoring_api.create_unit_and_version( content_library.learning_package_id, key=slug, title=title, @@ -299,7 +301,7 @@ def create_container( created_by=user_id, ) case ContainerType.Subsection: - container, _initial_version = authoring_api.create_subsection_and_version( + container, initial_version = authoring_api.create_subsection_and_version( content_library.learning_package_id, key=slug, title=title, @@ -307,7 +309,7 @@ def create_container( created_by=user_id, ) case ContainerType.Section: - container, _initial_version = authoring_api.create_section_and_version( + container, initial_version = authoring_api.create_section_and_version( content_library.learning_package_id, key=slug, title=title, @@ -315,7 +317,7 @@ def create_container( created_by=user_id, ) case ContainerType.OutlineRoot: - container, _initial_version = authoring_api.create_outline_root_and_version( + container, initial_version = authoring_api.create_outline_root_and_version( content_library.learning_package_id, key=slug, title=title, @@ -325,6 +327,8 @@ def create_container( case _: raise NotImplementedError(f"Library does not support {container_type} yet") + create_xblock_field_data_for_container(initial_version) + LIBRARY_CONTAINER_CREATED.send_event( library_container=LibraryContainerData( container_key=container_key, @@ -388,6 +392,9 @@ def update_container( case _: raise NotImplementedError(f"Library does not support {container_type} yet") + # Let's add some XBlock data onto the container we just made... + create_xblock_field_data_for_container(version) + # Send event related to the updated container LIBRARY_CONTAINER_UPDATED.send_event( library_container=LibraryContainerData( @@ -625,6 +632,8 @@ def update_container_children( case _: raise ValueError(f"Invalid container type: {container_type}") + create_xblock_field_data_for_container(new_version) + LIBRARY_CONTAINER_UPDATED.send_event( library_container=LibraryContainerData( container_key=container_key, diff --git a/openedx/core/djangoapps/content_libraries/signal_handlers.py b/openedx/core/djangoapps/content_libraries/signal_handlers.py index fe5489493641..4aa006a1d7c5 100644 --- a/openedx/core/djangoapps/content_libraries/signal_handlers.py +++ b/openedx/core/djangoapps/content_libraries/signal_handlers.py @@ -17,7 +17,12 @@ LIBRARY_COLLECTION_UPDATED ) from openedx_learning.api.authoring import get_components, get_containers -from openedx_learning.api.authoring_models import Collection, CollectionPublishableEntity, PublishableEntity +from openedx_learning.api.authoring_models import ( + Collection, + CollectionPublishableEntity, + PublishableEntity, + PublishLog, +) from lms.djangoapps.grades.api import signals as grades_signals diff --git a/openedx/core/djangoapps/content_libraries/tasks.py b/openedx/core/djangoapps/content_libraries/tasks.py index 72d2d1b0884f..fc5d812dd09a 100644 --- a/openedx/core/djangoapps/content_libraries/tasks.py +++ b/openedx/core/djangoapps/content_libraries/tasks.py @@ -124,6 +124,11 @@ def wait_for_post_publish_events(publish_log: PublishLog, library_key: LibraryLo up to some reasonable timeout, and then finish anything remaining asynchonrously. """ + from openedx.core.djangoapps.xblock.api import handle_library_publish + + # Learning Core Shim code (we really want the publish_log) + handle_library_publish(publish_log) + # Update the search index (and anything else) for the affected blocks result = send_events_after_publish.apply_async(args=(publish_log.pk, str(library_key))) # Try waiting a bit for those post-publish events to be handled: diff --git a/openedx/core/djangoapps/xblock/admin.py b/openedx/core/djangoapps/xblock/admin.py index e0c8b3766cbf..bc55a01f3171 100644 --- a/openedx/core/djangoapps/xblock/admin.py +++ b/openedx/core/djangoapps/xblock/admin.py @@ -6,7 +6,14 @@ from django.utils.html import format_html from openedx_learning.lib.admin_utils import ReadOnlyModelAdmin -from .models import XBlockVersionFieldData +from .models import XBlockVersionFieldData, LearningCoreLearningContext + +@admin.register(LearningCoreLearningContext) +class LearningCoreLearningContextAdmin(admin.ModelAdmin): + list_display = [ + "key", + "use_learning_core", + ] @admin.register(XBlockVersionFieldData) diff --git a/openedx/core/djangoapps/xblock/api.py b/openedx/core/djangoapps/xblock/api.py index c806fefc87c5..ab3ee9e2bf71 100644 --- a/openedx/core/djangoapps/xblock/api.py +++ b/openedx/core/djangoapps/xblock/api.py @@ -13,11 +13,12 @@ import logging import threading +import bson.tz_util from django.core.exceptions import PermissionDenied from django.urls import reverse from django.utils.translation import gettext as _ from openedx_learning.api import authoring as authoring_api -from openedx_learning.api.authoring_models import Component, ComponentVersion +from openedx_learning.api.authoring_models import Component, ComponentVersion, ContainerVersion, PublishLog from opaque_keys.edx.keys import UsageKeyV2 from opaque_keys.edx.locator import LibraryUsageLocatorV2 from rest_framework.exceptions import NotFound @@ -330,3 +331,331 @@ def get_handler_url( # can be called by the XBlock from python as well and in that case we don't # have access to the request. return site_root_url + path + + +from django.template.defaultfilters import filesizeformat +from opaque_keys.edx.keys import CourseKey +from xmodule.modulestore import BlockData +from xmodule.modulestore.split_mongo import BlockKey +from datetime import datetime, timezone +import bson +from bson import ObjectId +from bson.codec_options import CodecOptions +import zlib +from openedx.core.lib.cache_utils import request_cached + + +from .models import ( + LearningCoreCourseStructure, + LearningCoreLearningContext, + XBlockVersionFieldData, +) + +def get_structure_for_course(course_key: CourseKey): + """Just gets the published version for now, need to update to do both branches later""" + lookup_key = course_key.replace(branch=None, version_guid=None) + lccs = LearningCoreCourseStructure.objects.get(course_key=lookup_key) + uncompressed_data = zlib.decompress(lccs.structure) + return bson.decode(uncompressed_data, codec_options=CodecOptions(tz_aware=True)) + + +def update_learning_core_course(course_key: CourseKey): + """ + This is going to write to LearningCoreCourseStructure. + + Pass 0 of this: just push hardcoded data into the shim + + """ + writer = LearningCoreCourseShimWriter(course_key) + structure = writer.make_structure() + + import pprint + + with open("lc_struct.txt", "w") as struct_file: + printer = pprint.PrettyPrinter(indent=2, stream=struct_file) + printer.pprint(structure) + + # Structure doc is so repetitive that we get a 4-5X reduction in file size + num_blocks = len(structure['blocks']) + encoded_structure = zlib.compress(bson.encode(structure, codec_options=CodecOptions(tz_aware=True))) + + lccs, _created = LearningCoreCourseStructure.objects.get_or_create(course_key=course_key) + lccs.structure = encoded_structure + lccs.save() + + log.info(f"Updated Learning Core Structure (for Split) on course {course_key}.") + log.info(f"Structure size: {filesizeformat(len(encoded_structure))} for {num_blocks} blocks.") + + from xmodule.modulestore.django import SignalHandler + log.info(f"Emitting course_published signal for {course_key}") + SignalHandler.course_published.send_robust(sender=update_learning_core_course, course_key=course_key) + + +@request_cached() +def learning_core_backend_enabled_for_course(course_key: CourseKey): + try: + lookup_key = course_key.replace(branch=None, version_guid=None) + lc_context = LearningCoreLearningContext.objects.get(key=lookup_key) + return lc_context.use_learning_core + except LearningCoreLearningContext.DoesNotExist: + return False + + +def get_definition_doc(def_id: ObjectId): + try: + xb_field_data = XBlockVersionFieldData.objects.get(definition_object_id=str(def_id)) + except XBlockVersionFieldData.DoesNotExist: + return None + + return { + '_id': ObjectId(xb_field_data.definition_object_id), + 'block_type': None, + 'fields': xb_field_data.content, + 'edit_info': { + 'edited_by': xb_field_data.publishable_entity_version.created_by_id, + 'edited_on': xb_field_data.publishable_entity_version.created, + + # These are supposed to be the ObjectIds of the structure docs that + # represent the last time this block was edited and the original + # version at the time of creation. It's actually a common occurrence + # for these values to get pruned in Split, so we're making dummy + # ObjectIds--i.e. we're making it look like this was created a while + # ago and the versions for both the original creation and last + # update are no longer available. + 'previous_version': ObjectId(), + 'original_version': ObjectId(), + }, + 'schema_version': 1, + } + + +def handle_library_publish(publish_log: PublishLog): + affected_course_keys = set( + key + for key in publish_log.records.values_list('entity__block__learning_context__key', flat=True) + if key + ) + log.info(f"Affected Courses to update in LC shim: {affected_course_keys}") + for course_key in affected_course_keys: + log.info(f"Type of course_key: {type(course_key)}") + update_learning_core_course(course_key) + + +def create_xblock_field_data_for_container(version: ContainerVersion): + # this whole thing should be in xblock.api instead of here. + + log.info("Am I even being called?") + + from openedx.core.djangoapps.xblock.models import Block + + entity = version.publishable_entity_version.entity + + # If this PublishableEntity isn't associated with an Learning Core backed + # XBlock, then we can't write anything. Note: This is going to be an edge + # case later, when we want to add an existing container to a container that + # was imported from a course. + if not hasattr(entity, 'block'): + log.error("No Block detected???") + return + + parent_block = entity.block + container_usage_key = parent_block.key + course_key = container_usage_key.course_key + + # Generic values for all container types + content_scoped_fields = {} + settings_scoped_fields = { + 'display_name': version.publishable_entity_version.title + } + children = [] + + # Things specific to the course root... + if container_usage_key.block_type == "course": + content_scoped_fields['license'] = None + content_scoped_fields['wiki_slug'] = f'{course_key.org}.{course_key.course}.{course_key.run}' + settings_scoped_fields.update( + _course_block_entry(container_usage_key) + ) + + for child_entity_row in version.entity_list.entitylistrow_set.select_related('entity__block').all(): + log.error(f"Iterating children: {child_entity_row.entity}") + if not hasattr(child_entity_row.entity, 'block'): + # This can happen if we add a new component in a library to a + # container that was imported from a course. + match(container_usage_key.block_type): + case "course": + child_block_type = "chapter" + child_block_id = child_entity_row.entity.key + case "chapter": + child_block_type = "sequential" + child_block_id = child_entity_row.entity.key + case "sequential": + child_block_type = "vertical" + child_block_id = child_entity_row.entity.key + case "vertical": + child_block_type = child_entity_row.entity.component.component_type.name + child_block_id = child_entity_row.entity.component.local_key + + log.info(f"Creating child usage key: {child_usage_key}") + child_usage_key = course_key.make_usage_key(child_block_type, child_block_id) + child_block = Block.objects.create( + learning_context_id=parent_block.learning_context_id, + entity=child_entity_row.entity, + key=child_usage_key, + ) + else: + child_block = child_entity_row.entity.block + child_usage_key = child_block.key + children.append( + [child_usage_key.block_type, child_usage_key.block_id] + ) + + field_data = XBlockVersionFieldData.objects.create( + pk=version.pk, + content=content_scoped_fields, + settings=settings_scoped_fields, + children=children, + ) + log.info(f"Wrote XBlock Data for Container: {version}: {field_data}") + + +def _course_block_entry(usage_key): + return { + 'allow_anonymous': True, + 'allow_anonymous_to_peers': False, + 'cert_html_view_enabled': True, + 'discussion_blackouts': [], + 'discussion_topics': {'General': {'id': 'course'}}, + 'discussions_settings': { + 'enable_graded_units': False, + 'enable_in_context': True, + 'openedx': { 'group_at_subsection': False}, + 'posting_restrictions': 'disabled', + 'provider_type': 'openedx', + 'unit_level_visibility': True + }, + 'end': None, + 'language': 'en', + + ## HARDCODED START DATE + 'start': datetime(2020, 1, 1, 0, 0, tzinfo=timezone.utc), + 'static_asset_path': 'course', + 'tabs': [ + { + 'course_staff_only': False, + 'name': 'Course', + 'type': 'courseware' + }, + { + 'course_staff_only': False, + 'name': 'Progress', + 'type': 'progress' + }, + { + 'course_staff_only': False, + 'name': 'Dates', + 'type': 'dates' + }, + { + 'course_staff_only': False, + 'name': 'Discussion', + 'type': 'discussion' + }, + { + 'course_staff_only': False, + 'is_hidden': True, + 'name': 'Wiki', + 'type': 'wiki' + }, + { + 'course_staff_only': False, + 'name': 'Textbooks', + 'type': 'textbooks' + } + ], + 'xml_attributes': { + 'filename': [ f'course/{usage_key.run}.xml', f'course/{usage_key.run}.xml'] + } + } + + +class LearningCoreCourseShimWriter: + def __init__(self, course_key: CourseKey): + self.course_key = course_key + self.structure_obj_id = bson.ObjectId() + + self.edited_on = datetime.now(tz=timezone.utc) + self.user_id = -1 # This is "the system did it" + + def make_structure(self): + structure = self.base_structure() + + context = LearningCoreLearningContext.objects.get(key=self.course_key) + blocks = ( + context.blocks + .select_related( + 'entity__published__version__xblockversionfielddata', + 'entity__draft__version__xblockversionfielddata', + ) + ) + for block in blocks: + entity_version = block.entity.published.version + if not hasattr(entity_version, 'xblockversionfielddata'): + log.error(f"MISSING XBlockVersionFieldData for {block.key}") + field_data = entity_version.xblockversionfielddata + block_entry = self.base_block_entry( + block.key.block_type, + block.key.block_id, + ObjectId(field_data.definition_object_id), + ) + block_entry['fields'].update(field_data.settings) + if field_data.children: + block_entry['fields']['children'] = field_data.children + + structure['blocks'].append(block_entry) + + return structure + + def base_structure(self): + doc_id = bson.ObjectId() + + return { + '_id': doc_id, + 'blocks': [], + 'schema_version': 1, # LOL + + 'root': ['course', 'course'], # Root is always the CourseBlock + 'edited_by': self.user_id, + 'edited_on': self.edited_on, + + # We're always going to be the "first" version for now, from Split's + # perspective. + 'previous_version': None, + 'original_version': doc_id + } + + def base_block_entry(self, block_type: str, block_id: str, definition_object_id: ObjectId): + return { + 'asides': {}, # We are *so* not doing asides in this prototype + 'block_id': block_id, + 'block_type': block_type, + 'defaults': {}, + 'fields': {'children': []}, # Even blocks without children are written this way. + 'definition': definition_object_id, + 'edit_info': self.base_edit_info() + } + + def base_edit_info(self): + return { + 'edited_by': self.user_id, + 'edited_on': self.edited_on, + + # This is v1 libraries data that we're faking + 'original_usage': None, + 'original_usage_vesion': None, + + # Edit history, all of which we're faking + 'previous_version': None, + 'source_version': self.structure_obj_id, + 'update_version': self.structure_obj_id, + } diff --git a/openedx/core/djangoapps/xblock/management/commands/update_lc_course.py b/openedx/core/djangoapps/xblock/management/commands/update_lc_course.py new file mode 100644 index 000000000000..3226673d90fb --- /dev/null +++ b/openedx/core/djangoapps/xblock/management/commands/update_lc_course.py @@ -0,0 +1,19 @@ +from django.core.management.base import BaseCommand +from opaque_keys.edx.keys import CourseKey + +from ...api import update_learning_core_course + +class Command(BaseCommand): + """ + Invoke with: + + python manage.py cms update_lc_course + """ + help = "Updates a single course to read from a hybrid LC/Modulestore interface." + + def add_arguments(self, parser): + parser.add_argument('course_key') + + def handle(self, *args, **options): + course_key = CourseKey.from_string(options['course_key']) + update_learning_core_course(course_key) diff --git a/openedx/core/djangoapps/xblock/migrations/0002_learningcorecoursestructure_and_more.py b/openedx/core/djangoapps/xblock/migrations/0002_learningcorecoursestructure_and_more.py new file mode 100644 index 000000000000..19207cf017ae --- /dev/null +++ b/openedx/core/djangoapps/xblock/migrations/0002_learningcorecoursestructure_and_more.py @@ -0,0 +1,26 @@ +# Generated by Django 4.2.22 on 2025-06-20 01:21 + +from django.db import migrations, models +import opaque_keys.edx.django.models + + +class Migration(migrations.Migration): + + dependencies = [ + ('xblock_new', '0001_add_xblock_version_field_data'), + ] + + operations = [ + migrations.CreateModel( + name='LearningCoreCourseStructure', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('course_key', opaque_keys.edx.django.models.CourseKeyField(max_length=255)), + ('structure', models.BinaryField()), + ], + ), + migrations.AddConstraint( + model_name='learningcorecoursestructure', + constraint=models.UniqueConstraint(models.F('course_key'), name='xblock_lccs_uniq_course_key'), + ), + ] diff --git a/openedx/core/djangoapps/xblock/migrations/0003_xblockversionfielddata_definition_object_id.py b/openedx/core/djangoapps/xblock/migrations/0003_xblockversionfielddata_definition_object_id.py new file mode 100644 index 000000000000..31ab67fdf139 --- /dev/null +++ b/openedx/core/djangoapps/xblock/migrations/0003_xblockversionfielddata_definition_object_id.py @@ -0,0 +1,23 @@ +# Generated by Django 4.2.22 on 2025-06-20 02:05 + +from django.db import migrations, models +import openedx.core.djangoapps.xblock.models + + +class Migration(migrations.Migration): + + dependencies = [ + ('xblock_new', '0002_learningcorecoursestructure_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='xblockversionfielddata', + name='definition_object_id', + field=models.CharField( + default=openedx.core.djangoapps.xblock.models.XBlockVersionFieldData.generate_object_id_str, + max_length=24, + null=True, + ), + ), + ] diff --git a/openedx/core/djangoapps/xblock/migrations/0004_generate_def_ids.py b/openedx/core/djangoapps/xblock/migrations/0004_generate_def_ids.py new file mode 100644 index 000000000000..75bc5b451584 --- /dev/null +++ b/openedx/core/djangoapps/xblock/migrations/0004_generate_def_ids.py @@ -0,0 +1,22 @@ +# Generated by Django 4.2.22 on 2025-06-20 02:10 + +from django.db import migrations +from bson import ObjectId + + +def gen_definition_object_ids(apps, schema_editor): + XBlockVersionFieldData = apps.get_model("xblock_new", "XBlockVersionFieldData") + for row in XBlockVersionFieldData.objects.all(): + row.definition_object_id = str(ObjectId()) + row.save(update_fields=["definition_object_id"]) + + +class Migration(migrations.Migration): + + dependencies = [ + ('xblock_new', '0003_xblockversionfielddata_definition_object_id'), + ] + + operations = [ + migrations.RunPython(gen_definition_object_ids, reverse_code=migrations.RunPython.noop), + ] diff --git a/openedx/core/djangoapps/xblock/migrations/0005_unique_def_ids.py b/openedx/core/djangoapps/xblock/migrations/0005_unique_def_ids.py new file mode 100644 index 000000000000..ecc32010801e --- /dev/null +++ b/openedx/core/djangoapps/xblock/migrations/0005_unique_def_ids.py @@ -0,0 +1,24 @@ +# Generated by Django 4.2.22 on 2025-06-20 14:07 + +from django.db import migrations, models +import openedx.core.djangoapps.xblock.models + + +class Migration(migrations.Migration): + + dependencies = [ + ('xblock_new', '0004_generate_def_ids'), + ] + + operations = [ + migrations.AlterField( + model_name="XBlockVersionFieldData", + name="definition_object_id", + field=models.CharField( + max_length=24, + unique=True, + null=False, + default=openedx.core.djangoapps.xblock.models.XBlockVersionFieldData.generate_object_id_str, + ) + ) + ] diff --git a/openedx/core/djangoapps/xblock/migrations/0006_block_learningcontext_block_learning_context.py b/openedx/core/djangoapps/xblock/migrations/0006_block_learningcontext_block_learning_context.py new file mode 100644 index 000000000000..801d3c8c197a --- /dev/null +++ b/openedx/core/djangoapps/xblock/migrations/0006_block_learningcontext_block_learning_context.py @@ -0,0 +1,38 @@ +# Generated by Django 4.2.22 on 2025-06-21 02:57 + +from django.db import migrations, models +import django.db.models.deletion +import opaque_keys.edx.django.models + + +class Migration(migrations.Migration): + + dependencies = [ + ('oel_publishing', '0008_alter_draftchangelogrecord_options_and_more'), + ('xblock_new', '0005_unique_def_ids'), + ] + + operations = [ + migrations.CreateModel( + name='Block', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('key', opaque_keys.edx.django.models.UsageKeyField(max_length=255, unique=True)), + ('children', models.JSONField(default=None, null=True)), + ('entity', models.ForeignKey(on_delete=django.db.models.deletion.RESTRICT, to='oel_publishing.publishableentity')), + ], + ), + migrations.CreateModel( + name='LearningContext', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('key', opaque_keys.edx.django.models.LearningContextKeyField(max_length=255, unique=True)), + ('root', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='xblock_new.block')), + ], + ), + migrations.AddField( + model_name='block', + name='learning_context', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='xblock_new.learningcontext'), + ), + ] diff --git a/openedx/core/djangoapps/xblock/migrations/0007_remove_block_children_and_more.py b/openedx/core/djangoapps/xblock/migrations/0007_remove_block_children_and_more.py new file mode 100644 index 000000000000..792735db52e7 --- /dev/null +++ b/openedx/core/djangoapps/xblock/migrations/0007_remove_block_children_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 4.2.22 on 2025-06-21 03:24 + +from django.db import migrations +import jsonfield.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('xblock_new', '0006_block_learningcontext_block_learning_context'), + ] + + operations = [ + migrations.RemoveField( + model_name='block', + name='children', + ), + migrations.AddField( + model_name='xblockversionfielddata', + name='children', + field=jsonfield.fields.JSONField(default=None, help_text='XBlock children scope fields as JSON'), + ), + ] diff --git a/openedx/core/djangoapps/xblock/migrations/0008_learningcorelearningcontext_and_more.py b/openedx/core/djangoapps/xblock/migrations/0008_learningcorelearningcontext_and_more.py new file mode 100644 index 000000000000..e2d8b5628e94 --- /dev/null +++ b/openedx/core/djangoapps/xblock/migrations/0008_learningcorelearningcontext_and_more.py @@ -0,0 +1,32 @@ +# Generated by Django 4.2.22 on 2025-06-21 03:39 + +from django.db import migrations, models +import django.db.models.deletion +import opaque_keys.edx.django.models + + +class Migration(migrations.Migration): + + dependencies = [ + ('xblock_new', '0007_remove_block_children_and_more'), + ] + + operations = [ + migrations.CreateModel( + name='LearningCoreLearningContext', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('key', opaque_keys.edx.django.models.LearningContextKeyField(max_length=255, unique=True)), + ('use_learning_core', models.BooleanField(default=True)), + ('root', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='xblock_new.block')), + ], + ), + migrations.AlterField( + model_name='block', + name='learning_context', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='xblock_new.learningcorelearningcontext'), + ), + migrations.DeleteModel( + name='LearningContext', + ), + ] diff --git a/openedx/core/djangoapps/xblock/migrations/0009_alter_block_entity.py b/openedx/core/djangoapps/xblock/migrations/0009_alter_block_entity.py new file mode 100644 index 000000000000..385d9851605e --- /dev/null +++ b/openedx/core/djangoapps/xblock/migrations/0009_alter_block_entity.py @@ -0,0 +1,20 @@ +# Generated by Django 4.2.22 on 2025-06-21 04:29 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('oel_publishing', '0008_alter_draftchangelogrecord_options_and_more'), + ('xblock_new', '0008_learningcorelearningcontext_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='block', + name='entity', + field=models.OneToOneField(on_delete=django.db.models.deletion.RESTRICT, to='oel_publishing.publishableentity'), + ), + ] diff --git a/openedx/core/djangoapps/xblock/migrations/0010_alter_block_learning_context.py b/openedx/core/djangoapps/xblock/migrations/0010_alter_block_learning_context.py new file mode 100644 index 000000000000..edcdf9e24fbf --- /dev/null +++ b/openedx/core/djangoapps/xblock/migrations/0010_alter_block_learning_context.py @@ -0,0 +1,19 @@ +# Generated by Django 4.2.22 on 2025-06-21 05:14 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('xblock_new', '0009_alter_block_entity'), + ] + + operations = [ + migrations.AlterField( + model_name='block', + name='learning_context', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='blocks', to='xblock_new.learningcorelearningcontext'), + ), + ] diff --git a/openedx/core/djangoapps/xblock/migrations/0011_remove_learningcorelearningcontext_root.py b/openedx/core/djangoapps/xblock/migrations/0011_remove_learningcorelearningcontext_root.py new file mode 100644 index 000000000000..9e33e1964f8e --- /dev/null +++ b/openedx/core/djangoapps/xblock/migrations/0011_remove_learningcorelearningcontext_root.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.22 on 2025-06-23 13:45 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('xblock_new', '0010_alter_block_learning_context'), + ] + + operations = [ + migrations.RemoveField( + model_name='learningcorelearningcontext', + name='root', + ), + ] diff --git a/openedx/core/djangoapps/xblock/models.py b/openedx/core/djangoapps/xblock/models.py index 761fe3129a2a..c323e0cbf525 100644 --- a/openedx/core/djangoapps/xblock/models.py +++ b/openedx/core/djangoapps/xblock/models.py @@ -1,7 +1,10 @@ """Models for XBlock runtime.""" +from django.db import models from jsonfield.fields import JSONField -from openedx_learning.api.authoring_models import PublishableEntityVersionMixin +from openedx_learning.api.authoring_models import PublishableEntity, PublishableEntityVersionMixin +from opaque_keys.edx.django.models import CourseKeyField, LearningContextKeyField, UsageKeyField +import bson class XBlockVersionFieldData(PublishableEntityVersionMixin): """ @@ -12,6 +15,18 @@ class XBlockVersionFieldData(PublishableEntityVersionMixin): When block field data changes, a new ComponentVersion and corresponding XBlockVersionFieldData record are created by the LearningCoreXBlockRuntime. """ + def generate_object_id_str(): + # TODO: This should be a proper field type + return str(bson.ObjectId()) + + # This exists entirely for the Modulestore shim layer. We can get rid of it + # when we've moved entirely off of SplitModuleStore. + definition_object_id = models.CharField( + max_length=24, + null=False, + unique=True, + default=generate_object_id_str, + ) content = JSONField( default=dict, @@ -23,9 +38,45 @@ class XBlockVersionFieldData(PublishableEntityVersionMixin): help_text="XBlock settings scope fields as JSON" ) + children = JSONField( + default=None, + help_text="XBlock children scope fields as JSON" + ) + class Meta: verbose_name = "XBlock Version Field Data" verbose_name_plural = "XBlock Version Field Data" def __str__(self): return f"Field data for {self.publishable_entity_version}" + + +class LearningCoreCourseStructure(models.Model): + course_key = CourseKeyField(max_length=255) + structure = models.BinaryField() + + class Meta: + constraints = [ + models.UniqueConstraint("course_key", name="xblock_lccs_uniq_course_key") + ] + + +class LearningCoreLearningContext(models.Model): + key = LearningContextKeyField(max_length=255, unique=True) + + # This is a way for us to turn off LC as a backend both for rollback + # purposes, but also to temporarily disable when doing a re-import. + use_learning_core = models.BooleanField(default=True) + + def __str__(self): + return str(self.key) + + +class Block(models.Model): + learning_context = models.ForeignKey( + LearningCoreLearningContext, + on_delete=models.CASCADE, + related_name="blocks", + ) + key = UsageKeyField(max_length=255, unique=True) + entity = models.OneToOneField(PublishableEntity, on_delete=models.RESTRICT) diff --git a/xmodule/modulestore/split_mongo/mongo_connection.py b/xmodule/modulestore/split_mongo/mongo_connection.py index 4654511cfe01..36b7019b593a 100644 --- a/xmodule/modulestore/split_mongo/mongo_connection.py +++ b/xmodule/modulestore/split_mongo/mongo_connection.py @@ -22,6 +22,7 @@ from pymongo.errors import DuplicateKeyError # pylint: disable=unused-import from edx_django_utils import monitoring from edx_django_utils.cache import RequestCache +from opaque_keys.edx.keys import CourseKey from common.djangoapps.split_modulestore_django.models import SplitModulestoreCourseIndex from xmodule.exceptions import HeartbeatFailure @@ -153,6 +154,13 @@ def structure_from_mongo(structure, course_context=None): course_context (CourseKey): For metrics gathering, the CourseKey for the course that this data is being processed for. """ + import pprint + + #with open("raw_struct.txt", "w") as struct_file: + # struct_file.write(f"Course: {course_context}\n\n") + # printer = pprint.PrettyPrinter(indent=2, stream=struct_file) + # printer.pprint(structure) + with TIMER.timer('structure_from_mongo', course_context) as tagger: tagger.measure('blocks', len(structure['blocks'])) @@ -164,6 +172,11 @@ def structure_from_mongo(structure, course_context=None): new_blocks[BlockKey(block['block_type'], block.pop('block_id'))] = BlockData(**block) structure['blocks'] = new_blocks + #with open("struct.txt", "w") as struct_file: + # struct_file.write(f"Course: {course_context}\n\n") + # printer = pprint.PrettyPrinter(indent=2, stream=struct_file) + # printer.pprint(structure) + return structure @@ -341,13 +354,25 @@ def get_structure(self, key, course_context=None): cache = CourseStructureCache() structure = cache.get(key, course_context) + + structure = None # force cache miss for now + tagger_get_structure.tag(from_cache=str(bool(structure)).lower()) if not structure: # Always log cache misses, because they are unexpected tagger_get_structure.sample_rate = 1 with TIMER.timer("get_structure.find_one", course_context) as tagger_find_one: - doc = self.structures.find_one({'_id': key}) + # Reminder: course_context includes the branch information + from openedx.core.djangoapps.xblock.api import get_structure_for_course, learning_core_backend_enabled_for_course + + if learning_core_backend_enabled_for_course(course_context): + log.info(f"Getting Structure doc from Learning Core: {course_context}: {key}") + doc = get_structure_for_course(course_context) + else: + log.info(f"Getting Structure doc from ModuleStore: {course_context}: {key}") + doc = self.structures.find_one({'_id': key}) + if doc is None: log.warning( "doc was None when attempting to retrieve structure for item with key %s", @@ -537,12 +562,31 @@ def delete_course_index(self, course_key): } return self.course_index.delete_one(query) - def get_definition(self, key, course_context=None): + def get_definition(self, key, course_context: CourseKey | None=None): """ Get the definition from the persistence mechanism whose id is the given key """ + from openedx.core.djangoapps.xblock.api import get_definition_doc, learning_core_backend_enabled_for_course + + log.info(f"Fetching Definition: {key}") with TIMER.timer("get_definition", course_context) as tagger: - definition = self.definitions.find_one({'_id': key}) + # Note that sometimes course_context comes in with version/branch + # information, and sometimes it doesn't. So we can't rely on that to + # only enable the LC shim for the published branch. We also can't do + # switching from Studio to LMS because Studio needs to build things + # off of course publish. + definition = None + if learning_core_backend_enabled_for_course(course_context): + log.info(f"Getting Definition doc from Learning Core: {course_context}: {key}") + definition = get_definition_doc(key) + + if not definition: + # This fallback exists for the random standalone blocks that + # courses expect. Change this to an "else" branch when we're + # importing those for real. + log.info(f"Getting Definition doc from ModuleStore: {course_context}: {key}") + definition = self.definitions.find_one({'_id': key}) + tagger.measure("fields", len(definition['fields'])) tagger.tag(block_type=definition['block_type']) return definition @@ -551,6 +595,7 @@ def get_definitions(self, definitions, course_context=None): """ Retrieve all definitions listed in `definitions`. """ + log.info(f"Fetching Definitions: {definitions}") with TIMER.timer("get_definitions", course_context) as tagger: tagger.measure('definitions', len(definitions)) definitions = self.definitions.find({'_id': {'$in': definitions}}) diff --git a/xmodule/modulestore/split_mongo/split.py b/xmodule/modulestore/split_mongo/split.py index e69a2ca0e53c..60d68f4b2af8 100644 --- a/xmodule/modulestore/split_mongo/split.py +++ b/xmodule/modulestore/split_mongo/split.py @@ -432,6 +432,7 @@ def get_definitions(self, course_key, ids): ids (list): A list of definition ids """ definitions = [] + print(ids) ids = set(ids) bulk_write_record = self._get_bulk_ops_record(course_key)