diff --git a/cms/djangoapps/contentstore/management/commands/backfill_orgs_and_org_courses.py b/cms/djangoapps/contentstore/management/commands/backfill_orgs_and_org_courses.py
index 6608c190afc0..cba7ab452b0a 100644
--- a/cms/djangoapps/contentstore/management/commands/backfill_orgs_and_org_courses.py
+++ b/cms/djangoapps/contentstore/management/commands/backfill_orgs_and_org_courses.py
@@ -273,9 +273,9 @@ def find_orgslug_libraryid_pairs() -> Set[Tuple[str, str]]:
Note that this only considers "version 1" (aka "legacy" or "modulestore-based")
content libraries.
- We do not consider "version 2" (aka "blockstore-based") content libraries,
- because those require a database-level link to their authoring organization,
- and thus would not need backfilling via this command.
+ We do not consider "version 2" content libraries, because those require a
+ database-level link to their authoring organization, and thus would not need
+ backfilling via this command.
Returns: set[tuple[str, str]]
"""
diff --git a/cms/djangoapps/contentstore/tasks.py b/cms/djangoapps/contentstore/tasks.py
index b8406ff61e20..e3a5e063929a 100644
--- a/cms/djangoapps/contentstore/tasks.py
+++ b/cms/djangoapps/contentstore/tasks.py
@@ -922,15 +922,14 @@ def _create_metadata(v1_library_key, collection_uuid):
library_license = '' # '' = ALL_RIGHTS_RESERVED
with atomic():
return v2contentlib_api.create_library(
- collection,
- library_type,
org,
slug,
title,
description,
allow_public_learning,
allow_public_read,
- library_license
+ library_license,
+ library_type,
)
diff --git a/cms/djangoapps/contentstore/views/tests/test_clipboard_paste.py b/cms/djangoapps/contentstore/views/tests/test_clipboard_paste.py
index 011c9a10e561..d137959780c6 100644
--- a/cms/djangoapps/contentstore/views/tests/test_clipboard_paste.py
+++ b/cms/djangoapps/contentstore/views/tests/test_clipboard_paste.py
@@ -4,7 +4,6 @@
APIs.
"""
import ddt
-from django.test import LiveServerTestCase
from opaque_keys.edx.keys import UsageKey
from rest_framework.test import APIClient
from organizations.models import Organization
@@ -13,9 +12,7 @@
from xmodule.modulestore.tests.factories import BlockFactory, CourseFactory, ToyCourseFactory
from cms.djangoapps.contentstore.utils import reverse_usage_url
-from openedx.core.lib.blockstore_api.tests.base import BlockstoreAppTestMixin
from openedx.core.djangoapps.content_libraries import api as library_api
-from blockstore.apps import api as blockstore_api
CLIPBOARD_ENDPOINT = "/api/content-staging/v1/clipboard/"
XBLOCK_ENDPOINT = "/xblock/"
@@ -214,7 +211,7 @@ def test_paste_with_assets(self):
assert source_pic2_hash != dest_pic2_hash # Because there was a conflict, this file was unchanged.
-class ClipboardLibraryContentPasteTestCase(BlockstoreAppTestMixin, LiveServerTestCase, ModuleStoreTestCase):
+class ClipboardLibraryContentPasteTestCase(ModuleStoreTestCase):
"""
Test Clipboard Paste functionality with library content
"""
@@ -229,7 +226,6 @@ def setUp(self):
self.store = modulestore()
# Create a content library:
library = library_api.create_library(
- collection_uuid=blockstore_api.create_collection("Collection").uuid,
library_type=library_api.COMPLEX,
org=Organization.objects.create(name="Test Org", short_name="CL-TEST"),
slug="lib",
diff --git a/cms/envs/common.py b/cms/envs/common.py
index 4a6c6cc68ead..754e6e9755a5 100644
--- a/cms/envs/common.py
+++ b/cms/envs/common.py
@@ -1816,7 +1816,13 @@
# alternative swagger generator for CMS API
'drf_spectacular',
+
'openedx_events',
+
+ # Learning Core Apps, used by v2 content libraries (content_libraries app)
+ "openedx_learning.core.components",
+ "openedx_learning.core.contents",
+ "openedx_learning.core.publishing",
]
diff --git a/lms/envs/common.py b/lms/envs/common.py
index c28b68be4099..cfbdd70132b1 100644
--- a/lms/envs/common.py
+++ b/lms/envs/common.py
@@ -40,7 +40,6 @@
# and throws spurious errors. Therefore, we disable invalid-name checking.
# pylint: disable=invalid-name
-
import importlib.util
import sys
import os
@@ -3343,9 +3342,16 @@ def _make_locale_paths(settings): # pylint: disable=missing-function-docstring
# Notifications
'openedx.core.djangoapps.notifications',
+
'openedx_events',
+
+ # Learning Core Apps, used by v2 content libraries (content_libraries app)
+ "openedx_learning.core.components",
+ "openedx_learning.core.contents",
+ "openedx_learning.core.publishing",
]
+
######################### CSRF #########################################
# Forwards-compatibility with Django 1.7
diff --git a/openedx/core/djangoapps/content_libraries/admin.py b/openedx/core/djangoapps/content_libraries/admin.py
index 559d2471cee3..f84cac7f62e2 100644
--- a/openedx/core/djangoapps/content_libraries/admin.py
+++ b/openedx/core/djangoapps/content_libraries/admin.py
@@ -24,12 +24,11 @@ class ContentLibraryAdmin(admin.ModelAdmin):
"library_key",
"org",
"slug",
- "bundle_uuid",
"allow_public_learning",
"allow_public_read",
"authorized_lti_configs",
)
- list_display = ("slug", "org", "bundle_uuid")
+ list_display = ("slug", "org",)
inlines = (ContentLibraryPermissionInline, )
def get_readonly_fields(self, request, obj=None):
@@ -37,6 +36,6 @@ def get_readonly_fields(self, request, obj=None):
Ensure that 'slug' and 'uuid' cannot be edited after creation.
"""
if obj:
- return ["library_key", "org", "slug", "bundle_uuid"]
+ return ["library_key", "org", "slug"]
else:
return ["library_key", ]
diff --git a/openedx/core/djangoapps/content_libraries/api.py b/openedx/core/djangoapps/content_libraries/api.py
index 26e7c652d983..d815782fe5f2 100644
--- a/openedx/core/djangoapps/content_libraries/api.py
+++ b/openedx/core/djangoapps/content_libraries/api.py
@@ -52,8 +52,7 @@
import abc
import collections
-from datetime import datetime
-from uuid import UUID
+from datetime import datetime, timezone
import base64
import hashlib
import logging
@@ -63,14 +62,15 @@
from django.conf import settings
from django.contrib.auth.models import AbstractUser, Group
-from django.core.exceptions import PermissionDenied
+from django.core.exceptions import ObjectDoesNotExist, PermissionDenied
from django.core.validators import validate_unicode_slug
from django.db import IntegrityError, transaction
+from django.db.models import Q, QuerySet
from django.utils.translation import gettext as _
+from edx_rest_api_client.client import OAuthAPIClient
from lxml import etree
-from opaque_keys.edx.keys import LearningContextKey, UsageKey
+from opaque_keys.edx.keys import UsageKey, UsageKeyV2
from opaque_keys.edx.locator import (
- BundleDefinitionLocator,
LibraryLocatorV2,
LibraryUsageLocatorV2,
LibraryLocator as LibraryLocatorV1
@@ -85,59 +85,25 @@
LIBRARY_BLOCK_DELETED,
LIBRARY_BLOCK_UPDATED,
)
+from openedx_learning.core.publishing import api as publishing_api
+from openedx_learning.core.contents import api as contents_api
+from openedx_learning.core.components import api as components_api
+from openedx_learning.core.components.models import Component
+from openedx_tagging.core.tagging import api as tagging_api
from organizations.models import Organization
from xblock.core import XBlock
from xblock.exceptions import XBlockNotFoundError
-from edx_rest_api_client.client import OAuthAPIClient
-from openedx.core.djangoapps.content_libraries import permissions
-# pylint: disable=unused-import
-from openedx.core.djangoapps.content_libraries.constants import (
- ALL_RIGHTS_RESERVED,
- CC_4_BY,
- COMPLEX,
- DRAFT_NAME,
- PROBLEM,
- VIDEO,
-)
-from openedx.core.djangoapps.content_libraries.library_bundle import LibraryBundle
-from openedx.core.djangoapps.content_libraries.models import (
- ContentLibrary,
- ContentLibraryPermission,
- ContentLibraryBlockImportTask,
-)
-from openedx.core.djangoapps.xblock.api import (
- get_block_display_name,
- get_learning_context_impl,
- load_block,
- XBlockInclude,
-)
+from openedx.core.djangoapps.xblock.api import get_component_from_usage_key, xblock_type_display_name
from openedx.core.lib.xblock_serializer.api import serialize_modulestore_block_for_blockstore
-from openedx.core.lib.blockstore_api import (
- get_bundle,
- get_bundles,
- get_bundle_file_data,
- get_bundle_files,
- get_or_create_bundle_draft,
- create_bundle,
- update_bundle,
- delete_bundle,
- write_draft_file,
- set_draft_link,
- commit_draft,
- delete_draft,
- BundleNotFound,
-)
-from openedx.core.djangolib import blockstore_cache
-from openedx.core.djangolib.blockstore_cache import BundleCache
from xmodule.library_root_xblock import LibraryRoot as LibraryRootV1
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError
-from openedx_tagging.core.tagging import api as tagging_api
-
-from . import tasks
+from . import permissions, tasks
+from .constants import ALL_RIGHTS_RESERVED, COMPLEX
+from .models import ContentLibrary, ContentLibraryPermission, ContentLibraryBlockImportTask
log = logging.getLogger(__name__)
@@ -187,7 +153,6 @@ class ContentLibraryMetadata:
Class that represents the metadata about a content library.
"""
key = attr.ib(type=LibraryLocatorV2)
- bundle_uuid = attr.ib(type=UUID)
title = attr.ib("")
description = attr.ib("")
num_blocks = attr.ib(0)
@@ -232,11 +197,25 @@ class LibraryXBlockMetadata:
Class that represents the metadata about an XBlock in a content library.
"""
usage_key = attr.ib(type=LibraryUsageLocatorV2)
- def_key = attr.ib(type=BundleDefinitionLocator)
display_name = attr.ib("")
has_unpublished_changes = attr.ib(False)
tags_count = attr.ib(0)
+ @classmethod
+ def from_component(cls, library_key, component):
+ """
+ Construct a LibraryXBlockMetadata from a Component object.
+ """
+ return cls(
+ usage_key=LibraryUsageLocatorV2(
+ library_key,
+ component.component_type.name,
+ component.local_key,
+ ),
+ display_name=component.versioning.draft.title,
+ has_unpublished_changes=component.versioning.has_unpublished_changes
+ )
+
@attr.s
class LibraryXBlockStaticFile:
@@ -262,30 +241,11 @@ class LibraryXBlockType:
display_name = attr.ib("")
-@attr.s
-class LibraryBundleLink:
- """
- A link from a content library blockstore bundle to another blockstore bundle
- """
- # Bundle that is linked to
- bundle_uuid = attr.ib(type=UUID)
- # Link name (slug)
- id = attr.ib("")
- # What version of this bundle we are currently linking to.
- version = attr.ib(0)
- # What the latest version of the linked bundle is:
- # (if latest_version > version), the link can be "updated" to the latest version.
- latest_version = attr.ib(0)
- # Opaque key: If the linked bundle is a library or other learning context whose opaque key we can deduce, then this
- # is the key. If we don't know what type of blockstore bundle this link is pointing to, then this is blank.
- opaque_key = attr.ib(type=LearningContextKey, default=None)
-
-
# General APIs
# ============
-def get_libraries_for_user(user, org=None, library_type=None):
+def get_libraries_for_user(user, org=None, library_type=None, text_search=None):
"""
Return content libraries that the user has permission to view.
"""
@@ -294,58 +254,52 @@ def get_libraries_for_user(user, org=None, library_type=None):
filter_kwargs['org__short_name'] = org
if library_type:
filter_kwargs['type'] = library_type
- qs = ContentLibrary.objects.filter(**filter_kwargs)
+ qs = ContentLibrary.objects.filter(**filter_kwargs) \
+ .select_related('learning_package', 'org') \
+ .order_by('org__short_name', 'slug')
+
+ if text_search:
+ qs = qs.filter(
+ Q(slug__icontains=text_search) |
+ Q(org__short_name__icontains=text_search) |
+ Q(learning_package__title__icontains=text_search) |
+ Q(learning_package__description__icontains=text_search)
+ )
+
return permissions.perms[permissions.CAN_VIEW_THIS_CONTENT_LIBRARY].filter(user, qs)
def get_metadata(queryset, text_search=None):
"""
- Take a list of ContentLibrary objects and return metadata from blockstore.
+ Take a list of ContentLibrary objects and return metadata from Learning Core.
"""
- uuids = [lib.bundle_uuid for lib in queryset]
- bundles = get_bundles(uuids=uuids, text_search=text_search)
-
if text_search:
- # Bundle APIs can't apply text_search on a bundle's org, so including those results here
- queryset_org_search = queryset.filter(org__short_name__icontains=text_search)
- if queryset_org_search.exists():
- uuids_org_search = [lib.bundle_uuid for lib in queryset_org_search]
- bundles += get_bundles(uuids=uuids_org_search)
-
- bundle_dict = {
- bundle.uuid: {
- 'uuid': bundle.uuid,
- 'title': bundle.title,
- 'description': bundle.description,
- 'version': bundle.latest_version,
- }
- for bundle in bundles
- }
- metadata = [
- bundle_dict[uuid]
- if uuid in bundle_dict
- else None
- for uuid in uuids
- ]
+ queryset = queryset.filter(org__short_name__icontains=text_search)
libraries = [
+ # TODO: Do we really need these fields for the library listing view?
+ # It's actually going to be pretty expensive to compute this over a
+ # large list. If we do need it, it might need to go into a denormalized
+ # form, e.g. a new table for stats that it can join to, even if we don't
+ # guarantee accuracy (because of possible race conditions).
ContentLibraryMetadata(
key=lib.library_key,
- bundle_uuid=metadata[i]['uuid'],
- title=metadata[i]['title'],
+ title=lib.learning_package.title if lib.learning_package else "",
type=lib.type,
- description=metadata[i]['description'],
- version=metadata[i]['version'],
+ description="",
+ version=0,
allow_public_learning=lib.allow_public_learning,
allow_public_read=lib.allow_public_read,
- num_blocks=metadata[i].get('num_blocks'),
- last_published=metadata[i].get('last_published'),
- has_unpublished_changes=metadata[i].get('has_unpublished_changes'),
- has_unpublished_deletes=metadata[i].get('has_unpublished_deletes'),
+
+ # These are currently dummy values to maintain the REST API contract
+ # while we shift to Learning Core models.
+ num_blocks=0,
+ last_published=None,
+ has_unpublished_changes=False,
+ has_unpublished_deletes=False,
license=lib.license,
)
- for i, lib in enumerate(queryset)
- if metadata[i] is not None
+ for lib in queryset
]
return libraries
@@ -373,20 +327,44 @@ def get_library(library_key):
Raises ContentLibraryNotFound if the library doesn't exist.
"""
ref = ContentLibrary.objects.get_by_key(library_key)
- bundle_metadata = get_bundle(ref.bundle_uuid)
- lib_bundle = LibraryBundle(library_key, ref.bundle_uuid, draft_name=DRAFT_NAME)
- num_blocks = len(lib_bundle.get_top_level_usages())
- last_published = lib_bundle.get_last_published_time()
- (has_unpublished_changes, has_unpublished_deletes) = lib_bundle.has_changes()
+ learning_package = ref.learning_package
+ num_blocks = publishing_api.get_all_drafts(learning_package.id).count()
+ last_publish_log = publishing_api.get_last_publish(learning_package.id)
+ has_unpublished_changes = publishing_api.get_entities_with_unpublished_changes(learning_package.id) \
+ .exists()
+
+ # TODO: I'm doing this one to match already-existing behavior, but this is
+ # something that we should remove. It exists to accomodate some complexities
+ # with how Blockstore staged changes, but Learning Core works differently,
+ # and has_unpublished_changes should be sufficient.
+ has_unpublished_deletes = publishing_api.get_entities_with_unpublished_deletes(learning_package.id) \
+ .exists()
+
+ # Learning Core doesn't really have a notion of a global version number,but
+ # we can sort of approximate it by using the primary key of the last publish
+ # log entry, in the sense that it will be a monotonically increasing
+ # integer, though there will be large gaps. We use 0 to denote that nothing
+ # has been done, since that will never be a valid value for a PublishLog pk.
+ #
+ # That being said, we should figure out if we really even want to keep a top
+ # level version indicator for the Library as a whole. In the v1 libs
+ # implemention, this served as a way to know whether or not there was an
+ # updated version of content that a course could pull in. But more recently,
+ # we've decided to do those version references at the level of the
+ # individual blocks being used, since a Learning Core backed library is
+ # intended to be used for many LibraryContentBlocks and not 1:1 like v1
+ # libraries. The top level version stays for now because LibraryContentBlock
+ # uses it, but that should hopefully change before the Redwood release.
+ version = 0 if last_publish_log is None else last_publish_log.pk
+
return ContentLibraryMetadata(
key=library_key,
- bundle_uuid=ref.bundle_uuid,
- title=bundle_metadata.title,
+ title=learning_package.title,
type=ref.type,
- description=bundle_metadata.description,
+ description=ref.learning_package.description,
num_blocks=num_blocks,
- version=bundle_metadata.latest_version,
- last_published=last_published,
+ version=version,
+ last_published=None if last_publish_log is None else last_publish_log.published_at,
allow_lti=ref.allow_lti,
allow_public_learning=ref.allow_public_learning,
allow_public_read=ref.allow_public_read,
@@ -397,7 +375,6 @@ def get_library(library_key):
def create_library(
- collection_uuid,
org,
slug,
title,
@@ -426,33 +403,29 @@ def create_library(
Returns a ContentLibraryMetadata instance.
"""
- assert isinstance(collection_uuid, UUID)
assert isinstance(org, Organization)
- assert not transaction.get_autocommit(), (
- "Call within a django.db.transaction.atomic block so that all created objects are rolled back on error."
- )
-
validate_unicode_slug(slug)
- # First, create the blockstore bundle:
- bundle = create_bundle(
- collection_uuid,
- slug=slug,
- title=title,
- description=description,
- )
- # Now create the library reference in our database:
try:
- ref = ContentLibrary.objects.create(
- org=org,
- slug=slug,
- type=library_type,
- bundle_uuid=bundle.uuid,
- allow_public_learning=allow_public_learning,
- allow_public_read=allow_public_read,
- license=library_license,
- )
+ with transaction.atomic():
+ ref = ContentLibrary.objects.create(
+ org=org,
+ slug=slug,
+ type=library_type,
+ allow_public_learning=allow_public_learning,
+ allow_public_read=allow_public_read,
+ license=library_license,
+ )
+ learning_package = publishing_api.create_learning_package(
+ key=str(ref.library_key),
+ title=title,
+ description=description,
+ )
+ ref.learning_package = learning_package
+ ref.save()
+
except IntegrityError:
raise LibraryAlreadyExists(slug) # lint-amnesty, pylint: disable=raise-missing-from
+
CONTENT_LIBRARY_CREATED.send_event(
content_library=ContentLibraryData(
library_key=ref.library_key
@@ -460,7 +433,6 @@ def create_library(
)
return ContentLibraryMetadata(
key=ref.library_key,
- bundle_uuid=bundle.uuid,
title=title,
type=library_type,
description=description,
@@ -511,6 +483,7 @@ def set_library_user_permissions(library_key, user, access_level):
if current_grant and current_grant.access_level == AccessLevel.ADMIN_LEVEL:
if not ref.permission_grants.filter(access_level=AccessLevel.ADMIN_LEVEL).exclude(user_id=user.id).exists():
raise LibraryPermissionIntegrityError(_('Cannot change or remove the access level for the only admin.'))
+
if access_level is None:
ref.permission_grants.filter(user=user).delete()
else:
@@ -528,6 +501,7 @@ def set_library_group_permissions(library_key, group, access_level):
access_level should be one of the AccessLevel values defined above.
"""
ref = ContentLibrary.objects.get_by_key(library_key)
+
if access_level is None:
ref.permission_grants.filter(group=group).delete()
else:
@@ -553,89 +527,70 @@ def update_library(
A value of None means "don't change".
"""
- ref = ContentLibrary.objects.get_by_key(library_key)
+ lib_obj_fields = [
+ allow_public_learning, allow_public_read, library_type, library_license
+ ]
+ lib_obj_changed = any(field is not None for field in lib_obj_fields)
+ learning_pkg_changed = any(field is not None for field in [title, description])
+
+ # If nothing's changed, just return early.
+ if (not lib_obj_changed) and (not learning_pkg_changed):
+ return
+
+ content_lib = ContentLibrary.objects.get_by_key(library_key)
+
+ with transaction.atomic():
+ # We need to make updates to both the ContentLibrary and its linked
+ # LearningPackage.
+ if lib_obj_changed:
+ if allow_public_learning is not None:
+ content_lib.allow_public_learning = allow_public_learning
+ if allow_public_read is not None:
+ content_lib.allow_public_read = allow_public_read
+ if library_type is not None:
+ # TODO: Get rid of this field entirely, and remove library_type
+ # from any functions that take it as an argument.
+ content_lib.library_type = library_type
+ if library_license is not None:
+ content_lib.library_license = library_license
+ content_lib.save()
+
+ if learning_pkg_changed:
+ publishing_api.update_learning_package(
+ content_lib.learning_package_id,
+ title=title,
+ description=description,
+ )
- # Update MySQL model:
- changed = False
- if allow_public_learning is not None:
- ref.allow_public_learning = allow_public_learning
- changed = True
- if allow_public_read is not None:
- ref.allow_public_read = allow_public_read
- changed = True
- if library_type is not None:
- if library_type not in (COMPLEX, ref.type):
- lib_bundle = LibraryBundle(library_key, ref.bundle_uuid, draft_name=DRAFT_NAME)
- (has_unpublished_changes, has_unpublished_deletes) = lib_bundle.has_changes()
- if has_unpublished_changes or has_unpublished_deletes:
- raise IncompatibleTypesError(
- _(
- 'You may not change a library\'s type to {library_type} if it still has unpublished changes.'
- ).format(library_type=library_type)
- )
- for block in get_library_blocks(library_key):
- if block.usage_key.block_type != library_type:
- raise IncompatibleTypesError(
- _(
- 'You can only set a library to {library_type} if all existing blocks are of that type. '
- 'Found incompatible block {block_id} with type {block_type}.'
- ).format(
- library_type=library_type,
- block_type=block.usage_key.block_type,
- block_id=block.usage_key.block_id,
- ),
- )
- ref.type = library_type
-
- changed = True
- if library_license is not None:
- ref.license = library_license
- changed = True
- if changed:
- ref.save()
- # Update Blockstore:
- fields = {
- # We don't ever read the "slug" value from the Blockstore bundle, but
- # we might as well always do our best to keep it in sync with the "slug"
- # value in the LMS that we do use.
- "slug": ref.slug,
- }
- if title is not None:
- assert isinstance(title, str)
- fields["title"] = title
- if description is not None:
- assert isinstance(description, str)
- fields["description"] = description
- update_bundle(ref.bundle_uuid, **fields)
CONTENT_LIBRARY_UPDATED.send_event(
content_library=ContentLibraryData(
- library_key=ref.library_key
+ library_key=content_lib.library_key
)
)
+ return content_lib
+
def delete_library(library_key):
"""
Delete a content library
"""
- ref = ContentLibrary.objects.get_by_key(library_key)
- bundle_uuid = ref.bundle_uuid
- # We can't atomically delete the ref and bundle at the same time.
- # Delete the ref first, then the bundle. An error may cause the bundle not
- # to get deleted, but the library will still be effectively gone from the
- # system, which is a better state than having a reference to a library with
- # no backing blockstore bundle.
- ref.delete()
+ with transaction.atomic():
+ content_lib = ContentLibrary.objects.get_by_key(library_key)
+ learning_package = content_lib.learning_package
+ content_lib.delete()
+
+ # TODO: Move the LearningPackage delete() operation to an API call
+ # TODO: We should eventually detach the LearningPackage and delete it
+ # asynchronously, especially if we need to delete a bunch of stuff
+ # on the filesystem for it.
+ learning_package.delete()
+
CONTENT_LIBRARY_DELETED.send_event(
content_library=ContentLibraryData(
- library_key=ref.library_key
+ library_key=library_key
)
)
- try:
- delete_bundle(bundle_uuid)
- except:
- log.exception("Failed to delete blockstore bundle %s when deleting library. Delete it manually.", bundle_uuid)
- raise
def _get_library_component_tags_count(library_key) -> dict:
@@ -647,116 +602,109 @@ def _get_library_component_tags_count(library_key) -> dict:
return tagging_api.get_object_tag_counts(library_key_pattern, count_implicit=True)
-def get_library_blocks(library_key, text_search=None, block_types=None) -> list[LibraryXBlockMetadata]:
- """
- Get the list of top-level XBlocks in the specified library.
-
- Returns a list of LibraryXBlockMetadata objects
- """
- metadata = []
- ref = ContentLibrary.objects.get_by_key(library_key) # type: ignore[attr-defined]
- lib_bundle = LibraryBundle(library_key, ref.bundle_uuid, draft_name=DRAFT_NAME)
- usages = lib_bundle.get_top_level_usages()
- library_component_tags_count = _get_library_component_tags_count(library_key)
-
- for usage_key in usages:
- # For top-level definitions, we can go from definition key to usage key using the following, but this would
- # not work for non-top-level blocks as they may have multiple usages. Top level blocks are guaranteed to
- # have only a single usage in the library, which is part of the definition of top level block.
- def_key = lib_bundle.definition_for_usage(usage_key)
- display_name = get_block_display_name(def_key)
- text_match = (text_search is None or
- text_search.lower() in display_name.lower() or
- text_search.lower() in str(usage_key).lower())
- type_match = (block_types is None or usage_key.block_type in block_types)
- if text_match and type_match:
- metadata.append({
- "id": usage_key,
- "def_key": def_key,
- "display_name": display_name,
- "has_unpublished_changes": lib_bundle.does_definition_have_unpublished_changes(def_key),
- "tags_count": library_component_tags_count.get(str(usage_key), 0),
- })
-
- return [
- LibraryXBlockMetadata(
- usage_key=item['id'],
- def_key=item['def_key'],
- display_name=item['display_name'],
- has_unpublished_changes=item['has_unpublished_changes'],
- tags_count=item['tags_count']
- )
- for item in metadata
- ]
-
-
-def _lookup_usage_key(usage_key) -> tuple[BundleDefinitionLocator, LibraryBundle]:
+def get_library_components(library_key, text_search=None, block_types=None) -> QuerySet[Component]:
"""
- Given a LibraryUsageLocatorV2 (usage key for an XBlock in a content library)
- return the definition key and LibraryBundle
- or raise ContentLibraryBlockNotFound
+ Get the library components and filter.
+
+ TODO: Full text search needs to be implemented as a custom lookup for MySQL,
+ but it should have a fallback to still work in SQLite.
"""
- assert isinstance(usage_key, LibraryUsageLocatorV2)
- lib_context = get_learning_context_impl(usage_key)
- def_key = lib_context.definition_for_usage(usage_key, force_draft=DRAFT_NAME)
- if def_key is None:
- raise ContentLibraryBlockNotFound(usage_key)
- lib_bundle = LibraryBundle(usage_key.lib_key, def_key.bundle_uuid, draft_name=DRAFT_NAME)
- return def_key, lib_bundle
+ lib = ContentLibrary.objects.get_by_key(library_key) # type: ignore[attr-defined]
+ learning_package = lib.learning_package
+ components = components_api.get_components(
+ learning_package.id,
+ draft=True,
+ namespace='xblock.v1',
+ type_names=block_types,
+ draft_title=text_search,
+ )
+ return components
def get_library_block(usage_key) -> LibraryXBlockMetadata:
"""
- Get metadata (LibraryXBlockMetadata) about one specific XBlock in a library
+ Get metadata about (the draft version of) one specific XBlock in a library.
- To load the actual XBlock instance, use
- openedx.core.djangoapps.xblock.api.load_block()
- instead.
+ This will raise ContentLibraryBlockNotFound if there is no draft version of
+ this block (i.e. it's been soft-deleted from Studio), even if there is a
+ live published version of it in the LMS.
"""
- def_key, lib_bundle = _lookup_usage_key(usage_key)
+ try:
+ component = get_component_from_usage_key(usage_key)
+ except ObjectDoesNotExist as exc:
+ raise ContentLibraryBlockNotFound(usage_key) from exc
+
+ # The component might have existed at one point, but no longer does because
+ # the draft was soft-deleted. This is actually a weird edge case and I'm not
+ # clear on what the proper behavior should be, since (a) the published
+ # version still exists; and (b) we might want to make some queries on the
+ # block even after it's been removed, since there might be versioned
+ # references to it.
+ draft_version = component.versioning.draft
+ if not draft_version:
+ raise ContentLibraryBlockNotFound(usage_key)
+
+ published_version = component.versioning.published
+
return LibraryXBlockMetadata(
usage_key=usage_key,
- def_key=def_key,
- display_name=get_block_display_name(def_key),
- has_unpublished_changes=lib_bundle.does_definition_have_unpublished_changes(def_key),
+ display_name=draft_version.title,
+ has_unpublished_changes=(draft_version != published_version),
)
-def get_library_block_olx(usage_key):
- """
- Get the OLX source of the given XBlock.
- """
- assert isinstance(usage_key, LibraryUsageLocatorV2)
- definition_key = get_library_block(usage_key).def_key
- xml_str = get_bundle_file_data(
- bundle_uuid=definition_key.bundle_uuid, # pylint: disable=no-member
- path=definition_key.olx_path, # pylint: disable=no-member
- use_draft=DRAFT_NAME,
- ).decode('utf-8')
- return xml_str
-
-
def set_library_block_olx(usage_key, new_olx_str):
"""
Replace the OLX source of the given XBlock.
+
This is only meant for use by developers or API client applications, as
very little validation is done and this can easily result in a broken XBlock
that won't load.
"""
# because this old pylint can't understand attr.ib() objects, pylint: disable=no-member
assert isinstance(usage_key, LibraryUsageLocatorV2)
+
# Make sure the block exists:
- metadata = get_library_block(usage_key)
- block_type = usage_key.block_type
- # Verify that the OLX parses, at least as generic XML:
+ _block_metadata = get_library_block(usage_key)
+
+ # Verify that the OLX parses, at least as generic XML, and the root tag is correct:
node = etree.fromstring(new_olx_str)
- if node.tag != block_type:
- raise ValueError(f"Invalid root tag in OLX, expected {block_type}")
- # Write the new XML/OLX file into the library bundle's draft
- draft = get_or_create_bundle_draft(metadata.def_key.bundle_uuid, DRAFT_NAME)
- write_draft_file(draft.uuid, metadata.def_key.olx_path, new_olx_str.encode('utf-8'))
- # Clear the bundle cache so everyone sees the new block immediately:
- BundleCache(metadata.def_key.bundle_uuid, draft_name=DRAFT_NAME).clear()
+ if node.tag != usage_key.block_type:
+ raise ValueError(
+ f"Tried to set the OLX of a {usage_key.block_type} block to a <{node.tag}> node. "
+ f"{usage_key=!s}, {new_olx_str=}"
+ )
+
+ # We're intentionally NOT checking if the XBlock type is installed, since
+ # this is one of the only tools you can reach for to edit content for an
+ # XBlock that's broken or missing.
+ component = get_component_from_usage_key(usage_key)
+
+ # Get the title from the new OLX (or default to the default specified on the
+ # XBlock's display_name field.
+ new_title = node.attrib.get(
+ "display_name",
+ xblock_type_display_name(usage_key.block_type),
+ )
+
+ now = datetime.now(tz=timezone.utc)
+
+ with transaction.atomic():
+ new_content = contents_api.get_or_create_text_content(
+ component.learning_package_id,
+ get_or_create_olx_media_type(usage_key.block_type).id,
+ text=new_olx_str,
+ created=now,
+ )
+ components_api.create_next_version(
+ component.pk,
+ title=new_title,
+ content_to_replace={
+ 'block.xml': new_content.pk,
+ },
+ created=now,
+ )
+
LIBRARY_BLOCK_UPDATED.send_event(
library_block=LibraryBlockData(
library_key=usage_key.context_key,
@@ -768,10 +716,12 @@ def set_library_block_olx(usage_key, new_olx_str):
def create_library_block(library_key, block_type, definition_id):
"""
Create a new XBlock in this library of the specified type (e.g. "html").
-
- The 'definition_id' value (which should be a string like "problem1") will be
- used as both the definition_id and the usage_id.
"""
+ # It's in the serializer as ``definition_id``, but for our purposes, it's
+ # the block_id. See the comments in ``LibraryXBlockCreationSerializer`` for
+ # more details. TODO: Change the param name once we change the serializer.
+ block_id = definition_id
+
assert isinstance(library_key, LibraryLocatorV2)
ref = ContentLibrary.objects.get_by_key(library_key)
if ref.type != COMPLEX:
@@ -781,35 +731,32 @@ def create_library_block(library_key, block_type, definition_id):
block_type=block_type, library_type=ref.type,
)
)
- lib_bundle = LibraryBundle(library_key, ref.bundle_uuid, draft_name=DRAFT_NAME)
- # Total number of blocks should not exceed the maximum allowed
- total_blocks = len(lib_bundle.get_top_level_usages())
- if total_blocks + 1 > settings.MAX_BLOCKS_PER_CONTENT_LIBRARY:
+
+ # If adding a component would take us over our max, return an error.
+ component_count = publishing_api.get_all_drafts(ref.learning_package.id).count()
+ if component_count + 1 > settings.MAX_BLOCKS_PER_CONTENT_LIBRARY:
raise BlockLimitReachedError(
- _("Library cannot have more than {} XBlocks").format(settings.MAX_BLOCKS_PER_CONTENT_LIBRARY)
+ _("Library cannot have more than {} Components").format(
+ settings.MAX_BLOCKS_PER_CONTENT_LIBRARY
+ )
)
+
# Make sure the proposed ID will be valid:
- validate_unicode_slug(definition_id)
+ validate_unicode_slug(block_id)
# Ensure the XBlock type is valid and installed:
XBlock.load_class(block_type) # Will raise an exception if invalid
# Make sure the new ID is not taken already:
- new_usage_id = definition_id # Since this is a top level XBlock, usage_id == definition_id
usage_key = LibraryUsageLocatorV2(
lib_key=library_key,
block_type=block_type,
- usage_id=new_usage_id,
+ usage_id=block_id,
)
- library_context = get_learning_context_impl(usage_key)
- if library_context.definition_for_usage(usage_key) is not None:
- raise LibraryBlockAlreadyExists(f"An XBlock with ID '{new_usage_id}' already exists")
-
- new_definition_xml = f'<{block_type}/>' # xss-lint: disable=python-wrap-html
- path = f"{block_type}/{definition_id}/definition.xml"
- # Write the new XML/OLX file into the library bundle's draft
- draft = get_or_create_bundle_draft(ref.bundle_uuid, DRAFT_NAME)
- write_draft_file(draft.uuid, path, new_definition_xml.encode('utf-8'))
- # Clear the bundle cache so everyone sees the new block immediately:
- BundleCache(ref.bundle_uuid, draft_name=DRAFT_NAME).clear()
+
+ if _component_exists(usage_key):
+ raise LibraryBlockAlreadyExists(f"An XBlock with ID '{usage_key}' already exists")
+
+ _create_component_for_block(ref, usage_key)
+
# Now return the metadata about the new block:
LIBRARY_BLOCK_CREATED.send_event(
library_block=LibraryBlockData(
@@ -817,113 +764,112 @@ def create_library_block(library_key, block_type, definition_id):
usage_key=usage_key
)
)
+
return get_library_block(usage_key)
-def delete_library_block(usage_key, remove_from_parent=True):
+def _component_exists(usage_key: UsageKeyV2) -> bool:
"""
- Delete the specified block from this library (and any children it has).
-
- If the block's definition (OLX file) is within this same library as the
- usage key, both the definition and the usage will be deleted.
-
- If the usage points to a definition in a linked bundle, the usage will be
- deleted but the link and the linked bundle will be unaffected.
-
- If the block is in use by some other bundle that links to this one, that
- will not prevent deletion of the definition.
-
- remove_from_parent: modify the parent to remove the reference to this
- delete block. This should always be true except when this function
- calls itself recursively.
- """
- def_key, lib_bundle = _lookup_usage_key(usage_key)
- # Create a draft:
- draft_uuid = get_or_create_bundle_draft(def_key.bundle_uuid, DRAFT_NAME).uuid
- # Does this block have a parent?
- if usage_key not in lib_bundle.get_top_level_usages() and remove_from_parent:
- # Yes: this is not a top-level block.
- # First need to modify the parent to remove this block as a child.
- raise NotImplementedError
- # Does this block have children?
- block = load_block(usage_key, user=None)
- if block.has_children:
- # Next, recursively call delete_library_block(...) on each child usage
- for child_usage in block.children:
- # Specify remove_from_parent=False to avoid unnecessary work to
- # modify this block's children list when deleting each child, since
- # we're going to delete this block anyways.
- delete_library_block(child_usage, remove_from_parent=False)
- # Delete the definition:
- if def_key.bundle_uuid == lib_bundle.bundle_uuid:
- # This definition is in the library, so delete it:
- path_prefix = lib_bundle.olx_prefix(def_key)
- for bundle_file in get_bundle_files(def_key.bundle_uuid, use_draft=DRAFT_NAME):
- if bundle_file.path.startswith(path_prefix):
- # Delete this file, within this definition's "folder"
- write_draft_file(draft_uuid, bundle_file.path, contents=None)
- else:
- # The definition must be in a linked bundle, so we don't want to delete
- # it; just the in the parent, which was already
- # deleted above.
- pass
- # Clear the bundle cache so everyone sees the deleted block immediately:
- lib_bundle.cache.clear()
- LIBRARY_BLOCK_DELETED.send_event(
- library_block=LibraryBlockData(
- library_key=lib_bundle.library_key,
- usage_key=usage_key
- )
+ Does a Component exist for this usage key?
+
+ This is a lower-level function that will return True if a Component object
+ exists, even if it was soft-deleted, and there is no active draft version.
+ """
+ try:
+ get_component_from_usage_key(usage_key)
+ except ObjectDoesNotExist:
+ return False
+ return True
+
+
+def get_or_create_olx_media_type(block_type: str) -> contents_api.MediaType:
+ """
+ Get or create a MediaType for the block type.
+
+ Learning Core stores all Content with a Media Type (a.k.a. MIME type). For
+ OLX, we use the "application/vnd.*" convention, per RFC 6838.
+ """
+ return contents_api.get_or_create_media_type(
+ f"application/vnd.openedx.xblock.v1.{block_type}+xml"
)
-def create_library_block_child(parent_usage_key, block_type, definition_id) -> LibraryXBlockMetadata:
+def _create_component_for_block(content_lib, usage_key):
"""
- Create a new XBlock definition in this library of the specified type (e.g.
- "html"), and add it as a child of the specified existing block.
+ Create a Component for an XBlock type, and initialize it.
+
+ This will create a Component, along with its first ComponentVersion. The tag
+ in the OLX will have no attributes, e.g. ``. This first version
+ will be set as the current draft. This function does not publish the
+ Component.
- The 'definition_id' value (which should be a string like "problem1") will be
- used as both the definition_id and the usage_id of the child.
+ TODO: We should probably shift this to openedx.core.djangoapps.xblock.api
+ (along with its caller) since it gives runtime storage specifics. The
+ Library-specific logic stays in this module, so "create a block for my lib"
+ should stay here, but "making a block means creating a component with
+ text data like X" goes in xblock.api.
"""
- assert isinstance(parent_usage_key, LibraryUsageLocatorV2)
- # Load the parent block to make sure it exists and so we can modify its 'children' field:
- parent_block = load_block(parent_usage_key, user=None)
- if not parent_block.has_children:
- raise ValueError("The specified parent XBlock does not allow child XBlocks.")
- # Create the new block in the library:
- metadata = create_library_block(parent_usage_key.context_key, block_type, definition_id)
- # Set the block as a child.
- # This will effectively "move" the newly created block from being a top-level block in the library to a child.
- include_data = XBlockInclude(link_id=None, block_type=block_type, definition_id=definition_id, usage_hint=None)
- parent_block.runtime.add_child_include(parent_block, include_data)
- parent_block.save()
- ref = ContentLibrary.objects.get_by_key(parent_usage_key.context_key) # type: ignore[attr-defined]
- LIBRARY_BLOCK_UPDATED.send_event(
+ display_name = xblock_type_display_name(usage_key.block_type)
+ now = datetime.now(tz=timezone.utc)
+ xml_text = f'<{usage_key.block_type} />'
+
+ learning_package = content_lib.learning_package
+
+ with transaction.atomic():
+ component_type = components_api.get_or_create_component_type(
+ "xblock.v1", usage_key.block_type
+ )
+ component, component_version = components_api.create_component_and_version(
+ learning_package.id,
+ component_type=component_type,
+ local_key=usage_key.block_id,
+ title=display_name,
+ created=now,
+ created_by=None,
+ )
+ content = contents_api.get_or_create_text_content(
+ learning_package.id,
+ get_or_create_olx_media_type(usage_key.block_type).id,
+ text=xml_text,
+ created=now,
+ )
+ components_api.create_component_version_content(
+ component_version.pk,
+ content.id,
+ key="block.xml",
+ learner_downloadable=False
+ )
+
+
+def delete_library_block(usage_key, remove_from_parent=True):
+ """
+ Delete the specified block from this library (soft delete).
+ """
+ component = get_component_from_usage_key(usage_key)
+ publishing_api.soft_delete_draft(component.pk)
+
+ LIBRARY_BLOCK_DELETED.send_event(
library_block=LibraryBlockData(
- library_key=ref.library_key,
- usage_key=metadata.usage_key
+ library_key=usage_key.context_key,
+ usage_key=usage_key
)
)
- return metadata
-def get_library_block_static_asset_files(usage_key):
+def get_library_block_static_asset_files(usage_key) -> list[LibraryXBlockStaticFile]:
"""
Given an XBlock in a content library, list all the static asset files
associated with that XBlock.
- Returns a list of LibraryXBlockStaticFile objects.
+ Returns a list of LibraryXBlockStaticFile objects, sorted by path.
+
+ TODO: This is not yet implemented for Learning Core backed libraries.
+ TODO: Should this be in the general XBlock API rather than the libraries API?
"""
- def_key, lib_bundle = _lookup_usage_key(usage_key)
- result = [
- LibraryXBlockStaticFile(path=f.path, url=f.url, size=f.size)
- for f in lib_bundle.get_static_files_for_definition(def_key)
- ]
- result.sort(key=lambda f: f.path)
- return result
+ return []
-def add_library_block_static_asset_file(usage_key, file_name, file_content):
+def add_library_block_static_asset_file(usage_key, file_name, file_content) -> LibraryXBlockStaticFile:
"""
Upload a static asset file into the library, to be associated with the
specified XBlock. Will silently overwrite an existing file of the same name.
@@ -934,57 +880,26 @@ def add_library_block_static_asset_file(usage_key, file_name, file_content):
Returns a LibraryXBlockStaticFile object.
+ Sends a LIBRARY_BLOCK_UPDATED event.
+
Example:
video_block = UsageKey.from_string("lb:VideoTeam:python-intro:video:1")
add_library_block_static_asset_file(video_block, "subtitles-en.srt", subtitles.encode('utf-8'))
"""
- assert isinstance(file_content, bytes)
- def_key, lib_bundle = _lookup_usage_key(usage_key)
- if file_name != file_name.strip().strip('/'):
- raise InvalidNameError("file name cannot start/end with / or whitespace.")
- if '//' in file_name or '..' in file_name:
- raise InvalidNameError("Invalid sequence (// or ..) in filename.")
- file_path = lib_bundle.get_static_prefix_for_definition(def_key) + file_name
- # Write the new static file into the library bundle's draft
- draft = get_or_create_bundle_draft(def_key.bundle_uuid, DRAFT_NAME)
- write_draft_file(draft.uuid, file_path, file_content)
- # Clear the bundle cache so everyone sees the new file immediately:
- lib_bundle.cache.clear()
- file_metadata = blockstore_cache.get_bundle_file_metadata_with_cache(
- bundle_uuid=def_key.bundle_uuid, path=file_path, draft_name=DRAFT_NAME,
- )
- LIBRARY_BLOCK_UPDATED.send_event(
- library_block=LibraryBlockData(
- library_key=lib_bundle.library_key,
- usage_key=usage_key
- )
- )
- return LibraryXBlockStaticFile(path=file_metadata.path, url=file_metadata.url, size=file_metadata.size)
+ raise NotImplementedError("Static assets not yet implemented for Learning Core")
def delete_library_block_static_asset_file(usage_key, file_name):
"""
Delete a static asset file from the library.
+ Sends a LIBRARY_BLOCK_UPDATED event.
+
Example:
video_block = UsageKey.from_string("lb:VideoTeam:python-intro:video:1")
delete_library_block_static_asset_file(video_block, "subtitles-en.srt")
"""
- def_key, lib_bundle = _lookup_usage_key(usage_key)
- if '..' in file_name:
- raise InvalidNameError("Invalid .. in file name.")
- file_path = lib_bundle.get_static_prefix_for_definition(def_key) + file_name
- # Delete the file from the library bundle's draft
- draft = get_or_create_bundle_draft(def_key.bundle_uuid, DRAFT_NAME)
- write_draft_file(draft.uuid, file_path, contents=None)
- # Clear the bundle cache so everyone sees the new file immediately:
- lib_bundle.cache.clear()
- LIBRARY_BLOCK_UPDATED.send_event(
- library_block=LibraryBlockData(
- library_key=lib_bundle.library_key,
- usage_key=usage_key
- )
- )
+ raise NotImplementedError("Static assets not yet implemented for Learning Core")
def get_allowed_block_types(library_key): # pylint: disable=unused-argument
@@ -995,7 +910,7 @@ def get_allowed_block_types(library_key): # pylint: disable=unused-argument
# This import breaks in the LMS so keep it here. The LMS doesn't generally
# use content libraries APIs directly but some tests may want to use them to
# create libraries and then test library learning or course-library integration.
- from cms.djangoapps.contentstore.helpers import xblock_type_display_name
+ from cms.djangoapps.contentstore import helpers as studio_helpers
# TODO: return support status and template options
# See cms/djangoapps/contentstore/views/component.py
block_types = sorted(name for name, class_ in XBlock.load_classes())
@@ -1005,126 +920,23 @@ def get_allowed_block_types(library_key): # pylint: disable=unused-argument
block_types = (name for name in block_types if name == lib.type)
info = []
for block_type in block_types:
- display_name = xblock_type_display_name(block_type, None)
+ # TODO: unify the contentstore helper with the xblock.api version of
+ # xblock_type_display_name
+ display_name = studio_helpers.xblock_type_display_name(block_type, None)
# For now as a crude heuristic, we exclude blocks that don't have a display_name
if display_name:
info.append(LibraryXBlockType(block_type=block_type, display_name=display_name))
return info
-def get_bundle_links(library_key):
- """
- Get the list of bundles/libraries linked to this content library.
-
- Returns LibraryBundleLink objects (defined above).
-
- Because every content library is a blockstore bundle, it can have "links" to
- other bundles, which may or may not be content libraries. This allows using
- XBlocks (or perhaps even static assets etc.) from another bundle without
- needing to duplicate/copy the data.
-
- Links always point to a specific published version of the target bundle.
- Links are identified by a slug-like ID, e.g. "link1"
- """
- ref = ContentLibrary.objects.get_by_key(library_key)
- links = blockstore_cache.get_bundle_draft_direct_links_cached(ref.bundle_uuid, DRAFT_NAME)
- results = []
- # To be able to quickly get the library ID from the bundle ID for links which point to other libraries, build a map:
- bundle_uuids = {link_data.bundle_uuid for link_data in links.values()}
- libraries_linked = {
- lib.bundle_uuid: lib
- for lib in ContentLibrary.objects.select_related('org').filter(bundle_uuid__in=bundle_uuids)
- }
- for link_name, link_data in links.items():
- # Is this linked bundle a content library?
- try:
- opaque_key = libraries_linked[link_data.bundle_uuid].library_key
- except KeyError:
- opaque_key = None
- try:
- latest_version = blockstore_cache.get_bundle_version_number(link_data.bundle_uuid)
- except BundleNotFound:
- latest_version = 0
- results.append(LibraryBundleLink(
- id=link_name,
- bundle_uuid=link_data.bundle_uuid,
- version=link_data.version,
- latest_version=latest_version,
- opaque_key=opaque_key,
- ))
- return results
-
-
-def create_bundle_link(library_key, link_id, target_opaque_key, version=None):
- """
- Create a new link to the resource with the specified opaque key.
-
- For now, only LibraryLocatorV2 opaque keys are supported.
- """
- ref = ContentLibrary.objects.get_by_key(library_key)
- # Make sure this link ID/name is not already in use:
- links = blockstore_cache.get_bundle_draft_direct_links_cached(ref.bundle_uuid, DRAFT_NAME)
- if link_id in links:
- raise InvalidNameError("That link ID is already in use.")
- # Determine the target:
- if not isinstance(target_opaque_key, LibraryLocatorV2):
- raise TypeError("For now, only LibraryLocatorV2 opaque keys are supported by create_bundle_link")
- target_bundle_uuid = ContentLibrary.objects.get_by_key(target_opaque_key).bundle_uuid
- if version is None:
- version = get_bundle(target_bundle_uuid).latest_version
- # Create the new link:
- draft = get_or_create_bundle_draft(ref.bundle_uuid, DRAFT_NAME)
- set_draft_link(draft.uuid, link_id, target_bundle_uuid, version)
- # Clear the cache:
- LibraryBundle(library_key, ref.bundle_uuid, draft_name=DRAFT_NAME).cache.clear()
- CONTENT_LIBRARY_UPDATED.send_event(
- content_library=ContentLibraryData(
- library_key=library_key
- )
- )
-
-
-def update_bundle_link(library_key, link_id, version=None, delete=False):
- """
- Update a bundle's link to point to the specified version of its target
- bundle. Use version=None to automatically point to the latest version.
- Use delete=True to delete the link.
- """
- ref = ContentLibrary.objects.get_by_key(library_key)
- draft = get_or_create_bundle_draft(ref.bundle_uuid, DRAFT_NAME)
- if delete:
- set_draft_link(draft.uuid, link_id, None, None)
- else:
- links = blockstore_cache.get_bundle_draft_direct_links_cached(ref.bundle_uuid, DRAFT_NAME)
- try:
- link = links[link_id]
- except KeyError:
- raise InvalidNameError("That link does not exist.") # lint-amnesty, pylint: disable=raise-missing-from
- if version is None:
- version = get_bundle(link.bundle_uuid).latest_version
- set_draft_link(draft.uuid, link_id, link.bundle_uuid, version)
- # Clear the cache:
- LibraryBundle(library_key, ref.bundle_uuid, draft_name=DRAFT_NAME).cache.clear()
- CONTENT_LIBRARY_UPDATED.send_event(
- content_library=ContentLibraryData(
- library_key=library_key
- )
- )
-
-
def publish_changes(library_key):
"""
Publish all pending changes to the specified library.
"""
- ref = ContentLibrary.objects.get_by_key(library_key)
- bundle = get_bundle(ref.bundle_uuid)
- if DRAFT_NAME in bundle.drafts: # pylint: disable=unsupported-membership-test
- draft_uuid = bundle.drafts[DRAFT_NAME] # pylint: disable=unsubscriptable-object
- commit_draft(draft_uuid)
- else:
- return # If there is no draft, no action is needed.
- LibraryBundle(library_key, ref.bundle_uuid).cache.clear()
- LibraryBundle(library_key, ref.bundle_uuid, draft_name=DRAFT_NAME).cache.clear()
+ learning_package = ContentLibrary.objects.get_by_key(library_key).learning_package
+
+ publishing_api.publish_all_drafts(learning_package.id)
+
CONTENT_LIBRARY_UPDATED.send_event(
content_library=ContentLibraryData(
library_key=library_key,
@@ -1138,14 +950,9 @@ def revert_changes(library_key):
Revert all pending changes to the specified library, restoring it to the
last published version.
"""
- ref = ContentLibrary.objects.get_by_key(library_key)
- bundle = get_bundle(ref.bundle_uuid)
- if DRAFT_NAME in bundle.drafts: # pylint: disable=unsupported-membership-test
- draft_uuid = bundle.drafts[DRAFT_NAME] # pylint: disable=unsubscriptable-object
- delete_draft(draft_uuid)
- else:
- return # If there is no draft, no action is needed.
- LibraryBundle(library_key, ref.bundle_uuid, draft_name=DRAFT_NAME).cache.clear()
+ learning_package = ContentLibrary.objects.get_by_key(library_key).learning_package
+ publishing_api.reset_drafts_to_published(learning_package.id)
+
CONTENT_LIBRARY_UPDATED.send_event(
content_library=ContentLibraryData(
library_key=library_key,
@@ -1429,6 +1236,9 @@ def get_block_static_data(self, asset_file):
class EdxApiImportClient(BaseEdxImportClient):
"""
An import client based on a remote Open Edx API interface.
+
+ TODO: Look over this class. We'll probably need to completely re-implement
+ the import process.
"""
URL_COURSES = "/api/courses/v1/courses/{course_key}"
diff --git a/openedx/core/djangoapps/content_libraries/library_bundle.py b/openedx/core/djangoapps/content_libraries/library_bundle.py
deleted file mode 100644
index 3665365f7962..000000000000
--- a/openedx/core/djangoapps/content_libraries/library_bundle.py
+++ /dev/null
@@ -1,407 +0,0 @@
-"""
-Helper code for working with Blockstore bundles that contain OLX
-"""
-
-import logging # lint-amnesty, pylint: disable=wrong-import-order
-
-from functools import lru_cache # lint-amnesty, pylint: disable=wrong-import-order
-from opaque_keys.edx.locator import BundleDefinitionLocator, LibraryUsageLocatorV2
-from xblock.core import XBlock
-from xblock.plugin import PluginMissingError
-
-from openedx.core.djangoapps.content_libraries.models import ContentLibrary
-from openedx.core.djangoapps.xblock.api import (
- BundleFormatException,
- definition_for_include,
- parse_xblock_include,
- xml_for_definition,
-)
-from openedx.core.djangolib.blockstore_cache import (
- BundleCache,
- get_bundle_direct_links_with_cache,
- get_bundle_files_cached,
- get_bundle_file_metadata_with_cache,
- get_bundle_version_number,
-)
-from openedx.core.lib import blockstore_api
-
-log = logging.getLogger(__name__)
-
-
-@lru_cache()
-def bundle_uuid_for_library_key(library_key):
- """
- Given a library slug, look up its bundle UUID.
- Can be cached aggressively since bundle UUID is immutable.
-
- May raise ContentLibrary.DoesNotExist
- """
- library_metadata = ContentLibrary.objects.get_by_key(library_key)
- return library_metadata.bundle_uuid
-
-
-def usage_for_child_include(parent_usage, parent_definition, parsed_include):
- """
- Get the usage ID for a child XBlock, given the parent's keys and the
- element that specifies the child.
-
- Consider two bundles, one with three definitions:
- main-unit, html1, subunit1
- And a second bundle with two definitions:
- unit1, html1
- Note that both bundles have a definition called "html1". Now, with the
- following tree structure, where "unit/unit1" and the second "html/html1"
- are in a linked bundle:
-
- in unit/main-unit/definition.xml
-
-
-
-
-
- The following usage IDs would result:
-
- main-unit
- html1
- subunit1
- alias1
- alias1-html1
-
- Notice that "html1" in the linked bundle is prefixed so its ID stays
- unique from the "html1" in the original library.
- """
- assert isinstance(parent_usage, LibraryUsageLocatorV2)
- usage_id = parsed_include.usage_hint if parsed_include.usage_hint else parsed_include.definition_id
- library_bundle_uuid = bundle_uuid_for_library_key(parent_usage.context_key)
- # Is the parent usage from the same bundle as the library?
- parent_usage_from_library_bundle = parent_definition.bundle_uuid == library_bundle_uuid
- if not parent_usage_from_library_bundle:
- # This XBlock has been linked in to the library via a chain of one
- # or more bundle links. In order to keep usage_id collisions from
- # happening, any descdenants of the first linked block must have
- # their usage_id prefixed with the parent usage's usage_id.
- # (It would be possible to only change the prefix when the block is
- # a child of a block with an explicit usage="" attribute on its
- # but that requires much more complex logic.)
- usage_id = parent_usage.usage_id + "-" + usage_id
- return LibraryUsageLocatorV2(
- lib_key=parent_usage.lib_key,
- block_type=parsed_include.block_type,
- usage_id=usage_id,
- )
-
-
-class LibraryBundle:
- """
- Wrapper around a Content Library Blockstore bundle that contains OLX.
- """
-
- def __init__(self, library_key, bundle_uuid, draft_name=None):
- """
- Instantiate this wrapper for the bundle with the specified library_key,
- UUID, and optionally the specified draft name.
- """
- self.library_key = library_key
- self.bundle_uuid = bundle_uuid
- self.draft_name = draft_name
- self.cache = BundleCache(bundle_uuid, draft_name)
-
- def get_olx_files(self):
- """
- Get the list of OLX files in this bundle (using a heuristic)
-
- Because this uses a heuristic, it will only return files with filenames
- that seem like OLX files that are in the expected locations of OLX
- files. They are not guaranteed to be valid OLX nor will OLX files in
- nonstandard locations be returned.
-
- Example return value: [
- 'html/intro/definition.xml',
- 'unit/unit1/definition.xml',
- ]
- """
- bundle_files = get_bundle_files_cached(self.bundle_uuid, draft_name=self.draft_name)
- return [f.path for f in bundle_files if f.path.endswith("/definition.xml")]
-
- def definition_for_usage(self, usage_key):
- """
- Given the usage key for an XBlock in this library bundle, return the
- BundleDefinitionLocator which specifies the actual XBlock definition (as
- a path to an OLX in a specific blockstore bundle).
-
- Must return a BundleDefinitionLocator if the XBlock exists in this
- context, or None otherwise.
-
- For a content library, the rules are simple:
- * If the usage key points to a block in this library, the filename
- (definition) of the OLX file is always
- {block_type}/{usage_id}/definition.xml
- Each library has exactly one usage per definition for its own blocks.
- * However, block definitions from other content libraries may be linked
- into this library via directives. In that case,
- it's necessary to inspect every OLX file in this library that might
- have an directive in order to find what external
- block the usage ID refers to.
- """
- # Now that we know the library/bundle, find the block's definition
- if self.draft_name:
- version_arg = {"draft_name": self.draft_name}
- else:
- version_arg = {"bundle_version": get_bundle_version_number(self.bundle_uuid)}
- olx_path = f"{usage_key.block_type}/{usage_key.usage_id}/definition.xml"
- try:
- get_bundle_file_metadata_with_cache(self.bundle_uuid, olx_path, **version_arg)
- return BundleDefinitionLocator(self.bundle_uuid, usage_key.block_type, olx_path, **version_arg)
- except blockstore_api.BundleFileNotFound:
- # This must be a usage of a block from a linked bundle. One of the
- # OLX files in this bundle contains an
- bundle_includes = self.get_bundle_includes()
- try:
- return bundle_includes[usage_key]
- except KeyError:
- return None
-
- def get_all_usages(self):
- """
- Get usage keys of all the blocks in this bundle
- """
- usage_keys = []
- for olx_file_path in self.get_olx_files():
- block_type, usage_id, _unused = olx_file_path.split('/')
- usage_key = LibraryUsageLocatorV2(self.library_key, block_type, usage_id)
- usage_keys.append(usage_key)
-
- return usage_keys
-
- def get_top_level_usages(self):
- """
- Get the set of usage keys in this bundle that have no parent.
- """
- own_usage_keys = self.get_all_usages()
- usage_keys_with_parents = self.get_bundle_includes().keys()
- return [usage_key for usage_key in own_usage_keys if usage_key not in usage_keys_with_parents]
-
- def get_bundle_includes(self):
- """
- Scan through the bundle and all linked bundles as needed to generate
- a complete list of all the blocks that are included as
- child/grandchild/... blocks of the blocks in this bundle.
-
- Returns a dict of {usage_key -> BundleDefinitionLocator}
-
- Blocks in the bundle that have no parent are not included.
- """
- cache_key = ("bundle_includes", )
- usages_found = self.cache.get(cache_key)
- if usages_found is not None:
- return usages_found
-
- usages_found = {}
-
- def add_definitions_children(usage_key, def_key):
- """
- Recursively add any children of the given XBlock usage+definition to
- usages_found.
- """
- if not does_block_type_support_children(def_key.block_type):
- return
- try:
- xml_node = xml_for_definition(def_key)
- except: # pylint:disable=bare-except
- log.exception(f"Unable to load definition {def_key}")
- return
-
- for child in xml_node:
- if child.tag != 'xblock-include':
- continue
- try:
- parsed_include = parse_xblock_include(child)
- child_usage = usage_for_child_include(usage_key, def_key, parsed_include)
- child_def_key = definition_for_include(parsed_include, def_key)
- except BundleFormatException:
- log.exception(f"Unable to parse a child of {def_key}")
- continue
- usages_found[child_usage] = child_def_key
- add_definitions_children(child_usage, child_def_key)
-
- # Find all the definitions in this bundle and recursively add all their descendants:
- bundle_files = get_bundle_files_cached(self.bundle_uuid, draft_name=self.draft_name)
- if self.draft_name:
- version_arg = {"draft_name": self.draft_name}
- else:
- version_arg = {"bundle_version": get_bundle_version_number(self.bundle_uuid)}
- for bfile in bundle_files:
- if not bfile.path.endswith("/definition.xml") or bfile.path.count('/') != 2:
- continue # Not an OLX file.
- block_type, usage_id, _unused = bfile.path.split('/')
- def_key = BundleDefinitionLocator(
- bundle_uuid=self.bundle_uuid,
- block_type=block_type,
- olx_path=bfile.path,
- **version_arg
- )
- usage_key = LibraryUsageLocatorV2(self.library_key, block_type, usage_id)
- add_definitions_children(usage_key, def_key)
-
- self.cache.set(cache_key, usages_found)
- return usages_found
-
- def does_definition_have_unpublished_changes(self, definition_key):
- """
- Given the defnition key of an XBlock, which exists in an OLX file like
- problem/quiz1/definition.xml
- Check if the bundle's draft has _any_ unpublished changes in the
- problem/quiz1/
- directory.
- """
- if self.draft_name is None:
- return False # No active draft so can't be changes
- prefix = self.olx_prefix(definition_key)
- return prefix in self._get_changed_definitions()
-
- def _get_changed_definitions(self):
- """
- Helper method to get a list of all paths with changes, where a path is
- problem/quiz1/
- Or similar (a type and an ID), excluding 'definition.xml'
- """
- cached_result = self.cache.get(('changed_definition_prefixes', ))
- if cached_result is not None:
- return cached_result
- changed = []
- bundle_files = get_bundle_files_cached(self.bundle_uuid, draft_name=self.draft_name)
- for file_ in bundle_files:
- if getattr(file_, 'modified', False) and file_.path.count('/') >= 2:
- (type_part, id_part, _rest) = file_.path.split('/', 2)
- prefix = type_part + '/' + id_part + '/'
- if prefix not in changed:
- changed.append(prefix)
- self.cache.set(('changed_definition_prefixes', ), changed)
- return changed
-
- def has_changes(self):
- """
- Helper method to check if this OLX bundle has any pending changes,
- including any deleted blocks.
-
- Returns a tuple of (
- has_unpublished_changes,
- has_unpublished_deletes,
- )
- Where has_unpublished_changes is true if there is any type of change,
- including deletes, and has_unpublished_deletes is only true if one or
- more blocks has been deleted since the last publish.
- """
- if not self.draft_name:
- return (False, False)
- cached_result = self.cache.get(('has_changes', ))
- if cached_result is not None:
- return cached_result
- draft_files = get_bundle_files_cached(self.bundle_uuid, draft_name=self.draft_name)
-
- has_unpublished_changes = False
- has_unpublished_deletes = False
-
- for file_ in draft_files:
- if getattr(file_, 'modified', False):
- has_unpublished_changes = True
- break
-
- if not has_unpublished_changes:
- # Check if any links have changed:
- old_links = set(get_bundle_direct_links_with_cache(self.bundle_uuid).items())
- new_links = set(get_bundle_direct_links_with_cache(self.bundle_uuid, draft_name=self.draft_name).items())
- has_unpublished_changes = new_links != old_links
-
- published_file_paths = {f.path for f in get_bundle_files_cached(self.bundle_uuid)}
- draft_file_paths = {f.path for f in draft_files}
- for file_path in published_file_paths:
- if file_path not in draft_file_paths:
- has_unpublished_changes = True
- if file_path.endswith('/definition.xml'):
- # only set 'has_unpublished_deletes' if the actual main definition XML
- # file was deleted, not if only some asset file was deleted, etc.
- has_unpublished_deletes = True
- break
-
- result = (has_unpublished_changes, has_unpublished_deletes)
- self.cache.set(('has_changes', ), result)
- return result
-
- def get_static_prefix_for_definition(self, definition_key):
- """
- Given a definition key, get the path prefix used for all (public) static
- asset files.
-
- Example: problem/quiz1/static/
- """
- return self.olx_prefix(definition_key) + 'static/'
-
- def get_static_files_for_definition(self, definition_key):
- """
- Return a list of the static asset files related with a particular XBlock
- definition.
-
- If the bundle contains files like:
- problem/quiz1/definition.xml
- problem/quiz1/static/image1.png
- Then this will return
- [BundleFileData(path="image1.png", size, url, hash_digest)]
- """
- path_prefix = self.get_static_prefix_for_definition(definition_key)
- path_prefix_len = len(path_prefix)
- return [
- blockstore_api.BundleFileData(
- path=f.path[path_prefix_len:],
- size=f.size,
- url=f.url,
- hash_digest=f.hash_digest,
- )
- for f in get_bundle_files_cached(self.bundle_uuid, draft_name=self.draft_name)
- if f.path.startswith(path_prefix)
- ]
-
- def get_last_published_time(self):
- """
- Return the timestamp when the current library was last published. If the
- current draft has never been published, return 0.
- """
- cache_key = ("last_published_time", )
- usages_found = self.cache.get(cache_key)
- if usages_found is not None:
- return usages_found
- version = get_bundle_version_number(self.bundle_uuid)
- if version == 0:
- return None
- last_published_time = blockstore_api.get_bundle_version(self.bundle_uuid, version).created_at
- self.cache.set(cache_key, last_published_time)
- return last_published_time
-
- @staticmethod
- def olx_prefix(definition_key):
- """
- Given a definition key in a compatible bundle, whose olx_path refers to
- block_type/some_id/definition.xml
- Return the "folder name" / "path prefix"
- block-type/some_id/
-
- This method is here rather than a method of BundleDefinitionLocator
- because BundleDefinitionLocator is more generic and doesn't require
- that its olx_path always ends in /definition.xml
- """
- if not definition_key.olx_path.endswith('/definition.xml'):
- raise ValueError
- return definition_key.olx_path[:-14] # Remove 'definition.xml', keep trailing slash
-
-
-def does_block_type_support_children(block_type):
- """
- Does the specified block type (e.g. "html", "vertical") support child
- blocks?
- """
- try:
- return XBlock.load_class(block_type).has_children
- except PluginMissingError:
- # We don't know if this now-uninstalled block type had children
- # but to be conservative, assume it may have.
- return True
diff --git a/openedx/core/djangoapps/content_libraries/library_context.py b/openedx/core/djangoapps/content_libraries/library_context.py
index 8526193f9af5..9408f51e511a 100644
--- a/openedx/core/djangoapps/content_libraries/library_context.py
+++ b/openedx/core/djangoapps/content_libraries/library_context.py
@@ -7,14 +7,11 @@
from django.core.exceptions import PermissionDenied
from openedx.core.djangoapps.content_libraries import api, permissions
-from openedx.core.djangoapps.content_libraries.library_bundle import (
- LibraryBundle,
- bundle_uuid_for_library_key,
- usage_for_child_include,
-)
from openedx.core.djangoapps.content_libraries.models import ContentLibrary
from openedx.core.djangoapps.xblock.api import LearningContext
+from openedx_learning.core.components import api as components_api
+
log = logging.getLogger(__name__)
@@ -46,10 +43,8 @@ def can_edit_block(self, user, usage_key):
api.require_permission_for_library_key(usage_key.lib_key, user, permissions.CAN_EDIT_THIS_CONTENT_LIBRARY)
except (PermissionDenied, api.ContentLibraryNotFound):
return False
- def_key = self.definition_for_usage(usage_key)
- if not def_key:
- return False
- return True
+
+ return self.block_exists(usage_key)
def can_view_block(self, user, usage_key):
"""
@@ -69,37 +64,32 @@ def can_view_block(self, user, usage_key):
)
except (PermissionDenied, api.ContentLibraryNotFound):
return False
- def_key = self.definition_for_usage(usage_key)
- if not def_key:
- return False
- return True
- def definition_for_usage(self, usage_key, **kwargs):
- """
- Given a usage key for an XBlock in this context, return the
- BundleDefinitionLocator which specifies the actual XBlock definition
- (as a path to an OLX in a specific blockstore bundle).
+ return self.block_exists(usage_key)
- Must return a BundleDefinitionLocator if the XBlock exists in this
- context, or None otherwise.
+ def block_exists(self, usage_key):
+ """
+ Does the block for this usage_key exist in this Library?
+
+ Note that this applies to all versions, i.e. you can put a usage key for
+ a piece of content that has been soft-deleted (removed from Drafts), and
+ it will still return True here. That's because for the purposes of
+ permission checking, we just want to know whether that block has ever
+ existed in this Library, because we could be looking at any older
+ version of it.
"""
- library_key = usage_key.context_key
try:
- bundle_uuid = bundle_uuid_for_library_key(library_key)
+ content_lib = ContentLibrary.objects.get_by_key(usage_key.context_key)
except ContentLibrary.DoesNotExist:
- return None
- if 'force_draft' in kwargs: # lint-amnesty, pylint: disable=consider-using-get
- use_draft = kwargs['force_draft']
- else:
- use_draft = self.use_draft
- bundle = LibraryBundle(library_key, bundle_uuid, use_draft)
- return bundle.definition_for_usage(usage_key)
-
- def usage_for_child_include(self, parent_usage, parent_definition, parsed_include):
- """
- Method that the runtime uses when loading a block's child, to get the
- ID of the child.
+ return False
- The child is always from an element.
- """
- return usage_for_child_include(parent_usage, parent_definition, parsed_include)
+ learning_package = content_lib.learning_package
+ if learning_package is None:
+ return False
+
+ return components_api.component_exists_by_key(
+ learning_package.id,
+ namespace='xblock.v1',
+ type_name=usage_key.block_type,
+ local_key=usage_key.block_id,
+ )
diff --git a/openedx/core/djangoapps/content_libraries/migrations/0010_contentlibrary_learning_package_and_more.py b/openedx/core/djangoapps/content_libraries/migrations/0010_contentlibrary_learning_package_and_more.py
new file mode 100644
index 000000000000..372f51de310c
--- /dev/null
+++ b/openedx/core/djangoapps/content_libraries/migrations/0010_contentlibrary_learning_package_and_more.py
@@ -0,0 +1,25 @@
+# Generated by Django 4.2.9 on 2024-02-14 18:26
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('oel_publishing', '0001_initial'),
+ ('content_libraries', '0009_alter_contentlibrary_authorized_lti_configs'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='contentlibrary',
+ name='learning_package',
+ field=models.OneToOneField(default=None, null=True, on_delete=django.db.models.deletion.RESTRICT, to='oel_publishing.learningpackage'),
+ ),
+ migrations.AlterField(
+ model_name='contentlibrary',
+ name='bundle_uuid',
+ field=models.UUIDField(default=None, null=True, unique=True),
+ ),
+ ]
diff --git a/openedx/core/djangoapps/content_libraries/models.py b/openedx/core/djangoapps/content_libraries/models.py
index 7bf8792699f9..a4c128c9bcc1 100644
--- a/openedx/core/djangoapps/content_libraries/models.py
+++ b/openedx/core/djangoapps/content_libraries/models.py
@@ -32,6 +32,7 @@
reasoning applies to steps beyond the data model, such as at the XBlock
runtime, authentication, and score handling, etc.
"""
+from __future__ import annotations
import contextlib
import logging
@@ -56,6 +57,7 @@
LIBRARY_TYPES, COMPLEX, LICENSE_OPTIONS,
ALL_RIGHTS_RESERVED,
)
+from openedx_learning.core.publishing.models import LearningPackage
from organizations.models import Organization # lint-amnesty, pylint: disable=wrong-import-order
from .apps import ContentLibrariesConfig
@@ -75,7 +77,8 @@ def get_by_key(self, library_key):
Get the ContentLibrary for the given LibraryLocatorV2 key.
"""
assert isinstance(library_key, LibraryLocatorV2)
- return self.get(org__short_name=library_key.org, slug=library_key.slug)
+ return self.select_related('learning_package') \
+ .get(org__short_name=library_key.org, slug=library_key.slug)
class ContentLibrary(models.Model):
@@ -88,7 +91,7 @@ class ContentLibrary(models.Model):
model in Studio should only be used to track settings specific to this Open
edX instance, like who has permission to edit this content library.
"""
- objects = ContentLibraryManager()
+ objects: ContentLibraryManager[ContentLibrary] = ContentLibraryManager()
id = models.AutoField(primary_key=True)
# Every Library is uniquely and permanently identified by an 'org' and a
@@ -97,9 +100,31 @@ class ContentLibrary(models.Model):
# e.g. "lib:org:slug" is the opaque key for a library.
org = models.ForeignKey(Organization, on_delete=models.PROTECT, null=False)
slug = models.SlugField(allow_unicode=True)
- bundle_uuid = models.UUIDField(unique=True, null=False)
+
+ # We no longer use the ``bundle_uuid`` and ``type`` fields, but we'll leave
+ # them in the model until after the Redwood release, just in case someone
+ # out there was using v2 libraries. We don't expect this, since it wasn't in
+ # a usable state, but there's always a chance someone managed to do it and
+ # is still using it. By keeping the schema backwards compatible, the thought
+ # is that they would update to the latest version, notice their libraries
+ # aren't working correctly, and still have the ability to recover their data
+ # if the code was rolled back.
+ # TODO: Remove these fields after the Redwood release is cut.
+ bundle_uuid = models.UUIDField(unique=True, null=True, default=None)
type = models.CharField(max_length=25, default=COMPLEX, choices=LIBRARY_TYPES)
+
license = models.CharField(max_length=25, default=ALL_RIGHTS_RESERVED, choices=LICENSE_OPTIONS)
+ learning_package = models.OneToOneField(
+ LearningPackage,
+ # We can't delete the LearningPackage that holds a Library's content
+ # unless we're deleting both at the same time.
+ on_delete=models.RESTRICT,
+ # This is nullable mostly for backwards compatibility, though it should
+ # be possible to have the abstract notion of a Library with no actual
+ # content in it yet.
+ null=True,
+ default=None,
+ )
# How is this library going to be used?
allow_public_learning = models.BooleanField(
diff --git a/openedx/core/djangoapps/content_libraries/serializers.py b/openedx/core/djangoapps/content_libraries/serializers.py
index 800c8cb10585..ee0e48b59c87 100644
--- a/openedx/core/djangoapps/content_libraries/serializers.py
+++ b/openedx/core/djangoapps/content_libraries/serializers.py
@@ -110,10 +110,17 @@ class LibraryXBlockMetadataSerializer(serializers.Serializer):
Serializer for LibraryXBlockMetadata
"""
id = serializers.CharField(source="usage_key", read_only=True)
- def_key = serializers.CharField(read_only=True)
+
+ # TODO: Remove this serializer field once the frontend no longer relies on
+ # it. Learning Core doesn't use definition IDs, but we're passing this dummy
+ # value back to preserve the REST API contract (just to reduce the number of
+ # things we're changing at one time).
+ def_key = serializers.ReadOnlyField(default=None)
+
block_type = serializers.CharField(source="usage_key.block_type")
display_name = serializers.CharField(read_only=True)
has_unpublished_changes = serializers.BooleanField(read_only=True)
+
# When creating a new XBlock in a library, the slug becomes the ID part of
# the definition key and usage key:
slug = serializers.CharField(write_only=True)
@@ -128,41 +135,25 @@ class LibraryXBlockTypeSerializer(serializers.Serializer):
display_name = serializers.CharField()
-class LibraryBundleLinkSerializer(serializers.Serializer):
- """
- Serializer for a link from a content library blockstore bundle to another
- blockstore bundle.
- """
- id = serializers.SlugField() # Link name
- bundle_uuid = serializers.UUIDField(format='hex_verbose', read_only=True)
- # What version of this bundle we are currently linking to.
- # This is never NULL but can optionally be set to null when creating a new link, which means "use latest version."
- version = serializers.IntegerField(allow_null=True)
- # What the latest version of the linked bundle is:
- # (if latest_version > version), the link can be "updated" to the latest version.
- latest_version = serializers.IntegerField(read_only=True)
- # Opaque key: If the linked bundle is a library or other learning context whose opaque key we can deduce, then this
- # is the key. If we don't know what type of blockstore bundle this link is pointing to, then this is blank.
- opaque_key = serializers.CharField()
-
-
-class LibraryBundleLinkUpdateSerializer(serializers.Serializer):
- """
- Serializer for updating an existing link in a content library blockstore
- bundle.
- """
- version = serializers.IntegerField(allow_null=True)
-
-
class LibraryXBlockCreationSerializer(serializers.Serializer):
"""
Serializer for adding a new XBlock to a content library
"""
# Parent block: optional usage key of an existing block to add this child
- # block to.
+ # block to. TODO: Remove this, because we don't support it.
parent_block = serializers.CharField(required=False)
+
block_type = serializers.CharField()
- definition_id = serializers.SlugField()
+
+ # TODO: Rename to ``block_id`` or ``slug``. The Learning Core XBlock runtime
+ # doesn't use definition_ids, but this field is really just about requesting
+ # a specific block_id, e.g. the "best_tropical_vacation_spots" portion of a
+ # problem with UsageKey:
+ # lb:Axim:VacationsLib:problem:best_tropical_vacation_spots
+ #
+ # It doesn't look like the frontend actually uses this to put meaningful
+ # slugs at the moment, but hopefully we can change this soon.
+ definition_id = serializers.CharField(validators=(validate_unicode_slug, ))
class LibraryXBlockOlxSerializer(serializers.Serializer):
diff --git a/openedx/core/djangoapps/content_libraries/tasks.py b/openedx/core/djangoapps/content_libraries/tasks.py
index 82a2c48ed8df..3714dc55f8d6 100644
--- a/openedx/core/djangoapps/content_libraries/tasks.py
+++ b/openedx/core/djangoapps/content_libraries/tasks.py
@@ -343,8 +343,10 @@ def _sync_children(
elif isinstance(library, library_api.ContentLibraryMetadata):
# TODO: add filtering by capa_type when V2 library will support different problem types
try:
- source_blocks = library_api.get_library_blocks(library_key)
- source_block_ids = [str(block.usage_key) for block in source_blocks]
+ source_block_ids = [
+ str(library_api.LibraryXBlockMetadata.from_component(library_key, component).usage_key)
+ for component in library_api.get_library_components(library_key)
+ ]
_import_from_blockstore(user_id, store, dest_block, source_block_ids)
dest_block.source_library_version = str(library.version)
store.update_item(dest_block, user_id)
diff --git a/openedx/core/djangoapps/content_libraries/tests/base.py b/openedx/core/djangoapps/content_libraries/tests/base.py
index 63b93509a3cb..5f837628c935 100644
--- a/openedx/core/djangoapps/content_libraries/tests/base.py
+++ b/openedx/core/djangoapps/content_libraries/tests/base.py
@@ -1,18 +1,17 @@
"""
Tests for Blockstore-based Content Libraries
"""
+import uuid
from contextlib import contextmanager
from io import BytesIO
from urllib.parse import urlencode
-from django.test import LiveServerTestCase
from organizations.models import Organization
-from rest_framework.test import APITestCase, APIClient
+from rest_framework.test import APITransactionTestCase, APIClient
from common.djangoapps.student.tests.factories import UserFactory
from openedx.core.djangoapps.content_libraries.constants import COMPLEX, ALL_RIGHTS_RESERVED
from openedx.core.djangolib.testing.utils import skip_unless_cms
-from openedx.core.lib import blockstore_api
from openedx.core.lib.blockstore_api.tests.base import (
BlockstoreAppTestMixin,
)
@@ -47,7 +46,7 @@
@skip_unless_cms # Content Libraries REST API is only available in Studio
-class _ContentLibrariesRestApiTestMixin:
+class ContentLibrariesRestApiTest(BlockstoreAppTestMixin, APITransactionTestCase):
"""
Base class for Blockstore-based Content Libraries test that use the REST API
@@ -71,21 +70,14 @@ class _ContentLibrariesRestApiTestMixin:
and cached forever.
"""
- @classmethod
- def setUpClass(cls):
- super().setUpClass()
- cls.user = UserFactory.create(username="Bob", email="bob@example.com", password="edx")
- # Create a collection using Blockstore API directly only because there
- # is not yet any Studio REST API for doing so:
- cls.collection = blockstore_api.create_collection("Content Library Test Collection")
+ def setUp(self):
+ super().setUp()
+ self.user = UserFactory.create(username="Bob", email="bob@example.com", password="edx")
# Create an organization
- cls.organization, _ = Organization.objects.get_or_create(
+ self.organization, _ = Organization.objects.get_or_create(
short_name="CL-TEST",
defaults={"name": "Content Libraries Tachyon Exploration & Survey Team"},
)
-
- def setUp(self):
- super().setUp()
self.clients_by_user = {}
self.client.login(username=self.user.username, password="edx")
@@ -139,7 +131,10 @@ def _create_library(
"description": description,
"type": library_type,
"license": license_type,
- "collection_uuid": str(self.collection.uuid),
+ # We're not actually using this value any more, but we're keeping it
+ # in the API testing for backwards compatibility for just a little
+ # longer. TODO: Remove this once the frontend stops sending it.
+ "collection_uuid": uuid.uuid4(),
}, expect_response)
def _list_libraries(self, query_params_dict=None, expect_response=200):
@@ -307,17 +302,3 @@ def _get_block_handler_url(self, block_key, handler_name):
"""
url = URL_BLOCK_GET_HANDLER_URL.format(block_key=block_key, handler_name=handler_name)
return self._api('get', url, None, expect_response=200)["handler_url"]
-
-
-class ContentLibrariesRestApiTest(
- _ContentLibrariesRestApiTestMixin,
- BlockstoreAppTestMixin,
- APITestCase,
- LiveServerTestCase,
-):
- """
- Base class for Blockstore-based Content Libraries test that use the REST API
- and the installed Blockstore app.
-
- We run this test with a live server, so that the blockstore asset files can be served.
- """
diff --git a/openedx/core/djangoapps/content_libraries/tests/test_content_libraries.py b/openedx/core/djangoapps/content_libraries/tests/test_content_libraries.py
index 69ee1755188f..c23e728c4b4d 100644
--- a/openedx/core/djangoapps/content_libraries/tests/test_content_libraries.py
+++ b/openedx/core/djangoapps/content_libraries/tests/test_content_libraries.py
@@ -1,8 +1,8 @@
"""
Tests for Blockstore-based Content Libraries
"""
-from uuid import UUID
from unittest.mock import Mock, patch
+from unittest import skip
import ddt
from django.contrib.auth.models import Group
@@ -20,6 +20,7 @@
LIBRARY_BLOCK_DELETED,
LIBRARY_BLOCK_UPDATED,
)
+from openedx_events.tests.utils import OpenEdxEventsTestMixin
from openedx.core.djangoapps.content_libraries.tests.base import (
ContentLibrariesRestApiTest,
URL_BLOCK_METADATA_URL,
@@ -27,14 +28,14 @@
URL_BLOCK_GET_HANDLER_URL,
URL_BLOCK_XBLOCK_HANDLER,
)
-from openedx.core.djangoapps.content_libraries.constants import VIDEO, COMPLEX, PROBLEM, CC_4_BY, ALL_RIGHTS_RESERVED
-from openedx.core.djangolib.blockstore_cache import cache
-from openedx.core.lib import blockstore_api
+from openedx.core.djangoapps.content_libraries.constants import VIDEO, COMPLEX, PROBLEM, CC_4_BY
+from openedx.core.djangolib.testing.utils import skip_unless_cms
from common.djangoapps.student.tests.factories import UserFactory
+@skip_unless_cms
@ddt.ddt
-class ContentLibrariesTestMixin:
+class ContentLibrariesTestCase(ContentLibrariesRestApiTest, OpenEdxEventsTestMixin):
"""
General tests for Blockstore-based Content Libraries
@@ -57,30 +58,51 @@ class ContentLibrariesTestMixin:
library slug and bundle UUID does not because it's assumed to be immutable
and cached forever.
"""
+ ENABLED_OPENEDX_EVENTS = [
+ CONTENT_LIBRARY_CREATED.event_type,
+ CONTENT_LIBRARY_DELETED.event_type,
+ CONTENT_LIBRARY_UPDATED.event_type,
+ LIBRARY_BLOCK_CREATED.event_type,
+ LIBRARY_BLOCK_DELETED.event_type,
+ LIBRARY_BLOCK_UPDATED.event_type,
+ ]
+
+ @classmethod
+ def setUpClass(cls):
+ """
+ Set up class method for the Test class.
+
+ TODO: It's unclear why we need to call start_events_isolation ourselves rather than relying on
+ OpenEdxEventsTestMixin.setUpClass to handle it. It fails it we don't, and many other test cases do it,
+ so we're following a pattern here. But that pattern doesn't really make sense.
+ """
+ super().setUpClass()
+ cls.start_events_isolation()
+
def test_library_crud(self):
"""
Test Create, Read, Update, and Delete of a Content Library
+
+ Tests with some non-ASCII chars in slug, title, description.
"""
# Create:
lib = self._create_library(
- slug="lib-crud", title="A Test Library", description="Just Testing", license_type=CC_4_BY,
+ slug="téstlꜟط", title="A Tést Lꜟطrary", description="Just Téstꜟng", license_type=CC_4_BY,
)
expected_data = {
- "id": "lib:CL-TEST:lib-crud",
+ "id": "lib:CL-TEST:téstlꜟط",
"org": "CL-TEST",
- "slug": "lib-crud",
- "title": "A Test Library",
- "description": "Just Testing",
+ "slug": "téstlꜟط",
+ "title": "A Tést Lꜟطrary",
+ "description": "Just Téstꜟng",
"version": 0,
"type": COMPLEX,
"license": CC_4_BY,
"has_unpublished_changes": False,
"has_unpublished_deletes": False,
}
- self.assertDictContainsEntries(lib, expected_data)
- # Check that bundle_uuid looks like a valid UUID
- UUID(lib["bundle_uuid"]) # will raise an exception if not valid
+ self.assertDictContainsEntries(lib, expected_data)
# Read:
lib2 = self._get_library(lib["id"])
self.assertDictContainsEntries(lib2, expected_data)
@@ -96,120 +118,51 @@ def test_library_crud(self):
self._get_library(lib["id"], expect_response=404)
self._delete_library(lib["id"], expect_response=404)
- @ddt.data(VIDEO, PROBLEM, COMPLEX)
- def test_library_alternative_type(self, target_type):
- """
- Create a library with a specific type
- """
- lib = self._create_library(
- slug="some-slug", title="Video Library", description="Test Video Library", library_type=target_type,
- )
- expected_data = {
- "id": "lib:CL-TEST:some-slug",
- "org": "CL-TEST",
- "slug": "some-slug",
- "title": "Video Library",
- "type": target_type,
- "description": "Test Video Library",
- "version": 0,
- "has_unpublished_changes": False,
- "has_unpublished_deletes": False,
- "license": ALL_RIGHTS_RESERVED,
- }
- self.assertDictContainsEntries(lib, expected_data)
-
- # Need to use a different slug each time here. Seems to be a race condition on test cleanup that will break things
- # otherwise.
- @ddt.data(
- ('to-video-fail', COMPLEX, VIDEO, (("problem", "problemA"),), 400),
- ('to-video-empty', COMPLEX, VIDEO, tuple(), 200),
- ('to-problem', COMPLEX, PROBLEM, (("problem", "problemB"),), 200),
- ('to-problem-fail', COMPLEX, PROBLEM, (("video", "videoA"),), 400),
- ('to-problem-empty', COMPLEX, PROBLEM, tuple(), 200),
- ('to-complex-from-video', VIDEO, COMPLEX, (("video", "videoB"),), 200),
- ('to-complex-from-problem', PROBLEM, COMPLEX, (("problem", "problemC"),), 200),
- ('to-complex-from-problem-empty', PROBLEM, COMPLEX, tuple(), 200),
- ('to-problem-from-video-empty', PROBLEM, VIDEO, tuple(), 200),
- )
- @ddt.unpack
- def test_library_update_type_conversion(self, slug, start_type, target_type, xblock_specs, expect_response):
- """
- Test conversion of one library type to another. Restricts options based on type/block matching.
- """
- lib = self._create_library(
- slug=slug, title="A Test Library", description="Just Testing", library_type=start_type,
- )
- assert lib['type'] == start_type
- for block_type, block_slug in xblock_specs:
- self._add_block_to_library(lib['id'], block_type, block_slug)
- self._commit_library_changes(lib['id'])
- result = self._update_library(lib['id'], type=target_type, expect_response=expect_response)
- if expect_response == 200:
- assert result['type'] == target_type
- assert 'type' in result
- else:
- lib = self._get_library(lib['id'])
- assert lib['type'] == start_type
-
- def test_no_convert_on_unpublished(self):
- """
- Verify that you can't change a library's type, even if it would normally be valid,
- when there are unpublished changes. This is so that a reversion of blocks won't cause an inconsistency.
- """
- lib = self._create_library(
- slug='resolute', title="A complex library", description="Unconvertable", library_type=COMPLEX,
- )
- self._add_block_to_library(lib['id'], "video", 'vid-block')
- result = self._update_library(lib['id'], type=VIDEO, expect_response=400)
- assert 'type' in result
-
- def test_no_convert_on_pending_deletes(self):
- """
- Verify that you can't change a library's type, even if it would normally be valid,
- when there are unpublished changes. This is so that a reversion of blocks won't cause an inconsistency.
- """
- lib = self._create_library(
- slug='still-alive', title="A complex library", description="Unconvertable", library_type=COMPLEX,
- )
- block = self._add_block_to_library(lib['id'], "video", 'vid-block')
- self._commit_library_changes(lib['id'])
- self._delete_library_block(block['id'])
- result = self._update_library(lib['id'], type=VIDEO, expect_response=400)
- assert 'type' in result
-
def test_library_validation(self):
"""
You can't create a library with the same slug as an existing library,
or an invalid slug.
"""
- assert 0 == len(blockstore_api.get_bundles(text_search='some-slug'))
self._create_library(slug="some-slug", title="Existing Library")
- assert 1 == len(blockstore_api.get_bundles(text_search='some-slug'))
# Try to create a library+bundle with a duplicate slug
response = self._create_library(slug="some-slug", title="Duplicate Library", expect_response=400)
assert response == {
'slug': 'A library with that ID already exists.',
}
- # The second bundle created with that slug is removed when the transaction rolls back.
- assert 1 == len(blockstore_api.get_bundles(text_search='some-slug'))
response = self._create_library(slug="Invalid Slug!", title="Library with Bad Slug", expect_response=400)
assert response == {
'slug': ['Enter a valid “slug” consisting of Unicode letters, numbers, underscores, or hyphens.'],
}
+ @skip("This endpoint shouldn't support num_blocks and has_unpublished_*.")
@patch("openedx.core.djangoapps.content_libraries.views.LibraryApiPagination.page_size", new=2)
def test_list_library(self):
"""
Test the /libraries API and its pagination
+
+ TODO: This test will technically pass, but it's not really meaningful
+ because we don't have real data behind num_blocks, last_published,
+ has_published_changes, and has_unpublished_deletes. The has_* in
+ particular are going to be expensive to compute, particularly if we have
+ many large libraries. We also don't use that data for the library list
+ page yet.
+
+ We're looking at re-doing a lot of the UX right now, and so I'm holding
+ off on making deeper changes. We should either make sure we don't need
+ those fields and remove them from the returned results, or else we
+ should figure out how to make them more performant.
+
+ I've marked this as @skip to flag it for future review.
"""
lib1 = self._create_library(slug="some-slug-1", title="Existing Library")
lib2 = self._create_library(slug="some-slug-2", title="Existing Library")
- lib1['num_blocks'] = lib2['num_blocks'] = None
+ lib1['num_blocks'] = lib2['num_blocks'] = 0
lib1['last_published'] = lib2['last_published'] = None
- lib1['has_unpublished_changes'] = lib2['has_unpublished_changes'] = None
- lib1['has_unpublished_deletes'] = lib2['has_unpublished_deletes'] = None
+ lib1['version'] = lib2['version'] = None
+ lib1['has_unpublished_changes'] = lib2['has_unpublished_changes'] = False
+ lib1['has_unpublished_deletes'] = lib2['has_unpublished_deletes'] = False
result = self._list_libraries()
assert len(result) == 2
@@ -283,18 +236,20 @@ def test_library_blocks(self):
"""
Test the happy path of creating and working with XBlocks in a content
library.
+
+ Tests with some non-ASCII chars in slugs, titles, descriptions.
"""
- lib = self._create_library(slug="testlib1", title="A Test Library", description="Testing XBlocks")
+ lib = self._create_library(slug="téstlꜟط", title="A Tést Lꜟطrary", description="Tésting XBlocks")
lib_id = lib["id"]
assert lib['has_unpublished_changes'] is False
# A library starts out empty:
- assert self._get_library_blocks(lib_id) == []
+ assert self._get_library_blocks(lib_id)['results'] == []
# Add a 'problem' XBlock to the library:
- block_data = self._add_block_to_library(lib_id, "problem", "problem1")
+ block_data = self._add_block_to_library(lib_id, "problem", "ࠒröblæm1")
self.assertDictContainsEntries(block_data, {
- "id": "lb:CL-TEST:testlib1:problem:problem1",
+ "id": "lb:CL-TEST:téstlꜟط:problem:ࠒröblæm1",
"display_name": "Blank Problem",
"block_type": "problem",
"has_unpublished_changes": True,
@@ -305,7 +260,7 @@ def test_library_blocks(self):
assert 'def_key' in block_data
# now the library should contain one block and have unpublished changes:
- assert self._get_library_blocks(lib_id) == [block_data]
+ assert self._get_library_blocks(lib_id)['results'] == [block_data]
assert self._get_library(lib_id)['has_unpublished_changes'] is True
# Publish the changes:
@@ -314,7 +269,7 @@ def test_library_blocks(self):
# And now the block information should also show that block has no unpublished changes:
block_data["has_unpublished_changes"] = False
self.assertDictContainsEntries(self._get_library_block(block_id), block_data)
- assert self._get_library_blocks(lib_id) == [block_data]
+ assert self._get_library_blocks(lib_id)['results'] == [block_data]
# Now update the block's OLX:
orig_olx = self._get_library_block_olx(block_id)
@@ -377,7 +332,7 @@ def test_library_blocks_studio_view(self):
assert lib['has_unpublished_changes'] is False
# A library starts out empty:
- assert self._get_library_blocks(lib_id) == []
+ assert self._get_library_blocks(lib_id)['results'] == []
# Add a 'html' XBlock to the library:
block_data = self._add_block_to_library(lib_id, "html", "html1")
@@ -388,12 +343,9 @@ def test_library_blocks_studio_view(self):
"has_unpublished_changes": True,
})
block_id = block_data["id"]
- # Confirm that the result contains a definition key, but don't check its value,
- # which for the purposes of these tests is an implementation detail.
- assert 'def_key' in block_data
# now the library should contain one block and have unpublished changes:
- assert self._get_library_blocks(lib_id) == [block_data]
+ assert self._get_library_blocks(lib_id)['results'] == [block_data]
assert self._get_library(lib_id)['has_unpublished_changes'] is True
# Publish the changes:
@@ -402,7 +354,7 @@ def test_library_blocks_studio_view(self):
# And now the block information should also show that block has no unpublished changes:
block_data["has_unpublished_changes"] = False
self.assertDictContainsEntries(self._get_library_block(block_id), block_data)
- assert self._get_library_blocks(lib_id) == [block_data]
+ assert self._get_library_blocks(lib_id)['results'] == [block_data]
# Now update the block's OLX:
orig_olx = self._get_library_block_olx(block_id)
@@ -429,27 +381,22 @@ def test_list_library_blocks(self):
"""
lib = self._create_library(slug="list_blocks-slug", title="Library 1")
block1 = self._add_block_to_library(lib["id"], "problem", "problem1")
- block2 = self._add_block_to_library(lib["id"], "unit", "unit1")
-
- self._add_block_to_library(lib["id"], "problem", "problem2", parent_block=block2["id"])
+ self._add_block_to_library(lib["id"], "unit", "unit1")
- result = self._get_library_blocks(lib["id"])
- assert len(result) == 2
+ response = self._get_library_blocks(lib["id"])
+ result = response['results']
+ assert len(response['results']) == 2
assert block1 in result
-
- result = self._get_library_blocks(lib["id"], {'pagination': 'true'})
- assert len(result['results']) == 2
- assert result['next'] is None
+ assert response['next'] is None
self._add_block_to_library(lib["id"], "problem", "problem3")
+
# Test pagination
result = self._get_library_blocks(lib["id"])
- assert len(result) == 3
- result = self._get_library_blocks(lib["id"], {'pagination': 'true'})
assert len(result['results']) == 2
+
assert 'page=2' in result['next']
- assert 'pagination=true' in result['next']
- result = self._get_library_blocks(lib["id"], {'pagination': 'true', 'page': '2'})
+ result = self._get_library_blocks(lib["id"], {'page': '2'})
assert len(result['results']) == 1
assert result['next'] is None
@@ -466,17 +413,21 @@ def test_library_blocks_filters(self):
self._set_library_block_olx(block1["id"], "")
- assert len(self._get_library_blocks(lib['id'])) == 5
- assert len(self._get_library_blocks(lib['id'], {'text_search': 'Foo'})) == 2
- assert len(self._get_library_blocks(lib['id'], {'text_search': 'Display'})) == 1
- assert len(self._get_library_blocks(lib['id'], {'text_search': 'Video'})) == 1
- assert len(self._get_library_blocks(lib['id'], {'text_search': 'Foo', 'block_type': 'video'})) == 0
- assert len(self._get_library_blocks(lib['id'], {'text_search': 'Baz', 'block_type': 'video'})) == 1
- assert len(self._get_library_blocks(lib['id'], {'text_search': 'Baz', 'block_type': ['video', 'html']})) ==\
- 2
- assert len(self._get_library_blocks(lib['id'], {'block_type': 'video'})) == 1
- assert len(self._get_library_blocks(lib['id'], {'block_type': 'problem'})) == 3
- assert len(self._get_library_blocks(lib['id'], {'block_type': 'squirrel'})) == 0
+ assert len(self._get_library_blocks(lib['id'])['results']) == 5
+ assert len(self._get_library_blocks(lib['id'], {'text_search': 'Foo'})['results']) == 2
+ assert len(self._get_library_blocks(lib['id'], {'text_search': 'Display'})['results']) == 1
+ assert len(self._get_library_blocks(lib['id'], {'text_search': 'Video'})['results']) == 1
+ assert len(self._get_library_blocks(lib['id'], {'text_search': 'Foo', 'block_type': 'video'})['results']) == 0
+ assert len(self._get_library_blocks(lib['id'], {'text_search': 'Baz', 'block_type': 'video'})['results']) == 1
+ assert 2 == len(
+ self._get_library_blocks(
+ lib['id'],
+ {'text_search': 'Baz', 'block_type': ['video', 'html']}
+ )['results']
+ )
+ assert len(self._get_library_blocks(lib['id'], {'block_type': 'video'})['results']) == 1
+ assert len(self._get_library_blocks(lib['id'], {'block_type': 'problem'})['results']) == 3
+ assert len(self._get_library_blocks(lib['id'], {'block_type': 'squirrel'})['results']) == 0
@ddt.data(
('video-problem', VIDEO, 'problem', 400),
@@ -499,44 +450,6 @@ def test_library_blocks_type_constrained(self, slug, library_type, block_type, e
# Add a 'problem' XBlock to the library:
self._add_block_to_library(lib_id, block_type, 'test-block', expect_response=expect_response)
- def test_library_blocks_with_hierarchy(self):
- """
- Test library blocks with children
- """
- lib = self._create_library(slug="hierarchy_test_lib", title="A Test Library")
- lib_id = lib["id"]
-
- # Add a 'unit' XBlock to the library:
- unit_block = self._add_block_to_library(lib_id, "unit", "unit1")
- # Add an HTML child block:
- child1 = self._add_block_to_library(lib_id, "html", "html1", parent_block=unit_block["id"])
- self._set_library_block_olx(child1["id"], "Hello world")
- # Add a problem child block:
- child2 = self._add_block_to_library(lib_id, "problem", "problem1", parent_block=unit_block["id"])
- self._set_library_block_olx(child2["id"], """
-
- What is an even number?
-
- 3
- 2
-
-
- """)
-
- # Check the resulting OLX of the unit:
- assert self._get_library_block_olx(unit_block['id']) ==\
- '\n \n' \
- ' \n\n'
-
- # The unit can see and render its children:
- fragment = self._render_block_view(unit_block["id"], "student_view")
- assert 'Hello world' in fragment['content']
- assert 'What is an even number?' in fragment['content']
-
- # We cannot add a duplicate ID to the library, either at the top level or as a child:
- self._add_block_to_library(lib_id, "problem", "problem1", expect_response=400)
- self._add_block_to_library(lib_id, "problem", "problem1", parent_block=unit_block["id"], expect_response=400)
-
# Test that permissions are enforced for content libraries
def test_library_permissions(self): # pylint: disable=too-many-statements
@@ -547,6 +460,10 @@ def test_library_permissions(self): # pylint: disable=too-many-statements
This is a single giant test case, because that optimizes for the fastest
test run time, even though it can make debugging failures harder.
+
+ TODO: The asset permissions part of this test have been commented out
+ for now. These should be re-enabled after we re-implement them over
+ Learning Core data models.
"""
# Create a few users to use for all of these tests:
admin = UserFactory.create(username="Admin", email="admin@example.com")
@@ -671,17 +588,17 @@ def test_library_permissions(self): # pylint: disable=too-many-statements
# But if we grant allow_public_read, then they can:
with self.as_user(admin):
self._update_library(lib_id, allow_public_read=True)
- self._set_library_block_asset(block3_key, "whatever.png", b"data")
+ # self._set_library_block_asset(block3_key, "whatever.png", b"data")
with self.as_user(random_user):
self._get_library_block_olx(block3_key)
- self._get_library_block_assets(block3_key)
- self._get_library_block_asset(block3_key, file_name="whatever.png")
+ # self._get_library_block_assets(block3_key)
+ # self._get_library_block_asset(block3_key, file_name="whatever.png")
# Users without authoring permission cannot edit nor delete XBlocks (this library has allow_public_read False):
for user in [reader, random_user]:
with self.as_user(user):
self._set_library_block_olx(block3_key, "", expect_response=403)
- self._set_library_block_asset(block3_key, "test.txt", b"data", expect_response=403)
+ # self._set_library_block_asset(block3_key, "test.txt", b"data", expect_response=403)
self._delete_library_block(block3_key, expect_response=403)
self._commit_library_changes(lib_id, expect_response=403)
self._revert_library_changes(lib_id, expect_response=403)
@@ -690,9 +607,9 @@ def test_library_permissions(self): # pylint: disable=too-many-statements
with self.as_user(author_group_member):
olx = self._get_library_block_olx(block3_key)
self._set_library_block_olx(block3_key, olx)
- self._get_library_block_assets(block3_key)
- self._set_library_block_asset(block3_key, "test.txt", b"data")
- self._get_library_block_asset(block3_key, file_name="test.txt")
+ # self._get_library_block_assets(block3_key)
+ # self._set_library_block_asset(block3_key, "test.txt", b"data")
+ # self._get_library_block_asset(block3_key, file_name="test.txt")
self._delete_library_block(block3_key)
self._commit_library_changes(lib_id)
self._revert_library_changes(lib_id) # This is a no-op after the commit, but should still have 200 response
@@ -713,194 +630,20 @@ def test_no_lockout(self):
)
self._remove_user_access(lib_key=lib['id'], username=admin.username)
- def test_library_blocks_with_links(self):
- """
- Test that libraries can link to XBlocks in other content libraries
- """
- # Create a problem bank:
- bank_lib = self._create_library(slug="problem_bank", title="Problem Bank")
- bank_lib_id = bank_lib["id"]
- # Add problem1 to the problem bank:
- p1 = self._add_block_to_library(bank_lib_id, "problem", "problem1")
- self._set_library_block_olx(p1["id"], """
-
- What is an even number?
-
- 3
- 2
-
-
- """)
- # Commit the changes, creating version 1:
- self._commit_library_changes(bank_lib_id)
- # Now update problem 1 and create a new problem 2:
- self._set_library_block_olx(p1["id"], """
-
- What is an odd number?
-
- 3
- 2
-
-
- """)
- p2 = self._add_block_to_library(bank_lib_id, "problem", "problem2")
- self._set_library_block_olx(p2["id"], """
-
- What holds this XBlock?
-
- A course
- A problem bank
-
-
- """)
- # Commit the changes, creating version 2:
- self._commit_library_changes(bank_lib_id)
- # At this point, bank_lib contains two problems and has two versions.
- # In version 1, problem1 is "What is an event number", and in version 2 it's "What is an odd number".
- # Problem2 exists only in version 2 and asks "What holds this XBlock?"
-
- lib = self._create_library(slug="links_test_lib", title="Link Test Library")
- lib_id = lib["id"]
- # Link to the problem bank:
- self._link_to_library(lib_id, "problem_bank", bank_lib_id)
- self._link_to_library(lib_id, "problem_bank_v1", bank_lib_id, version=1)
-
- # Add a 'unit' XBlock to the library:
- unit_block = self._add_block_to_library(lib_id, "unit", "unit1")
- self._set_library_block_olx(unit_block["id"], """
-
-
-
-
-
-
-
-
- """)
-
- # The unit can see and render its children:
- fragment = self._render_block_view(unit_block["id"], "student_view")
- assert 'What is an odd number?' in fragment['content']
- assert 'What is an even number?' in fragment['content']
- assert 'What holds this XBlock?' in fragment['content']
-
- # Also check the API for retrieving links:
- links_created = self._get_library_links(lib_id)
- links_created.sort(key=lambda link: link["id"])
- assert len(links_created) == 2
-
- assert links_created[0]['id'] == 'problem_bank'
- assert links_created[0]['bundle_uuid'] == bank_lib['bundle_uuid']
- assert links_created[0]['version'] == 2
- assert links_created[0]['latest_version'] == 2
- assert links_created[0]['opaque_key'] == bank_lib_id
-
- assert links_created[1]['id'] == 'problem_bank_v1'
- assert links_created[1]['bundle_uuid'] == bank_lib['bundle_uuid']
- assert links_created[1]['version'] == 1
- assert links_created[1]['latest_version'] == 2
- assert links_created[1]['opaque_key'] == bank_lib_id
-
- def test_library_blocks_with_deleted_links(self):
- """
- Test that libraries can handle deleted links to bundles
- """
- # Create a problem bank:
- bank_lib = self._create_library(slug="problem_bank1X", title="Problem Bank")
- bank_lib_id = bank_lib["id"]
- # Add problem1 to the problem bank:
- p1 = self._add_block_to_library(bank_lib_id, "problem", "problem1X")
- self._set_library_block_olx(p1["id"], """
-
- What is an even number?
-
- 3
- 2
-
-
- """)
- # Commit the changes, creating version 1:
- self._commit_library_changes(bank_lib_id)
-
- # Create another problem bank:
- bank_lib2 = self._create_library(slug="problem_bank2", title="Problem Bank 2")
- bank_lib2_id = bank_lib2["id"]
- # Add problem1 to the problem bank:
- p2 = self._add_block_to_library(bank_lib2_id, "problem", "problem1X")
- self._set_library_block_olx(p2["id"], """
-
- What is an odd number?
-
- 3
- 2
-
-
- """)
- # Commit the changes, creating version 1:
- self._commit_library_changes(bank_lib2_id)
-
- lib = self._create_library(slug="problem_bank2X", title="Link Test Library")
- lib_id = lib["id"]
- # Link to the other libraries:
- self._link_to_library(lib_id, "problem_bank", bank_lib_id)
- self._link_to_library(lib_id, "problem_bank_v1", bank_lib2_id)
-
- # check the API for retrieving links:
- links_created = self._get_library_links(lib_id)
- links_created.sort(key=lambda link: link["id"])
- assert len(links_created) == 2
-
- assert links_created[0]['id'] == 'problem_bank'
- assert links_created[0]['bundle_uuid'] == bank_lib['bundle_uuid']
- assert links_created[0]['version'] == 1
- assert links_created[0]['latest_version'] == 1
- assert links_created[0]['opaque_key'] == bank_lib_id
-
- assert links_created[1]['id'] == 'problem_bank_v1'
- assert links_created[1]['bundle_uuid'] == bank_lib2['bundle_uuid']
- assert links_created[1]['version'] == 1
- assert links_created[1]['latest_version'] == 1
- assert links_created[1]['opaque_key'] == bank_lib2_id
-
- # Delete one of the linked bundles/libraries
- self._delete_library(bank_lib2_id)
-
- # update the cache so we're not getting cached links in the next step
- cache_key = 'bundle_version:{}:'.format(bank_lib['bundle_uuid'])
- cache.delete(cache_key)
- cache_key = 'bundle_version:{}:'.format(bank_lib2['bundle_uuid'])
- cache.delete(cache_key)
-
- links_created = self._get_library_links(lib_id)
- links_created.sort(key=lambda link: link["id"])
- assert len(links_created) == 2
-
- assert links_created[0]['id'] == 'problem_bank'
- assert links_created[0]['bundle_uuid'] == bank_lib['bundle_uuid']
- assert links_created[0]['version'] == 1
- assert links_created[0]['latest_version'] == 1
- assert links_created[0]['opaque_key'] == bank_lib_id
-
- # If a link has been deleted, the latest version will be 0,
- # and the opaque key will be `None`.
- assert links_created[1]['id'] == 'problem_bank_v1'
- assert links_created[1]['bundle_uuid'] == bank_lib2['bundle_uuid']
- assert links_created[1]['version'] == 1
- assert links_created[1]['latest_version'] == 0
- assert links_created[1]['opaque_key'] is None
-
def test_library_blocks_limit(self):
"""
Test that libraries don't allow more than specified blocks
"""
with self.settings(MAX_BLOCKS_PER_CONTENT_LIBRARY=1):
- lib = self._create_library(slug="test_lib_limits", title="Limits Test Library", description="Testing XBlocks limits in a library") # lint-amnesty, pylint: disable=line-too-long
+ lib = self._create_library(
+ slug="test_lib_limits",
+ title="Limits Test Library",
+ description="Testing XBlocks limits in a library"
+ )
lib_id = lib["id"]
- block_data = self._add_block_to_library(lib_id, "unit", "unit1")
+ self._add_block_to_library(lib_id, "unit", "unit1")
# Second block should throw error
self._add_block_to_library(lib_id, "problem", "problem1", expect_response=400)
- # Also check that limit applies to child blocks too
- self._add_block_to_library(lib_id, "html", "html1", parent_block=block_data['id'], expect_response=400)
@ddt.data(
('complex-types', COMPLEX, False),
@@ -926,7 +669,11 @@ def test_content_library_create_event(self):
"""
event_receiver = Mock()
CONTENT_LIBRARY_CREATED.connect(event_receiver)
- lib = self._create_library(slug="test_lib_event_create", title="Event Test Library", description="Testing event in library") # lint-amnesty, pylint: disable=line-too-long
+ lib = self._create_library(
+ slug="test_lib_event_create",
+ title="Event Test Library",
+ description="Testing event in library"
+ )
library_key = LibraryLocatorV2.from_string(lib['id'])
event_receiver.assert_called_once()
@@ -948,7 +695,11 @@ def test_content_library_update_event(self):
"""
event_receiver = Mock()
CONTENT_LIBRARY_UPDATED.connect(event_receiver)
- lib = self._create_library(slug="test_lib_event_update", title="Event Test Library", description="Testing event in library") # lint-amnesty, pylint: disable=line-too-long
+ lib = self._create_library(
+ slug="test_lib_event_update",
+ title="Event Test Library",
+ description="Testing event in library"
+ )
lib2 = self._update_library(lib["id"], title="New Title")
library_key = LibraryLocatorV2.from_string(lib2['id'])
@@ -972,7 +723,11 @@ def test_content_library_delete_event(self):
"""
event_receiver = Mock()
CONTENT_LIBRARY_DELETED.connect(event_receiver)
- lib = self._create_library(slug="test_lib_event_delete", title="Event Test Library", description="Testing event in library") # lint-amnesty, pylint: disable=line-too-long
+ lib = self._create_library(
+ slug="test_lib_event_delete",
+ title="Event Test Library",
+ description="Testing event in library"
+ )
library_key = LibraryLocatorV2.from_string(lib['id'])
self._delete_library(lib["id"])
@@ -996,7 +751,11 @@ def test_library_block_create_event(self):
"""
event_receiver = Mock()
LIBRARY_BLOCK_CREATED.connect(event_receiver)
- lib = self._create_library(slug="test_lib_block_event_create", title="Event Test Library", description="Testing event in library") # lint-amnesty, pylint: disable=line-too-long
+ lib = self._create_library(
+ slug="test_lib_block_event_create",
+ title="Event Test Library",
+ description="Testing event in library"
+ )
lib_id = lib["id"]
self._add_block_to_library(lib_id, "problem", "problem1")
@@ -1026,7 +785,11 @@ def test_library_block_olx_update_event(self):
"""
event_receiver = Mock()
LIBRARY_BLOCK_UPDATED.connect(event_receiver)
- lib = self._create_library(slug="test_lib_block_event_olx_update", title="Event Test Library", description="Testing event in library") # lint-amnesty, pylint: disable=line-too-long
+ lib = self._create_library(
+ slug="test_lib_block_event_olx_update",
+ title="Event Test Library",
+ description="Testing event in library"
+ )
lib_id = lib["id"]
library_key = LibraryLocatorV2.from_string(lib_id)
@@ -1069,48 +832,19 @@ def test_library_block_olx_update_event(self):
event_receiver.call_args.kwargs
)
- def test_library_block_child_update_event(self):
- """
- Check that LIBRARY_BLOCK_CREATED event is sent when a child is created.
- """
- event_receiver = Mock()
- LIBRARY_BLOCK_UPDATED.connect(event_receiver)
- lib = self._create_library(slug="test_lib_block_event_child_update", title="Event Test Library", description="Testing event in library") # lint-amnesty, pylint: disable=line-too-long
- lib_id = lib["id"]
-
- library_key = LibraryLocatorV2.from_string(lib_id)
-
- parent_block = self._add_block_to_library(lib_id, "unit", "u1")
- parent_block_id = parent_block["id"]
-
- self._add_block_to_library(lib["id"], "problem", "problem1", parent_block=parent_block_id)
-
- usage_key = LibraryUsageLocatorV2(
- lib_key=library_key,
- block_type="problem",
- usage_id="problem1"
- )
-
- event_receiver.assert_called_once()
- self.assertDictContainsSubset(
- {
- "signal": LIBRARY_BLOCK_UPDATED,
- "sender": None,
- "library_block": LibraryBlockData(
- library_key=library_key,
- usage_key=usage_key
- ),
- },
- event_receiver.call_args.kwargs
- )
-
+ @skip("We still need to re-implement static asset handling.")
def test_library_block_add_asset_update_event(self):
"""
- Check that LIBRARY_BLOCK_CREATED event is sent when a static asset is uploaded associated with the XBlock.
+ Check that LIBRARY_BLOCK_CREATED event is sent when a static asset is
+ uploaded associated with the XBlock.
"""
event_receiver = Mock()
LIBRARY_BLOCK_UPDATED.connect(event_receiver)
- lib = self._create_library(slug="test_lib_block_event_add_asset_update", title="Event Test Library", description="Testing event in library") # lint-amnesty, pylint: disable=line-too-long
+ lib = self._create_library(
+ slug="test_lib_block_event_add_asset_update",
+ title="Event Test Library",
+ description="Testing event in library"
+ )
lib_id = lib["id"]
library_key = LibraryLocatorV2.from_string(lib_id)
@@ -1138,13 +872,19 @@ def test_library_block_add_asset_update_event(self):
event_receiver.call_args.kwargs
)
+ @skip("We still need to re-implement static asset handling.")
def test_library_block_del_asset_update_event(self):
"""
- Check that LIBRARY_BLOCK_CREATED event is sent when a static asset is removed from XBlock.
+ Check that LIBRARY_BLOCK_CREATED event is sent when a static asset is
+ removed from XBlock.
"""
event_receiver = Mock()
LIBRARY_BLOCK_UPDATED.connect(event_receiver)
- lib = self._create_library(slug="test_lib_block_event_del_asset_update", title="Event Test Library", description="Testing event in library") # lint-amnesty, pylint: disable=line-too-long
+ lib = self._create_library(
+ slug="test_lib_block_event_del_asset_update",
+ title="Event Test Library",
+ description="Testing event in library"
+ )
lib_id = lib["id"]
library_key = LibraryLocatorV2.from_string(lib_id)
@@ -1180,7 +920,11 @@ def test_library_block_delete_event(self):
"""
event_receiver = Mock()
LIBRARY_BLOCK_DELETED.connect(event_receiver)
- lib = self._create_library(slug="test_lib_block_event_delete", title="Event Test Library", description="Testing event in library") # lint-amnesty, pylint: disable=line-too-long
+ lib = self._create_library(
+ slug="test_lib_block_event_delete",
+ title="Event Test Library",
+ description="Testing event in library"
+ )
lib_id = lib["id"]
library_key = LibraryLocatorV2.from_string(lib_id)
@@ -1210,15 +954,6 @@ def test_library_block_delete_event(self):
)
-class ContentLibrariesTest(
- ContentLibrariesTestMixin,
- ContentLibrariesRestApiTest,
-):
- """
- General tests for Blockstore-based Content Libraries, using the installed Blockstore app.
- """
-
-
@ddt.ddt
class ContentLibraryXBlockValidationTest(APITestCase):
"""Tests only focused on service validation, no Blockstore needed."""
diff --git a/openedx/core/djangoapps/content_libraries/tests/test_models.py b/openedx/core/djangoapps/content_libraries/tests/test_models.py
index 3f8b51b6ad74..81a5a8fa32f3 100644
--- a/openedx/core/djangoapps/content_libraries/tests/test_models.py
+++ b/openedx/core/djangoapps/content_libraries/tests/test_models.py
@@ -4,7 +4,6 @@
from unittest import mock
-import uuid
from django.test import TestCase
from django.test import RequestFactory
@@ -30,14 +29,13 @@ class ContentLibraryTest(TestCase):
def _create_library(self, **kwds):
"""
- Create a library model, without a blockstore bundle attached to it.
+ Create a library model, without a LearningPackage attached to it.
"""
org = Organization.objects.create(name='foo', short_name='foo')
return ContentLibrary.objects.create(
org=org,
slug='foobar',
type=COMPLEX,
- bundle_uuid=uuid.uuid4(),
allow_public_learning=False,
allow_public_read=False,
license=ALL_RIGHTS_RESERVED,
diff --git a/openedx/core/djangoapps/content_libraries/tests/test_runtime.py b/openedx/core/djangoapps/content_libraries/tests/test_runtime.py
index 430306d936e2..f35ba7ea7b90 100644
--- a/openedx/core/djangoapps/content_libraries/tests/test_runtime.py
+++ b/openedx/core/djangoapps/content_libraries/tests/test_runtime.py
@@ -6,7 +6,6 @@
from completion.test_utils import CompletionWaffleTestMixin
from django.db import connections, transaction
-from django.test import LiveServerTestCase
from django.utils.text import slugify
from organizations.models import Organization
from rest_framework.test import APIClient
@@ -22,13 +21,11 @@
URL_BLOCK_FIELDS_URL,
)
from openedx.core.djangoapps.content_libraries.tests.user_state_block import UserStateTestBlock
-from openedx.core.djangoapps.content_libraries.constants import COMPLEX, ALL_RIGHTS_RESERVED, CC_4_BY
+from openedx.core.djangoapps.content_libraries.constants import COMPLEX, ALL_RIGHTS_RESERVED
from openedx.core.djangoapps.dark_lang.models import DarkLangConfig
from openedx.core.djangoapps.xblock import api as xblock_api
from openedx.core.djangolib.testing.utils import skip_unless_lms, skip_unless_cms
-from openedx.core.lib import blockstore_api
from common.djangoapps.student.tests.factories import UserFactory
-from xmodule.unit_block import UnitBlock # lint-amnesty, pylint: disable=wrong-import-order
class ContentLibraryContentTestMixin:
@@ -44,9 +41,6 @@ def setUp(self):
# staff user
self.staff_user = UserFactory(password="edx", is_staff=True)
- # Create a collection using Blockstore API directly only because there
- # is not yet any Studio REST API for doing so:
- self.collection = blockstore_api.create_collection("Content Library Test Collection")
# Create an organization
self.organization = Organization.objects.create(
name="Content Libraries Tachyon Exploration & Survey Team",
@@ -55,7 +49,6 @@ def setUp(self):
_, slug = self.id().rsplit('.', 1)
with transaction.atomic():
self.library = library_api.create_library(
- collection_uuid=self.collection.uuid,
library_type=COMPLEX,
org=self.organization,
slug=slugify(slug),
@@ -73,40 +66,6 @@ class ContentLibraryRuntimeTestMixin(ContentLibraryContentTestMixin):
content library.
"""
- @skip_unless_cms # creating child blocks only works properly in Studio
- def test_identical_olx(self):
- """
- Test library blocks with children that also have identical OLX. Since
- the blockstore runtime caches authored field data based on the hash of
- the OLX, this can catch some potential bugs, especially given that the
- "children" field stores usage IDs, not definition IDs.
- """
- # Create a unit containing a
- unit_block_key = library_api.create_library_block(self.library.key, "unit", "u1").usage_key
- library_api.create_library_block_child(unit_block_key, "problem", "p1")
- library_api.publish_changes(self.library.key)
- # Now do the same in a different library:
- with transaction.atomic():
- library2 = library_api.create_library(
- collection_uuid=self.collection.uuid,
- org=self.organization,
- slug="idolx",
- title=("Identical OLX Test Lib 2"),
- description="",
- library_type=COMPLEX,
- allow_public_learning=True,
- allow_public_read=False,
- library_license=CC_4_BY,
- )
- unit_block2_key = library_api.create_library_block(library2.key, "unit", "u1").usage_key
- library_api.create_library_block_child(unit_block2_key, "problem", "p1")
- library_api.publish_changes(library2.key)
- # Load both blocks:
- unit_block = xblock_api.load_block(unit_block_key, self.student_a)
- unit_block2 = xblock_api.load_block(unit_block2_key, self.student_a)
- assert library_api.get_library_block_olx(unit_block_key) == library_api.get_library_block_olx(unit_block2_key)
- assert unit_block.children != unit_block2.children
-
def test_dndv2_sets_translator(self):
dnd_block_key = library_api.create_library_block(self.library.key, "drag-and-drop-v2", "dnd1").usage_key
library_api.publish_changes(self.library.key)
@@ -119,17 +78,10 @@ def test_has_score(self):
Test that the LMS-specific 'has_score' attribute is getting added to
blocks.
"""
- unit_block_key = library_api.create_library_block(self.library.key, "unit", "score-unit1").usage_key
problem_block_key = library_api.create_library_block(self.library.key, "problem", "score-prob1").usage_key
library_api.publish_changes(self.library.key)
- unit_block = xblock_api.load_block(unit_block_key, self.student_a)
problem_block = xblock_api.load_block(problem_block_key, self.student_a)
-
- assert not hasattr(UnitBlock, 'has_score')
- # The block class doesn't declare 'has_score'
- assert unit_block.has_score is False
- # But it gets added by the runtime and defaults to False
- # And problems do have has_score True:
+ # problems do have has_score True:
assert problem_block.has_score is True
@skip_unless_cms # creating child blocks only works properly in Studio
@@ -211,7 +163,7 @@ def test_xblock_fields(self):
assert fields_get_result.data['metadata']['display_name'] == 'New Text Block'
# Check the POST API for the block:
- fields_post_result = client.post(URL_BLOCK_FIELDS_URL.format(block_key=block_key), data={
+ client.post(URL_BLOCK_FIELDS_URL.format(block_key=block_key), data={
'data': 'test
',
'metadata': {
'display_name': 'New Display Name',
@@ -219,10 +171,10 @@ def test_xblock_fields(self):
}, format='json')
block_saved = xblock_api.load_block(block_key, self.staff_user)
assert block_saved.data == '\ntest
\n'
- assert xblock_api.get_block_display_name(block_saved) == 'New Display Name'
+ assert block_saved.display_name == 'New Display Name'
-class ContentLibraryRuntimeTest(ContentLibraryRuntimeTestMixin, BlockstoreAppTestMixin, LiveServerTestCase):
+class ContentLibraryRuntimeTest(ContentLibraryRuntimeTestMixin, BlockstoreAppTestMixin):
"""
Tests XBlock runtime using XBlocks in a content library using the installed Blockstore app.
@@ -538,7 +490,6 @@ def test_i18n(self):
class ContentLibraryXBlockUserStateTest( # type: ignore[misc]
ContentLibraryXBlockUserStateTestMixin,
BlockstoreAppTestMixin,
- LiveServerTestCase,
):
"""
Tests XBlock user state for XBlocks in a content library using the installed Blockstore app.
@@ -605,7 +556,6 @@ class ContentLibraryXBlockCompletionTest(
ContentLibraryXBlockCompletionTestMixin,
CompletionWaffleTestMixin,
BlockstoreAppTestMixin,
- LiveServerTestCase,
):
"""
Test that the Blockstore-based XBlocks can track their completion status
diff --git a/openedx/core/djangoapps/content_libraries/tests/test_static_assets.py b/openedx/core/djangoapps/content_libraries/tests/test_static_assets.py
index e330101eb3bc..6a75d63110b8 100644
--- a/openedx/core/djangoapps/content_libraries/tests/test_static_assets.py
+++ b/openedx/core/djangoapps/content_libraries/tests/test_static_assets.py
@@ -1,6 +1,7 @@
"""
Tests for static asset files in Blockstore-based Content Libraries
"""
+from unittest import skip
from openedx.core.djangoapps.content_libraries.tests.base import (
ContentLibrariesRestApiTest,
@@ -22,7 +23,8 @@
"""
-class ContentLibrariesStaticAssetsTestMixin:
+@skip("Assets are being reimplemented in Learning Core. Disable until that's ready.")
+class ContentLibrariesStaticAssetsTest(ContentLibrariesRestApiTest):
"""
Tests for static asset files in Blockstore-based Content Libraries
@@ -106,12 +108,3 @@ def check_download():
self._commit_library_changes(library["id"])
check_sjson()
check_download()
-
-
-class ContentLibrariesStaticAssetsTest(
- ContentLibrariesStaticAssetsTestMixin,
- ContentLibrariesRestApiTest,
-):
- """
- Tests for static asset files in Blockstore-based Content Libraries, using the installed Blockstore app.
- """
diff --git a/openedx/core/djangoapps/content_libraries/urls.py b/openedx/core/djangoapps/content_libraries/urls.py
index dd6ada45dc26..022c288e363f 100644
--- a/openedx/core/djangoapps/content_libraries/urls.py
+++ b/openedx/core/djangoapps/content_libraries/urls.py
@@ -31,10 +31,6 @@
path('', views.LibraryDetailsView.as_view()),
# Get the list of XBlock types that can be added to this library
path('block_types/', views.LibraryBlockTypesView.as_view()),
- # Get the list of Blockstore Bundle Links for this library, or add a new one:
- path('links/', views.LibraryLinksView.as_view()),
- # Update or delete a link:
- path('links//', views.LibraryLinkDetailView.as_view()),
# Get the list of XBlocks in this library, or add a new one:
path('blocks/', views.LibraryBlocksView.as_view()),
# Commit (POST) or revert (DELETE) all pending changes to this library:
diff --git a/openedx/core/djangoapps/content_libraries/views.py b/openedx/core/djangoapps/content_libraries/views.py
index bd5f091b136c..0df670b1aea6 100644
--- a/openedx/core/djangoapps/content_libraries/views.py
+++ b/openedx/core/djangoapps/content_libraries/views.py
@@ -3,39 +3,84 @@
Content Libraries Views
=======================
-This module contains the REST APIs for blockstore-based content libraries, and
-LTI 1.3 views.
+This module contains the REST APIs for Learning Core-based content libraries,
+and LTI 1.3 views (though I'm not sure how functional the LTI piece of this is
+right now).
+
+Most of the real work is intended to happen in the api.py module. The views are
+intended to be thin ones that do:
+
+1. Permissions checking
+2. Input/output data conversion via serializers
+3. Pagination
+
+Everything else should be delegated to api.py for the actual business logic. If
+you see business logic happening in these views, consider refactoring them into
+the api module instead.
+
+.. warning::
+ **NOTICE: DO NOT USE THE @atomic DECORATOR FOR THESE VIEWS!!!**
+
+ Views in ths module are decorated with:
+ @method_decorator(non_atomic_requests, name="dispatch")
+
+ This forces the views to execute without an implicit view-level transaction,
+ even if the project is configured to use view-level transactions by default.
+ (So no matter what you set the ATOMIC_REQUESTS setting to.)
+
+ We *must* use manual transactions for content libraries related views, or
+ we'll run into mysterious race condition bugs. We should NOT use the @atomic
+ decorator over any of these views.
+
+ The problem is this: Code outside of this app will want to listen for
+ content lifecycle events like ``LIBRARY_BLOCK_CREATED`` and take certain
+ actions based on them. We see this pattern used extensively with courses.
+ Another common pattern is to use celery to queue up an asynchronous task to
+ do that work.
+
+ If there is an implicit database transaction around the entire view
+ execution, the celery task may start up just before the view finishes
+ executing. When that happens, the celery task doesn't see the new content
+ change, because the view transaction hasn't finished committing it to the
+ database yet.
+
+ The worst part of this is that dev environments and tests often won't catch
+ this because celery is typically configured to run in-process in those
+ situations. When it's run in-process, celery is already inside the view's
+ transaction so it will "see" the new changes and everything will appear to
+ be fine–only to fail intermittently when deployed to production.
+
+ We can and should continue to use atomic() as a context manager when we want
+ to make changes to multiple models. But this should happen at the api module
+ layer, not in the view. Other apps are permitted to call functions in the
+ public api.py module, and we want to make sure those api calls manage their
+ own transactions and don't assume that they're being called in an atomic
+ block.
+
+ Historical note: These views used to be wrapped with @atomic because we
+ wanted to make all views that operated on Blockstore data atomic:
+ https://github.com/openedx/edx-platform/pull/30456
"""
-
from functools import wraps
import itertools
import json
import logging
from django.conf import settings
-from django.contrib.auth import authenticate
-from django.contrib.auth import get_user_model
-from django.contrib.auth import login
+from django.contrib.auth import authenticate, get_user_model, login
from django.contrib.auth.models import Group
-from django.db.transaction import atomic
-from django.http import Http404
-from django.http import HttpResponseBadRequest
-from django.http import JsonResponse
+from django.db.transaction import atomic, non_atomic_requests
+from django.http import Http404, HttpResponseBadRequest, JsonResponse
from django.shortcuts import get_object_or_404
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.utils.translation import gettext as _
from django.views.decorators.clickjacking import xframe_options_exempt
from django.views.decorators.csrf import csrf_exempt
-from django.views.generic.base import TemplateResponseMixin
-from django.views.generic.base import View
-from pylti1p3.contrib.django import DjangoCacheDataStorage
-from pylti1p3.contrib.django import DjangoDbToolConf
-from pylti1p3.contrib.django import DjangoMessageLaunch
-from pylti1p3.contrib.django import DjangoOIDCLogin
-from pylti1p3.exception import LtiException
-from pylti1p3.exception import OIDCException
+from django.views.generic.base import TemplateResponseMixin, View
+from pylti1p3.contrib.django import DjangoCacheDataStorage, DjangoDbToolConf, DjangoMessageLaunch, DjangoOIDCLogin
+from pylti1p3.exception import LtiException, OIDCException
import edx_api_doc_tools as apidocs
from opaque_keys.edx.locator import LibraryLocatorV2, LibraryUsageLocatorV2
@@ -62,8 +107,6 @@
LibraryXBlockCreationSerializer,
LibraryXBlockMetadataSerializer,
LibraryXBlockTypeSerializer,
- LibraryBundleLinkSerializer,
- LibraryBundleLinkUpdateSerializer,
LibraryXBlockOlxSerializer,
LibraryXBlockStaticFileSerializer,
LibraryXBlockStaticFilesSerializer,
@@ -74,9 +117,7 @@
from openedx.core.djangoapps.safe_sessions.middleware import mark_user_change_as_expected
from openedx.core.djangoapps.xblock import api as xblock_api
-from .models import ContentLibrary
-from .models import LtiGradedResource
-from .models import LtiProfile
+from .models import ContentLibrary, LtiGradedResource, LtiProfile
User = get_user_model()
@@ -137,13 +178,13 @@ class LibraryApiPagination(PageNumberPagination):
]
+@method_decorator(non_atomic_requests, name="dispatch")
@view_auth_classes()
class LibraryRootView(APIView):
"""
Views to list, search for, and create content libraries.
"""
- @atomic
@apidocs.schema(
parameters=[
*LibraryApiPagination.apidoc_params,
@@ -170,14 +211,14 @@ def get(self, request):
text_search = serializer.validated_data['text_search']
paginator = LibraryApiPagination()
- queryset = api.get_libraries_for_user(request.user, org=org, library_type=library_type)
- if text_search:
- result = api.get_metadata(queryset, text_search=text_search)
- result = paginator.paginate_queryset(result, request)
- else:
- # We can paginate queryset early and prevent fetching unneeded metadata
- paginated_qs = paginator.paginate_queryset(queryset, request)
- result = api.get_metadata(paginated_qs)
+ queryset = api.get_libraries_for_user(
+ request.user,
+ org=org,
+ library_type=library_type,
+ text_search=text_search,
+ )
+ paginated_qs = paginator.paginate_queryset(queryset, request)
+ result = api.get_metadata(paginated_qs)
serializer = ContentLibraryMetadataSerializer(result, many=True)
# Verify `pagination` param to maintain compatibility with older
@@ -186,7 +227,6 @@ def get(self, request):
return paginator.get_paginated_response(serializer.data)
return Response(serializer.data)
- @atomic
def post(self, request):
"""
Create a new content library.
@@ -212,21 +252,31 @@ def post(self, request):
detail={"org": f"No such organization '{org_name}' found."}
)
org = Organization.objects.get(short_name=org_name)
+
+ # Backwards compatibility: ignore the no-longer used "collection_uuid"
+ # parameter. This was necessary with Blockstore, but not used for
+ # Learning Core. TODO: This can be removed once the frontend stops
+ # sending it to us. This whole bit of deserialization is kind of weird
+ # though, with the renames and such. Look into this later for clennup.
+ data.pop("collection_uuid", None)
+
try:
- result = api.create_library(org=org, **data)
+ with atomic():
+ result = api.create_library(org=org, **data)
+ # Grant the current user admin permissions on the library:
+ api.set_library_user_permissions(result.key, request.user, api.AccessLevel.ADMIN_LEVEL)
except api.LibraryAlreadyExists:
raise ValidationError(detail={"slug": "A library with that ID already exists."}) # lint-amnesty, pylint: disable=raise-missing-from
- # Grant the current user admin permissions on the library:
- api.set_library_user_permissions(result.key, request.user, api.AccessLevel.ADMIN_LEVEL)
+
return Response(ContentLibraryMetadataSerializer(result).data)
+@method_decorator(non_atomic_requests, name="dispatch")
@view_auth_classes()
class LibraryDetailsView(APIView):
"""
Views to work with a specific content library
"""
- @atomic
@convert_exceptions
def get(self, request, lib_key_str):
"""
@@ -237,7 +287,6 @@ def get(self, request, lib_key_str):
result = api.get_library(key)
return Response(ContentLibraryMetadataSerializer(result).data)
- @atomic
@convert_exceptions
def patch(self, request, lib_key_str):
"""
@@ -260,7 +309,6 @@ def patch(self, request, lib_key_str):
result = api.get_library(key)
return Response(ContentLibraryMetadataSerializer(result).data)
- @atomic
@convert_exceptions
def delete(self, request, lib_key_str): # pylint: disable=unused-argument
"""
@@ -272,6 +320,7 @@ def delete(self, request, lib_key_str): # pylint: disable=unused-argument
return Response({})
+@method_decorator(non_atomic_requests, name="dispatch")
@view_auth_classes()
class LibraryTeamView(APIView):
"""
@@ -281,7 +330,6 @@ class LibraryTeamView(APIView):
Note also the 'allow_public_' settings which can be edited by PATCHing the
library itself (LibraryDetailsView.patch).
"""
- @atomic
@convert_exceptions
def post(self, request, lib_key_str):
"""
@@ -309,7 +357,6 @@ def post(self, request, lib_key_str):
grant = api.get_library_user_permissions(key, user)
return Response(ContentLibraryPermissionSerializer(grant).data)
- @atomic
@convert_exceptions
def get(self, request, lib_key_str):
"""
@@ -322,13 +369,13 @@ def get(self, request, lib_key_str):
return Response(ContentLibraryPermissionSerializer(team, many=True).data)
+@method_decorator(non_atomic_requests, name="dispatch")
@view_auth_classes()
class LibraryTeamUserView(APIView):
"""
View to add/remove/edit an individual user's permissions for a content
library.
"""
- @atomic
@convert_exceptions
def put(self, request, lib_key_str, username):
"""
@@ -347,7 +394,6 @@ def put(self, request, lib_key_str, username):
grant = api.get_library_user_permissions(key, user)
return Response(ContentLibraryPermissionSerializer(grant).data)
- @atomic
@convert_exceptions
def get(self, request, lib_key_str, username):
"""
@@ -361,7 +407,6 @@ def get(self, request, lib_key_str, username):
raise NotFound
return Response(ContentLibraryPermissionSerializer(grant).data)
- @atomic
@convert_exceptions
def delete(self, request, lib_key_str, username):
"""
@@ -378,12 +423,12 @@ def delete(self, request, lib_key_str, username):
return Response({})
+@method_decorator(non_atomic_requests, name="dispatch")
@view_auth_classes()
class LibraryTeamGroupView(APIView):
"""
View to add/remove/edit a group's permissions for a content library.
"""
- @atomic
@convert_exceptions
def put(self, request, lib_key_str, group_name):
"""
@@ -398,7 +443,6 @@ def put(self, request, lib_key_str, group_name):
api.set_library_group_permissions(key, group, access_level=serializer.validated_data["access_level"])
return Response({})
- @atomic
@convert_exceptions
def delete(self, request, lib_key_str, username):
"""
@@ -412,12 +456,12 @@ def delete(self, request, lib_key_str, username):
return Response({})
+@method_decorator(non_atomic_requests, name="dispatch")
@view_auth_classes()
class LibraryBlockTypesView(APIView):
"""
View to get the list of XBlock types that can be added to this library
"""
- @atomic
@convert_exceptions
def get(self, request, lib_key_str):
"""
@@ -429,90 +473,12 @@ def get(self, request, lib_key_str):
return Response(LibraryXBlockTypeSerializer(result, many=True).data)
-@view_auth_classes()
-class LibraryLinksView(APIView):
- """
- View to get the list of bundles/libraries linked to this content library.
-
- Because every content library is a blockstore bundle, it can have "links" to
- other bundles, which may or may not be content libraries. This allows using
- XBlocks (or perhaps even static assets etc.) from another bundle without
- needing to duplicate/copy the data.
-
- Links always point to a specific published version of the target bundle.
- Links are identified by a slug-like ID, e.g. "link1"
- """
- @atomic
- @convert_exceptions
- def get(self, request, lib_key_str):
- """
- Get the list of bundles that this library links to, if any
- """
- key = LibraryLocatorV2.from_string(lib_key_str)
- api.require_permission_for_library_key(key, request.user, permissions.CAN_VIEW_THIS_CONTENT_LIBRARY)
- result = api.get_bundle_links(key)
- return Response(LibraryBundleLinkSerializer(result, many=True).data)
-
- @atomic
- @convert_exceptions
- def post(self, request, lib_key_str):
- """
- Create a new link in this library.
- """
- key = LibraryLocatorV2.from_string(lib_key_str)
- api.require_permission_for_library_key(key, request.user, permissions.CAN_EDIT_THIS_CONTENT_LIBRARY)
- serializer = LibraryBundleLinkSerializer(data=request.data)
- serializer.is_valid(raise_exception=True)
- target_key = LibraryLocatorV2.from_string(serializer.validated_data['opaque_key'])
- api.create_bundle_link(
- library_key=key,
- link_id=serializer.validated_data['id'],
- target_opaque_key=target_key,
- version=serializer.validated_data['version'], # a number, or None for "use latest version"
- )
- return Response({})
-
-
-@view_auth_classes()
-class LibraryLinkDetailView(APIView):
- """
- View to update/delete an existing library link
- """
- @atomic
- @convert_exceptions
- def patch(self, request, lib_key_str, link_id):
- """
- Update the specified link to point to a different version of its
- target bundle.
-
- Pass e.g. {"version": 40} or pass {"version": None} to update to the
- latest published version.
- """
- key = LibraryLocatorV2.from_string(lib_key_str)
- api.require_permission_for_library_key(key, request.user, permissions.CAN_EDIT_THIS_CONTENT_LIBRARY)
- serializer = LibraryBundleLinkUpdateSerializer(data=request.data)
- serializer.is_valid(raise_exception=True)
- api.update_bundle_link(key, link_id, version=serializer.validated_data['version'])
- return Response({})
-
- @atomic
- @convert_exceptions
- def delete(self, request, lib_key_str, link_id): # pylint: disable=unused-argument
- """
- Delete a link from this library.
- """
- key = LibraryLocatorV2.from_string(lib_key_str)
- api.require_permission_for_library_key(key, request.user, permissions.CAN_EDIT_THIS_CONTENT_LIBRARY)
- api.update_bundle_link(key, link_id, delete=True)
- return Response({})
-
-
+@method_decorator(non_atomic_requests, name="dispatch")
@view_auth_classes()
class LibraryCommitView(APIView):
"""
Commit/publish or revert all of the draft changes made to the library.
"""
- @atomic
@convert_exceptions
def post(self, request, lib_key_str):
"""
@@ -524,7 +490,6 @@ def post(self, request, lib_key_str):
api.publish_changes(key)
return Response({})
- @atomic
@convert_exceptions
def delete(self, request, lib_key_str): # pylint: disable=unused-argument
"""
@@ -537,12 +502,12 @@ def delete(self, request, lib_key_str): # pylint: disable=unused-argument
return Response({})
+@method_decorator(non_atomic_requests, name="dispatch")
@view_auth_classes()
class LibraryBlocksView(APIView):
"""
Views to work with XBlocks in a specific content library.
"""
- @atomic
@apidocs.schema(
parameters=[
*LibraryApiPagination.apidoc_params,
@@ -569,19 +534,16 @@ def get(self, request, lib_key_str):
block_types = request.query_params.getlist('block_type') or None
api.require_permission_for_library_key(key, request.user, permissions.CAN_VIEW_THIS_CONTENT_LIBRARY)
- result = api.get_library_blocks(key, text_search=text_search, block_types=block_types)
-
- # Verify `pagination` param to maintain compatibility with older
- # non pagination-aware clients
- if request.GET.get('pagination', 'false').lower() == 'true':
- paginator = LibraryApiPagination()
- result = paginator.paginate_queryset(result, request)
- serializer = LibraryXBlockMetadataSerializer(result, many=True)
- return paginator.get_paginated_response(serializer.data)
+ components = api.get_library_components(key, text_search=text_search, block_types=block_types)
- return Response(LibraryXBlockMetadataSerializer(result, many=True).data)
+ paginator = LibraryApiPagination()
+ paginated_xblock_metadata = [
+ api.LibraryXBlockMetadata.from_component(key, component)
+ for component in paginator.paginate_queryset(components, request)
+ ]
+ serializer = LibraryXBlockMetadataSerializer(paginated_xblock_metadata, many=True)
+ return paginator.get_paginated_response(serializer.data)
- @atomic
@convert_exceptions
def post(self, request, lib_key_str):
"""
@@ -591,30 +553,24 @@ def post(self, request, lib_key_str):
api.require_permission_for_library_key(library_key, request.user, permissions.CAN_EDIT_THIS_CONTENT_LIBRARY)
serializer = LibraryXBlockCreationSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
- parent_block_usage_str = serializer.validated_data.pop("parent_block", None)
- if parent_block_usage_str:
- # Add this as a child of an existing block:
- parent_block_usage = LibraryUsageLocatorV2.from_string(parent_block_usage_str)
- if parent_block_usage.context_key != library_key:
- raise ValidationError(detail={"parent_block": "Usage ID doesn't match library ID in the URL."})
- result = api.create_library_block_child(parent_block_usage, **serializer.validated_data)
- else:
- # Create a new regular top-level block:
- try:
- result = api.create_library_block(library_key, **serializer.validated_data)
- except api.IncompatibleTypesError as err:
- raise ValidationError( # lint-amnesty, pylint: disable=raise-missing-from
- detail={'block_type': str(err)},
- )
+
+ # Create a new regular top-level block:
+ try:
+ result = api.create_library_block(library_key, **serializer.validated_data)
+ except api.IncompatibleTypesError as err:
+ raise ValidationError( # lint-amnesty, pylint: disable=raise-missing-from
+ detail={'block_type': str(err)},
+ )
+
return Response(LibraryXBlockMetadataSerializer(result).data)
+@method_decorator(non_atomic_requests, name="dispatch")
@view_auth_classes()
class LibraryBlockView(APIView):
"""
Views to work with an existing XBlock in a content library.
"""
- @atomic
@convert_exceptions
def get(self, request, usage_key_str):
"""
@@ -623,9 +579,9 @@ def get(self, request, usage_key_str):
key = LibraryUsageLocatorV2.from_string(usage_key_str)
api.require_permission_for_library_key(key.lib_key, request.user, permissions.CAN_VIEW_THIS_CONTENT_LIBRARY)
result = api.get_library_block(key)
+
return Response(LibraryXBlockMetadataSerializer(result).data)
- @atomic
@convert_exceptions
def delete(self, request, usage_key_str): # pylint: disable=unused-argument
"""
@@ -645,6 +601,7 @@ def delete(self, request, usage_key_str): # pylint: disable=unused-argument
return Response({})
+@method_decorator(non_atomic_requests, name="dispatch")
@view_auth_classes()
class LibraryBlockLtiUrlView(APIView):
"""
@@ -652,7 +609,6 @@ class LibraryBlockLtiUrlView(APIView):
Returns 404 in case the block not found by the given key.
"""
- @atomic
@convert_exceptions
def get(self, request, usage_key_str):
"""
@@ -667,12 +623,12 @@ def get(self, request, usage_key_str):
return Response({"lti_url": lti_login_url})
+@method_decorator(non_atomic_requests, name="dispatch")
@view_auth_classes()
class LibraryBlockOlxView(APIView):
"""
Views to work with an existing XBlock's OLX
"""
- @atomic
@convert_exceptions
def get(self, request, usage_key_str):
"""
@@ -680,10 +636,9 @@ def get(self, request, usage_key_str):
"""
key = LibraryUsageLocatorV2.from_string(usage_key_str)
api.require_permission_for_library_key(key.lib_key, request.user, permissions.CAN_VIEW_THIS_CONTENT_LIBRARY)
- xml_str = api.get_library_block_olx(key)
+ xml_str = xblock_api.get_block_draft_olx(key)
return Response(LibraryXBlockOlxSerializer({"olx": xml_str}).data)
- @atomic
@convert_exceptions
def post(self, request, usage_key_str):
"""
@@ -704,12 +659,12 @@ def post(self, request, usage_key_str):
return Response(LibraryXBlockOlxSerializer({"olx": new_olx_str}).data)
+@method_decorator(non_atomic_requests, name="dispatch")
@view_auth_classes()
class LibraryBlockAssetListView(APIView):
"""
Views to list an existing XBlock's static asset files
"""
- @atomic
@convert_exceptions
def get(self, request, usage_key_str):
"""
@@ -721,6 +676,7 @@ def get(self, request, usage_key_str):
return Response(LibraryXBlockStaticFilesSerializer({"files": files}).data)
+@method_decorator(non_atomic_requests, name="dispatch")
@view_auth_classes()
class LibraryBlockAssetView(APIView):
"""
@@ -728,7 +684,6 @@ class LibraryBlockAssetView(APIView):
"""
parser_classes = (MultiPartParser, )
- @atomic
@convert_exceptions
def get(self, request, usage_key_str, file_path):
"""
@@ -742,7 +697,6 @@ def get(self, request, usage_key_str, file_path):
return Response(LibraryXBlockStaticFileSerializer(f).data)
raise NotFound
- @atomic
@convert_exceptions
def put(self, request, usage_key_str, file_path):
"""
@@ -765,7 +719,6 @@ def put(self, request, usage_key_str, file_path):
raise ValidationError("Invalid file path") # lint-amnesty, pylint: disable=raise-missing-from
return Response(LibraryXBlockStaticFileSerializer(result).data)
- @atomic
@convert_exceptions
def delete(self, request, usage_key_str, file_path):
"""
@@ -782,13 +735,13 @@ def delete(self, request, usage_key_str, file_path):
return Response(status=status.HTTP_204_NO_CONTENT)
+@method_decorator(non_atomic_requests, name="dispatch")
@view_auth_classes()
class LibraryImportTaskViewSet(ViewSet):
"""
Import blocks from Courseware through modulestore.
"""
- @atomic
@convert_exceptions
def list(self, request, lib_key_str):
"""
@@ -807,7 +760,6 @@ def list(self, request, lib_key_str):
paginator.paginate_queryset(result, request)
)
- @atomic
@convert_exceptions
def create(self, request, lib_key_str):
"""
@@ -828,7 +780,6 @@ def create(self, request, lib_key_str):
import_task = api.import_blocks_create_task(library_key, course_key)
return Response(ContentLibraryBlockImportTaskSerializer(import_task).data)
- @atomic
@convert_exceptions
def retrieve(self, request, lib_key_str, pk=None):
"""
@@ -864,6 +815,7 @@ def wrapped_view(*args, **kwargs):
return wrapped_view
+@method_decorator(non_atomic_requests, name="dispatch")
@method_decorator(requires_lti_enabled, name='dispatch')
class LtiToolView(View):
"""
@@ -880,6 +832,7 @@ def setup(self, request, *args, **kwds):
self.lti_tool_storage = DjangoCacheDataStorage(cache_name='default')
+@method_decorator(non_atomic_requests, name="dispatch")
@method_decorator(csrf_exempt, name='dispatch')
class LtiToolLoginView(LtiToolView):
"""
@@ -913,6 +866,7 @@ def post(self, request):
return HttpResponseBadRequest('Invalid LTI login request.')
+@method_decorator(non_atomic_requests, name="dispatch")
@method_decorator(csrf_exempt, name='dispatch')
@method_decorator(xframe_options_exempt, name='dispatch')
class LtiToolLaunchView(TemplateResponseMixin, LtiToolView):
@@ -1100,6 +1054,7 @@ def handle_ags(self):
resource)
+@method_decorator(non_atomic_requests, name="dispatch")
class LtiToolJwksView(LtiToolView):
"""
JSON Web Key Sets view.
diff --git a/openedx/core/djangoapps/content_tagging/rest_api/v1/tests/test_views.py b/openedx/core/djangoapps/content_tagging/rest_api/v1/tests/test_views.py
index eb5598c96a50..a01981e60008 100644
--- a/openedx/core/djangoapps/content_tagging/rest_api/v1/tests/test_views.py
+++ b/openedx/core/djangoapps/content_tagging/rest_api/v1/tests/test_views.py
@@ -35,7 +35,7 @@
from openedx.core.djangoapps.content_tagging import api as tagging_api
from openedx.core.djangoapps.content_tagging.models import TaxonomyOrg
from openedx.core.djangolib.testing.utils import skip_unless_cms
-from openedx.core.lib import blockstore_api
+
from .test_objecttag_export_helpers import TaggedCourseMixin
@@ -103,9 +103,7 @@ def _setUp_library(self):
"""
Create library for testing
"""
- self.collection = blockstore_api.create_collection("Test library collection")
self.content_libraryA = create_library(
- collection_uuid=self.collection.uuid,
org=self.orgA,
slug="lib_a",
title="Library Org A",
diff --git a/openedx/core/djangoapps/content_tagging/tests/test_tasks.py b/openedx/core/djangoapps/content_tagging/tests/test_tasks.py
index 52a6fcb5b883..5e3a49dc79f9 100644
--- a/openedx/core/djangoapps/content_tagging/tests/test_tasks.py
+++ b/openedx/core/djangoapps/content_tagging/tests/test_tasks.py
@@ -14,7 +14,6 @@
from common.djangoapps.student.tests.factories import UserFactory
from openedx.core.djangolib.testing.utils import skip_unless_cms
from xmodule.modulestore.tests.django_utils import TEST_DATA_SPLIT_MODULESTORE, ModuleStoreTestCase
-from openedx.core.lib.blockstore_api import create_collection
from openedx.core.djangoapps.content_libraries.api import create_library, create_library_block, delete_library_block
from openedx.core.lib.blockstore_api.tests.base import BlockstoreAppTestMixin
@@ -252,10 +251,8 @@ def test_waffle_disabled_create_delete_xblock(self):
assert self._check_tag(usage_key_str, LANGUAGE_TAXONOMY_ID, None)
def test_create_delete_library_block(self):
- # Create collection and library
- collection = create_collection("Test library collection")
+ # Create library
library = create_library(
- collection_uuid=collection.uuid,
org=self.orgA,
slug="lib_a",
title="Library Org A",
@@ -281,10 +278,8 @@ def test_create_delete_library_block(self):
@override_waffle_flag(CONTENT_TAGGING_AUTO, active=False)
def test_waffle_disabled_create_delete_library_block(self):
- # Create collection and library
- collection = create_collection("Test library collection 2")
+ # Create library
library = create_library(
- collection_uuid=collection.uuid,
org=self.orgA,
slug="lib_a2",
title="Library Org A 2",
diff --git a/openedx/core/djangoapps/content_tagging/utils.py b/openedx/core/djangoapps/content_tagging/utils.py
index 3d7c340162da..3005cc7793a6 100644
--- a/openedx/core/djangoapps/content_tagging/utils.py
+++ b/openedx/core/djangoapps/content_tagging/utils.py
@@ -93,7 +93,9 @@ def get_library_orgs(self, user, org_names: list[str]) -> list[Organization]:
if library_orgs is None:
library_orgs = {
library.org.short_name: library.org
- for library in get_libraries_for_user(user).select_related('org').only('org')
+ # Note: We don't actually need .learning_package here, but it's already select_related'ed by
+ # get_libraries_for_user(), so we need to include it in .only() otherwise we get an ORM error.
+ for library in get_libraries_for_user(user).select_related('org').only('org', 'learning_package')
}
self.request_cache.set(cache_key, library_orgs)
diff --git a/openedx/core/djangoapps/xblock/api.py b/openedx/core/djangoapps/xblock/api.py
index 7c24308f6bfe..98ba9e222dd0 100644
--- a/openedx/core/djangoapps/xblock/api.py
+++ b/openedx/core/djangoapps/xblock/api.py
@@ -9,32 +9,39 @@
"""
# pylint: disable=unused-import
+from datetime import datetime
import logging
import threading
from django.urls import reverse
from django.utils.translation import gettext as _
+from openedx_learning.core.components import api as components_api
+from openedx_learning.core.components.models import Component
+from openedx_learning.core.publishing import api as publishing_api
from opaque_keys.edx.keys import UsageKeyV2
-from opaque_keys.edx.locator import BundleDefinitionLocator
+from opaque_keys.edx.locator import BundleDefinitionLocator, LibraryUsageLocatorV2
+
from rest_framework.exceptions import NotFound
from xblock.core import XBlock
from xblock.exceptions import NoSuchViewError
+from xblock.plugin import PluginMissingError
from openedx.core.djangoapps.xblock.apps import get_xblock_app_config
from openedx.core.djangoapps.xblock.learning_context.manager import get_learning_context_impl
-from openedx.core.djangoapps.xblock.runtime.blockstore_runtime import BlockstoreXBlockRuntime, xml_for_definition
+
+from openedx.core.djangoapps.xblock.runtime.learning_core_runtime import (
+ LearningCoreFieldData,
+ LearningCoreXBlockRuntime,
+)
+
+
from openedx.core.djangoapps.xblock.runtime.runtime import XBlockRuntimeSystem as _XBlockRuntimeSystem
-from openedx.core.djangolib.blockstore_cache import BundleCache
from .utils import get_secure_token_for_xblock_handler, get_xblock_id_for_anonymous_user
+from .runtime.learning_core_runtime import LearningCoreXBlockRuntime
+
# Made available as part of this package's public API:
from openedx.core.djangoapps.xblock.learning_context import LearningContext
-from openedx.core.djangoapps.xblock.runtime.olx_parsing import (
- BundleFormatException,
- definition_for_include,
- parse_xblock_include,
- XBlockInclude,
-)
# Implementation:
@@ -43,31 +50,33 @@
def get_runtime_system():
"""
- Get the XBlockRuntimeSystem, which is a single long-lived factory that can
- create user-specific runtimes.
-
- The Runtime System isn't always needed (e.g. for management commands), so to
- keep application startup faster, it's only initialized when first accessed
- via this method.
+ Return a new XBlockRuntimeSystem.
+
+ TODO: Refactor to get rid of the XBlockRuntimeSystem entirely and just
+ create the LearningCoreXBlockRuntime and return it. We used to want to keep
+ around a long lived runtime system (a factory that returns runtimes) for
+ caching purposes, and have it dynamically construct a runtime on request.
+ Now we're just re-constructing both the system and the runtime in this call
+ and returning it every time, because:
+
+ 1. We no longer have slow, Blockstore-style definitions to cache, so the
+ performance of this is perfectly acceptable.
+ 2. Having a singleton increases complexity and the chance of bugs.
+ 3. Creating the XBlockRuntimeSystem every time only takes about 10-30 µs.
+
+ Given that, the extra XBlockRuntimeSystem class just adds confusion. But
+ despite that, it's tested, working code, and so I'm putting off refactoring
+ for now.
"""
- # The runtime system should not be shared among threads, as there is currently a race condition when parsing XML
- # that can lead to duplicate children.
- # (In BlockstoreXBlockRuntime.get_block(), has_cached_definition(def_id) returns false so parse_xml is called, but
- # meanwhile another thread parses the XML and caches the definition; then when parse_xml gets to XML nodes for
- # child blocks, it appends them to the children already cached by the other thread and saves the doubled list of
- # children; this happens only occasionally but is very difficult to avoid in a clean way due to the API of parse_xml
- # and XBlock field data in general [does not distinguish between setting initial values during parsing and changing
- # values at runtime due to user interaction], and how it interacts with BlockstoreFieldData. Keeping the caches
- # local to each thread completely avoids this problem.)
- cache_name = f'_system_{threading.get_ident()}'
- if not hasattr(get_runtime_system, cache_name):
- params = dict(
- handler_url=get_handler_url,
- runtime_class=BlockstoreXBlockRuntime,
- )
- params.update(get_xblock_app_config().get_runtime_system_params())
- setattr(get_runtime_system, cache_name, _XBlockRuntimeSystem(**params))
- return getattr(get_runtime_system, cache_name)
+ params = get_xblock_app_config().get_runtime_system_params()
+ params.update(
+ runtime_class=LearningCoreXBlockRuntime,
+ handler_url=get_handler_url,
+ authored_data_store=LearningCoreFieldData(),
+ )
+ runtime = _XBlockRuntimeSystem(**params)
+
+ return runtime
def load_block(usage_key, user):
@@ -87,6 +96,7 @@ def load_block(usage_key, user):
# Is this block part of a course, a library, or what?
# Get the Learning Context Implementation based on the usage key
context_impl = get_learning_context_impl(usage_key)
+
# Now, check if the block exists in this context and if the user has
# permission to render this XBlock view:
if user is not None and not context_impl.can_view_block(user, usage_key):
@@ -98,7 +108,6 @@ def load_block(usage_key, user):
# e.g. a course might specify that all 'problem' XBlocks have 'max_attempts'
# set to 3.
# field_overrides = context_impl.get_field_overrides(usage_key)
-
runtime = get_runtime_system().get_runtime(user=user)
return runtime.get_block(usage_key)
@@ -146,65 +155,68 @@ def get_block_metadata(block, includes=()):
return data
-def resolve_definition(block_or_key):
- """
- Given an XBlock, definition key, or usage key, return the definition key.
- """
- if isinstance(block_or_key, BundleDefinitionLocator):
- return block_or_key
- elif isinstance(block_or_key, UsageKeyV2):
- context_impl = get_learning_context_impl(block_or_key)
- return context_impl.definition_for_usage(block_or_key)
- elif isinstance(block_or_key, XBlock):
- return block_or_key.scope_ids.def_id
- else:
- raise TypeError(block_or_key)
-
-
def xblock_type_display_name(block_type):
"""
Get the display name for the specified XBlock class.
"""
- block_class = XBlock.load_class(block_type)
+ try:
+ # We want to be able to give *some* value, even if the XBlock is later
+ # uninstalled.
+ block_class = XBlock.load_class(block_type)
+ except PluginMissingError:
+ return block_type
+
if hasattr(block_class, 'display_name') and block_class.display_name.default:
return _(block_class.display_name.default) # pylint: disable=translation-of-non-string
else:
return block_type # Just use the block type as the name
-def get_block_display_name(block_or_key):
+def get_block_display_name(block: XBlock) -> str:
+ """
+ Get the display name from an instatiated XBlock, falling back to the XBlock-type-defined-default.
"""
- Efficiently get the display name of the specified block. This is done in a
- way that avoids having to load and parse the block's entire XML field data
- using its parse_xml() method, which may be very expensive (e.g. the video
- XBlock parse_xml leads to various slow edxval API calls in some cases).
+ display_name = getattr(block, "display_name", None)
+ if display_name is not None:
+ return display_name
+ else:
+ return xblock_type_display_name(block.block_type)
- This method also defines and implements various fallback mechanisms in case
- the ID can't be loaded.
- block_or_key can be an XBlock instance, a usage key or a definition key.
+def get_component_from_usage_key(usage_key: UsageKeyV2) -> Component:
+ """
+ Fetch the Component object for a given usage key.
+
+ Raises a ObjectDoesNotExist error if no such Component exists.
- Returns the display name as a string
+ This is a lower-level function that will return a Component even if there is
+ no current draft version of that Component (because it's been soft-deleted).
+ """
+ learning_package = publishing_api.get_learning_package_by_key(
+ str(usage_key.context_key)
+ )
+ return components_api.get_component_by_key(
+ learning_package.id,
+ namespace='xblock.v1',
+ type_name=usage_key.block_type,
+ local_key=usage_key.block_id,
+ )
+
+
+def get_block_draft_olx(usage_key: UsageKeyV2) -> str:
"""
- def_key = resolve_definition(block_or_key)
- use_draft = get_xblock_app_config().get_learning_context_params().get('use_draft')
- cache = BundleCache(def_key.bundle_uuid, draft_name=use_draft)
- cache_key = ('block_display_name', str(def_key))
- display_name = cache.get(cache_key)
- if display_name is None:
- # Instead of loading the block, just load its XML and parse it
- try:
- olx_node = xml_for_definition(def_key)
- except Exception: # pylint: disable=broad-except
- log.exception("Error when trying to get display_name for block definition %s", def_key)
- # Return now so we don't cache the error result
- return xblock_type_display_name(def_key.block_type)
- try:
- display_name = olx_node.attrib['display_name']
- except KeyError:
- display_name = xblock_type_display_name(def_key.block_type)
- cache.set(cache_key, display_name)
- return display_name
+ Get the OLX source of the draft version of the given Learning-Core-backed XBlock.
+ """
+ # Inefficient but simple approach. Optimize later if needed.
+ component = get_component_from_usage_key(usage_key)
+ component_version = component.versioning.draft
+
+ # TODO: we should probably make a method on ComponentVersion that returns
+ # a content based on the name. Accessing by componentversioncontent__key is
+ # awkward.
+ content = component_version.contents.get(componentversioncontent__key="block.xml")
+
+ return content.text
def render_block_view(block, view_name, user): # pylint: disable=unused-argument
diff --git a/openedx/core/djangoapps/xblock/learning_context/learning_context.py b/openedx/core/djangoapps/xblock/learning_context/learning_context.py
index ed4f8d12a3a0..72a6a7645e9f 100644
--- a/openedx/core/djangoapps/xblock/learning_context/learning_context.py
+++ b/openedx/core/djangoapps/xblock/learning_context/learning_context.py
@@ -65,36 +65,3 @@ def definition_for_usage(self, usage_key, **kwargs):
context, or None otherwise.
"""
raise NotImplementedError
-
- def usage_for_child_include(self, parent_usage, parent_definition, parsed_include):
- """
- Method that the runtime uses when loading a block's child, to get the
- ID of the child. Must return a usage key.
-
- The child is always from an element.
-
- parent_usage: the UsageKeyV2 subclass key of the parent
-
- parent_definition: the BundleDefinitionLocator key of the parent (same
- as returned by definition_for_usage(parent_usage) but included here
- as an optimization since it's already known.)
-
- parsed_include: the XBlockInclude tuple containing the data from the
- parsed element. See xblock.runtime.olx_parsing.
-
- Must return a UsageKeyV2 subclass
- """
- raise NotImplementedError
-
- # Future functionality:
- # def get_field_overrides(self, user, usage_key):
- # """
- # Each learning context may have a way for authors to specify field
- # overrides that apply to XBlocks in the context.
-
- # For example, courses might allow an instructor to specify that all
- # 'problem' blocks in her course have 'num_attempts' set to '5',
- # regardless of the 'num_attempts' value in the underlying problem XBlock
- # definitions.
- # """
- # raise NotImplementedError
diff --git a/openedx/core/djangoapps/xblock/rest_api/views.py b/openedx/core/djangoapps/xblock/rest_api/views.py
index 4af890bb4323..3722d9d8ab15 100644
--- a/openedx/core/djangoapps/xblock/rest_api/views.py
+++ b/openedx/core/djangoapps/xblock/rest_api/views.py
@@ -16,14 +16,15 @@
from rest_framework.response import Response
from rest_framework.views import APIView
from xblock.django.request import DjangoWebobRequest, webob_to_django_response
+from xblock.exceptions import NoSuchUsage
from xblock.fields import Scope
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import UsageKey
from openedx.core.lib.api.view_utils import view_auth_classes
from ..api import (
- get_block_display_name,
get_block_metadata,
+ get_block_display_name,
get_handler_url as _get_handler_url,
load_block,
render_block_view as _render_block_view,
@@ -74,7 +75,11 @@ def render_block_view(request, usage_key_str, view_name):
except InvalidKeyError as e:
raise NotFound(invalid_not_found_fmt.format(usage_key=usage_key_str)) from e
- block = load_block(usage_key, request.user)
+ try:
+ block = load_block(usage_key, request.user)
+ except NoSuchUsage as exc:
+ raise NotFound(f"{usage_key} not found") from exc
+
fragment = _render_block_view(block, view_name, request.user)
response_data = get_block_metadata(block)
response_data.update(fragment.to_dict())
diff --git a/openedx/core/djangoapps/xblock/runtime/blockstore_field_data.py b/openedx/core/djangoapps/xblock/runtime/blockstore_field_data.py
deleted file mode 100644
index ac17a9550b0d..000000000000
--- a/openedx/core/djangoapps/xblock/runtime/blockstore_field_data.py
+++ /dev/null
@@ -1,352 +0,0 @@
-"""
-Key-value store that holds XBlock field data read out of Blockstore
-"""
-
-from collections import namedtuple
-from weakref import WeakKeyDictionary
-import logging
-
-from xblock.exceptions import InvalidScopeError, NoSuchDefinition
-from xblock.fields import Field, BlockScope, Scope, UserScope, Sentinel
-from xblock.field_data import FieldData
-
-from openedx.core.djangoapps.xblock.learning_context.manager import get_learning_context_impl
-from openedx.core.djangolib.blockstore_cache import (
- get_bundle_version_files_cached,
- get_bundle_draft_files_cached,
-)
-
-log = logging.getLogger(__name__)
-
-ActiveBlock = namedtuple('ActiveBlock', ['olx_hash', 'changed_fields'])
-
-DELETED = Sentinel('DELETED') # Special value indicating a field was reset to its default value
-CHILDREN_INCLUDES = Sentinel('CHILDREN_INCLUDES') # Key for a pseudo-field that stores the XBlock's children info
-
-MAX_DEFINITIONS_LOADED = 100 # How many of the most recently used XBlocks' field data to keep in memory at max.
-
-
-class BlockInstanceUniqueKey:
- """
- An empty object used as a unique key for each XBlock instance, see
- get_weak_key_for_block() and BlockstoreFieldData._get_active_block(). Every
- XBlock instance will get a unique one of these keys, even if they are
- otherwise identical. Its purpose is similar to `id(block)`.
- """
-
-
-def get_weak_key_for_block(block):
- """
- Given an XBlock instance, return an object with the same lifetime as the
- block, suitable as a key to hold block-specific data in a WeakKeyDictionary.
- """
- # We would like to make the XBlock instance 'block' itself the key of
- # BlockstoreFieldData.active_blocks, so that we have exactly one entry per
- # XBlock instance in memory, and they'll each be automatically freed by the
- # WeakKeyDictionary as needed. But because XModules implement
- # __eq__() in a way that reads all field values, just attempting to use
- # the block as a dict key here will trigger infinite recursion. So
- # instead we key the dict on an arbitrary object,
- # block key = BlockInstanceUniqueKey() which we create here. That way
- # the weak reference will still cause the entry in the WeakKeyDictionary to
- # be freed automatically when the block is no longer needed, and we
- # still get one entry per XBlock instance.
- if not hasattr(block, '_field_data_key_obj'):
- block._field_data_key_obj = BlockInstanceUniqueKey() # pylint: disable=protected-access
- return block._field_data_key_obj # pylint: disable=protected-access
-
-
-def get_olx_hash_for_definition_key(def_key):
- """
- Given a BundleDefinitionLocator, which identifies a specific version of an
- OLX file, return the hash of the OLX file as given by the Blockstore API.
- """
- if def_key.bundle_version:
- # This is referring to an immutable file (BundleVersions are immutable so this can be aggressively cached)
- files_list = get_bundle_version_files_cached(def_key.bundle_uuid, def_key.bundle_version)
- else:
- # This is referring to a draft OLX file which may be recently updated:
- files_list = get_bundle_draft_files_cached(def_key.bundle_uuid, def_key.draft_name)
- for entry in files_list:
- if entry.path == def_key.olx_path:
- return entry.hash_digest
- raise NoSuchDefinition(f"Could not load OLX file for key {def_key}")
-
-
-class BlockstoreFieldData(FieldData):
- """
- An XBlock FieldData implementation that reads XBlock field data directly out
- of Blockstore.
-
- It requires that every XBlock have a BundleDefinitionLocator as its
- "definition key", since the BundleDefinitionLocator is what specifies the
- OLX file path and version to use.
-
- Within Blockstore there is no mechanism for setting different field values
- at the usage level compared to the definition level, so we treat
- usage-scoped fields identically to definition-scoped fields.
- """
- def __init__(self):
- """
- Initialize this BlockstoreFieldData instance.
- """
- # loaded definitions: a dict where the key is the hash of the XBlock's
- # olx file (as stated by the Blockstore API), and the values is the
- # dict of field data as loaded from that OLX file. The field data dicts
- # in this should be considered immutable, and never modified.
- self.loaded_definitions = {}
- # Active blocks: this holds the field data *changes* for all the XBlocks
- # that are currently in memory being used for something. We only keep a
- # weak reference so that the memory will be freed when the XBlock is no
- # longer needed (e.g. at the end of a request)
- # The key of this dictionary is on ID object owned by the XBlock itself
- # (see _get_active_block()) and the value is an ActiveBlock object
- # (which holds olx_hash and changed_fields)
- self.active_blocks = WeakKeyDictionary()
- super().__init__()
-
- def _getfield(self, block, name):
- """
- Return the field with the given `name` from `block`.
- If the XBlock doesn't have such a field, raises a KeyError.
- """
- # First, get the field from the class, if defined
- block_field = getattr(block.__class__, name, None)
- if block_field is not None and isinstance(block_field, Field):
- return block_field
- # Not in the class, so name really doesn't name a field
- raise KeyError(name)
-
- def _check_field(self, block, name):
- """
- Given a block and the name of one of its fields, check that we will be
- able to read/write it.
- """
- if name == CHILDREN_INCLUDES:
- return # This is a pseudo-field used in conjunction with BlockstoreChildrenData
- field = self._getfield(block, name)
- if field.scope in (Scope.children, Scope.parent): # lint-amnesty, pylint: disable=no-else-raise
- # This field data store is focused on definition-level field data, and children/parent is mostly
- # relevant at the usage level. Scope.parent doesn't even seem to be used?
- raise NotImplementedError("Setting Scope.children/parent is not supported by BlockstoreFieldData.")
- else:
- if field.scope.user != UserScope.NONE:
- raise InvalidScopeError("BlockstoreFieldData only supports UserScope.NONE fields")
- if field.scope.block not in (BlockScope.DEFINITION, BlockScope.USAGE):
- raise InvalidScopeError(
- f"BlockstoreFieldData does not support BlockScope.{field.scope.block} fields"
- )
- # There is also BlockScope.TYPE but we don't need to support that;
- # it's mostly relevant as Scope.preferences(UserScope.ONE, BlockScope.TYPE)
- # Which would be handled by a user-aware FieldData implementation
-
- def _get_active_block(self, block):
- """
- Get the ActiveBlock entry for the specified block, creating it if
- necessary.
- """
- key = get_weak_key_for_block(block)
- if key not in self.active_blocks:
- self.active_blocks[key] = ActiveBlock(
- olx_hash=get_olx_hash_for_definition_key(block.scope_ids.def_id),
- changed_fields={},
- )
- return self.active_blocks[key]
-
- def get(self, block, name):
- """
- Get the given field value from Blockstore
-
- If the XBlock has been making changes to its fields, the value will be
- in self._get_active_block(block).changed_fields[name]
-
- Otherwise, the value comes from self.loaded_definitions which is a dict
- of OLX file field data, keyed by the hash of the OLX file.
- """
- self._check_field(block, name)
- entry = self._get_active_block(block)
- if name in entry.changed_fields:
- value = entry.changed_fields[name]
- if value == DELETED:
- raise KeyError # KeyError means use the default value, since this field was deliberately set to default
- return value
- try:
- saved_fields = self.loaded_definitions[entry.olx_hash]
- except KeyError:
- if name == CHILDREN_INCLUDES:
- # Special case: parse_xml calls add_node_as_child which calls 'block.children.append()'
- # BEFORE parse_xml is done, and .append() needs to read the value of children. So
- return [] # start with an empty list, it will get filled in.
- # Otherwise, this is an anomalous get() before the XML was fully loaded:
- # This could happen if an XBlock's parse_xml() method tried to read a field before setting it,
- # if an XBlock read field data in its constructor (forbidden), or if an XBlock was loaded via
- # some means other than runtime.get_block(). One way this can happen is if you log/print an XBlock during
- # XML parsing, because ScopedStorageMixin.__repr__ will try to print all field values, and any fields which
- # aren't mentioned in the XML (which are left at their default) will be "not loaded yet."
- log.exception(
- "XBlock %s tried to read from field data (%s) that wasn't loaded from Blockstore!",
- block.scope_ids.usage_id, name,
- )
- raise # Just use the default value for now; any exception raised here is caught anyways
- return saved_fields[name]
- # If 'name' is not found, this will raise KeyError, which means to use the default value
-
- def set(self, block, name, value):
- """
- Set the value of the field named `name`
- """
- entry = self._get_active_block(block)
- entry.changed_fields[name] = value
-
- def delete(self, block, name):
- """
- Reset the value of the field named `name` to the default
- """
- self.set(block, name, DELETED)
-
- def default(self, block, name):
- """
- Get the default value for block's field 'name'.
- The XBlock class will provide the default if KeyError is raised; this is
- mostly for the purpose of context-specific overrides.
- """
- raise KeyError(name)
-
- def cache_fields(self, block):
- """
- Cache field data:
- This is called by the runtime after a block has parsed its OLX via its
- parse_xml() methods and written all of its field values into this field
- data store. The values will be stored in
- self._get_active_block(block).changed_fields
- so we know at this point that that isn't really "changed" field data,
- it's the result of parsing the OLX. Save a copy into loaded_definitions.
- """
- entry = self._get_active_block(block)
- self.loaded_definitions[entry.olx_hash] = entry.changed_fields.copy()
- # Reset changed_fields to indicate this block hasn't actually made any field data changes, just loaded from XML:
- entry.changed_fields.clear()
-
- if len(self.loaded_definitions) > MAX_DEFINITIONS_LOADED:
- self.free_unused_definitions()
-
- def has_changes(self, block):
- """
- Does the specified block have any unsaved changes?
- """
- entry = self._get_active_block(block)
- return bool(entry.changed_fields)
-
- def has_cached_definition(self, definition_key):
- """
- Has the specified OLX file been loaded into memory?
- """
- olx_hash = get_olx_hash_for_definition_key(definition_key)
- return olx_hash in self.loaded_definitions
-
- def free_unused_definitions(self):
- """
- Free unused field data cache entries from self.loaded_definitions
- as long as they're not in use.
- """
- olx_hashes = set(self.loaded_definitions.keys())
- olx_hashes_needed = {entry.olx_hash for entry in self.active_blocks.values()}
-
- olx_hashes_safe_to_delete = olx_hashes - olx_hashes_needed
-
- # To avoid doing this too often, randomly cull unused entries until
- # we have only half as many as MAX_DEFINITIONS_LOADED in memory, if possible.
- while olx_hashes_safe_to_delete and (len(self.loaded_definitions) > MAX_DEFINITIONS_LOADED / 2):
- del self.loaded_definitions[olx_hashes_safe_to_delete.pop()]
-
-
-class BlockstoreChildrenData(FieldData):
- """
- An XBlock FieldData implementation that reads 'children' data out of
- the definition fields in BlockstoreFieldData.
-
- The children field contains usage keys and so is usage-specific; the
- BlockstoreFieldData can only store field data that is not usage-specific. So
- we store data about the elements that define the children
- in BlockstoreFieldData (since that is not usage-specific), and this field
- data implementation loads that data and transforms it
- into the usage keys that comprise the standard .children field.
- """
- def __init__(self, blockstore_field_data):
- """
- Initialize this BlockstoreChildrenData instance.
- """
- # The data store that holds Scope.usage and Scope.definition data:
- self.authored_data_store = blockstore_field_data
- super().__init__()
-
- def _check_field(self, block, name): # pylint: disable=unused-argument
- """
- Given a block and the name of one of its fields, check that we will be
- able to read/write it.
- """
- if name != 'children':
- raise InvalidScopeError("BlockstoreChildrenData can only read/write from a field named 'children'")
-
- def get(self, block, name):
- """
- Get the "children' field value.
-
- We do this by reading the parsed values from
- the regular authored data store and then transforming them to usage IDs.
- """
- self._check_field(block, name)
- children_includes = self.get_includes(block)
- if not children_includes:
- return []
- # Now the .children field is required to be a list of usage IDs:
- learning_context = get_learning_context_impl(block.scope_ids.usage_id)
- child_usages = []
- for parsed_include in children_includes:
- child_usages.append(
- learning_context.usage_for_child_include(
- block.scope_ids.usage_id, block.scope_ids.def_id, parsed_include,
- )
- )
- return child_usages
-
- def set(self, block, name, value):
- """
- Set the value of the field; requires name='children'
- """
- self._check_field(block, name)
- children_includes = self.authored_data_store.get(block, CHILDREN_INCLUDES)
- if len(value) != len(children_includes):
- raise RuntimeError(
- "This runtime does not allow changing .children directly - use runtime.add_child_include instead."
- )
- # This is a no-op; the value of 'children' is derived from CHILDREN_INCLUDES
- # so we never write to the children field directly. All we do is make sure it
- # looks like it's still in sync with CHILDREN_INCLUDES
-
- def get_includes(self, block):
- """
- Get the list of elements representing this XBlock's
- children.
- """
- try:
- return self.authored_data_store.get(block, CHILDREN_INCLUDES)
- except KeyError:
- # KeyError raised by an XBlock field data store means "use the
- # default value", and the default value for the children field is an
- # empty list.
- return []
-
- def append_include(self, block, parsed_include):
- """
- Append an element to this XBlock's list of children
- """
- self.authored_data_store.set(block, CHILDREN_INCLUDES, self.get_includes(block) + [parsed_include])
-
- def delete(self, block, name):
- """
- Reset the value of the field named `name` to the default
- """
- self._check_field(block, name)
- self.authored_data_store.set(block, CHILDREN_INCLUDES, [])
- self.set(block, name, [])
diff --git a/openedx/core/djangoapps/xblock/runtime/blockstore_runtime.py b/openedx/core/djangoapps/xblock/runtime/blockstore_runtime.py
deleted file mode 100644
index 33ea8514b632..000000000000
--- a/openedx/core/djangoapps/xblock/runtime/blockstore_runtime.py
+++ /dev/null
@@ -1,202 +0,0 @@
-"""
-A runtime designed to work with Blockstore, reading and writing
-XBlock field data directly from Blockstore.
-"""
-
-import logging
-import os.path
-
-from lxml import etree
-from opaque_keys.edx.locator import BundleDefinitionLocator
-from xblock.exceptions import NoSuchDefinition, NoSuchUsage
-from xblock.fields import ScopeIds
-
-from openedx.core.djangoapps.xblock.learning_context.manager import get_learning_context_impl
-from openedx.core.djangoapps.xblock.runtime.runtime import XBlockRuntime
-from openedx.core.djangoapps.xblock.runtime.olx_parsing import parse_xblock_include, BundleFormatException
-from openedx.core.djangolib.blockstore_cache import (
- BundleCache,
- get_bundle_file_data_with_cache,
- get_bundle_file_metadata_with_cache,
-)
-from openedx.core.lib import blockstore_api
-from openedx.core.lib.xblock_serializer.api import serialize_modulestore_block_for_blockstore
-
-log = logging.getLogger(__name__)
-
-
-class BlockstoreXBlockRuntime(XBlockRuntime):
- """
- A runtime designed to work with Blockstore, reading and writing
- XBlock field data directly from Blockstore.
- """
- def parse_xml_file(self, fileobj, id_generator=None):
- raise NotImplementedError("Use parse_olx_file() instead")
-
- def get_block(self, usage_id, for_parent=None):
- """
- Create an XBlock instance in this runtime.
-
- Args:
- usage_key(OpaqueKey): identifier used to find the XBlock class and data.
- """
- def_id = self.id_reader.get_definition_id(usage_id)
- if def_id is None:
- raise ValueError(f"Definition not found for usage {usage_id}")
- if not isinstance(def_id, BundleDefinitionLocator):
- raise TypeError("This runtime can only load blocks stored in Blockstore bundles.")
- try:
- block_type = self.id_reader.get_block_type(def_id)
- except NoSuchDefinition:
- raise NoSuchUsage(repr(usage_id)) # lint-amnesty, pylint: disable=raise-missing-from
- keys = ScopeIds(self.user_id, block_type, def_id, usage_id)
-
- if self.system.authored_data_store.has_cached_definition(def_id):
- return self.construct_xblock(block_type, keys, for_parent=for_parent)
- else:
- # We need to load this block's field data from its OLX file in blockstore:
- xml_node = xml_for_definition(def_id)
- if xml_node.get("url_name", None):
- log.warning("XBlock at %s should not specify an old-style url_name attribute.", def_id.olx_path)
- block_class = self.mixologist.mix(self.load_block_type(block_type))
- if hasattr(block_class, 'parse_xml_new_runtime'):
- # This is a (former) XModule with messy XML parsing code; let its parse_xml() method continue to work
- # as it currently does in the old runtime, but let this parse_xml_new_runtime() method parse the XML in
- # a simpler way that's free of tech debt, if defined.
- # In particular, XmlMixin doesn't play well with this new runtime, so this is mostly about
- # bypassing that mixin's code.
- # When a former XModule no longer needs to support the old runtime, its parse_xml_new_runtime method
- # should be removed and its parse_xml() method should be simplified to just call the super().parse_xml()
- # plus some minor additional lines of code as needed.
- block = block_class.parse_xml_new_runtime(xml_node, runtime=self, keys=keys)
- else:
- block = block_class.parse_xml(xml_node, runtime=self, keys=keys, id_generator=None)
- # Update field data with parsed values. We can't call .save() because it will call save_block(), below.
- block.force_save_fields(block._get_fields_to_save()) # pylint: disable=protected-access
- self.system.authored_data_store.cache_fields(block)
- # There is no way to set the parent via parse_xml, so do what
- # HierarchyMixin would do:
- if for_parent is not None:
- block._parent_block = for_parent # pylint: disable=protected-access
- block._parent_block_id = for_parent.scope_ids.usage_id # pylint: disable=protected-access
- return block
-
- def add_node_as_child(self, block, node, id_generator=None):
- """
- This runtime API should normally be used via
- runtime.get_block() -> block.parse_xml() -> runtime.add_node_as_child
- """
- try:
- parsed_include = parse_xblock_include(node)
- except BundleFormatException:
- # We need to log the XBlock ID or this will be hard to debug
- log.error("BundleFormatException when parsing XBlock %s", block.scope_ids.usage_id)
- raise # Also log details and stack trace
- self.add_child_include(block, parsed_include)
-
- def add_child_include(self, block, parsed_include):
- """
- Given an XBlockInclude tuple that represents a new
- node, add it as a child of the specified XBlock. This is the only
- supported API for adding a new child to an XBlock - one cannot just
- modify block.children to append a usage ID, since that doesn't provide
- enough information to serialize the block's elements.
- """
- self.system.children_data_store.append_include(block, parsed_include)
- block.children = self.system.children_data_store.get(block, 'children')
-
- def child_includes_of(self, block):
- """
- Get the list of directives that define the children
- of this block's definition.
- """
- return self.system.children_data_store.get_includes(block)
-
- def save_block(self, block):
- """
- Save any pending field data values to Blockstore.
-
- This gets called by block.save() - do not call this directly.
- """
- if not self.system.authored_data_store.has_changes(block):
- return # No changes, so no action needed.
- definition_key = block.scope_ids.def_id
- if definition_key.draft_name is None:
- raise RuntimeError(
- "The Blockstore runtime does not support saving changes to blockstore without a draft. "
- "Are you making changes to UserScope.NONE fields from the LMS rather than Studio?"
- )
- # Verify that the user has permission to write to authored data in this
- # learning context:
- if self.user is not None:
- learning_context = get_learning_context_impl(block.scope_ids.usage_id)
- if not learning_context.can_edit_block(self.user, block.scope_ids.usage_id):
- log.warning("User %s does not have permission to edit %s", self.user.username, block.scope_ids.usage_id)
- raise RuntimeError("You do not have permission to edit this XBlock")
- serialized = serialize_modulestore_block_for_blockstore(block)
- olx_str = serialized.olx_str
- static_files = serialized.static_files
- # Write the OLX file to the bundle:
- draft_uuid = blockstore_api.get_or_create_bundle_draft(
- definition_key.bundle_uuid, definition_key.draft_name
- ).uuid
- olx_path = definition_key.olx_path
- blockstore_api.write_draft_file(draft_uuid, olx_path, olx_str)
- # And the other files, if any:
- olx_static_path = os.path.dirname(olx_path) + '/static/'
- for fh in static_files:
- new_path = olx_static_path + fh.name
- blockstore_api.write_draft_file(draft_uuid, new_path, fh.data)
- # Now invalidate the blockstore data cache for the bundle:
- BundleCache(definition_key.bundle_uuid, draft_name=definition_key.draft_name).clear()
-
- def _lookup_asset_url(self, block, asset_path):
- """
- Return an absolute URL for the specified static asset file that may
- belong to this XBlock.
-
- e.g. if the XBlock settings have a field value like "/static/foo.png"
- then this method will be called with asset_path="foo.png" and should
- return a URL like https://cdn.none/xblock/f843u89789/static/foo.png
-
- If the asset file is not recognized, return None
- """
- if '..' in asset_path:
- return None # Illegal path
- definition_key = block.scope_ids.def_id
- # Compute the full path to the static file in the bundle,
- # e.g. "problem/prob1/static/illustration.svg"
- expanded_path = os.path.dirname(definition_key.olx_path) + '/static/' + asset_path
- try:
- metadata = get_bundle_file_metadata_with_cache(
- bundle_uuid=definition_key.bundle_uuid,
- path=expanded_path,
- bundle_version=definition_key.bundle_version,
- draft_name=definition_key.draft_name,
- )
- except blockstore_api.BundleFileNotFound:
- log.warning("XBlock static file not found: %s for %s", asset_path, block.scope_ids.usage_id)
- return None
- # Make sure the URL is one that will work from the user's browser,
- # not one that only works from within a docker container:
- url = blockstore_api.force_browser_url(metadata.url)
- return url
-
-
-def xml_for_definition(definition_key):
- """
- Method for loading OLX from Blockstore and parsing it to an etree.
- """
- try:
- xml_str = get_bundle_file_data_with_cache(
- bundle_uuid=definition_key.bundle_uuid,
- path=definition_key.olx_path,
- bundle_version=definition_key.bundle_version,
- draft_name=definition_key.draft_name,
- )
- except blockstore_api.BundleFileNotFound:
- raise NoSuchDefinition("OLX file {} not found in bundle {}.".format( # lint-amnesty, pylint: disable=raise-missing-from
- definition_key.olx_path, definition_key.bundle_uuid,
- ))
- node = etree.fromstring(xml_str)
- return node
diff --git a/openedx/core/djangoapps/xblock/runtime/id_managers.py b/openedx/core/djangoapps/xblock/runtime/id_managers.py
index 53891c4fa9ae..214a901b3b51 100644
--- a/openedx/core/djangoapps/xblock/runtime/id_managers.py
+++ b/openedx/core/djangoapps/xblock/runtime/id_managers.py
@@ -3,12 +3,9 @@
our newer Open edX-specific opaque key formats.
"""
-
from opaque_keys.edx.keys import UsageKeyV2
from xblock.runtime import IdReader
-from openedx.core.djangoapps.xblock.learning_context.manager import get_learning_context_impl
-
class OpaqueKeyReader(IdReader):
"""
@@ -24,7 +21,7 @@ def get_definition_id(self, usage_id):
The `definition_id` the usage is derived from
"""
if isinstance(usage_id, UsageKeyV2):
- return get_learning_context_impl(usage_id).definition_for_usage(usage_id)
+ return None
raise TypeError("This version of get_definition_id doesn't support the given key type.")
def get_block_type(self, def_id):
diff --git a/openedx/core/djangoapps/xblock/runtime/learning_core_runtime.py b/openedx/core/djangoapps/xblock/runtime/learning_core_runtime.py
new file mode 100644
index 000000000000..f476e36c7f8e
--- /dev/null
+++ b/openedx/core/djangoapps/xblock/runtime/learning_core_runtime.py
@@ -0,0 +1,302 @@
+"""
+Learning Core XBlock Runtime code
+"""
+from __future__ import annotations
+
+import logging
+from collections import defaultdict
+from datetime import datetime, timezone
+
+from django.core.exceptions import ObjectDoesNotExist
+from django.db.transaction import atomic
+
+from openedx_learning.core.components import api as components_api
+from openedx_learning.core.contents import api as contents_api
+from openedx_learning.core.publishing import api as publishing_api
+
+from lxml import etree
+
+from xblock.core import XBlock
+from xblock.exceptions import NoSuchUsage
+from xblock.fields import Field, Scope, ScopeIds
+from xblock.field_data import FieldData
+
+from openedx.core.lib.xblock_serializer.api import serialize_modulestore_block_for_blockstore
+from ..learning_context.manager import get_learning_context_impl
+from .runtime import XBlockRuntime
+
+
+log = logging.getLogger(__name__)
+
+
+class LearningCoreFieldData(FieldData):
+ """
+ FieldData for the Learning Core XBlock Runtime
+
+ LearningCoreFieldData only supports the ``content`` and ``settings`` scopes.
+ Any attempt to read or write fields with other scopes will raise a
+ ``NotImplementedError``. This class does NOT support the parent and children
+ scopes.
+
+ LearningCoreFieldData should only live for the duration of one request. The
+ interaction between LearningCoreXBlockRuntime and LearningCoreFieldData is
+ as follows:
+
+ 1. LearningCoreXBlockRuntime knows how retrieve authored content data from
+ the Learning Core APIs in openedx-learning. This content is stored as
+ OLX, and LearningCoreXBlockRuntime won't know how to parse it into
+ fields, since serialization logic can happen in the XBlock itself.
+ 2. LearningCoreXBlockRuntime will then invoke the block to parse the OLX and
+ then force_save its field data into LearningCoreFieldData.
+ 3. After this point, various handler and API calls might alter fields for
+ a given block using the XBlock.
+ 4. The main thing that LearningCoreXBlockRuntime will want to know later on
+ is whether it needs to write any changes when its save_block method is
+ invoked. To support this, LearningCoreFieldData needs to track which
+ blocks have changes to any of their fields. See the marked_unchanged
+ method docstring for more details.
+ """
+
+ def __init__(self):
+ # set of UsageKeyV2 for blocks that were modified and need to be saved
+ self.changed = set()
+ # mapping of { UsageKeyV2 : { field_name: field_value } }
+ self.field_data = defaultdict(dict)
+
+ def mark_unchanged(self, block):
+ """
+ Mark a block as being unchanged (i.e. no need to write this to the DB).
+
+ Calling set or delete on a field always marks the block with that field
+ as changed, by adding its usage key to self.changed. But set() is also
+ called at the very beginning, when a block is first loaded from the
+ database by the LearningCoreXBlockRuntime's get_block call.
+
+ This method exists so that LearningCoreXBlockRuntime can call it
+ whenever it has either just done a get_block operation (because those
+ set() calls represent the already-persisted content state), or a
+ save_block operation (since those changes will have been persisted).
+
+ This is not a standard part of the FieldData interface.
+ """
+ usage_key = block.scope_ids.usage_id
+ if usage_key in self.changed:
+ self.changed.remove(usage_key)
+
+ def delete(self, block, name):
+ """
+ Delete a field value from a block.
+ """
+ self._check_field(block, name)
+ usage_key = block.scope_ids.usage_id
+ del self.field_data[usage_key][name]
+ self.changed.add(usage_key)
+
+ def get(self, block, name):
+ """
+ Get a field value from a block.
+
+ Raises a KeyError if the value is not found. It is very common to raise
+ this error. XBlocks have many fields with default values, and the
+ FieldData is not expected to store those defaults (that information
+ lives on the Field object). A FieldData subclass only has to store field
+ values that have explicitly been set.
+ """
+ self._check_field(block, name)
+ usage_key = block.scope_ids.usage_id
+ return self.field_data[usage_key][name]
+
+ def set(self, block, name, value):
+ """
+ Set a field for a block to a value.
+ """
+ self._check_field(block, name)
+ usage_key = block.scope_ids.usage_id
+
+ # Check to see if we're just setting the same value. If so, return
+ # without doing anything.
+ block_fields = self.field_data[usage_key]
+ if (name in block_fields) and (block_fields[name] == value):
+ return
+
+ block_fields[name] = value
+ self.changed.add(usage_key)
+
+ def has_changes(self, block):
+ """
+ Does this block have changes that need to be persisted?
+ """
+ return block.scope_ids.usage_id in self.changed
+
+ def _getfield(self, block, name):
+ """
+ Return the field with the given `name` from `block`.
+ If the XBlock doesn't have such a field, raises a KeyError.
+ """
+ # First, get the field from the class, if defined
+ block_field = getattr(block.__class__, name, None)
+ if block_field is not None and isinstance(block_field, Field):
+ return block_field
+ # Not in the class, so name really doesn't name a field
+ raise KeyError(name)
+
+ def _check_field(self, block, name):
+ """
+ Given a block and the name of one of its fields, check that we will be
+ able to read/write it.
+ """
+ field = self._getfield(block, name)
+ if field.scope not in (Scope.content, Scope.settings):
+ raise NotImplementedError(
+ f"Scope {field.scope} (field {name} of {block.scope_ids.usage_id}) "
+ "is unsupported. LearningCoreFieldData only supports the content"
+ " and settings scopes."
+ )
+
+
+class LearningCoreXBlockRuntime(XBlockRuntime):
+ """
+ XBlock runtime that uses openedx-learning apps for content storage.
+
+ The superclass is doing all the hard stuff. This class only only has to
+ worry about the block storage, block serialization/de-serialization, and
+ (eventually) asset storage.
+ """
+
+ def get_block(self, usage_key, for_parent=None):
+ """
+ Fetch an XBlock from Learning Core data models.
+
+ This method will find the OLX for the content in Learning Core, parse it
+ into an XBlock (with mixins) instance, and properly initialize our
+ internal LearningCoreFieldData instance with the field values from the
+ parsed OLX.
+ """
+ # We can do this more efficiently in a single query later, but for now
+ # just get it the easy way.
+ component = self._get_component_from_usage_key(usage_key)
+ # TODO: For now, this runtime will only be used in CMS, so it's fine to just return the Draft version.
+ # However, we will need the runtime to return the Published version for LMS (and Draft for LMS-Preview).
+ # We should base this Draft vs Published decision on a runtime initialization parameter.
+ component_version = component.versioning.draft
+ if component_version is None:
+ raise NoSuchUsage(usage_key)
+
+ content = component_version.contents.get(
+ componentversioncontent__key="block.xml"
+ )
+ xml_node = etree.fromstring(content.text)
+ block_type = usage_key.block_type
+ keys = ScopeIds(self.user_id, block_type, None, usage_key)
+
+ if xml_node.get("url_name", None):
+ log.warning("XBlock at %s should not specify an old-style url_name attribute.", usage_key)
+
+ block_class = self.mixologist.mix(self.load_block_type(block_type))
+
+ if hasattr(block_class, 'parse_xml_new_runtime'):
+ # This is a (former) XModule with messy XML parsing code; let its parse_xml() method continue to work
+ # as it currently does in the old runtime, but let this parse_xml_new_runtime() method parse the XML in
+ # a simpler way that's free of tech debt, if defined.
+ # In particular, XmlMixin doesn't play well with this new runtime, so this is mostly about
+ # bypassing that mixin's code.
+ # When a former XModule no longer needs to support the old runtime, its parse_xml_new_runtime method
+ # should be removed and its parse_xml() method should be simplified to just call the super().parse_xml()
+ # plus some minor additional lines of code as needed.
+ block = block_class.parse_xml_new_runtime(xml_node, runtime=self, keys=keys)
+ else:
+ block = block_class.parse_xml(xml_node, runtime=self, keys=keys, id_generator=None)
+
+ # Update field data with parsed values. We can't call .save() because it will call save_block(), below.
+ block.force_save_fields(block._get_fields_to_save()) # pylint: disable=protected-access
+
+ # We've pre-loaded the fields for this block, so the FieldData shouldn't
+ # consider these values "changed" in its sense of "you have to persist
+ # these because we've altered the field values from what was stored".
+ self.system.authored_data_store.mark_unchanged(block)
+
+ return block
+
+ def save_block(self, block):
+ """
+ Save any pending field data values to Learning Core data models.
+
+ This gets called by block.save() - do not call this directly.
+ """
+ if not self.system.authored_data_store.has_changes(block):
+ return # No changes, so no action needed.
+
+ # Verify that the user has permission to write to authored data in this
+ # learning context:
+ if self.user is not None:
+ learning_context = get_learning_context_impl(block.scope_ids.usage_id)
+ if not learning_context.can_edit_block(self.user, block.scope_ids.usage_id):
+ log.warning("User %s does not have permission to edit %s", self.user.username, block.scope_ids.usage_id)
+ raise RuntimeError("You do not have permission to edit this XBlock")
+
+ # We need Blockstore's serialization so we don't have `url_name` showing
+ # up in all the OLX. TODO: Rename this later, after we figure out what
+ # other changes we need to make in the serialization as part of the
+ # Blockstore -> Learning Core conversion.
+ serialized = serialize_modulestore_block_for_blockstore(block)
+ now = datetime.now(tz=timezone.utc)
+ usage_key = block.scope_ids.usage_id
+ with atomic():
+ component = self._get_component_from_usage_key(usage_key)
+ block_media_type = contents_api.get_or_create_media_type(
+ f"application/vnd.openedx.xblock.v1.{usage_key.block_type}+xml"
+ )
+ content = contents_api.get_or_create_text_content(
+ component.learning_package_id,
+ block_media_type.id,
+ text=serialized.olx_str,
+ created=now,
+ )
+ components_api.create_next_version(
+ component.pk,
+ title=block.display_name,
+ content_to_replace={
+ "block.xml": content.id,
+ },
+ created=now,
+ )
+ self.system.authored_data_store.mark_unchanged(block)
+
+ def _get_component_from_usage_key(self, usage_key):
+ """
+ Note that Components aren't ever really truly deleted, so this will
+ return a Component if this usage key has ever been used, even if it was
+ later deleted.
+
+ TODO: This is the third place where we're implementing this. Figure out
+ where the definitive place should be and have everything else call that.
+ """
+ learning_package = publishing_api.get_learning_package_by_key(str(usage_key.lib_key))
+ try:
+ component = components_api.get_component_by_key(
+ learning_package.id,
+ namespace='xblock.v1',
+ type_name=usage_key.block_type,
+ local_key=usage_key.block_id,
+ )
+ except ObjectDoesNotExist as exc:
+ raise NoSuchUsage(usage_key) from exc
+
+ return component
+
+ def _lookup_asset_url(self, block: XBlock, asset_path: str) -> str | None: # pylint: disable=unused-argument
+ """
+ Return an absolute URL for the specified static asset file that may
+ belong to this XBlock.
+
+ e.g. if the XBlock settings have a field value like "/static/foo.png"
+ then this method will be called with asset_path="foo.png" and should
+ return a URL like https://cdn.none/xblock/f843u89789/static/foo.png
+
+ If the asset file is not recognized, return None
+
+ This is called by the XBlockRuntime superclass in the .runtime module.
+
+ TODO: Implement as part of larger static asset effort.
+ """
+ return None
diff --git a/openedx/core/djangoapps/xblock/runtime/olx_parsing.py b/openedx/core/djangoapps/xblock/runtime/olx_parsing.py
deleted file mode 100644
index 4bfb648d89d0..000000000000
--- a/openedx/core/djangoapps/xblock/runtime/olx_parsing.py
+++ /dev/null
@@ -1,86 +0,0 @@
-"""
-Helpful methods to use when parsing OLX (XBlock XML)
-"""
-
-from collections import namedtuple
-
-from opaque_keys.edx.locator import BundleDefinitionLocator
-
-from openedx.core.djangolib.blockstore_cache import get_bundle_direct_links_with_cache
-
-
-class BundleFormatException(Exception):
- """
- Raised when certain errors are found when parsing the OLX in a content
- library bundle.
- """
-
-
-XBlockInclude = namedtuple('XBlockInclude', ['link_id', 'block_type', 'definition_id', 'usage_hint'])
-
-
-def parse_xblock_include(include_node):
- """
- Given an etree XML node that represents an element,
- parse it and return the BundleDefinitionLocator that it points to.
- """
- # An XBlock include looks like:
- #
- # Where "source" and "usage" are optional.
- if include_node.tag != 'xblock-include':
- # xss-lint: disable=python-wrap-html
- raise BundleFormatException(f"Expected an XML node, but got <{include_node.tag}>")
- try:
- definition_path = include_node.attrib['definition']
- except KeyError:
- raise BundleFormatException(" is missing the required definition=\"...\" attribute") # lint-amnesty, pylint: disable=raise-missing-from
- usage_hint = include_node.attrib.get("usage", None)
- link_id = include_node.attrib.get("source", None)
- # This is pointing to another definition in the same bundle. It looks like:
- #
- try:
- block_type, definition_id = definition_path.split("/")
- except ValueError:
- raise BundleFormatException(f"Invalid definition attribute: {definition_path}") # lint-amnesty, pylint: disable=raise-missing-from
- return XBlockInclude(link_id=link_id, block_type=block_type, definition_id=definition_id, usage_hint=usage_hint)
-
-
-def definition_for_include(parsed_include, parent_definition_key):
- """
- Given a parsed element as a XBlockInclude tuple, get the
- definition (OLX file) that it is pointing to.
-
- Arguments:
-
- parsed_include: An XBlockInclude tuple
-
- parent_definition_key: The BundleDefinitionLocator for the XBlock whose OLX
- contained the (i.e. the parent).
-
- Returns: a BundleDefinitionLocator
- """
- if parsed_include.link_id:
- links = get_bundle_direct_links_with_cache(
- parent_definition_key.bundle_uuid,
- # And one of the following will be set:
- bundle_version=parent_definition_key.bundle_version,
- draft_name=parent_definition_key.draft_name,
- )
- try:
- link = links[parsed_include.link_id]
- except KeyError:
- raise BundleFormatException(f"Link not found: {parsed_include.link_id}") # lint-amnesty, pylint: disable=raise-missing-from
- return BundleDefinitionLocator(
- bundle_uuid=link.bundle_uuid,
- block_type=parsed_include.block_type,
- olx_path=f"{parsed_include.block_type}/{parsed_include.definition_id}/definition.xml",
- bundle_version=link.version,
- )
- else:
- return BundleDefinitionLocator(
- bundle_uuid=parent_definition_key.bundle_uuid,
- block_type=parsed_include.block_type,
- olx_path=f"{parsed_include.block_type}/{parsed_include.definition_id}/definition.xml",
- bundle_version=parent_definition_key.bundle_version,
- draft_name=parent_definition_key.draft_name,
- )
diff --git a/openedx/core/djangoapps/xblock/runtime/runtime.py b/openedx/core/djangoapps/xblock/runtime/runtime.py
index 74771833fedf..0b6e7a9ce2f8 100644
--- a/openedx/core/djangoapps/xblock/runtime/runtime.py
+++ b/openedx/core/djangoapps/xblock/runtime/runtime.py
@@ -3,7 +3,7 @@
"""
from __future__ import annotations
import logging
-from typing import Callable
+from typing import Callable, Optional
from urllib.parse import urljoin # pylint: disable=import-error
import crum
@@ -21,7 +21,7 @@
from xblock.exceptions import NoSuchServiceError
from xblock.field_data import FieldData, SplitFieldData
from xblock.fields import Scope, ScopeIds
-from xblock.runtime import KvsFieldData, MemoryIdManager, Runtime
+from xblock.runtime import IdReader, KvsFieldData, MemoryIdManager, Runtime
from xmodule.errortracker import make_error_tracker
from xmodule.contentstore.django import contentstore
@@ -38,7 +38,6 @@
from openedx.core.types import User as UserType
from openedx.core.djangoapps.xblock.apps import get_xblock_app_config
from openedx.core.djangoapps.xblock.data import StudentDataMode
-from openedx.core.djangoapps.xblock.runtime.blockstore_field_data import BlockstoreChildrenData, BlockstoreFieldData
from openedx.core.djangoapps.xblock.runtime.ephemeral_field_data import EphemeralKeyValueStore
from openedx.core.djangoapps.xblock.runtime.mixin import LmsBlockMixin
from openedx.core.djangoapps.xblock.utils import get_xblock_id_for_anonymous_user
@@ -420,6 +419,8 @@ def __init__(
handler_url: Callable[[UsageKey, str, UserType | None], str],
student_data_mode: StudentDataMode,
runtime_class: type[XBlockRuntime],
+ id_reader: Optional[IdReader] = None,
+ authored_data_store: Optional[FieldData] = None,
):
"""
args:
@@ -436,11 +437,11 @@ def __init__(
runtime_class: What runtime to use, e.g. BlockstoreXBlockRuntime
"""
self.handler_url = handler_url
- self.id_reader = OpaqueKeyReader()
+ self.id_reader = id_reader or OpaqueKeyReader()
self.id_generator = MemoryIdManager() # We don't really use id_generator until we need to support asides
self.runtime_class = runtime_class
- self.authored_data_store = BlockstoreFieldData()
- self.children_data_store = BlockstoreChildrenData(self.authored_data_store)
+ self.authored_data_store = authored_data_store
+ self.children_data_store = None
assert student_data_mode in (StudentDataMode.Ephemeral, StudentDataMode.Persisted)
self.student_data_mode = student_data_mode
diff --git a/openedx/core/djangolib/blockstore_cache.py b/openedx/core/djangolib/blockstore_cache.py
deleted file mode 100644
index 590f6ca68629..000000000000
--- a/openedx/core/djangolib/blockstore_cache.py
+++ /dev/null
@@ -1,282 +0,0 @@
-"""
-An API for caching data related to Blockstore bundles
-
-The whole point of this is to make the hard problem of cache invalidation
-somewhat less hard.
-
-This cache prefixes all keys with the bundle/draft version number, so that when
-any change is made to the bundle/draft, we will look up entries using a new key
-and won't find the now-invalid cached data.
-"""
-
-from datetime import datetime
-from uuid import UUID
-
-from django.conf import settings
-from django.core.cache import caches, InvalidCacheBackendError
-from pytz import UTC
-import requests
-
-from openedx.core.lib import blockstore_api
-
-try:
- # Use a dedicated cache for blockstore, if available:
- cache = caches['blockstore']
-except InvalidCacheBackendError:
- cache = caches['default']
-
-# MAX_BLOCKSTORE_CACHE_DELAY:
-# The per-bundle/draft caches are automatically invalidated when a newer version
-# of the bundle/draft is available, but that automatic check for the current
-# version is cached for this many seconds. So in the absence of explicit calls
-# to invalidate the cache, data may be out of date by up to this many seconds.
-# (Note that we do usually explicitly invalidate this cache during write
-# operations though, so this setting mostly affects actions by external systems
-# on Blockstore or bugs where we left out the cache invalidation step.)
-MAX_BLOCKSTORE_CACHE_DELAY = 60 * 5
-
-
-class BundleCache:
- """
- Data cache that ties every key-value to a particular version of a blockstore
- bundle/draft, so that if/when the bundle/draft is updated, the cache is
- automatically invalidated.
-
- The automatic invalidation may take up to MAX_BLOCKSTORE_CACHE_DELAY
- seconds, although the cache can also be manually invalidated for any
- particular bundle versoin/draft by calling .clear()
- """
-
- def __init__(self, bundle_uuid, draft_name=None):
- """
- Instantiate this wrapper for the bundle with the specified UUID, and
- optionally the specified draft name.
- """
- self.bundle_uuid = bundle_uuid
- self.draft_name = draft_name
-
- def get(self, key_parts, default=None):
- """
- Get a cached value related to this Blockstore bundle/draft.
-
- key_parts: an arbitrary list of strings to identify the cached value.
- For example, if caching the XBlock type of an OLX file, one could
- request:
- get(bundle_uuid, ["olx_type", "/path/to/file"])
- default: default value if the key is not set in the cache
- draft_name: read keys related to the specified draft
- """
- assert isinstance(key_parts, (list, tuple))
- full_key = _get_versioned_cache_key(self.bundle_uuid, self.draft_name, key_parts)
- return cache.get(full_key, default)
-
- def set(self, key_parts, value):
- """
- Set a cached value related to this Blockstore bundle/draft.
-
- key_parts: an arbitrary list of strings to identify the cached value.
- For example, if caching the XBlock type of an OLX file, one could
- request:
- set(bundle_uuid, ["olx_type", "/path/to/file"], "html")
- value: value to set in the cache
- """
- assert isinstance(key_parts, (list, tuple))
- full_key = _get_versioned_cache_key(self.bundle_uuid, self.draft_name, key_parts)
- return cache.set(full_key, value, timeout=settings.BLOCKSTORE_BUNDLE_CACHE_TIMEOUT)
-
- def clear(self):
- """
- Clear the cache for the specified bundle or draft.
-
- This doesn't actually delete keys from the cache, but if the bundle or
- draft has been modified, this will ensure we use the latest version
- number, which will change the key prefix used by this cache, causing the
- old version's keys to become unaddressable and eventually expire.
- """
- # Note: if we switch from memcached to redis at some point, this can be
- # improved because Redis makes it easy to delete all keys with a
- # specific prefix (e.g. a specific bundle UUID), which memcached cannot.
- # With memcached, we just have to leave the invalid keys in the cache
- # (unused) until they expire.
- cache_key = 'bundle_version:{}:{}'.format(self.bundle_uuid, self.draft_name or '')
- cache.delete(cache_key)
-
-
-def _construct_versioned_cache_key(bundle_uuid, version_num, key_parts, draft_name=None): # lint-amnesty, pylint: disable=missing-function-docstring
- cache_key = str(bundle_uuid)
- if draft_name:
- cache_key += ":" + draft_name
- cache_key += ":" + str(version_num) + ":" + ":".join(key_parts)
- return cache_key
-
-
-def _get_versioned_cache_key(bundle_uuid, draft_name, key_parts):
- """
- Generate a cache key string that can be used to store data about the current
- version/draft of the given bundle. The key incorporates the bundle/draft's
- current version number such that if the bundle/draft is updated, a new key
- will be used and the old key will no longer be valid and will expire.
-
- Pass draft_name=None if you want to use the published version of the bundle.
- """
- assert isinstance(bundle_uuid, UUID)
- version_num = get_bundle_version_number(bundle_uuid, draft_name)
- return _construct_versioned_cache_key(bundle_uuid, version_num, key_parts, draft_name)
-
-
-def get_bundle_version_number(bundle_uuid, draft_name=None):
- """
- Get the current version number of the specified bundle/draft. If a draft is
- specified, the update timestamp is used in lieu of a version number.
- """
- cache_key = 'bundle_version:{}:{}'.format(bundle_uuid, draft_name or '')
- version = cache.get(cache_key)
- if version is not None:
- return version
- else:
- version = 0 # Default to 0 in case bundle/draft is empty or doesn't exist
-
- bundle_metadata = blockstore_api.get_bundle(bundle_uuid)
- if draft_name:
- draft_uuid = bundle_metadata.drafts.get(draft_name) # pylint: disable=no-member
- if draft_uuid:
- draft_metadata = blockstore_api.get_draft(draft_uuid)
- # Convert the 'updated_at' datetime info an integer value with microsecond accuracy.
- updated_at_timestamp = (draft_metadata.updated_at - datetime(1970, 1, 1, tzinfo=UTC)).total_seconds()
- version = int(updated_at_timestamp * 1e6)
- # Cache the draft files using the version. This saves an API call when the draft is first retrieved.
- draft_files = list(draft_metadata.files.values())
- draft_files_cache_key = _construct_versioned_cache_key(
- bundle_uuid, version, ('bundle_draft_files', ), draft_name)
- cache.set(draft_files_cache_key, draft_files)
- # If we're not using a draft or the draft does not exist [anymore], fall
- # back to the bundle version, if any versions have been published:
- if version == 0 and bundle_metadata.latest_version:
- version = bundle_metadata.latest_version
- cache.set(cache_key, version, timeout=MAX_BLOCKSTORE_CACHE_DELAY)
- return version
-
-
-def get_bundle_version_files_cached(bundle_uuid, bundle_version):
- """
- Get the files in the specified BundleVersion. Since BundleVersions are
- immutable, this should be cached as aggressively as possible.
- """
- # Use the blockstore django cache directly; this can't use BundleCache because BundleCache only associates data
- # with the most recent bundleversion, not a specified bundleversion
- # This key is '_v2' to avoid reading invalid values cached by a past version of this code with no timeout.
- cache_key = f'bundle_version_files_v2:{bundle_uuid}:{bundle_version}'
- result = cache.get(cache_key)
- if result is None:
- result = blockstore_api.get_bundle_version_files(bundle_uuid, bundle_version)
- # Cache this result. We should be able to cache this forever, since bundle versions are immutable, but currently
- # this result may contain signed S3 URLs which become invalid after 3600 seconds. If Blockstore is improved to
- # return URLs that redirect to the signed S3 URLs, then this can be changed to cache forever.
- cache.set(cache_key, result, timeout=1800)
- return result
-
-
-def get_bundle_draft_files_cached(bundle_uuid, draft_name):
- """
- Get the files in the specified bundle draft. Cached using BundleCache so we
- get automatic cache invalidation when the draft is updated.
- """
- bundle_cache = BundleCache(bundle_uuid, draft_name)
-
- cache_key = ('bundle_draft_files', )
- result = bundle_cache.get(cache_key)
- if result is None:
- result = list(blockstore_api.get_bundle_files(bundle_uuid, use_draft=draft_name))
- bundle_cache.set(cache_key, result)
- return result
-
-
-def get_bundle_files_cached(bundle_uuid, bundle_version=None, draft_name=None):
- """
- Get the list of files in the bundle, optionally with a version and/or draft
- specified.
- """
- if draft_name:
- return get_bundle_draft_files_cached(bundle_uuid, draft_name)
- else:
- if bundle_version is None:
- bundle_version = get_bundle_version_number(bundle_uuid)
- return get_bundle_version_files_cached(bundle_uuid, bundle_version)
-
-
-def get_bundle_file_metadata_with_cache(bundle_uuid, path, bundle_version=None, draft_name=None):
- """
- Get metadata about a file in a Blockstore Bundle[Version] or Draft, using the
- cached list of files in each bundle if available.
- """
- for file_info in get_bundle_files_cached(bundle_uuid, bundle_version, draft_name):
- if file_info.path == path:
- return file_info
- raise blockstore_api.BundleFileNotFound(f"Could not load {path} from bundle {bundle_uuid}")
-
-
-def get_bundle_file_data_with_cache(bundle_uuid, path, bundle_version=None, draft_name=None):
- """
- Method to read a file out of a Blockstore Bundle[Version] or Draft, using the
- cached list of files in each bundle if available.
- """
- file_info = get_bundle_file_metadata_with_cache(bundle_uuid, path, bundle_version, draft_name)
- response = requests.get(file_info.url)
- if response.status_code != 200:
- try:
- error_response = response.content.decode('utf-8')[:500]
- except UnicodeDecodeError:
- error_response = '(error details unavailable - response was not a [unicode] string)'
- raise blockstore_api.BundleStorageError(
- "Unexpected error ({}) trying to read {} from bundle {} using URL {}: \n{}".format(
- response.status_code, path, bundle_uuid, file_info.url, error_response,
- )
- )
- return response.content
-
-
-def get_bundle_version_direct_links_cached(bundle_uuid, bundle_version):
- """
- Get the direct links in the specified BundleVersion. Since BundleVersions
- are immutable, this should be cached as aggressively as possible.
- """
- # Use the blockstore django cache directly; this can't use BundleCache because BundleCache only associates data
- # with the most recent bundleversion, not a specified bundleversion
- cache_key = f'bundle_version_direct_links:{bundle_uuid}:{bundle_version}'
- result = cache.get(cache_key)
- if result is None:
- result = {
- link.name: link.direct
- for link in blockstore_api.get_bundle_version_links(bundle_uuid, bundle_version).values()
- }
- cache.set(cache_key, result, timeout=None) # Cache forever since bundle versions are immutable
- return result
-
-
-def get_bundle_draft_direct_links_cached(bundle_uuid, draft_name):
- """
- Get the direct links in the specified bundle draft. Cached using BundleCache
- so we get automatic cache invalidation when the draft is updated.
- """
- bundle_cache = BundleCache(bundle_uuid, draft_name)
- cache_key = ('bundle_draft_direct_links', )
- result = bundle_cache.get(cache_key)
- if result is None:
- links = blockstore_api.get_bundle_links(bundle_uuid, use_draft=draft_name).values()
- result = {link.name: link.direct for link in links}
- bundle_cache.set(cache_key, result)
- return result
-
-
-def get_bundle_direct_links_with_cache(bundle_uuid, bundle_version=None, draft_name=None):
- """
- Get a dictionary of the direct links of the specified bundle, from cache if
- possible.
- """
- if draft_name:
- links = get_bundle_draft_direct_links_cached(bundle_uuid, draft_name)
- else:
- if bundle_version is None:
- bundle_version = get_bundle_version_number(bundle_uuid)
- links = get_bundle_version_direct_links_cached(bundle_uuid, bundle_version)
- return links
diff --git a/openedx/core/djangolib/tests/test_blockstore_cache.py b/openedx/core/djangolib/tests/test_blockstore_cache.py
deleted file mode 100644
index 5ad1993d3ecd..000000000000
--- a/openedx/core/djangolib/tests/test_blockstore_cache.py
+++ /dev/null
@@ -1,109 +0,0 @@
-"""
-Tests for BundleCache
-"""
-from unittest.mock import patch
-
-from django.test import TestCase
-from openedx.core.djangolib.blockstore_cache import BundleCache
-from openedx.core.lib import blockstore_api as api
-
-
-class TestWithBundleMixin:
- """
- Mixin that gives every test method access to a bundle + draft
- """
-
- @classmethod
- def setUpClass(cls):
- super().setUpClass()
- cls.collection = api.create_collection(title="Collection")
- cls.bundle = api.create_bundle(cls.collection.uuid, title="Test Bundle", slug="test")
- cls.draft = api.get_or_create_bundle_draft(cls.bundle.uuid, draft_name="test-draft")
-
-
-@patch('openedx.core.djangolib.blockstore_cache.MAX_BLOCKSTORE_CACHE_DELAY', 0)
-class BundleCacheTestMixin(TestWithBundleMixin, TestCase):
- """
- Tests for BundleCache
- """
-
- def test_bundle_cache(self):
- """
- Test caching data related to a bundle (no draft)
- """
- cache = BundleCache(self.bundle.uuid)
-
- key1 = ("some", "key", "1")
- key2 = ("key2", )
-
- value1 = "value1"
- cache.set(key1, value1)
- value2 = {"this is": "a dict", "for": "key2"}
- cache.set(key2, value2)
- assert cache.get(key1) == value1
- assert cache.get(key2) == value2
-
- # Now publish a new version of the bundle:
- api.write_draft_file(self.draft.uuid, "test.txt", "we need a changed file in order to publish a new version")
- api.commit_draft(self.draft.uuid)
-
- # Now the cache should be invalidated
- # (immediately since we set MAX_BLOCKSTORE_CACHE_DELAY to 0)
- assert cache.get(key1) is None
- assert cache.get(key2) is None
-
- def test_bundle_draft_cache(self):
- """
- Test caching data related to a bundle draft
- """
- cache = BundleCache(self.bundle.uuid, draft_name=self.draft.name)
-
- key1 = ("some", "key", "1")
- key2 = ("key2", )
-
- value1 = "value1"
- cache.set(key1, value1)
- value2 = {"this is": "a dict", "for": "key2"}
- cache.set(key2, value2)
- assert cache.get(key1) == value1
- assert cache.get(key2) == value2
-
- # Now make a change to the draft (doesn't matter if we commit it or not)
- api.write_draft_file(self.draft.uuid, "test.txt", "we need a changed file in order to publish a new version")
-
- # Now the cache should be invalidated
- # (immediately since we set MAX_BLOCKSTORE_CACHE_DELAY to 0)
- assert cache.get(key1) is None
- assert cache.get(key2) is None
-
-
-class BundleCacheClearTest(TestWithBundleMixin, TestCase):
- """
- Tests for BundleCache's clear() method.
- Requires MAX_BLOCKSTORE_CACHE_DELAY to be non-zero. This clear() method does
- not actually clear the cache but rather just means "a new bundle/draft
- version has been created, so immediately start reading/writing cache keys
- using the new version number.
- """
-
- def test_bundle_cache_clear(self):
- """
- Test the cache clear() method
- """
- cache = BundleCache(self.bundle.uuid)
- key1 = ("some", "key", "1")
- value1 = "value1"
- cache.set(key1, value1)
- assert cache.get(key1) == value1
-
- # Now publish a new version of the bundle:
- api.write_draft_file(self.draft.uuid, "test.txt", "we need a changed file in order to publish a new version")
- api.commit_draft(self.draft.uuid)
-
- # Now the cache will not be immediately invalidated; it takes up to MAX_BLOCKSTORE_CACHE_DELAY seconds.
- # Since this is a new bundle and we _just_ accessed the cache for the first time, we can be confident
- # it won't yet be automatically invalidated.
- assert cache.get(key1) == value1
- # Now "clear" the cache, forcing the check of the new version:
- cache.clear()
- assert cache.get(key1) is None
diff --git a/openedx/core/lib/blockstore_api/__init__.py b/openedx/core/lib/blockstore_api/__init__.py
index d9855ef1812f..855d8a1f96a0 100644
--- a/openedx/core/lib/blockstore_api/__init__.py
+++ b/openedx/core/lib/blockstore_api/__init__.py
@@ -1,9 +1,7 @@
"""
API Client for Blockstore
-This API does not do any caching; consider using BundleCache or (in
-openedx.core.djangolib.blockstore_cache) together with these API methods for
-improved performance.
+TODO: This should all get ripped out.
TODO: This wrapper is extraneous now that Blockstore-as-a-service isn't supported.
This whole directory tree should be removed by https://github.com/openedx/blockstore/issues/296.
diff --git a/openedx/core/lib/xblock_serializer/api.py b/openedx/core/lib/xblock_serializer/api.py
index 97d04580f24b..30dbc8321b3c 100644
--- a/openedx/core/lib/xblock_serializer/api.py
+++ b/openedx/core/lib/xblock_serializer/api.py
@@ -23,5 +23,10 @@ def serialize_modulestore_block_for_blockstore(block):
contain the OLX of its children, just refers to them, so you have to
separately serialize them.)
(3) a list of any static files required by the XBlock and their URL
+
+ TODO: We should deprecate this in favor of a new Learning Core implementation.
+ We've left it as-is for now partly because there are bigger questions that
+ we have around how we should rewrite this (e.g. are we going to
+ remove ?).
"""
return XBlockSerializerForBlockstore(block)
diff --git a/xmodule/tests/test_library_tools.py b/xmodule/tests/test_library_tools.py
index 265d0f31f174..c6185f8754b1 100644
--- a/xmodule/tests/test_library_tools.py
+++ b/xmodule/tests/test_library_tools.py
@@ -11,15 +11,12 @@
import ddt
from django.conf import settings
from django.test import override_settings
-from opaque_keys.edx.keys import UsageKey
from opaque_keys.edx.locator import LibraryLocator, LibraryLocatorV2
from common.djangoapps.student.roles import CourseInstructorRole
from common.djangoapps.student.tests.factories import UserFactory
from openedx.core.djangolib.testing.utils import skip_unless_cms
-from openedx.core.djangoapps.content_libraries import api as library_api
from openedx.core.djangoapps.content_libraries.tests.base import ContentLibrariesRestApiTest
-from openedx.core.djangoapps.xblock.api import load_block
from xmodule.library_tools import LibraryToolsService
from xmodule.modulestore.tests.factories import CourseFactory, LibraryFactory
from xmodule.modulestore.tests.utils import MixedSplitTestCase
@@ -129,66 +126,6 @@ def test_update_children_for_v2_lib(self):
assert len(content_block.children) == 1
- def test_update_children_for_v2_lib_recursive(self):
- """
- Test update_children for a V2 library containing a unit.
-
- Ensures that _import_from_blockstore works on nested blocks.
- """
- # Create a blockstore content library
- library = self._create_library(slug="testlib1_import", title="A Test Library", description="Testing XBlocks")
- # Create a unit block with an HTML block in it.
- unit_block_id = self._add_block_to_library(library["id"], "unit", "unit1")["id"]
- html_block_id = self._add_block_to_library(library["id"], "html", "html1", parent_block=unit_block_id)["id"]
- html_block = load_block(UsageKey.from_string(html_block_id), self.user)
- # Add assets and content to the HTML block
- self._set_library_block_asset(html_block_id, "test.txt", b"data", expect_response=200)
- self._set_library_block_olx(html_block_id, 'Hello world')
-
- # Create a modulestore course
- course = CourseFactory.create(modulestore=self.store, user_id=self.user.id)
- CourseInstructorRole(course.id).add_users(self.user)
- # Add Source from library block to the course
- lc_block = self.make_block(
- "library_content",
- course,
- user_id=self.user_id,
- max_count=1,
- source_library_id=str(library["id"]),
- )
-
- # Import the unit block from the library to the course
- self.tools.trigger_library_sync(lc_block, library_version=None)
- lc_block = self.store.get_item(lc_block.location)
-
- # Verify imported block with its children
- assert len(lc_block.children) == 1
- assert lc_block.children[0].category == 'unit'
-
- imported_unit_block = self.store.get_item(lc_block.children[0])
- assert len(imported_unit_block.children) == 1
- assert imported_unit_block.children[0].category == 'html'
-
- imported_html_block = self.store.get_item(imported_unit_block.children[0])
- assert 'Hello world' in imported_html_block.data
-
- # Check that assets were imported and static paths were modified after importing
- assets = library_api.get_library_block_static_asset_files(html_block.scope_ids.usage_id)
- assert len(assets) == 1
- assert assets[0].url in imported_html_block.data
-
- # Check that reimporting updates the target block
- self._set_library_block_olx(html_block_id, 'Foo bar')
- self.tools.trigger_library_sync(lc_block, library_version=None)
- lc_block = self.store.get_item(lc_block.location)
-
- assert len(lc_block.children) == 1
- imported_unit_block = self.store.get_item(lc_block.children[0])
- assert len(imported_unit_block.children) == 1
- imported_html_block = self.store.get_item(imported_unit_block.children[0])
- assert 'Hello world' not in imported_html_block.data
- assert 'Foo bar' in imported_html_block.data
-
def test_update_children_for_v1_lib(self):
"""
Test update_children with V1 library as a source.
diff --git a/xmodule/video_block/transcripts_utils.py b/xmodule/video_block/transcripts_utils.py
index 1cc17928de7f..d81ce2ab08eb 100644
--- a/xmodule/video_block/transcripts_utils.py
+++ b/xmodule/video_block/transcripts_utils.py
@@ -19,8 +19,6 @@
from pysrt import SubRipFile, SubRipItem, SubRipTime
from pysrt.srtexc import Error
-from openedx.core.djangolib import blockstore_cache
-from openedx.core.lib import blockstore_api
from xmodule.contentstore.content import StaticContent
from xmodule.contentstore.django import contentstore
from xmodule.exceptions import NotFoundError
@@ -488,7 +486,7 @@ def manage_video_subtitles_save(item, user, old_metadata=None, generate_translat
{speed: subs_id for subs_id, speed in youtube_speed_dict(item).items()},
lang,
)
- except TranscriptException as ex: # lint-amnesty, pylint: disable=unused-variable
+ except TranscriptException:
pass
if reraised_message:
item.save_with_metadata(user)
@@ -1047,42 +1045,9 @@ def get_transcript_from_blockstore(video_block, language, output_format, transcr
Returns:
tuple containing content, filename, mimetype
"""
- if output_format not in (Transcript.SRT, Transcript.SJSON, Transcript.TXT):
- raise NotFoundError(f'Invalid transcript format `{output_format}`')
- transcripts = transcripts_info['transcripts']
- if language not in transcripts:
- raise NotFoundError("Video {} does not have a transcript file defined for the '{}' language in its OLX.".format(
- video_block.scope_ids.usage_id,
- language,
- ))
- filename = transcripts[language]
- if not filename.endswith('.srt'):
- # We want to standardize on .srt
- raise NotFoundError("Video XBlocks in Blockstore only support .srt transcript files.")
- # Try to load the transcript file out of Blockstore
- # In lieu of an XBlock API for this (like block.runtime.resources_fs), we use the blockstore API directly.
- bundle_uuid = video_block.scope_ids.def_id.bundle_uuid
- path = video_block.scope_ids.def_id.olx_path.rpartition('/')[0] + '/static/' + filename
- bundle_version = video_block.scope_ids.def_id.bundle_version # Either bundle_version or draft_name will be set.
- draft_name = video_block.scope_ids.def_id.draft_name
- try:
- content_binary = blockstore_cache.get_bundle_file_data_with_cache(bundle_uuid, path, bundle_version, draft_name)
- except blockstore_api.BundleFileNotFound:
- raise NotFoundError("Transcript file '{}' missing for video XBlock {}".format( # lint-amnesty, pylint: disable=raise-missing-from
- path,
- video_block.scope_ids.usage_id,
- ))
- # Now convert the transcript data to the requested format:
- filename_no_extension = os.path.splitext(filename)[0]
- output_filename = f'{filename_no_extension}.{output_format}'
- output_transcript = Transcript.convert(
- content_binary.decode('utf-8'),
- input_format=Transcript.SRT,
- output_format=output_format,
- )
- if not output_transcript.strip():
- raise NotFoundError('No transcript content')
- return output_transcript, output_filename, Transcript.mime_types[output_format]
+ # TODO: Update to use Learning Core data models once static assets support
+ # has been added.
+ raise NotImplementedError("Transcripts not supported.")
def get_transcript(video, lang=None, output_format=Transcript.SRT, youtube_id=None):