Commit 46f64305 by John Eskew

Merge pull request #6767 from edx/jeskew/split_course_export_slowdown_take_II

Fix performance problem with Split course import/export.
parents b01f7adc 9388ba59
...@@ -37,6 +37,7 @@ log = logging.getLogger('edx.modulestore') ...@@ -37,6 +37,7 @@ log = logging.getLogger('edx.modulestore')
new_contract('CourseKey', CourseKey) new_contract('CourseKey', CourseKey)
new_contract('AssetKey', AssetKey) new_contract('AssetKey', AssetKey)
new_contract('AssetMetadata', AssetMetadata) new_contract('AssetMetadata', AssetMetadata)
new_contract('XBlock', XBlock)
class ModuleStoreEnum(object): class ModuleStoreEnum(object):
...@@ -276,6 +277,122 @@ class BulkOperationsMixin(object): ...@@ -276,6 +277,122 @@ class BulkOperationsMixin(object):
return self._get_bulk_ops_record(course_key, ignore_case).active return self._get_bulk_ops_record(course_key, ignore_case).active
class EditInfo(object):
"""
Encapsulates the editing info of a block.
"""
def __init__(self, **kwargs):
self.from_storable(kwargs)
# For details, see caching_descriptor_system.py get_subtree_edited_by/on.
self._subtree_edited_on = kwargs.get('_subtree_edited_on', None)
self._subtree_edited_by = kwargs.get('_subtree_edited_by', None)
def to_storable(self):
"""
Serialize to a Mongo-storable format.
"""
return {
'previous_version': self.previous_version,
'update_version': self.update_version,
'source_version': self.source_version,
'edited_on': self.edited_on,
'edited_by': self.edited_by,
'original_usage': self.original_usage,
'original_usage_version': self.original_usage_version,
}
def from_storable(self, edit_info):
"""
De-serialize from Mongo-storable format to an object.
"""
# Guid for the structure which previously changed this XBlock.
# (Will be the previous value of 'update_version'.)
self.previous_version = edit_info.get('previous_version', None)
# Guid for the structure where this XBlock got its current field values.
# May point to a structure not in this structure's history (e.g., to a draft
# branch from which this version was published).
self.update_version = edit_info.get('update_version', None)
self.source_version = edit_info.get('source_version', None)
# Datetime when this XBlock's fields last changed.
self.edited_on = edit_info.get('edited_on', None)
# User ID which changed this XBlock last.
self.edited_by = edit_info.get('edited_by', None)
self.original_usage = edit_info.get('original_usage', None)
self.original_usage_version = edit_info.get('original_usage_version', None)
def __str__(self):
return ("EditInfo(previous_version={0.previous_version}, "
"update_version={0.update_version}, "
"source_version={0.source_version}, "
"edited_on={0.edited_on}, "
"edited_by={0.edited_by}, "
"original_usage={0.original_usage}, "
"original_usage_version={0.original_usage_version}, "
"_subtree_edited_on={0._subtree_edited_on}, "
"_subtree_edited_by={0._subtree_edited_by})").format(self)
class BlockData(object):
"""
Wrap the block data in an object instead of using a straight Python dictionary.
Allows the storing of meta-information about a structure that doesn't persist along with
the structure itself.
"""
def __init__(self, **kwargs):
# Has the definition been loaded?
self.definition_loaded = False
self.from_storable(kwargs)
def to_storable(self):
"""
Serialize to a Mongo-storable format.
"""
return {
'fields': self.fields,
'block_type': self.block_type,
'definition': self.definition,
'defaults': self.defaults,
'edit_info': self.edit_info.to_storable()
}
def from_storable(self, block_data):
"""
De-serialize from Mongo-storable format to an object.
"""
# Contains the Scope.settings and 'children' field values.
# 'children' are stored as a list of (block_type, block_id) pairs.
self.fields = block_data.get('fields', {})
# XBlock type ID.
self.block_type = block_data.get('block_type', None)
# DB id of the record containing the content of this XBlock.
self.definition = block_data.get('definition', None)
# Scope.settings default values copied from a template block (used e.g. when
# blocks are copied from a library to a course)
self.defaults = block_data.get('defaults', {})
# EditInfo object containing all versioning/editing data.
self.edit_info = EditInfo(**block_data.get('edit_info', {}))
def __str__(self):
return ("BlockData(fields={0.fields}, "
"block_type={0.block_type}, "
"definition={0.definition}, "
"definition_loaded={0.definition_loaded}, "
"defaults={0.defaults}, "
"edit_info={0.edit_info})").format(self)
new_contract('BlockData', BlockData)
class IncorrectlySortedList(Exception): class IncorrectlySortedList(Exception):
""" """
Thrown when calling find() on a SortedAssetList not sorted by filename. Thrown when calling find() on a SortedAssetList not sorted by filename.
...@@ -615,27 +732,32 @@ class ModuleStoreRead(ModuleStoreAssetBase): ...@@ -615,27 +732,32 @@ class ModuleStoreRead(ModuleStoreAssetBase):
""" """
pass pass
def _block_matches(self, fields_or_xblock, qualifiers): @contract(block='XBlock | BlockData | dict', qualifiers=dict)
''' def _block_matches(self, block, qualifiers):
"""
Return True or False depending on whether the field value (block contents) Return True or False depending on whether the field value (block contents)
matches the qualifiers as per get_items. Note, only finds directly set not matches the qualifiers as per get_items.
inherited nor default value matches. NOTE: Method only finds directly set value matches - not inherited nor default value matches.
For substring matching pass a regex object. For substring matching:
for arbitrary function comparison such as date time comparison, pass pass a regex object.
the function as in start=lambda x: x < datetime.datetime(2014, 1, 1, 0, tzinfo=pytz.UTC) For arbitrary function comparison such as date time comparison:
pass the function as in start=lambda x: x < datetime.datetime(2014, 1, 1, 0, tzinfo=pytz.UTC)
Args: Args:
fields_or_xblock (dict or XBlock): either the json blob (from the db or get_explicitly_set_fields) block (dict, XBlock, or BlockData): either the BlockData (transformed from the db) -or-
or the xblock.fields() value or the XBlock from which to get those values a dict (from BlockData.fields or get_explicitly_set_fields_by_scope) -or-
qualifiers (dict): field: searchvalue pairs. the xblock.fields() value -or-
''' the XBlock from which to get the 'fields' value.
if isinstance(fields_or_xblock, XBlock): qualifiers (dict): {field: value} search pairs.
fields = fields_or_xblock.fields """
xblock = fields_or_xblock if isinstance(block, XBlock):
is_xblock = True # If an XBlock is passed-in, just match its fields.
xblock, fields = (block, block.fields)
elif isinstance(block, BlockData):
# BlockData is an object - compare its attributes in dict form.
xblock, fields = (None, block.__dict__)
else: else:
fields = fields_or_xblock xblock, fields = (None, block)
is_xblock = False
def _is_set_on(key): def _is_set_on(key):
""" """
...@@ -646,8 +768,8 @@ class ModuleStoreRead(ModuleStoreAssetBase): ...@@ -646,8 +768,8 @@ class ModuleStoreRead(ModuleStoreAssetBase):
if key not in fields: if key not in fields:
return False, None return False, None
field = fields[key] field = fields[key]
if is_xblock: if xblock is not None:
return field.is_set_on(fields_or_xblock), getattr(xblock, key) return field.is_set_on(block), getattr(xblock, key)
else: else:
return True, field return True, field
...@@ -660,7 +782,7 @@ class ModuleStoreRead(ModuleStoreAssetBase): ...@@ -660,7 +782,7 @@ class ModuleStoreRead(ModuleStoreAssetBase):
return True return True
def _value_matches(self, target, criteria): def _value_matches(self, target, criteria):
''' """
helper for _block_matches: does the target (field value) match the criteria? helper for _block_matches: does the target (field value) match the criteria?
If target is a list, do any of the list elements meet the criteria If target is a list, do any of the list elements meet the criteria
...@@ -668,7 +790,7 @@ class ModuleStoreRead(ModuleStoreAssetBase): ...@@ -668,7 +790,7 @@ class ModuleStoreRead(ModuleStoreAssetBase):
If the criteria is a function, does invoking it on the target yield something truthy? If the criteria is a function, does invoking it on the target yield something truthy?
If criteria is a dict {($nin|$in): []}, then do (none|any) of the list elements meet the criteria If criteria is a dict {($nin|$in): []}, then do (none|any) of the list elements meet the criteria
Otherwise, is the target == criteria Otherwise, is the target == criteria
''' """
if isinstance(target, list): if isinstance(target, list):
return any(self._value_matches(ele, criteria) for ele in target) return any(self._value_matches(ele, criteria) for ele in target)
elif isinstance(criteria, re._pattern_type): # pylint: disable=protected-access elif isinstance(criteria, re._pattern_type): # pylint: disable=protected-access
......
...@@ -21,78 +21,3 @@ class BlockKey(namedtuple('BlockKey', 'type id')): ...@@ -21,78 +21,3 @@ class BlockKey(namedtuple('BlockKey', 'type id')):
CourseEnvelope = namedtuple('CourseEnvelope', 'course_key structure') CourseEnvelope = namedtuple('CourseEnvelope', 'course_key structure')
class BlockData(object):
"""
Wrap the block data in an object instead of using a straight Python dictionary.
Allows the storing of meta-information about a structure that doesn't persist along with
the structure itself.
"""
@contract(block_dict=dict)
def __init__(self, block_dict={}): # pylint: disable=dangerous-default-value
# Has the definition been loaded?
self.definition_loaded = False
self.from_storable(block_dict)
def to_storable(self):
"""
Serialize to a Mongo-storable format.
"""
return {
'fields': self.fields,
'block_type': self.block_type,
'definition': self.definition,
'defaults': self.defaults,
'edit_info': self.edit_info
}
@contract(stored=dict)
def from_storable(self, stored):
"""
De-serialize from Mongo-storable format to an object.
"""
self.fields = stored.get('fields', {})
self.block_type = stored.get('block_type', None)
self.definition = stored.get('definition', None)
self.defaults = stored.get('defaults', {})
self.edit_info = stored.get('edit_info', {})
def get(self, key, *args, **kwargs):
"""
Dict-like 'get' method. Raises AttributeError if requesting non-existent attribute and no default.
"""
if len(args) > 0:
return getattr(self, key, args[0])
elif 'default' in kwargs:
return getattr(self, key, kwargs['default'])
else:
return getattr(self, key)
def __getitem__(self, key):
"""
Dict-like '__getitem__'.
"""
if not hasattr(self, key):
raise KeyError
else:
return getattr(self, key)
def __setitem__(self, key, value):
setattr(self, key, value)
def __delitem__(self, key):
delattr(self, key)
def __iter__(self):
return self.__dict__.iterkeys()
def setdefault(self, key, default=None):
"""
Dict-like 'setdefault'.
"""
try:
return getattr(self, key)
except AttributeError:
setattr(self, key, default)
return default
...@@ -5,11 +5,13 @@ from fs.osfs import OSFS ...@@ -5,11 +5,13 @@ from fs.osfs import OSFS
from lazy import lazy from lazy import lazy
from xblock.runtime import KvsFieldData from xblock.runtime import KvsFieldData
from xblock.fields import ScopeIds from xblock.fields import ScopeIds
from xblock.core import XBlock
from opaque_keys.edx.locator import BlockUsageLocator, LocalId, CourseLocator, LibraryLocator, DefinitionLocator from opaque_keys.edx.locator import BlockUsageLocator, LocalId, CourseLocator, LibraryLocator, DefinitionLocator
from xmodule.library_tools import LibraryToolsService from xmodule.library_tools import LibraryToolsService
from xmodule.mako_module import MakoDescriptorSystem from xmodule.mako_module import MakoDescriptorSystem
from xmodule.error_module import ErrorDescriptor from xmodule.error_module import ErrorDescriptor
from xmodule.errortracker import exc_info_to_str from xmodule.errortracker import exc_info_to_str
from xmodule.modulestore import BlockData
from xmodule.modulestore.edit_info import EditInfoRuntimeMixin from xmodule.modulestore.edit_info import EditInfoRuntimeMixin
from xmodule.modulestore.exceptions import ItemNotFoundError from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.modulestore.inheritance import inheriting_field_data, InheritanceMixin from xmodule.modulestore.inheritance import inheriting_field_data, InheritanceMixin
...@@ -24,7 +26,9 @@ new_contract('BlockUsageLocator', BlockUsageLocator) ...@@ -24,7 +26,9 @@ new_contract('BlockUsageLocator', BlockUsageLocator)
new_contract('CourseLocator', CourseLocator) new_contract('CourseLocator', CourseLocator)
new_contract('LibraryLocator', LibraryLocator) new_contract('LibraryLocator', LibraryLocator)
new_contract('BlockKey', BlockKey) new_contract('BlockKey', BlockKey)
new_contract('BlockData', BlockData)
new_contract('CourseEnvelope', CourseEnvelope) new_contract('CourseEnvelope', CourseEnvelope)
new_contract('XBlock', XBlock)
class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
...@@ -79,7 +83,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -79,7 +83,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
def _parent_map(self): def _parent_map(self):
parent_map = {} parent_map = {}
for block_key, block in self.course_entry.structure['blocks'].iteritems(): for block_key, block in self.course_entry.structure['blocks'].iteritems():
for child in block['fields'].get('children', []): for child in block.fields.get('children', []):
parent_map[child] = block_key parent_map[child] = block_key
return parent_map return parent_map
...@@ -119,7 +123,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -119,7 +123,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
block_data = self.get_module_data(block_key, course_key) block_data = self.get_module_data(block_key, course_key)
class_ = self.load_block_type(block_data.get('block_type')) class_ = self.load_block_type(block_data.block_type)
block = self.xblock_from_json(class_, course_key, block_key, block_data, course_entry_override, **kwargs) block = self.xblock_from_json(class_, course_key, block_key, block_data, course_entry_override, **kwargs)
self.modulestore.cache_block(course_key, version_guid, block_key, block) self.modulestore.cache_block(course_key, version_guid, block_key, block)
return block return block
...@@ -164,17 +168,17 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -164,17 +168,17 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
# most recent retrieval is most likely the right one for next caller (see comment above fn) # most recent retrieval is most likely the right one for next caller (see comment above fn)
self.course_entry = CourseEnvelope(course_entry_override.course_key, self.course_entry.structure) self.course_entry = CourseEnvelope(course_entry_override.course_key, self.course_entry.structure)
definition_id = block_data.get('definition') definition_id = block_data.definition
# If no usage id is provided, generate an in-memory id # If no usage id is provided, generate an in-memory id
if block_key is None: if block_key is None:
block_key = BlockKey(block_data['block_type'], LocalId()) block_key = BlockKey(block_data.block_type, LocalId())
convert_fields = lambda field: self.modulestore.convert_references_to_keys( convert_fields = lambda field: self.modulestore.convert_references_to_keys(
course_key, class_, field, self.course_entry.structure['blocks'], course_key, class_, field, self.course_entry.structure['blocks'],
) )
if definition_id is not None and not block_data['definition_loaded']: if definition_id is not None and not block_data.definition_loaded:
definition_loader = DefinitionLazyLoader( definition_loader = DefinitionLazyLoader(
self.modulestore, self.modulestore,
course_key, course_key,
...@@ -195,8 +199,8 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -195,8 +199,8 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
block_id=block_key.id, block_id=block_key.id,
) )
converted_fields = convert_fields(block_data.get('fields', {})) converted_fields = convert_fields(block_data.fields)
converted_defaults = convert_fields(block_data.get('defaults', {})) converted_defaults = convert_fields(block_data.defaults)
if block_key in self._parent_map: if block_key in self._parent_map:
parent_key = self._parent_map[block_key] parent_key = self._parent_map[block_key]
parent = course_key.make_usage_key(parent_key.type, parent_key.id) parent = course_key.make_usage_key(parent_key.type, parent_key.id)
...@@ -221,7 +225,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -221,7 +225,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
ScopeIds(None, block_key.type, definition_id, block_locator), ScopeIds(None, block_key.type, definition_id, block_locator),
field_data, field_data,
) )
except Exception: except Exception: # pylint: disable=broad-except
log.warning("Failed to load descriptor", exc_info=True) log.warning("Failed to load descriptor", exc_info=True)
return ErrorDescriptor.from_json( return ErrorDescriptor.from_json(
block_data, block_data,
...@@ -233,12 +237,12 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -233,12 +237,12 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
error_msg=exc_info_to_str(sys.exc_info()) error_msg=exc_info_to_str(sys.exc_info())
) )
edit_info = block_data.get('edit_info', {}) edit_info = block_data.edit_info
module._edited_by = edit_info.get('edited_by') # pylint: disable=protected-access module._edited_by = edit_info.edited_by # pylint: disable=protected-access
module._edited_on = edit_info.get('edited_on') # pylint: disable=protected-access module._edited_on = edit_info.edited_on # pylint: disable=protected-access
module.previous_version = edit_info.get('previous_version') module.previous_version = edit_info.previous_version
module.update_version = edit_info.get('update_version') module.update_version = edit_info.update_version
module.source_version = edit_info.get('source_version', None) module.source_version = edit_info.source_version
module.definition_locator = DefinitionLocator(block_key.type, definition_id) module.definition_locator = DefinitionLocator(block_key.type, definition_id)
# decache any pending field settings # decache any pending field settings
module.save() module.save()
...@@ -261,31 +265,35 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -261,31 +265,35 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
""" """
return xblock._edited_on return xblock._edited_on
@contract(xblock='XBlock')
def get_subtree_edited_by(self, xblock): def get_subtree_edited_by(self, xblock):
""" """
See :class: cms.lib.xblock.runtime.EditInfoRuntimeMixin See :class: cms.lib.xblock.runtime.EditInfoRuntimeMixin
""" """
# pylint: disable=protected-access
if not hasattr(xblock, '_subtree_edited_by'): if not hasattr(xblock, '_subtree_edited_by'):
json_data = self.module_data[BlockKey.from_usage_key(xblock.location)] block_data = self.module_data[BlockKey.from_usage_key(xblock.location)]
if '_subtree_edited_by' not in json_data.setdefault('edit_info', {}): if block_data.edit_info._subtree_edited_by is None:
self._compute_subtree_edited_internal( self._compute_subtree_edited_internal(
xblock.location.block_id, json_data, xblock.location.course_key block_data, xblock.location.course_key
) )
setattr(xblock, '_subtree_edited_by', json_data['edit_info']['_subtree_edited_by']) setattr(xblock, '_subtree_edited_by', block_data.edit_info._subtree_edited_by)
return getattr(xblock, '_subtree_edited_by') return getattr(xblock, '_subtree_edited_by')
@contract(xblock='XBlock')
def get_subtree_edited_on(self, xblock): def get_subtree_edited_on(self, xblock):
""" """
See :class: cms.lib.xblock.runtime.EditInfoRuntimeMixin See :class: cms.lib.xblock.runtime.EditInfoRuntimeMixin
""" """
# pylint: disable=protected-access
if not hasattr(xblock, '_subtree_edited_on'): if not hasattr(xblock, '_subtree_edited_on'):
json_data = self.module_data[BlockKey.from_usage_key(xblock.location)] block_data = self.module_data[BlockKey.from_usage_key(xblock.location)]
if '_subtree_edited_on' not in json_data.setdefault('edit_info', {}): if block_data.edit_info._subtree_edited_on is None:
self._compute_subtree_edited_internal( self._compute_subtree_edited_internal(
xblock.location.block_id, json_data, xblock.location.course_key block_data, xblock.location.course_key
) )
setattr(xblock, '_subtree_edited_on', json_data['edit_info']['_subtree_edited_on']) setattr(xblock, '_subtree_edited_on', block_data.edit_info._subtree_edited_on)
return getattr(xblock, '_subtree_edited_on') return getattr(xblock, '_subtree_edited_on')
...@@ -307,20 +315,22 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -307,20 +315,22 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
return getattr(xblock, '_published_on', None) return getattr(xblock, '_published_on', None)
def _compute_subtree_edited_internal(self, block_id, json_data, course_key): @contract(block_data='BlockData')
def _compute_subtree_edited_internal(self, block_data, course_key):
""" """
Recurse the subtree finding the max edited_on date and its concomitant edited_by. Cache it Recurse the subtree finding the max edited_on date and its corresponding edited_by. Cache it.
""" """
max_date = json_data['edit_info']['edited_on'] # pylint: disable=protected-access
max_by = json_data['edit_info']['edited_by'] max_date = block_data.edit_info.edited_on
max_date_by = block_data.edit_info.edited_by
for child in json_data.get('fields', {}).get('children', []): for child in block_data.fields.get('children', []):
child_data = self.get_module_data(BlockKey(*child), course_key) child_data = self.get_module_data(BlockKey(*child), course_key)
if '_subtree_edited_on' not in json_data.setdefault('edit_info', {}): if block_data.edit_info._subtree_edited_on is None:
self._compute_subtree_edited_internal(child, child_data, course_key) self._compute_subtree_edited_internal(child_data, course_key)
if child_data['edit_info']['_subtree_edited_on'] > max_date: if child_data.edit_info._subtree_edited_on > max_date:
max_date = child_data['edit_info']['_subtree_edited_on'] max_date = child_data.edit_info._subtree_edited_on
max_by = child_data['edit_info']['_subtree_edited_by'] max_date_by = child_data.edit_info._subtree_edited_by
json_data['edit_info']['_subtree_edited_on'] = max_date block_data.edit_info._subtree_edited_on = max_date
json_data['edit_info']['_subtree_edited_by'] = max_by block_data.edit_info._subtree_edited_by = max_date_by
...@@ -26,7 +26,7 @@ class SplitMongoIdManager(OpaqueKeyReader, AsideKeyGenerator): # pylint: disabl ...@@ -26,7 +26,7 @@ class SplitMongoIdManager(OpaqueKeyReader, AsideKeyGenerator): # pylint: disabl
block_key = BlockKey.from_usage_key(usage_id) block_key = BlockKey.from_usage_key(usage_id)
module_data = self._cds.get_module_data(block_key, usage_id.course_key) module_data = self._cds.get_module_data(block_key, usage_id.course_key)
if 'definition' in module_data: if module_data.definition is not None:
return DefinitionLocator(usage_id.block_type, module_data['definition']) return DefinitionLocator(usage_id.block_type, module_data.definition)
else: else:
raise ValueError("All non-local blocks should have a definition specified") raise ValueError("All non-local blocks should have a definition specified")
...@@ -10,7 +10,8 @@ from pymongo.errors import DuplicateKeyError # pylint: disable=unused-import ...@@ -10,7 +10,8 @@ from pymongo.errors import DuplicateKeyError # pylint: disable=unused-import
from contracts import check, new_contract from contracts import check, new_contract
from xmodule.exceptions import HeartbeatFailure from xmodule.exceptions import HeartbeatFailure
from xmodule.modulestore.split_mongo import BlockKey, BlockData from xmodule.modulestore import BlockData
from xmodule.modulestore.split_mongo import BlockKey
import datetime import datetime
import pytz import pytz
...@@ -37,7 +38,7 @@ def structure_from_mongo(structure): ...@@ -37,7 +38,7 @@ def structure_from_mongo(structure):
for block in structure['blocks']: for block in structure['blocks']:
if 'children' in block['fields']: if 'children' in block['fields']:
block['fields']['children'] = [BlockKey(*child) for child in block['fields']['children']] block['fields']['children'] = [BlockKey(*child) for child in block['fields']['children']]
new_blocks[BlockKey(block['block_type'], block.pop('block_id'))] = BlockData(block) new_blocks[BlockKey(block['block_type'], block.pop('block_id'))] = BlockData(**block)
structure['blocks'] = new_blocks structure['blocks'] = new_blocks
return structure return structure
...@@ -54,8 +55,8 @@ def structure_to_mongo(structure): ...@@ -54,8 +55,8 @@ def structure_to_mongo(structure):
check('BlockKey', structure['root']) check('BlockKey', structure['root'])
check('dict(BlockKey: BlockData)', structure['blocks']) check('dict(BlockKey: BlockData)', structure['blocks'])
for block in structure['blocks'].itervalues(): for block in structure['blocks'].itervalues():
if 'children' in block['fields']: if 'children' in block.fields:
check('list(BlockKey)', block['fields']['children']) check('list(BlockKey)', block.fields['children'])
new_structure = dict(structure) new_structure = dict(structure)
new_structure['blocks'] = [] new_structure['blocks'] = []
......
...@@ -22,14 +22,15 @@ Representation: ...@@ -22,14 +22,15 @@ Representation:
** 'edited_by': user_id of the user whose change caused the creation of this structure version, ** 'edited_by': user_id of the user whose change caused the creation of this structure version,
** 'edited_on': the datetime for the change causing this creation of this structure version, ** 'edited_on': the datetime for the change causing this creation of this structure version,
** 'blocks': dictionary of xblocks in this structure: ** 'blocks': dictionary of xblocks in this structure:
*** BlockKey: dictionary of block settings and children: *** BlockKey: key mapping to each BlockData:
*** BlockData: object containing the following attributes:
**** 'block_type': the xblock type id **** 'block_type': the xblock type id
**** 'definition': the db id of the record containing the content payload for this xblock **** 'definition': the db id of the record containing the content payload for this xblock
**** 'fields': the Scope.settings and children field values **** 'fields': the Scope.settings and children field values
***** 'children': This is stored as a list of (block_type, block_id) pairs ***** 'children': This is stored as a list of (block_type, block_id) pairs
**** 'defaults': Scope.settings default values copied from a template block (used e.g. when **** 'defaults': Scope.settings default values copied from a template block (used e.g. when
blocks are copied from a library to a course) blocks are copied from a library to a course)
**** 'edit_info': dictionary: **** 'edit_info': EditInfo object:
***** 'edited_on': when was this xblock's fields last changed (will be edited_on value of ***** 'edited_on': when was this xblock's fields last changed (will be edited_on value of
update_version structure) update_version structure)
***** 'edited_by': user_id for who changed this xblock last (will be edited_by value of ***** 'edited_by': user_id for who changed this xblock last (will be edited_by value of
...@@ -72,13 +73,14 @@ from opaque_keys.edx.locator import ( ...@@ -72,13 +73,14 @@ from opaque_keys.edx.locator import (
from xmodule.modulestore.exceptions import InsufficientSpecificationError, VersionConflictError, DuplicateItemError, \ from xmodule.modulestore.exceptions import InsufficientSpecificationError, VersionConflictError, DuplicateItemError, \
DuplicateCourseError DuplicateCourseError
from xmodule.modulestore import ( from xmodule.modulestore import (
inheritance, ModuleStoreWriteBase, ModuleStoreEnum, BulkOpsRecord, BulkOperationsMixin, SortedAssetList inheritance, ModuleStoreWriteBase, ModuleStoreEnum,
BulkOpsRecord, BulkOperationsMixin, SortedAssetList, BlockData
) )
from ..exceptions import ItemNotFoundError from ..exceptions import ItemNotFoundError
from .caching_descriptor_system import CachingDescriptorSystem from .caching_descriptor_system import CachingDescriptorSystem
from xmodule.modulestore.split_mongo.mongo_connection import MongoConnection, DuplicateKeyError from xmodule.modulestore.split_mongo.mongo_connection import MongoConnection, DuplicateKeyError
from xmodule.modulestore.split_mongo import BlockKey, CourseEnvelope, BlockData from xmodule.modulestore.split_mongo import BlockKey, CourseEnvelope
from xmodule.error_module import ErrorDescriptor from xmodule.error_module import ErrorDescriptor
from collections import defaultdict from collections import defaultdict
from types import NoneType from types import NoneType
...@@ -111,6 +113,7 @@ EXCLUDE_ALL = '*' ...@@ -111,6 +113,7 @@ EXCLUDE_ALL = '*'
new_contract('BlockUsageLocator', BlockUsageLocator) new_contract('BlockUsageLocator', BlockUsageLocator)
new_contract('BlockKey', BlockKey) new_contract('BlockKey', BlockKey)
new_contract('XBlock', XBlock)
class SplitBulkWriteRecord(BulkOpsRecord): class SplitBulkWriteRecord(BulkOpsRecord):
...@@ -446,24 +449,22 @@ class SplitBulkWriteMixin(BulkOperationsMixin): ...@@ -446,24 +449,22 @@ class SplitBulkWriteMixin(BulkOperationsMixin):
return new_structure return new_structure
def version_block(self, block_info, user_id, update_version): def version_block(self, block_data, user_id, update_version):
""" """
Update the block_info dictionary based on it having been edited Update the block_data object based on it having been edited.
""" """
if block_info['edit_info'].get('update_version') == update_version: if block_data.edit_info.update_version == update_version:
return return
original_usage = block_info['edit_info'].get('original_usage') original_usage = block_data.edit_info.original_usage
original_usage_version = block_info['edit_info'].get('original_usage_version') original_usage_version = block_data.edit_info.original_usage_version
block_info['edit_info'] = { block_data.edit_info.edited_on = datetime.datetime.now(UTC)
'edited_on': datetime.datetime.now(UTC), block_data.edit_info.edited_by = user_id
'edited_by': user_id, block_data.edit_info.previous_version = block_data.edit_info.update_version
'previous_version': block_info['edit_info']['update_version'], block_data.edit_info.update_version = update_version
'update_version': update_version,
}
if original_usage: if original_usage:
block_info['edit_info']['original_usage'] = original_usage block_data.edit_info.original_usage = original_usage
block_info['edit_info']['original_usage_version'] = original_usage_version block_data.edit_info.original_usage_version = original_usage_version
def find_matching_course_indexes(self, branch=None, search_targets=None): def find_matching_course_indexes(self, branch=None, search_targets=None):
""" """
...@@ -696,11 +697,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -696,11 +697,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
for definition in descendent_definitions} for definition in descendent_definitions}
for block in new_module_data.itervalues(): for block in new_module_data.itervalues():
if block['definition'] in definitions: if block.definition in definitions:
definition = definitions[block['definition']] definition = definitions[block.definition]
# convert_fields was being done here, but it gets done later in the runtime's xblock_from_json # convert_fields was being done here, but it gets done later in the runtime's xblock_from_json
block['fields'].update(definition.get('fields')) block.fields.update(definition.get('fields'))
block['definition_loaded'] = True block.definition_loaded = True
system.module_data.update(new_module_data) system.module_data.update(new_module_data)
return system.module_data return system.module_data
...@@ -973,6 +974,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -973,6 +974,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
return self._get_block_from_structure(course_structure, BlockKey.from_usage_key(usage_key)) is not None return self._get_block_from_structure(course_structure, BlockKey.from_usage_key(usage_key)) is not None
@contract(returns='XBlock')
def get_item(self, usage_key, depth=0, **kwargs): def get_item(self, usage_key, depth=0, **kwargs):
""" """
depth (int): An argument that some module stores may use to prefetch depth (int): An argument that some module stores may use to prefetch
...@@ -1024,18 +1026,18 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1024,18 +1026,18 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
items = [] items = []
qualifiers = qualifiers.copy() if qualifiers else {} # copy the qualifiers (destructively manipulated here) qualifiers = qualifiers.copy() if qualifiers else {} # copy the qualifiers (destructively manipulated here)
def _block_matches_all(block_json): def _block_matches_all(block_data):
""" """
Check that the block matches all the criteria Check that the block matches all the criteria
""" """
# do the checks which don't require loading any additional data # do the checks which don't require loading any additional data
if ( if ( # pylint: disable=bad-continuation
self._block_matches(block_json, qualifiers) and self._block_matches(block_data, qualifiers) and
self._block_matches(block_json.get('fields', {}), settings) self._block_matches(block_data.fields, settings)
): ):
if content: if content:
definition_block = self.get_definition(course_locator, block_json['definition']) definition_block = self.get_definition(course_locator, block_data.definition)
return self._block_matches(definition_block.get('fields', {}), content) return self._block_matches(definition_block['fields'], content)
else: else:
return True return True
...@@ -1104,8 +1106,8 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1104,8 +1106,8 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
items.remove(course.structure['root']) items.remove(course.structure['root'])
blocks = course.structure['blocks'] blocks = course.structure['blocks']
for block_id, block_data in blocks.iteritems(): for block_id, block_data in blocks.iteritems():
items.difference_update(BlockKey(*child) for child in block_data.get('fields', {}).get('children', [])) items.difference_update(BlockKey(*child) for child in block_data.fields.get('children', []))
if block_data['block_type'] in detached_categories: if block_data.block_type in detached_categories:
items.discard(block_id) items.discard(block_id)
return [ return [
course_key.make_usage_key(block_type=block_id.type, block_id=block_id.id) course_key.make_usage_key(block_type=block_id.type, block_id=block_id.id)
...@@ -1235,18 +1237,18 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1235,18 +1237,18 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
result = {} result = {}
for version in all_versions_with_block: for version in all_versions_with_block:
block_payload = self._get_block_from_structure(version, block_key) block_payload = self._get_block_from_structure(version, block_key)
if version['_id'] == block_payload['edit_info']['update_version']: if version['_id'] == block_payload.edit_info.update_version:
if block_payload['edit_info'].get('previous_version') is None: if block_payload.edit_info.previous_version is None:
# this was when this block was created # this was when this block was created
possible_roots.append(block_payload['edit_info']['update_version']) possible_roots.append(block_payload.edit_info.update_version)
else: # map previous to {update..} else: # map previous to {update..}
result.setdefault(block_payload['edit_info']['previous_version'], set()).add( result.setdefault(block_payload.edit_info.previous_version, set()).add(
block_payload['edit_info']['update_version']) block_payload.edit_info.update_version)
# more than one possible_root means usage was added and deleted > 1x. # more than one possible_root means usage was added and deleted > 1x.
if len(possible_roots) > 1: if len(possible_roots) > 1:
# find the history segment including block_locator's version # find the history segment including block_locator's version
element_to_find = self._get_block_from_structure(course_struct, block_key)['edit_info']['update_version'] element_to_find = self._get_block_from_structure(course_struct, block_key).edit_info.update_version
if element_to_find in possible_roots: if element_to_find in possible_roots:
possible_roots = [element_to_find] possible_roots = [element_to_find]
for possibility in possible_roots: for possibility in possible_roots:
...@@ -1283,9 +1285,9 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1283,9 +1285,9 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
""" """
blocks = self._lookup_course(usage_key.course_key).structure['blocks'] blocks = self._lookup_course(usage_key.course_key).structure['blocks']
block = blocks.get(BlockKey.from_usage_key(usage_key)) block = blocks.get(BlockKey.from_usage_key(usage_key))
if block and 'original_usage' in block['edit_info']: if block and block.edit_info.original_usage is not None:
usage_key = BlockUsageLocator.from_string(block['edit_info']['original_usage']) usage_key = BlockUsageLocator.from_string(block.edit_info.original_usage)
return usage_key, block['edit_info'].get('original_usage_version') return usage_key, block.edit_info.original_usage_version
return None, None return None, None
def create_definition_from_data(self, course_key, new_def_data, category, user_id): def create_definition_from_data(self, course_key, new_def_data, category, user_id):
...@@ -1376,6 +1378,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1376,6 +1378,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
return potential_key return potential_key
serial += 1 serial += 1
@contract(returns='XBlock')
def create_item( def create_item(
self, user_id, course_key, block_type, block_id=None, self, user_id, course_key, block_type, block_id=None,
definition_locator=None, fields=None, definition_locator=None, fields=None,
...@@ -1529,14 +1532,14 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1529,14 +1532,14 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# Originally added to support entrance exams (settings.FEATURES.get('ENTRANCE_EXAMS')) # Originally added to support entrance exams (settings.FEATURES.get('ENTRANCE_EXAMS'))
if kwargs.get('position') is None: if kwargs.get('position') is None:
parent['fields'].setdefault('children', []).append(BlockKey.from_usage_key(xblock.location)) parent.fields.setdefault('children', []).append(BlockKey.from_usage_key(xblock.location))
else: else:
parent['fields'].setdefault('children', []).insert( parent.fields.setdefault('children', []).insert(
kwargs.get('position'), kwargs.get('position'),
BlockKey.from_usage_key(xblock.location) BlockKey.from_usage_key(xblock.location)
) )
if parent['edit_info']['update_version'] != new_structure['_id']: if parent.edit_info.update_version != new_structure['_id']:
# if the parent hadn't been previously changed in this bulk transaction, indicate that it's # if the parent hadn't been previously changed in this bulk transaction, indicate that it's
# part of the bulk transaction # part of the bulk transaction
self.version_block(parent, user_id, new_structure['_id']) self.version_block(parent, user_id, new_structure['_id'])
...@@ -1677,19 +1680,17 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1677,19 +1680,17 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
new_id = draft_structure['_id'] new_id = draft_structure['_id']
root_block = draft_structure['blocks'][draft_structure['root']] root_block = draft_structure['blocks'][draft_structure['root']]
if block_fields is not None: if block_fields is not None:
root_block['fields'].update(self._serialize_fields(root_category, block_fields)) root_block.fields.update(self._serialize_fields(root_category, block_fields))
if definition_fields is not None: if definition_fields is not None:
old_def = self.get_definition(locator, root_block['definition']) old_def = self.get_definition(locator, root_block.definition)
new_fields = old_def['fields'] new_fields = old_def['fields']
new_fields.update(definition_fields) new_fields.update(definition_fields)
definition_id = self._update_definition_from_data(locator, old_def, new_fields, user_id).definition_id definition_id = self._update_definition_from_data(locator, old_def, new_fields, user_id).definition_id
root_block['definition'] = definition_id root_block.definition = definition_id
root_block['edit_info'].update({ root_block.edit_info.edited_on = datetime.datetime.now(UTC)
'edited_on': datetime.datetime.now(UTC), root_block.edit_info.edited_by = user_id
'edited_by': user_id, root_block.edit_info.previous_version = root_block.edit_info.update_version
'previous_version': root_block['edit_info'].get('update_version'), root_block.edit_info.update_version = new_id
'update_version': new_id,
})
versions_dict[master_branch] = new_id versions_dict[master_branch] = new_id
else: # Pointing to an existing course structure else: # Pointing to an existing course structure
...@@ -1788,7 +1789,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1788,7 +1789,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
is_updated = False is_updated = False
definition_fields = partitioned_fields[Scope.content] definition_fields = partitioned_fields[Scope.content]
if definition_locator is None: if definition_locator is None:
definition_locator = DefinitionLocator(original_entry['block_type'], original_entry['definition']) definition_locator = DefinitionLocator(original_entry.block_type, original_entry.definition)
if definition_fields: if definition_fields:
definition_locator, is_updated = self.update_definition_from_data( definition_locator, is_updated = self.update_definition_from_data(
course_key, definition_locator, definition_fields, user_id course_key, definition_locator, definition_fields, user_id
...@@ -1798,12 +1799,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1798,12 +1799,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
settings = partitioned_fields[Scope.settings] settings = partitioned_fields[Scope.settings]
settings = self._serialize_fields(block_key.type, settings) settings = self._serialize_fields(block_key.type, settings)
if not is_updated: if not is_updated:
is_updated = self._compare_settings(settings, original_entry['fields']) is_updated = self._compare_settings(settings, original_entry.fields)
# check children # check children
if partitioned_fields.get(Scope.children, {}): # purposely not 'is not None' if partitioned_fields.get(Scope.children, {}): # purposely not 'is not None'
serialized_children = [BlockKey.from_usage_key(child) for child in partitioned_fields[Scope.children]['children']] serialized_children = [BlockKey.from_usage_key(child) for child in partitioned_fields[Scope.children]['children']]
is_updated = is_updated or original_entry['fields'].get('children', []) != serialized_children is_updated = is_updated or original_entry.fields.get('children', []) != serialized_children
if is_updated: if is_updated:
settings['children'] = serialized_children settings['children'] = serialized_children
...@@ -1812,8 +1813,8 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1812,8 +1813,8 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
new_structure = self.version_structure(course_key, original_structure, user_id) new_structure = self.version_structure(course_key, original_structure, user_id)
block_data = self._get_block_from_structure(new_structure, block_key) block_data = self._get_block_from_structure(new_structure, block_key)
block_data["definition"] = definition_locator.definition_id block_data.definition = definition_locator.definition_id
block_data["fields"] = settings block_data.fields = settings
new_id = new_structure['_id'] new_id = new_structure['_id']
self.version_block(block_data, user_id, new_id) self.version_block(block_data, user_id, new_id)
...@@ -1889,7 +1890,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1889,7 +1890,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
xblock_class, xblock_class,
course_key, course_key,
BlockKey(block_type, block_id) if block_id else None, BlockKey(block_type, block_id) if block_id else None,
json_data, BlockData(**json_data),
**kwargs **kwargs
) )
for field_name, value in (fields or {}).iteritems(): for field_name, value in (fields or {}).iteritems():
...@@ -1978,12 +1979,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1978,12 +1979,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
children.append(BlockKey.from_usage_key(child_block.location)) children.append(BlockKey.from_usage_key(child_block.location))
else: else:
children.append(BlockKey.from_usage_key(child)) children.append(BlockKey.from_usage_key(child))
is_updated = is_updated or structure_blocks[block_key]['fields']['children'] != children is_updated = is_updated or structure_blocks[block_key].fields['children'] != children
block_fields = partitioned_fields[Scope.settings] block_fields = partitioned_fields[Scope.settings]
block_fields = self._serialize_fields(xblock.category, block_fields) block_fields = self._serialize_fields(xblock.category, block_fields)
if not is_new and not is_updated: if not is_new and not is_updated:
is_updated = self._compare_settings(block_fields, structure_blocks[block_key]['fields']) is_updated = self._compare_settings(block_fields, structure_blocks[block_key].fields)
if children: if children:
block_fields['children'] = children block_fields['children'] = children
...@@ -1999,8 +2000,8 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1999,8 +2000,8 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
) )
else: else:
block_info = structure_blocks[block_key] block_info = structure_blocks[block_key]
block_info['fields'] = block_fields block_info.fields = block_fields
block_info['definition'] = xblock.definition_locator.definition_id block_info.definition = xblock.definition_locator.definition_id
self.version_block(block_info, user_id, new_id) self.version_block(block_info, user_id, new_id)
structure_blocks[block_key] = block_info structure_blocks[block_key] = block_info
...@@ -2075,7 +2076,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2075,7 +2076,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
destination_structure = self._new_structure( destination_structure = self._new_structure(
user_id, root_block_key, user_id, root_block_key,
# leave off the fields b/c the children must be filtered # leave off the fields b/c the children must be filtered
definition_id=root_source['definition'], definition_id=root_source.definition,
) )
else: else:
destination_structure = self._lookup_course(destination_course).structure destination_structure = self._lookup_course(destination_course).structure
...@@ -2183,10 +2184,10 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2183,10 +2184,10 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
dest_info = dest_structure['blocks'][block_key] dest_info = dest_structure['blocks'][block_key]
# Update the edit_info: # Update the edit_info:
dest_info['edit_info']['previous_version'] = dest_info['edit_info']['update_version'] dest_info.edit_info.previous_version = dest_info.edit_info.update_version
dest_info['edit_info']['update_version'] = old_dest_structure_version dest_info.edit_info.update_version = old_dest_structure_version
dest_info['edit_info']['edited_by'] = user_id dest_info.edit_info.edited_by = user_id
dest_info['edit_info']['edited_on'] = datetime.datetime.now(UTC) dest_info.edit_info.edited_on = datetime.datetime.now(UTC)
orphans = orig_descendants - new_descendants orphans = orig_descendants - new_descendants
for orphan in orphans: for orphan in orphans:
...@@ -2197,7 +2198,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2197,7 +2198,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# Return usage locators for all the new children: # Return usage locators for all the new children:
return [ return [
destination_course.make_usage_key(*k) destination_course.make_usage_key(*k)
for k in dest_structure['blocks'][block_key]['fields']['children'] for k in dest_structure['blocks'][block_key].fields['children']
] ]
def _copy_from_template(self, source_structures, source_keys, dest_structure, new_parent_block_key, user_id): def _copy_from_template(self, source_structures, source_keys, dest_structure, new_parent_block_key, user_id):
...@@ -2233,9 +2234,9 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2233,9 +2234,9 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# Now clone block_key to new_block_key: # Now clone block_key to new_block_key:
new_block_info = copy.deepcopy(source_block_info) new_block_info = copy.deepcopy(source_block_info)
# Note that new_block_info now points to the same definition ID entry as source_block_info did # Note that new_block_info now points to the same definition ID entry as source_block_info did
existing_block_info = dest_structure['blocks'].get(new_block_key, {}) existing_block_info = dest_structure['blocks'].get(new_block_key, BlockData())
# Inherit the Scope.settings values from 'fields' to 'defaults' # Inherit the Scope.settings values from 'fields' to 'defaults'
new_block_info['defaults'] = new_block_info['fields'] new_block_info.defaults = new_block_info.fields
# <workaround> # <workaround>
# CAPA modules store their 'markdown' value (an alternate representation of their content) # CAPA modules store their 'markdown' value (an alternate representation of their content)
...@@ -2245,27 +2246,29 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2245,27 +2246,29 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# even if it hasn't changed, which breaks our override system. # even if it hasn't changed, which breaks our override system.
# So until capa modules are fixed, we special-case them and remove their markdown fields, # So until capa modules are fixed, we special-case them and remove their markdown fields,
# forcing the inherited version to use XML only. # forcing the inherited version to use XML only.
if usage_key.block_type == 'problem' and 'markdown' in new_block_info['defaults']: if usage_key.block_type == 'problem' and 'markdown' in new_block_info.defaults:
del new_block_info['defaults']['markdown'] del new_block_info.defaults['markdown']
# </workaround> # </workaround>
new_block_info['fields'] = existing_block_info.get('fields', {}) # Preserve any existing overrides new_block_info.fields = existing_block_info.fields # Preserve any existing overrides
if 'children' in new_block_info['defaults']: if 'children' in new_block_info.defaults:
del new_block_info['defaults']['children'] # Will be set later del new_block_info.defaults['children'] # Will be set later
new_block_info['block_id'] = new_block_key.id # ALERT! Why was this 'block_id' stored here? Nothing else stores block_id
new_block_info['edit_info'] = existing_block_info.get('edit_info', {}) # in the block data. Was this a harmless error?
new_block_info['edit_info']['previous_version'] = new_block_info['edit_info'].get('update_version', None) #new_block_info['block_id'] = new_block_key.id
new_block_info['edit_info']['update_version'] = dest_structure['_id'] new_block_info.edit_info = existing_block_info.edit_info
new_block_info.edit_info.previous_version = new_block_info.edit_info.update_version
new_block_info.edit_info.update_version = dest_structure['_id']
# Note we do not set 'source_version' - it's only used for copying identical blocks # Note we do not set 'source_version' - it's only used for copying identical blocks
# from draft to published as part of publishing workflow. # from draft to published as part of publishing workflow.
# Setting it to the source_block_info structure version here breaks split_draft's has_changes() method. # Setting it to the source_block_info structure version here breaks split_draft's has_changes() method.
new_block_info['edit_info']['edited_by'] = user_id new_block_info.edit_info.edited_by = user_id
new_block_info['edit_info']['edited_on'] = datetime.datetime.now(UTC) new_block_info.edit_info.edited_on = datetime.datetime.now(UTC)
new_block_info['edit_info']['original_usage'] = unicode(usage_key.replace(branch=None, version_guid=None)) new_block_info.edit_info.original_usage = unicode(usage_key.replace(branch=None, version_guid=None))
new_block_info['edit_info']['original_usage_version'] = source_block_info['edit_info'].get('update_version') new_block_info.edit_info.original_usage_version = source_block_info.edit_info.update_version
dest_structure['blocks'][new_block_key] = new_block_info dest_structure['blocks'][new_block_key] = new_block_info
children = source_block_info['fields'].get('children') children = source_block_info.fields.get('children')
if children: if children:
children = [src_course_key.make_usage_key(child.type, child.id) for child in children] children = [src_course_key.make_usage_key(child.type, child.id) for child in children]
new_blocks |= self._copy_from_template( new_blocks |= self._copy_from_template(
...@@ -2277,7 +2280,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2277,7 +2280,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
new_children.append(new_block_key) new_children.append(new_block_key)
# Update the children of new_parent_block_key # Update the children of new_parent_block_key
dest_structure['blocks'][new_parent_block_key]['fields']['children'] = new_children dest_structure['blocks'][new_parent_block_key].fields['children'] = new_children
return new_blocks return new_blocks
...@@ -2314,11 +2317,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2314,11 +2317,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
parent_block_keys = self._get_parents_from_structure(block_key, original_structure) parent_block_keys = self._get_parents_from_structure(block_key, original_structure)
for parent_block_key in parent_block_keys: for parent_block_key in parent_block_keys:
parent_block = new_blocks[parent_block_key] parent_block = new_blocks[parent_block_key]
parent_block['fields']['children'].remove(block_key) parent_block.fields['children'].remove(block_key)
parent_block['edit_info']['edited_on'] = datetime.datetime.now(UTC) parent_block.edit_info.edited_on = datetime.datetime.now(UTC)
parent_block['edit_info']['edited_by'] = user_id parent_block.edit_info.edited_by = user_id
parent_block['edit_info']['previous_version'] = parent_block['edit_info']['update_version'] parent_block.edit_info.previous_version = parent_block.edit_info.update_version
parent_block['edit_info']['update_version'] = new_id parent_block.edit_info.update_version = new_id
self.decache_block(usage_locator.course_key, new_id, parent_block_key) self.decache_block(usage_locator.course_key, new_id, parent_block_key)
self._remove_subtree(BlockKey.from_usage_key(usage_locator), new_blocks) self._remove_subtree(BlockKey.from_usage_key(usage_locator), new_blocks)
...@@ -2340,7 +2343,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2340,7 +2343,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
""" """
Remove the subtree rooted at block_key Remove the subtree rooted at block_key
""" """
for child in blocks[block_key]['fields'].get('children', []): for child in blocks[block_key].fields.get('children', []):
self._remove_subtree(BlockKey(*child), blocks) self._remove_subtree(BlockKey(*child), blocks)
del blocks[block_key] del blocks[block_key]
...@@ -2365,11 +2368,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2365,11 +2368,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
self, block_map, block_key, inherited_settings_map, inheriting_settings=None, inherited_from=None self, block_map, block_key, inherited_settings_map, inheriting_settings=None, inherited_from=None
): ):
""" """
Updates block_json with any inheritable setting set by an ancestor and recurses to children. Updates block_data with any inheritable setting set by an ancestor and recurses to children.
""" """
if block_key not in block_map: if block_key not in block_map:
return return
block_json = block_map[block_key] block_data = block_map[block_key]
if inheriting_settings is None: if inheriting_settings is None:
inheriting_settings = {} inheriting_settings = {}
...@@ -2384,7 +2387,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2384,7 +2387,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# update the inheriting w/ what should pass to children # update the inheriting w/ what should pass to children
inheriting_settings = inherited_settings_map[block_key].copy() inheriting_settings = inherited_settings_map[block_key].copy()
block_fields = block_json['fields'] block_fields = block_data.fields
for field_name in inheritance.InheritanceMixin.fields: for field_name in inheritance.InheritanceMixin.fields:
if field_name in block_fields: if field_name in block_fields:
inheriting_settings[field_name] = block_fields[field_name] inheriting_settings[field_name] = block_fields[field_name]
...@@ -2421,7 +2424,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2421,7 +2424,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
if depth is None or depth > 0: if depth is None or depth > 0:
depth = depth - 1 if depth is not None else None depth = depth - 1 if depth is not None else None
for child in descendent_map[block_id]['fields'].get('children', []): for child in descendent_map[block_id].fields.get('children', []):
descendent_map = self.descendants(block_map, child, depth, descendent_map) descendent_map = self.descendants(block_map, child, depth, descendent_map)
return descendent_map return descendent_map
...@@ -2604,9 +2607,9 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2604,9 +2607,9 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
index_entry = self._get_index_if_valid(course_locator) index_entry = self._get_index_if_valid(course_locator)
new_structure = self.version_structure(course_locator, original_structure, user_id) new_structure = self.version_structure(course_locator, original_structure, user_id)
for block in new_structure['blocks'].itervalues(): for block in new_structure['blocks'].itervalues():
if 'fields' in block and 'children' in block['fields']: if 'children' in block.fields:
block['fields']["children"] = [ block.fields['children'] = [
block_id for block_id in block['fields']["children"] block_id for block_id in block.fields['children']
if block_id in new_structure['blocks'] if block_id in new_structure['blocks']
] ]
self.update_structure(course_locator, new_structure) self.update_structure(course_locator, new_structure)
...@@ -2809,7 +2812,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2809,7 +2812,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
return [ return [
parent_block_key parent_block_key
for parent_block_key, value in structure['blocks'].iteritems() for parent_block_key, value in structure['blocks'].iteritems()
if block_key in value['fields'].get('children', []) if block_key in value.fields.get('children', [])
] ]
def _sync_children(self, source_parent, destination_parent, new_child): def _sync_children(self, source_parent, destination_parent, new_child):
...@@ -2818,13 +2821,13 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2818,13 +2821,13 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
Return the removed ones as orphans (a set). Return the removed ones as orphans (a set).
""" """
destination_reordered = [] destination_reordered = []
destination_children = set(destination_parent['fields']['children']) destination_children = set(destination_parent.fields['children'])
source_children = source_parent['fields']['children'] source_children = source_parent.fields['children']
orphans = destination_children - set(source_children) orphans = destination_children - set(source_children)
for child in source_children: for child in source_children:
if child == new_child or child in destination_children: if child == new_child or child in destination_children:
destination_reordered.append(child) destination_reordered.append(child)
destination_parent['fields']['children'] = destination_reordered destination_parent.fields['children'] = destination_reordered
return orphans return orphans
@contract( @contract(
...@@ -2847,8 +2850,8 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2847,8 +2850,8 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# reorder children to correspond to whatever order holds for source. # reorder children to correspond to whatever order holds for source.
# remove any which source no longer claims (put into orphans) # remove any which source no longer claims (put into orphans)
# add any which are being copied # add any which are being copied
source_children = new_block['fields'].get('children', []) source_children = new_block.fields.get('children', [])
existing_children = destination_block['fields'].get('children', []) existing_children = destination_block.fields.get('children', [])
destination_reordered = SparseList() destination_reordered = SparseList()
for child in existing_children: for child in existing_children:
try: try:
...@@ -2862,28 +2865,28 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2862,28 +2865,28 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
destination_reordered[index] = child destination_reordered[index] = child
# the history of the published leaps between publications and only points to # the history of the published leaps between publications and only points to
# previously published versions. # previously published versions.
previous_version = destination_block['edit_info']['update_version'] previous_version = destination_block.edit_info.update_version
destination_block = copy.deepcopy(new_block) destination_block = copy.deepcopy(new_block)
destination_block['fields']['children'] = destination_reordered.compact_list() destination_block.fields['children'] = destination_reordered.compact_list()
destination_block['edit_info']['previous_version'] = previous_version destination_block.edit_info.previous_version = previous_version
destination_block['edit_info']['update_version'] = destination_version destination_block.edit_info.update_version = destination_version
destination_block['edit_info']['edited_by'] = user_id destination_block.edit_info.edited_by = user_id
destination_block['edit_info']['edited_on'] = datetime.datetime.now(UTC) destination_block.edit_info.edited_on = datetime.datetime.now(UTC)
else: else:
destination_block = self._new_block( destination_block = self._new_block(
user_id, new_block['block_type'], user_id, new_block.block_type,
self._filter_blacklist(copy.copy(new_block['fields']), blacklist), self._filter_blacklist(copy.copy(new_block.fields), blacklist),
new_block['definition'], new_block.definition,
destination_version, destination_version,
raw=True, raw=True,
block_defaults=new_block.get('defaults') block_defaults=new_block.defaults
) )
# introduce new edit info field for tracing where copied/published blocks came # introduce new edit info field for tracing where copied/published blocks came
destination_block['edit_info']['source_version'] = new_block['edit_info']['update_version'] destination_block.edit_info.source_version = new_block.edit_info.update_version
if blacklist != EXCLUDE_ALL: if blacklist != EXCLUDE_ALL:
for child in destination_block['fields'].get('children', []): for child in destination_block.fields.get('children', []):
if child not in blacklist: if child not in blacklist:
orphans.update( orphans.update(
self._copy_subdag( self._copy_subdag(
...@@ -2911,7 +2914,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2911,7 +2914,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
Delete the orphan and any of its descendants which no longer have parents. Delete the orphan and any of its descendants which no longer have parents.
""" """
if len(self._get_parents_from_structure(orphan, structure)) == 0: if len(self._get_parents_from_structure(orphan, structure)) == 0:
for child in structure['blocks'][orphan]['fields'].get('children', []): for child in structure['blocks'][orphan].fields.get('children', []):
self._delete_if_true_orphan(BlockKey(*child), structure) self._delete_if_true_orphan(BlockKey(*child), structure)
del structure['blocks'][orphan] del structure['blocks'][orphan]
...@@ -2940,7 +2943,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2940,7 +2943,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
} }
if block_defaults: if block_defaults:
document['defaults'] = block_defaults document['defaults'] = block_defaults
return BlockData(document) return BlockData(**document)
@contract(block_key=BlockKey, returns='BlockData | None') @contract(block_key=BlockKey, returns='BlockData | None')
def _get_block_from_structure(self, structure, block_key): def _get_block_from_structure(self, structure, block_key):
......
...@@ -324,9 +324,9 @@ class DraftVersioningModuleStore(SplitMongoModuleStore, ModuleStoreDraftAndPubli ...@@ -324,9 +324,9 @@ class DraftVersioningModuleStore(SplitMongoModuleStore, ModuleStoreDraftAndPubli
return True return True
# check the children in the draft # check the children in the draft
if 'children' in draft_block.setdefault('fields', {}): if 'children' in draft_block.fields:
return any( return any(
[has_changes_subtree(child_block_id) for child_block_id in draft_block['fields']['children']] [has_changes_subtree(child_block_id) for child_block_id in draft_block.fields['children']]
) )
return False return False
...@@ -410,7 +410,7 @@ class DraftVersioningModuleStore(SplitMongoModuleStore, ModuleStoreDraftAndPubli ...@@ -410,7 +410,7 @@ class DraftVersioningModuleStore(SplitMongoModuleStore, ModuleStoreDraftAndPubli
self._get_block_from_structure(published_course_structure, root_block_id) self._get_block_from_structure(published_course_structure, root_block_id)
) )
block = self._get_block_from_structure(new_structure, root_block_id) block = self._get_block_from_structure(new_structure, root_block_id)
for child_block_id in block.setdefault('fields', {}).get('children', []): for child_block_id in block.fields.get('children', []):
copy_from_published(child_block_id) copy_from_published(child_block_id)
copy_from_published(BlockKey.from_usage_key(location)) copy_from_published(BlockKey.from_usage_key(location))
...@@ -472,7 +472,8 @@ class DraftVersioningModuleStore(SplitMongoModuleStore, ModuleStoreDraftAndPubli ...@@ -472,7 +472,8 @@ class DraftVersioningModuleStore(SplitMongoModuleStore, ModuleStoreDraftAndPubli
""" """
Return the version of the given database representation of a block. Return the version of the given database representation of a block.
""" """
return block['edit_info'].get('source_version', block['edit_info']['update_version']) source_version = block.edit_info.source_version
return source_version if source_version is not None else block.edit_info.update_version
def import_xblock(self, user_id, course_key, block_type, block_id, fields=None, runtime=None, **kwargs): def import_xblock(self, user_id, course_key, block_type, block_id, fields=None, runtime=None, **kwargs):
""" """
...@@ -505,8 +506,8 @@ class DraftVersioningModuleStore(SplitMongoModuleStore, ModuleStoreDraftAndPubli ...@@ -505,8 +506,8 @@ class DraftVersioningModuleStore(SplitMongoModuleStore, ModuleStoreDraftAndPubli
""" """
published_block = self._get_head(xblock, ModuleStoreEnum.BranchName.published) published_block = self._get_head(xblock, ModuleStoreEnum.BranchName.published)
if published_block is not None: if published_block is not None:
setattr(xblock, '_published_by', published_block['edit_info']['edited_by']) setattr(xblock, '_published_by', published_block.edit_info.edited_by)
setattr(xblock, '_published_on', published_block['edit_info']['edited_on']) setattr(xblock, '_published_on', published_block.edit_info.edited_on)
@contract(asset_key='AssetKey') @contract(asset_key='AssetKey')
def find_asset_metadata(self, asset_key, **kwargs): def find_asset_metadata(self, asset_key, **kwargs):
......
...@@ -101,7 +101,7 @@ class MongoModulestoreBuilder(object): ...@@ -101,7 +101,7 @@ class MongoModulestoreBuilder(object):
fs_root = mkdtemp() fs_root = mkdtemp()
# pylint: disable=attribute-defined-outside-init # pylint: disable=attribute-defined-outside-init
self.modulestore = DraftModuleStore( modulestore = DraftModuleStore(
contentstore, contentstore,
doc_store_config, doc_store_config,
fs_root, fs_root,
...@@ -110,13 +110,13 @@ class MongoModulestoreBuilder(object): ...@@ -110,13 +110,13 @@ class MongoModulestoreBuilder(object):
metadata_inheritance_cache_subsystem=MemoryCache(), metadata_inheritance_cache_subsystem=MemoryCache(),
xblock_mixins=XBLOCK_MIXINS, xblock_mixins=XBLOCK_MIXINS,
) )
self.modulestore.ensure_indexes() modulestore.ensure_indexes()
try: try:
yield self.modulestore yield modulestore
finally: finally:
# Delete the created database # Delete the created database
self.modulestore._drop_database() # pylint: disable=protected-access modulestore._drop_database() # pylint: disable=protected-access
# Delete the created directory on the filesystem # Delete the created directory on the filesystem
rmtree(fs_root, ignore_errors=True) rmtree(fs_root, ignore_errors=True)
...@@ -124,12 +124,6 @@ class MongoModulestoreBuilder(object): ...@@ -124,12 +124,6 @@ class MongoModulestoreBuilder(object):
def __repr__(self): def __repr__(self):
return 'MongoModulestoreBuilder()' return 'MongoModulestoreBuilder()'
def asset_collection(self):
"""
Returns the collection storing the asset metadata.
"""
return self.modulestore.asset_collection
class VersioningModulestoreBuilder(object): class VersioningModulestoreBuilder(object):
""" """
...@@ -213,7 +207,7 @@ class MixedModulestoreBuilder(object): ...@@ -213,7 +207,7 @@ class MixedModulestoreBuilder(object):
""" """
self.store_builders = store_builders self.store_builders = store_builders
self.mappings = mappings or {} self.mappings = mappings or {}
self.modulestore = None self.mixed_modulestore = None
@contextmanager @contextmanager
def build(self, contentstore): def build(self, contentstore):
...@@ -235,7 +229,7 @@ class MixedModulestoreBuilder(object): ...@@ -235,7 +229,7 @@ class MixedModulestoreBuilder(object):
# Generate a fake list of stores to give the already generated stores appropriate names # Generate a fake list of stores to give the already generated stores appropriate names
stores = [{'NAME': name, 'ENGINE': 'This space deliberately left blank'} for name in names] stores = [{'NAME': name, 'ENGINE': 'This space deliberately left blank'} for name in names]
self.modulestore = MixedModuleStore( self.mixed_modulestore = MixedModuleStore(
contentstore, contentstore,
self.mappings, self.mappings,
stores, stores,
...@@ -243,7 +237,7 @@ class MixedModulestoreBuilder(object): ...@@ -243,7 +237,7 @@ class MixedModulestoreBuilder(object):
xblock_mixins=XBLOCK_MIXINS, xblock_mixins=XBLOCK_MIXINS,
) )
yield self.modulestore yield self.mixed_modulestore
def __repr__(self): def __repr__(self):
return 'MixedModulestoreBuilder({!r}, {!r})'.format(self.store_builders, self.mappings) return 'MixedModulestoreBuilder({!r}, {!r})'.format(self.store_builders, self.mappings)
...@@ -252,7 +246,7 @@ class MixedModulestoreBuilder(object): ...@@ -252,7 +246,7 @@ class MixedModulestoreBuilder(object):
""" """
Returns the collection storing the asset metadata. Returns the collection storing the asset metadata.
""" """
all_stores = self.modulestore.modulestores all_stores = self.mixed_modulestore.modulestores
if len(all_stores) > 1: if len(all_stores) > 1:
return None return None
......
...@@ -1580,7 +1580,7 @@ class TestCourseCreation(SplitModuleTest): ...@@ -1580,7 +1580,7 @@ class TestCourseCreation(SplitModuleTest):
self.assertIsNotNone(db_structure, "Didn't find course") self.assertIsNotNone(db_structure, "Didn't find course")
self.assertNotIn(BlockKey('course', 'course'), db_structure['blocks']) self.assertNotIn(BlockKey('course', 'course'), db_structure['blocks'])
self.assertIn(BlockKey('chapter', 'top'), db_structure['blocks']) self.assertIn(BlockKey('chapter', 'top'), db_structure['blocks'])
self.assertEqual(db_structure['blocks'][BlockKey('chapter', 'top')]['block_type'], 'chapter') self.assertEqual(db_structure['blocks'][BlockKey('chapter', 'top')].block_type, 'chapter')
def test_create_id_dupe(self): def test_create_id_dupe(self):
""" """
......
...@@ -11,6 +11,7 @@ import json ...@@ -11,6 +11,7 @@ import json
import os import os
import pprint import pprint
import unittest import unittest
import inspect
from contextlib import contextmanager from contextlib import contextmanager
from lazy import lazy from lazy import lazy
...@@ -222,19 +223,12 @@ class BulkAssertionManager(object): ...@@ -222,19 +223,12 @@ class BulkAssertionManager(object):
the failures at once, rather than only seeing single failures. the failures at once, rather than only seeing single failures.
""" """
def __init__(self, test_case): def __init__(self, test_case):
self._equal_expected = [] self._equal_assertions = []
self._equal_actual = []
self._test_case = test_case self._test_case = test_case
def assertEqual(self, expected, actual, description=None):
if description is None:
description = u"{!r} does not equal {!r}".format(expected, actual)
if expected != actual:
self._equal_expected.append((description, expected))
self._equal_actual.append((description, actual))
def run_assertions(self): def run_assertions(self):
super(BulkAssertionTest, self._test_case).assertEqual(self._equal_expected, self._equal_actual) if len(self._equal_assertions) > 0:
raise AssertionError(self._equal_assertions)
class BulkAssertionTest(unittest.TestCase): class BulkAssertionTest(unittest.TestCase):
...@@ -262,7 +256,15 @@ class BulkAssertionTest(unittest.TestCase): ...@@ -262,7 +256,15 @@ class BulkAssertionTest(unittest.TestCase):
def assertEqual(self, expected, actual, message=None): def assertEqual(self, expected, actual, message=None):
if self._manager is not None: if self._manager is not None:
self._manager.assertEqual(expected, actual, message) try:
super(BulkAssertionTest, self).assertEqual(expected, actual, message)
except Exception as error: # pylint: disable=broad-except
exc_stack = inspect.stack()[1]
if message is not None:
msg = '{} -> {}:{} -> {}'.format(message, exc_stack[1], exc_stack[2], unicode(error))
else:
msg = '{}:{} -> {}'.format(exc_stack[1], exc_stack[2], unicode(error))
self._manager._equal_assertions.append(msg) # pylint: disable=protected-access
else: else:
super(BulkAssertionTest, self).assertEqual(expected, actual, message) super(BulkAssertionTest, self).assertEqual(expected, actual, message)
assertEquals = assertEqual assertEquals = assertEqual
......
...@@ -457,6 +457,7 @@ class TestLibraryContentAnalytics(LibraryContentTest): ...@@ -457,6 +457,7 @@ class TestLibraryContentAnalytics(LibraryContentTest):
# except for one of the two already assigned to the student: # except for one of the two already assigned to the student:
keep_block_key = initial_blocks_assigned[0].location keep_block_key = initial_blocks_assigned[0].location
keep_block_lib_usage_key, keep_block_lib_version = self.store.get_block_original_usage(keep_block_key) keep_block_lib_usage_key, keep_block_lib_version = self.store.get_block_original_usage(keep_block_key)
self.assertIsNotNone(keep_block_lib_usage_key)
deleted_block_key = initial_blocks_assigned[1].location deleted_block_key = initial_blocks_assigned[1].location
self.library.children = [keep_block_lib_usage_key] self.library.children = [keep_block_lib_usage_key]
self.store.update_item(self.library, self.user_id) self.store.update_item(self.library, self.user_id)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment