Commit 46f64305 by John Eskew

Merge pull request #6767 from edx/jeskew/split_course_export_slowdown_take_II

Fix performance problem with Split course import/export.
parents b01f7adc 9388ba59
......@@ -37,6 +37,7 @@ log = logging.getLogger('edx.modulestore')
new_contract('CourseKey', CourseKey)
new_contract('AssetKey', AssetKey)
new_contract('AssetMetadata', AssetMetadata)
new_contract('XBlock', XBlock)
class ModuleStoreEnum(object):
......@@ -276,6 +277,122 @@ class BulkOperationsMixin(object):
return self._get_bulk_ops_record(course_key, ignore_case).active
class EditInfo(object):
"""
Encapsulates the editing info of a block.
"""
def __init__(self, **kwargs):
self.from_storable(kwargs)
# For details, see caching_descriptor_system.py get_subtree_edited_by/on.
self._subtree_edited_on = kwargs.get('_subtree_edited_on', None)
self._subtree_edited_by = kwargs.get('_subtree_edited_by', None)
def to_storable(self):
"""
Serialize to a Mongo-storable format.
"""
return {
'previous_version': self.previous_version,
'update_version': self.update_version,
'source_version': self.source_version,
'edited_on': self.edited_on,
'edited_by': self.edited_by,
'original_usage': self.original_usage,
'original_usage_version': self.original_usage_version,
}
def from_storable(self, edit_info):
"""
De-serialize from Mongo-storable format to an object.
"""
# Guid for the structure which previously changed this XBlock.
# (Will be the previous value of 'update_version'.)
self.previous_version = edit_info.get('previous_version', None)
# Guid for the structure where this XBlock got its current field values.
# May point to a structure not in this structure's history (e.g., to a draft
# branch from which this version was published).
self.update_version = edit_info.get('update_version', None)
self.source_version = edit_info.get('source_version', None)
# Datetime when this XBlock's fields last changed.
self.edited_on = edit_info.get('edited_on', None)
# User ID which changed this XBlock last.
self.edited_by = edit_info.get('edited_by', None)
self.original_usage = edit_info.get('original_usage', None)
self.original_usage_version = edit_info.get('original_usage_version', None)
def __str__(self):
return ("EditInfo(previous_version={0.previous_version}, "
"update_version={0.update_version}, "
"source_version={0.source_version}, "
"edited_on={0.edited_on}, "
"edited_by={0.edited_by}, "
"original_usage={0.original_usage}, "
"original_usage_version={0.original_usage_version}, "
"_subtree_edited_on={0._subtree_edited_on}, "
"_subtree_edited_by={0._subtree_edited_by})").format(self)
class BlockData(object):
"""
Wrap the block data in an object instead of using a straight Python dictionary.
Allows the storing of meta-information about a structure that doesn't persist along with
the structure itself.
"""
def __init__(self, **kwargs):
# Has the definition been loaded?
self.definition_loaded = False
self.from_storable(kwargs)
def to_storable(self):
"""
Serialize to a Mongo-storable format.
"""
return {
'fields': self.fields,
'block_type': self.block_type,
'definition': self.definition,
'defaults': self.defaults,
'edit_info': self.edit_info.to_storable()
}
def from_storable(self, block_data):
"""
De-serialize from Mongo-storable format to an object.
"""
# Contains the Scope.settings and 'children' field values.
# 'children' are stored as a list of (block_type, block_id) pairs.
self.fields = block_data.get('fields', {})
# XBlock type ID.
self.block_type = block_data.get('block_type', None)
# DB id of the record containing the content of this XBlock.
self.definition = block_data.get('definition', None)
# Scope.settings default values copied from a template block (used e.g. when
# blocks are copied from a library to a course)
self.defaults = block_data.get('defaults', {})
# EditInfo object containing all versioning/editing data.
self.edit_info = EditInfo(**block_data.get('edit_info', {}))
def __str__(self):
return ("BlockData(fields={0.fields}, "
"block_type={0.block_type}, "
"definition={0.definition}, "
"definition_loaded={0.definition_loaded}, "
"defaults={0.defaults}, "
"edit_info={0.edit_info})").format(self)
new_contract('BlockData', BlockData)
class IncorrectlySortedList(Exception):
"""
Thrown when calling find() on a SortedAssetList not sorted by filename.
......@@ -615,27 +732,32 @@ class ModuleStoreRead(ModuleStoreAssetBase):
"""
pass
def _block_matches(self, fields_or_xblock, qualifiers):
'''
@contract(block='XBlock | BlockData | dict', qualifiers=dict)
def _block_matches(self, block, qualifiers):
"""
Return True or False depending on whether the field value (block contents)
matches the qualifiers as per get_items. Note, only finds directly set not
inherited nor default value matches.
For substring matching pass a regex object.
for arbitrary function comparison such as date time comparison, pass
the function as in start=lambda x: x < datetime.datetime(2014, 1, 1, 0, tzinfo=pytz.UTC)
matches the qualifiers as per get_items.
NOTE: Method only finds directly set value matches - not inherited nor default value matches.
For substring matching:
pass a regex object.
For arbitrary function comparison such as date time comparison:
pass the function as in start=lambda x: x < datetime.datetime(2014, 1, 1, 0, tzinfo=pytz.UTC)
Args:
fields_or_xblock (dict or XBlock): either the json blob (from the db or get_explicitly_set_fields)
or the xblock.fields() value or the XBlock from which to get those values
qualifiers (dict): field: searchvalue pairs.
'''
if isinstance(fields_or_xblock, XBlock):
fields = fields_or_xblock.fields
xblock = fields_or_xblock
is_xblock = True
block (dict, XBlock, or BlockData): either the BlockData (transformed from the db) -or-
a dict (from BlockData.fields or get_explicitly_set_fields_by_scope) -or-
the xblock.fields() value -or-
the XBlock from which to get the 'fields' value.
qualifiers (dict): {field: value} search pairs.
"""
if isinstance(block, XBlock):
# If an XBlock is passed-in, just match its fields.
xblock, fields = (block, block.fields)
elif isinstance(block, BlockData):
# BlockData is an object - compare its attributes in dict form.
xblock, fields = (None, block.__dict__)
else:
fields = fields_or_xblock
is_xblock = False
xblock, fields = (None, block)
def _is_set_on(key):
"""
......@@ -646,8 +768,8 @@ class ModuleStoreRead(ModuleStoreAssetBase):
if key not in fields:
return False, None
field = fields[key]
if is_xblock:
return field.is_set_on(fields_or_xblock), getattr(xblock, key)
if xblock is not None:
return field.is_set_on(block), getattr(xblock, key)
else:
return True, field
......@@ -660,7 +782,7 @@ class ModuleStoreRead(ModuleStoreAssetBase):
return True
def _value_matches(self, target, criteria):
'''
"""
helper for _block_matches: does the target (field value) match the criteria?
If target is a list, do any of the list elements meet the criteria
......@@ -668,7 +790,7 @@ class ModuleStoreRead(ModuleStoreAssetBase):
If the criteria is a function, does invoking it on the target yield something truthy?
If criteria is a dict {($nin|$in): []}, then do (none|any) of the list elements meet the criteria
Otherwise, is the target == criteria
'''
"""
if isinstance(target, list):
return any(self._value_matches(ele, criteria) for ele in target)
elif isinstance(criteria, re._pattern_type): # pylint: disable=protected-access
......
......@@ -21,78 +21,3 @@ class BlockKey(namedtuple('BlockKey', 'type id')):
CourseEnvelope = namedtuple('CourseEnvelope', 'course_key structure')
class BlockData(object):
"""
Wrap the block data in an object instead of using a straight Python dictionary.
Allows the storing of meta-information about a structure that doesn't persist along with
the structure itself.
"""
@contract(block_dict=dict)
def __init__(self, block_dict={}): # pylint: disable=dangerous-default-value
# Has the definition been loaded?
self.definition_loaded = False
self.from_storable(block_dict)
def to_storable(self):
"""
Serialize to a Mongo-storable format.
"""
return {
'fields': self.fields,
'block_type': self.block_type,
'definition': self.definition,
'defaults': self.defaults,
'edit_info': self.edit_info
}
@contract(stored=dict)
def from_storable(self, stored):
"""
De-serialize from Mongo-storable format to an object.
"""
self.fields = stored.get('fields', {})
self.block_type = stored.get('block_type', None)
self.definition = stored.get('definition', None)
self.defaults = stored.get('defaults', {})
self.edit_info = stored.get('edit_info', {})
def get(self, key, *args, **kwargs):
"""
Dict-like 'get' method. Raises AttributeError if requesting non-existent attribute and no default.
"""
if len(args) > 0:
return getattr(self, key, args[0])
elif 'default' in kwargs:
return getattr(self, key, kwargs['default'])
else:
return getattr(self, key)
def __getitem__(self, key):
"""
Dict-like '__getitem__'.
"""
if not hasattr(self, key):
raise KeyError
else:
return getattr(self, key)
def __setitem__(self, key, value):
setattr(self, key, value)
def __delitem__(self, key):
delattr(self, key)
def __iter__(self):
return self.__dict__.iterkeys()
def setdefault(self, key, default=None):
"""
Dict-like 'setdefault'.
"""
try:
return getattr(self, key)
except AttributeError:
setattr(self, key, default)
return default
......@@ -5,11 +5,13 @@ from fs.osfs import OSFS
from lazy import lazy
from xblock.runtime import KvsFieldData
from xblock.fields import ScopeIds
from xblock.core import XBlock
from opaque_keys.edx.locator import BlockUsageLocator, LocalId, CourseLocator, LibraryLocator, DefinitionLocator
from xmodule.library_tools import LibraryToolsService
from xmodule.mako_module import MakoDescriptorSystem
from xmodule.error_module import ErrorDescriptor
from xmodule.errortracker import exc_info_to_str
from xmodule.modulestore import BlockData
from xmodule.modulestore.edit_info import EditInfoRuntimeMixin
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.modulestore.inheritance import inheriting_field_data, InheritanceMixin
......@@ -24,7 +26,9 @@ new_contract('BlockUsageLocator', BlockUsageLocator)
new_contract('CourseLocator', CourseLocator)
new_contract('LibraryLocator', LibraryLocator)
new_contract('BlockKey', BlockKey)
new_contract('BlockData', BlockData)
new_contract('CourseEnvelope', CourseEnvelope)
new_contract('XBlock', XBlock)
class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
......@@ -79,7 +83,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
def _parent_map(self):
parent_map = {}
for block_key, block in self.course_entry.structure['blocks'].iteritems():
for child in block['fields'].get('children', []):
for child in block.fields.get('children', []):
parent_map[child] = block_key
return parent_map
......@@ -119,7 +123,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
block_data = self.get_module_data(block_key, course_key)
class_ = self.load_block_type(block_data.get('block_type'))
class_ = self.load_block_type(block_data.block_type)
block = self.xblock_from_json(class_, course_key, block_key, block_data, course_entry_override, **kwargs)
self.modulestore.cache_block(course_key, version_guid, block_key, block)
return block
......@@ -164,17 +168,17 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
# most recent retrieval is most likely the right one for next caller (see comment above fn)
self.course_entry = CourseEnvelope(course_entry_override.course_key, self.course_entry.structure)
definition_id = block_data.get('definition')
definition_id = block_data.definition
# If no usage id is provided, generate an in-memory id
if block_key is None:
block_key = BlockKey(block_data['block_type'], LocalId())
block_key = BlockKey(block_data.block_type, LocalId())
convert_fields = lambda field: self.modulestore.convert_references_to_keys(
course_key, class_, field, self.course_entry.structure['blocks'],
)
if definition_id is not None and not block_data['definition_loaded']:
if definition_id is not None and not block_data.definition_loaded:
definition_loader = DefinitionLazyLoader(
self.modulestore,
course_key,
......@@ -195,8 +199,8 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
block_id=block_key.id,
)
converted_fields = convert_fields(block_data.get('fields', {}))
converted_defaults = convert_fields(block_data.get('defaults', {}))
converted_fields = convert_fields(block_data.fields)
converted_defaults = convert_fields(block_data.defaults)
if block_key in self._parent_map:
parent_key = self._parent_map[block_key]
parent = course_key.make_usage_key(parent_key.type, parent_key.id)
......@@ -221,7 +225,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
ScopeIds(None, block_key.type, definition_id, block_locator),
field_data,
)
except Exception:
except Exception: # pylint: disable=broad-except
log.warning("Failed to load descriptor", exc_info=True)
return ErrorDescriptor.from_json(
block_data,
......@@ -233,12 +237,12 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
error_msg=exc_info_to_str(sys.exc_info())
)
edit_info = block_data.get('edit_info', {})
module._edited_by = edit_info.get('edited_by') # pylint: disable=protected-access
module._edited_on = edit_info.get('edited_on') # pylint: disable=protected-access
module.previous_version = edit_info.get('previous_version')
module.update_version = edit_info.get('update_version')
module.source_version = edit_info.get('source_version', None)
edit_info = block_data.edit_info
module._edited_by = edit_info.edited_by # pylint: disable=protected-access
module._edited_on = edit_info.edited_on # pylint: disable=protected-access
module.previous_version = edit_info.previous_version
module.update_version = edit_info.update_version
module.source_version = edit_info.source_version
module.definition_locator = DefinitionLocator(block_key.type, definition_id)
# decache any pending field settings
module.save()
......@@ -261,31 +265,35 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
"""
return xblock._edited_on
@contract(xblock='XBlock')
def get_subtree_edited_by(self, xblock):
"""
See :class: cms.lib.xblock.runtime.EditInfoRuntimeMixin
"""
# pylint: disable=protected-access
if not hasattr(xblock, '_subtree_edited_by'):
json_data = self.module_data[BlockKey.from_usage_key(xblock.location)]
if '_subtree_edited_by' not in json_data.setdefault('edit_info', {}):
block_data = self.module_data[BlockKey.from_usage_key(xblock.location)]
if block_data.edit_info._subtree_edited_by is None:
self._compute_subtree_edited_internal(
xblock.location.block_id, json_data, xblock.location.course_key
block_data, xblock.location.course_key
)
setattr(xblock, '_subtree_edited_by', json_data['edit_info']['_subtree_edited_by'])
setattr(xblock, '_subtree_edited_by', block_data.edit_info._subtree_edited_by)
return getattr(xblock, '_subtree_edited_by')
@contract(xblock='XBlock')
def get_subtree_edited_on(self, xblock):
"""
See :class: cms.lib.xblock.runtime.EditInfoRuntimeMixin
"""
# pylint: disable=protected-access
if not hasattr(xblock, '_subtree_edited_on'):
json_data = self.module_data[BlockKey.from_usage_key(xblock.location)]
if '_subtree_edited_on' not in json_data.setdefault('edit_info', {}):
block_data = self.module_data[BlockKey.from_usage_key(xblock.location)]
if block_data.edit_info._subtree_edited_on is None:
self._compute_subtree_edited_internal(
xblock.location.block_id, json_data, xblock.location.course_key
block_data, xblock.location.course_key
)
setattr(xblock, '_subtree_edited_on', json_data['edit_info']['_subtree_edited_on'])
setattr(xblock, '_subtree_edited_on', block_data.edit_info._subtree_edited_on)
return getattr(xblock, '_subtree_edited_on')
......@@ -307,20 +315,22 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
return getattr(xblock, '_published_on', None)
def _compute_subtree_edited_internal(self, block_id, json_data, course_key):
@contract(block_data='BlockData')
def _compute_subtree_edited_internal(self, block_data, course_key):
"""
Recurse the subtree finding the max edited_on date and its concomitant edited_by. Cache it
Recurse the subtree finding the max edited_on date and its corresponding edited_by. Cache it.
"""
max_date = json_data['edit_info']['edited_on']
max_by = json_data['edit_info']['edited_by']
# pylint: disable=protected-access
max_date = block_data.edit_info.edited_on
max_date_by = block_data.edit_info.edited_by
for child in json_data.get('fields', {}).get('children', []):
for child in block_data.fields.get('children', []):
child_data = self.get_module_data(BlockKey(*child), course_key)
if '_subtree_edited_on' not in json_data.setdefault('edit_info', {}):
self._compute_subtree_edited_internal(child, child_data, course_key)
if child_data['edit_info']['_subtree_edited_on'] > max_date:
max_date = child_data['edit_info']['_subtree_edited_on']
max_by = child_data['edit_info']['_subtree_edited_by']
json_data['edit_info']['_subtree_edited_on'] = max_date
json_data['edit_info']['_subtree_edited_by'] = max_by
if block_data.edit_info._subtree_edited_on is None:
self._compute_subtree_edited_internal(child_data, course_key)
if child_data.edit_info._subtree_edited_on > max_date:
max_date = child_data.edit_info._subtree_edited_on
max_date_by = child_data.edit_info._subtree_edited_by
block_data.edit_info._subtree_edited_on = max_date
block_data.edit_info._subtree_edited_by = max_date_by
......@@ -26,7 +26,7 @@ class SplitMongoIdManager(OpaqueKeyReader, AsideKeyGenerator): # pylint: disabl
block_key = BlockKey.from_usage_key(usage_id)
module_data = self._cds.get_module_data(block_key, usage_id.course_key)
if 'definition' in module_data:
return DefinitionLocator(usage_id.block_type, module_data['definition'])
if module_data.definition is not None:
return DefinitionLocator(usage_id.block_type, module_data.definition)
else:
raise ValueError("All non-local blocks should have a definition specified")
......@@ -10,7 +10,8 @@ from pymongo.errors import DuplicateKeyError # pylint: disable=unused-import
from contracts import check, new_contract
from xmodule.exceptions import HeartbeatFailure
from xmodule.modulestore.split_mongo import BlockKey, BlockData
from xmodule.modulestore import BlockData
from xmodule.modulestore.split_mongo import BlockKey
import datetime
import pytz
......@@ -37,7 +38,7 @@ def structure_from_mongo(structure):
for block in structure['blocks']:
if 'children' in block['fields']:
block['fields']['children'] = [BlockKey(*child) for child in block['fields']['children']]
new_blocks[BlockKey(block['block_type'], block.pop('block_id'))] = BlockData(block)
new_blocks[BlockKey(block['block_type'], block.pop('block_id'))] = BlockData(**block)
structure['blocks'] = new_blocks
return structure
......@@ -54,8 +55,8 @@ def structure_to_mongo(structure):
check('BlockKey', structure['root'])
check('dict(BlockKey: BlockData)', structure['blocks'])
for block in structure['blocks'].itervalues():
if 'children' in block['fields']:
check('list(BlockKey)', block['fields']['children'])
if 'children' in block.fields:
check('list(BlockKey)', block.fields['children'])
new_structure = dict(structure)
new_structure['blocks'] = []
......
......@@ -22,14 +22,15 @@ Representation:
** 'edited_by': user_id of the user whose change caused the creation of this structure version,
** 'edited_on': the datetime for the change causing this creation of this structure version,
** 'blocks': dictionary of xblocks in this structure:
*** BlockKey: dictionary of block settings and children:
*** BlockKey: key mapping to each BlockData:
*** BlockData: object containing the following attributes:
**** 'block_type': the xblock type id
**** 'definition': the db id of the record containing the content payload for this xblock
**** 'fields': the Scope.settings and children field values
***** 'children': This is stored as a list of (block_type, block_id) pairs
**** 'defaults': Scope.settings default values copied from a template block (used e.g. when
blocks are copied from a library to a course)
**** 'edit_info': dictionary:
**** 'edit_info': EditInfo object:
***** 'edited_on': when was this xblock's fields last changed (will be edited_on value of
update_version structure)
***** 'edited_by': user_id for who changed this xblock last (will be edited_by value of
......@@ -72,13 +73,14 @@ from opaque_keys.edx.locator import (
from xmodule.modulestore.exceptions import InsufficientSpecificationError, VersionConflictError, DuplicateItemError, \
DuplicateCourseError
from xmodule.modulestore import (
inheritance, ModuleStoreWriteBase, ModuleStoreEnum, BulkOpsRecord, BulkOperationsMixin, SortedAssetList
inheritance, ModuleStoreWriteBase, ModuleStoreEnum,
BulkOpsRecord, BulkOperationsMixin, SortedAssetList, BlockData
)
from ..exceptions import ItemNotFoundError
from .caching_descriptor_system import CachingDescriptorSystem
from xmodule.modulestore.split_mongo.mongo_connection import MongoConnection, DuplicateKeyError
from xmodule.modulestore.split_mongo import BlockKey, CourseEnvelope, BlockData
from xmodule.modulestore.split_mongo import BlockKey, CourseEnvelope
from xmodule.error_module import ErrorDescriptor
from collections import defaultdict
from types import NoneType
......@@ -111,6 +113,7 @@ EXCLUDE_ALL = '*'
new_contract('BlockUsageLocator', BlockUsageLocator)
new_contract('BlockKey', BlockKey)
new_contract('XBlock', XBlock)
class SplitBulkWriteRecord(BulkOpsRecord):
......@@ -446,24 +449,22 @@ class SplitBulkWriteMixin(BulkOperationsMixin):
return new_structure
def version_block(self, block_info, user_id, update_version):
def version_block(self, block_data, user_id, update_version):
"""
Update the block_info dictionary based on it having been edited
Update the block_data object based on it having been edited.
"""
if block_info['edit_info'].get('update_version') == update_version:
if block_data.edit_info.update_version == update_version:
return
original_usage = block_info['edit_info'].get('original_usage')
original_usage_version = block_info['edit_info'].get('original_usage_version')
block_info['edit_info'] = {
'edited_on': datetime.datetime.now(UTC),
'edited_by': user_id,
'previous_version': block_info['edit_info']['update_version'],
'update_version': update_version,
}
original_usage = block_data.edit_info.original_usage
original_usage_version = block_data.edit_info.original_usage_version
block_data.edit_info.edited_on = datetime.datetime.now(UTC)
block_data.edit_info.edited_by = user_id
block_data.edit_info.previous_version = block_data.edit_info.update_version
block_data.edit_info.update_version = update_version
if original_usage:
block_info['edit_info']['original_usage'] = original_usage
block_info['edit_info']['original_usage_version'] = original_usage_version
block_data.edit_info.original_usage = original_usage
block_data.edit_info.original_usage_version = original_usage_version
def find_matching_course_indexes(self, branch=None, search_targets=None):
"""
......@@ -696,11 +697,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
for definition in descendent_definitions}
for block in new_module_data.itervalues():
if block['definition'] in definitions:
definition = definitions[block['definition']]
if block.definition in definitions:
definition = definitions[block.definition]
# convert_fields was being done here, but it gets done later in the runtime's xblock_from_json
block['fields'].update(definition.get('fields'))
block['definition_loaded'] = True
block.fields.update(definition.get('fields'))
block.definition_loaded = True
system.module_data.update(new_module_data)
return system.module_data
......@@ -973,6 +974,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
return self._get_block_from_structure(course_structure, BlockKey.from_usage_key(usage_key)) is not None
@contract(returns='XBlock')
def get_item(self, usage_key, depth=0, **kwargs):
"""
depth (int): An argument that some module stores may use to prefetch
......@@ -1024,18 +1026,18 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
items = []
qualifiers = qualifiers.copy() if qualifiers else {} # copy the qualifiers (destructively manipulated here)
def _block_matches_all(block_json):
def _block_matches_all(block_data):
"""
Check that the block matches all the criteria
"""
# do the checks which don't require loading any additional data
if (
self._block_matches(block_json, qualifiers) and
self._block_matches(block_json.get('fields', {}), settings)
if ( # pylint: disable=bad-continuation
self._block_matches(block_data, qualifiers) and
self._block_matches(block_data.fields, settings)
):
if content:
definition_block = self.get_definition(course_locator, block_json['definition'])
return self._block_matches(definition_block.get('fields', {}), content)
definition_block = self.get_definition(course_locator, block_data.definition)
return self._block_matches(definition_block['fields'], content)
else:
return True
......@@ -1104,8 +1106,8 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
items.remove(course.structure['root'])
blocks = course.structure['blocks']
for block_id, block_data in blocks.iteritems():
items.difference_update(BlockKey(*child) for child in block_data.get('fields', {}).get('children', []))
if block_data['block_type'] in detached_categories:
items.difference_update(BlockKey(*child) for child in block_data.fields.get('children', []))
if block_data.block_type in detached_categories:
items.discard(block_id)
return [
course_key.make_usage_key(block_type=block_id.type, block_id=block_id.id)
......@@ -1235,18 +1237,18 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
result = {}
for version in all_versions_with_block:
block_payload = self._get_block_from_structure(version, block_key)
if version['_id'] == block_payload['edit_info']['update_version']:
if block_payload['edit_info'].get('previous_version') is None:
if version['_id'] == block_payload.edit_info.update_version:
if block_payload.edit_info.previous_version is None:
# this was when this block was created
possible_roots.append(block_payload['edit_info']['update_version'])
possible_roots.append(block_payload.edit_info.update_version)
else: # map previous to {update..}
result.setdefault(block_payload['edit_info']['previous_version'], set()).add(
block_payload['edit_info']['update_version'])
result.setdefault(block_payload.edit_info.previous_version, set()).add(
block_payload.edit_info.update_version)
# more than one possible_root means usage was added and deleted > 1x.
if len(possible_roots) > 1:
# find the history segment including block_locator's version
element_to_find = self._get_block_from_structure(course_struct, block_key)['edit_info']['update_version']
element_to_find = self._get_block_from_structure(course_struct, block_key).edit_info.update_version
if element_to_find in possible_roots:
possible_roots = [element_to_find]
for possibility in possible_roots:
......@@ -1283,9 +1285,9 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
"""
blocks = self._lookup_course(usage_key.course_key).structure['blocks']
block = blocks.get(BlockKey.from_usage_key(usage_key))
if block and 'original_usage' in block['edit_info']:
usage_key = BlockUsageLocator.from_string(block['edit_info']['original_usage'])
return usage_key, block['edit_info'].get('original_usage_version')
if block and block.edit_info.original_usage is not None:
usage_key = BlockUsageLocator.from_string(block.edit_info.original_usage)
return usage_key, block.edit_info.original_usage_version
return None, None
def create_definition_from_data(self, course_key, new_def_data, category, user_id):
......@@ -1376,6 +1378,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
return potential_key
serial += 1
@contract(returns='XBlock')
def create_item(
self, user_id, course_key, block_type, block_id=None,
definition_locator=None, fields=None,
......@@ -1529,14 +1532,14 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# Originally added to support entrance exams (settings.FEATURES.get('ENTRANCE_EXAMS'))
if kwargs.get('position') is None:
parent['fields'].setdefault('children', []).append(BlockKey.from_usage_key(xblock.location))
parent.fields.setdefault('children', []).append(BlockKey.from_usage_key(xblock.location))
else:
parent['fields'].setdefault('children', []).insert(
parent.fields.setdefault('children', []).insert(
kwargs.get('position'),
BlockKey.from_usage_key(xblock.location)
)
if parent['edit_info']['update_version'] != new_structure['_id']:
if parent.edit_info.update_version != new_structure['_id']:
# if the parent hadn't been previously changed in this bulk transaction, indicate that it's
# part of the bulk transaction
self.version_block(parent, user_id, new_structure['_id'])
......@@ -1677,19 +1680,17 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
new_id = draft_structure['_id']
root_block = draft_structure['blocks'][draft_structure['root']]
if block_fields is not None:
root_block['fields'].update(self._serialize_fields(root_category, block_fields))
root_block.fields.update(self._serialize_fields(root_category, block_fields))
if definition_fields is not None:
old_def = self.get_definition(locator, root_block['definition'])
old_def = self.get_definition(locator, root_block.definition)
new_fields = old_def['fields']
new_fields.update(definition_fields)
definition_id = self._update_definition_from_data(locator, old_def, new_fields, user_id).definition_id
root_block['definition'] = definition_id
root_block['edit_info'].update({
'edited_on': datetime.datetime.now(UTC),
'edited_by': user_id,
'previous_version': root_block['edit_info'].get('update_version'),
'update_version': new_id,
})
root_block.definition = definition_id
root_block.edit_info.edited_on = datetime.datetime.now(UTC)
root_block.edit_info.edited_by = user_id
root_block.edit_info.previous_version = root_block.edit_info.update_version
root_block.edit_info.update_version = new_id
versions_dict[master_branch] = new_id
else: # Pointing to an existing course structure
......@@ -1788,7 +1789,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
is_updated = False
definition_fields = partitioned_fields[Scope.content]
if definition_locator is None:
definition_locator = DefinitionLocator(original_entry['block_type'], original_entry['definition'])
definition_locator = DefinitionLocator(original_entry.block_type, original_entry.definition)
if definition_fields:
definition_locator, is_updated = self.update_definition_from_data(
course_key, definition_locator, definition_fields, user_id
......@@ -1798,12 +1799,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
settings = partitioned_fields[Scope.settings]
settings = self._serialize_fields(block_key.type, settings)
if not is_updated:
is_updated = self._compare_settings(settings, original_entry['fields'])
is_updated = self._compare_settings(settings, original_entry.fields)
# check children
if partitioned_fields.get(Scope.children, {}): # purposely not 'is not None'
serialized_children = [BlockKey.from_usage_key(child) for child in partitioned_fields[Scope.children]['children']]
is_updated = is_updated or original_entry['fields'].get('children', []) != serialized_children
is_updated = is_updated or original_entry.fields.get('children', []) != serialized_children
if is_updated:
settings['children'] = serialized_children
......@@ -1812,8 +1813,8 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
new_structure = self.version_structure(course_key, original_structure, user_id)
block_data = self._get_block_from_structure(new_structure, block_key)
block_data["definition"] = definition_locator.definition_id
block_data["fields"] = settings
block_data.definition = definition_locator.definition_id
block_data.fields = settings
new_id = new_structure['_id']
self.version_block(block_data, user_id, new_id)
......@@ -1889,7 +1890,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
xblock_class,
course_key,
BlockKey(block_type, block_id) if block_id else None,
json_data,
BlockData(**json_data),
**kwargs
)
for field_name, value in (fields or {}).iteritems():
......@@ -1978,12 +1979,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
children.append(BlockKey.from_usage_key(child_block.location))
else:
children.append(BlockKey.from_usage_key(child))
is_updated = is_updated or structure_blocks[block_key]['fields']['children'] != children
is_updated = is_updated or structure_blocks[block_key].fields['children'] != children
block_fields = partitioned_fields[Scope.settings]
block_fields = self._serialize_fields(xblock.category, block_fields)
if not is_new and not is_updated:
is_updated = self._compare_settings(block_fields, structure_blocks[block_key]['fields'])
is_updated = self._compare_settings(block_fields, structure_blocks[block_key].fields)
if children:
block_fields['children'] = children
......@@ -1999,8 +2000,8 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
)
else:
block_info = structure_blocks[block_key]
block_info['fields'] = block_fields
block_info['definition'] = xblock.definition_locator.definition_id
block_info.fields = block_fields
block_info.definition = xblock.definition_locator.definition_id
self.version_block(block_info, user_id, new_id)
structure_blocks[block_key] = block_info
......@@ -2075,7 +2076,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
destination_structure = self._new_structure(
user_id, root_block_key,
# leave off the fields b/c the children must be filtered
definition_id=root_source['definition'],
definition_id=root_source.definition,
)
else:
destination_structure = self._lookup_course(destination_course).structure
......@@ -2183,10 +2184,10 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
dest_info = dest_structure['blocks'][block_key]
# Update the edit_info:
dest_info['edit_info']['previous_version'] = dest_info['edit_info']['update_version']
dest_info['edit_info']['update_version'] = old_dest_structure_version
dest_info['edit_info']['edited_by'] = user_id
dest_info['edit_info']['edited_on'] = datetime.datetime.now(UTC)
dest_info.edit_info.previous_version = dest_info.edit_info.update_version
dest_info.edit_info.update_version = old_dest_structure_version
dest_info.edit_info.edited_by = user_id
dest_info.edit_info.edited_on = datetime.datetime.now(UTC)
orphans = orig_descendants - new_descendants
for orphan in orphans:
......@@ -2197,7 +2198,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# Return usage locators for all the new children:
return [
destination_course.make_usage_key(*k)
for k in dest_structure['blocks'][block_key]['fields']['children']
for k in dest_structure['blocks'][block_key].fields['children']
]
def _copy_from_template(self, source_structures, source_keys, dest_structure, new_parent_block_key, user_id):
......@@ -2233,9 +2234,9 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# Now clone block_key to new_block_key:
new_block_info = copy.deepcopy(source_block_info)
# Note that new_block_info now points to the same definition ID entry as source_block_info did
existing_block_info = dest_structure['blocks'].get(new_block_key, {})
existing_block_info = dest_structure['blocks'].get(new_block_key, BlockData())
# Inherit the Scope.settings values from 'fields' to 'defaults'
new_block_info['defaults'] = new_block_info['fields']
new_block_info.defaults = new_block_info.fields
# <workaround>
# CAPA modules store their 'markdown' value (an alternate representation of their content)
......@@ -2245,27 +2246,29 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# even if it hasn't changed, which breaks our override system.
# So until capa modules are fixed, we special-case them and remove their markdown fields,
# forcing the inherited version to use XML only.
if usage_key.block_type == 'problem' and 'markdown' in new_block_info['defaults']:
del new_block_info['defaults']['markdown']
if usage_key.block_type == 'problem' and 'markdown' in new_block_info.defaults:
del new_block_info.defaults['markdown']
# </workaround>
new_block_info['fields'] = existing_block_info.get('fields', {}) # Preserve any existing overrides
if 'children' in new_block_info['defaults']:
del new_block_info['defaults']['children'] # Will be set later
new_block_info['block_id'] = new_block_key.id
new_block_info['edit_info'] = existing_block_info.get('edit_info', {})
new_block_info['edit_info']['previous_version'] = new_block_info['edit_info'].get('update_version', None)
new_block_info['edit_info']['update_version'] = dest_structure['_id']
new_block_info.fields = existing_block_info.fields # Preserve any existing overrides
if 'children' in new_block_info.defaults:
del new_block_info.defaults['children'] # Will be set later
# ALERT! Why was this 'block_id' stored here? Nothing else stores block_id
# in the block data. Was this a harmless error?
#new_block_info['block_id'] = new_block_key.id
new_block_info.edit_info = existing_block_info.edit_info
new_block_info.edit_info.previous_version = new_block_info.edit_info.update_version
new_block_info.edit_info.update_version = dest_structure['_id']
# Note we do not set 'source_version' - it's only used for copying identical blocks
# from draft to published as part of publishing workflow.
# Setting it to the source_block_info structure version here breaks split_draft's has_changes() method.
new_block_info['edit_info']['edited_by'] = user_id
new_block_info['edit_info']['edited_on'] = datetime.datetime.now(UTC)
new_block_info['edit_info']['original_usage'] = unicode(usage_key.replace(branch=None, version_guid=None))
new_block_info['edit_info']['original_usage_version'] = source_block_info['edit_info'].get('update_version')
new_block_info.edit_info.edited_by = user_id
new_block_info.edit_info.edited_on = datetime.datetime.now(UTC)
new_block_info.edit_info.original_usage = unicode(usage_key.replace(branch=None, version_guid=None))
new_block_info.edit_info.original_usage_version = source_block_info.edit_info.update_version
dest_structure['blocks'][new_block_key] = new_block_info
children = source_block_info['fields'].get('children')
children = source_block_info.fields.get('children')
if children:
children = [src_course_key.make_usage_key(child.type, child.id) for child in children]
new_blocks |= self._copy_from_template(
......@@ -2277,7 +2280,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
new_children.append(new_block_key)
# Update the children of new_parent_block_key
dest_structure['blocks'][new_parent_block_key]['fields']['children'] = new_children
dest_structure['blocks'][new_parent_block_key].fields['children'] = new_children
return new_blocks
......@@ -2314,11 +2317,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
parent_block_keys = self._get_parents_from_structure(block_key, original_structure)
for parent_block_key in parent_block_keys:
parent_block = new_blocks[parent_block_key]
parent_block['fields']['children'].remove(block_key)
parent_block['edit_info']['edited_on'] = datetime.datetime.now(UTC)
parent_block['edit_info']['edited_by'] = user_id
parent_block['edit_info']['previous_version'] = parent_block['edit_info']['update_version']
parent_block['edit_info']['update_version'] = new_id
parent_block.fields['children'].remove(block_key)
parent_block.edit_info.edited_on = datetime.datetime.now(UTC)
parent_block.edit_info.edited_by = user_id
parent_block.edit_info.previous_version = parent_block.edit_info.update_version
parent_block.edit_info.update_version = new_id
self.decache_block(usage_locator.course_key, new_id, parent_block_key)
self._remove_subtree(BlockKey.from_usage_key(usage_locator), new_blocks)
......@@ -2340,7 +2343,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
"""
Remove the subtree rooted at block_key
"""
for child in blocks[block_key]['fields'].get('children', []):
for child in blocks[block_key].fields.get('children', []):
self._remove_subtree(BlockKey(*child), blocks)
del blocks[block_key]
......@@ -2365,11 +2368,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
self, block_map, block_key, inherited_settings_map, inheriting_settings=None, inherited_from=None
):
"""
Updates block_json with any inheritable setting set by an ancestor and recurses to children.
Updates block_data with any inheritable setting set by an ancestor and recurses to children.
"""
if block_key not in block_map:
return
block_json = block_map[block_key]
block_data = block_map[block_key]
if inheriting_settings is None:
inheriting_settings = {}
......@@ -2384,7 +2387,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# update the inheriting w/ what should pass to children
inheriting_settings = inherited_settings_map[block_key].copy()
block_fields = block_json['fields']
block_fields = block_data.fields
for field_name in inheritance.InheritanceMixin.fields:
if field_name in block_fields:
inheriting_settings[field_name] = block_fields[field_name]
......@@ -2421,7 +2424,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
if depth is None or depth > 0:
depth = depth - 1 if depth is not None else None
for child in descendent_map[block_id]['fields'].get('children', []):
for child in descendent_map[block_id].fields.get('children', []):
descendent_map = self.descendants(block_map, child, depth, descendent_map)
return descendent_map
......@@ -2604,9 +2607,9 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
index_entry = self._get_index_if_valid(course_locator)
new_structure = self.version_structure(course_locator, original_structure, user_id)
for block in new_structure['blocks'].itervalues():
if 'fields' in block and 'children' in block['fields']:
block['fields']["children"] = [
block_id for block_id in block['fields']["children"]
if 'children' in block.fields:
block.fields['children'] = [
block_id for block_id in block.fields['children']
if block_id in new_structure['blocks']
]
self.update_structure(course_locator, new_structure)
......@@ -2809,7 +2812,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
return [
parent_block_key
for parent_block_key, value in structure['blocks'].iteritems()
if block_key in value['fields'].get('children', [])
if block_key in value.fields.get('children', [])
]
def _sync_children(self, source_parent, destination_parent, new_child):
......@@ -2818,13 +2821,13 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
Return the removed ones as orphans (a set).
"""
destination_reordered = []
destination_children = set(destination_parent['fields']['children'])
source_children = source_parent['fields']['children']
destination_children = set(destination_parent.fields['children'])
source_children = source_parent.fields['children']
orphans = destination_children - set(source_children)
for child in source_children:
if child == new_child or child in destination_children:
destination_reordered.append(child)
destination_parent['fields']['children'] = destination_reordered
destination_parent.fields['children'] = destination_reordered
return orphans
@contract(
......@@ -2847,8 +2850,8 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# reorder children to correspond to whatever order holds for source.
# remove any which source no longer claims (put into orphans)
# add any which are being copied
source_children = new_block['fields'].get('children', [])
existing_children = destination_block['fields'].get('children', [])
source_children = new_block.fields.get('children', [])
existing_children = destination_block.fields.get('children', [])
destination_reordered = SparseList()
for child in existing_children:
try:
......@@ -2862,28 +2865,28 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
destination_reordered[index] = child
# the history of the published leaps between publications and only points to
# previously published versions.
previous_version = destination_block['edit_info']['update_version']
previous_version = destination_block.edit_info.update_version
destination_block = copy.deepcopy(new_block)
destination_block['fields']['children'] = destination_reordered.compact_list()
destination_block['edit_info']['previous_version'] = previous_version
destination_block['edit_info']['update_version'] = destination_version
destination_block['edit_info']['edited_by'] = user_id
destination_block['edit_info']['edited_on'] = datetime.datetime.now(UTC)
destination_block.fields['children'] = destination_reordered.compact_list()
destination_block.edit_info.previous_version = previous_version
destination_block.edit_info.update_version = destination_version
destination_block.edit_info.edited_by = user_id
destination_block.edit_info.edited_on = datetime.datetime.now(UTC)
else:
destination_block = self._new_block(
user_id, new_block['block_type'],
self._filter_blacklist(copy.copy(new_block['fields']), blacklist),
new_block['definition'],
user_id, new_block.block_type,
self._filter_blacklist(copy.copy(new_block.fields), blacklist),
new_block.definition,
destination_version,
raw=True,
block_defaults=new_block.get('defaults')
block_defaults=new_block.defaults
)
# introduce new edit info field for tracing where copied/published blocks came
destination_block['edit_info']['source_version'] = new_block['edit_info']['update_version']
destination_block.edit_info.source_version = new_block.edit_info.update_version
if blacklist != EXCLUDE_ALL:
for child in destination_block['fields'].get('children', []):
for child in destination_block.fields.get('children', []):
if child not in blacklist:
orphans.update(
self._copy_subdag(
......@@ -2911,7 +2914,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
Delete the orphan and any of its descendants which no longer have parents.
"""
if len(self._get_parents_from_structure(orphan, structure)) == 0:
for child in structure['blocks'][orphan]['fields'].get('children', []):
for child in structure['blocks'][orphan].fields.get('children', []):
self._delete_if_true_orphan(BlockKey(*child), structure)
del structure['blocks'][orphan]
......@@ -2940,7 +2943,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
}
if block_defaults:
document['defaults'] = block_defaults
return BlockData(document)
return BlockData(**document)
@contract(block_key=BlockKey, returns='BlockData | None')
def _get_block_from_structure(self, structure, block_key):
......
......@@ -324,9 +324,9 @@ class DraftVersioningModuleStore(SplitMongoModuleStore, ModuleStoreDraftAndPubli
return True
# check the children in the draft
if 'children' in draft_block.setdefault('fields', {}):
if 'children' in draft_block.fields:
return any(
[has_changes_subtree(child_block_id) for child_block_id in draft_block['fields']['children']]
[has_changes_subtree(child_block_id) for child_block_id in draft_block.fields['children']]
)
return False
......@@ -410,7 +410,7 @@ class DraftVersioningModuleStore(SplitMongoModuleStore, ModuleStoreDraftAndPubli
self._get_block_from_structure(published_course_structure, root_block_id)
)
block = self._get_block_from_structure(new_structure, root_block_id)
for child_block_id in block.setdefault('fields', {}).get('children', []):
for child_block_id in block.fields.get('children', []):
copy_from_published(child_block_id)
copy_from_published(BlockKey.from_usage_key(location))
......@@ -472,7 +472,8 @@ class DraftVersioningModuleStore(SplitMongoModuleStore, ModuleStoreDraftAndPubli
"""
Return the version of the given database representation of a block.
"""
return block['edit_info'].get('source_version', block['edit_info']['update_version'])
source_version = block.edit_info.source_version
return source_version if source_version is not None else block.edit_info.update_version
def import_xblock(self, user_id, course_key, block_type, block_id, fields=None, runtime=None, **kwargs):
"""
......@@ -505,8 +506,8 @@ class DraftVersioningModuleStore(SplitMongoModuleStore, ModuleStoreDraftAndPubli
"""
published_block = self._get_head(xblock, ModuleStoreEnum.BranchName.published)
if published_block is not None:
setattr(xblock, '_published_by', published_block['edit_info']['edited_by'])
setattr(xblock, '_published_on', published_block['edit_info']['edited_on'])
setattr(xblock, '_published_by', published_block.edit_info.edited_by)
setattr(xblock, '_published_on', published_block.edit_info.edited_on)
@contract(asset_key='AssetKey')
def find_asset_metadata(self, asset_key, **kwargs):
......
......@@ -101,7 +101,7 @@ class MongoModulestoreBuilder(object):
fs_root = mkdtemp()
# pylint: disable=attribute-defined-outside-init
self.modulestore = DraftModuleStore(
modulestore = DraftModuleStore(
contentstore,
doc_store_config,
fs_root,
......@@ -110,13 +110,13 @@ class MongoModulestoreBuilder(object):
metadata_inheritance_cache_subsystem=MemoryCache(),
xblock_mixins=XBLOCK_MIXINS,
)
self.modulestore.ensure_indexes()
modulestore.ensure_indexes()
try:
yield self.modulestore
yield modulestore
finally:
# Delete the created database
self.modulestore._drop_database() # pylint: disable=protected-access
modulestore._drop_database() # pylint: disable=protected-access
# Delete the created directory on the filesystem
rmtree(fs_root, ignore_errors=True)
......@@ -124,12 +124,6 @@ class MongoModulestoreBuilder(object):
def __repr__(self):
return 'MongoModulestoreBuilder()'
def asset_collection(self):
"""
Returns the collection storing the asset metadata.
"""
return self.modulestore.asset_collection
class VersioningModulestoreBuilder(object):
"""
......@@ -213,7 +207,7 @@ class MixedModulestoreBuilder(object):
"""
self.store_builders = store_builders
self.mappings = mappings or {}
self.modulestore = None
self.mixed_modulestore = None
@contextmanager
def build(self, contentstore):
......@@ -235,7 +229,7 @@ class MixedModulestoreBuilder(object):
# Generate a fake list of stores to give the already generated stores appropriate names
stores = [{'NAME': name, 'ENGINE': 'This space deliberately left blank'} for name in names]
self.modulestore = MixedModuleStore(
self.mixed_modulestore = MixedModuleStore(
contentstore,
self.mappings,
stores,
......@@ -243,7 +237,7 @@ class MixedModulestoreBuilder(object):
xblock_mixins=XBLOCK_MIXINS,
)
yield self.modulestore
yield self.mixed_modulestore
def __repr__(self):
return 'MixedModulestoreBuilder({!r}, {!r})'.format(self.store_builders, self.mappings)
......@@ -252,7 +246,7 @@ class MixedModulestoreBuilder(object):
"""
Returns the collection storing the asset metadata.
"""
all_stores = self.modulestore.modulestores
all_stores = self.mixed_modulestore.modulestores
if len(all_stores) > 1:
return None
......
......@@ -1580,7 +1580,7 @@ class TestCourseCreation(SplitModuleTest):
self.assertIsNotNone(db_structure, "Didn't find course")
self.assertNotIn(BlockKey('course', 'course'), db_structure['blocks'])
self.assertIn(BlockKey('chapter', 'top'), db_structure['blocks'])
self.assertEqual(db_structure['blocks'][BlockKey('chapter', 'top')]['block_type'], 'chapter')
self.assertEqual(db_structure['blocks'][BlockKey('chapter', 'top')].block_type, 'chapter')
def test_create_id_dupe(self):
"""
......
......@@ -11,6 +11,7 @@ import json
import os
import pprint
import unittest
import inspect
from contextlib import contextmanager
from lazy import lazy
......@@ -222,19 +223,12 @@ class BulkAssertionManager(object):
the failures at once, rather than only seeing single failures.
"""
def __init__(self, test_case):
self._equal_expected = []
self._equal_actual = []
self._equal_assertions = []
self._test_case = test_case
def assertEqual(self, expected, actual, description=None):
if description is None:
description = u"{!r} does not equal {!r}".format(expected, actual)
if expected != actual:
self._equal_expected.append((description, expected))
self._equal_actual.append((description, actual))
def run_assertions(self):
super(BulkAssertionTest, self._test_case).assertEqual(self._equal_expected, self._equal_actual)
if len(self._equal_assertions) > 0:
raise AssertionError(self._equal_assertions)
class BulkAssertionTest(unittest.TestCase):
......@@ -262,7 +256,15 @@ class BulkAssertionTest(unittest.TestCase):
def assertEqual(self, expected, actual, message=None):
if self._manager is not None:
self._manager.assertEqual(expected, actual, message)
try:
super(BulkAssertionTest, self).assertEqual(expected, actual, message)
except Exception as error: # pylint: disable=broad-except
exc_stack = inspect.stack()[1]
if message is not None:
msg = '{} -> {}:{} -> {}'.format(message, exc_stack[1], exc_stack[2], unicode(error))
else:
msg = '{}:{} -> {}'.format(exc_stack[1], exc_stack[2], unicode(error))
self._manager._equal_assertions.append(msg) # pylint: disable=protected-access
else:
super(BulkAssertionTest, self).assertEqual(expected, actual, message)
assertEquals = assertEqual
......
......@@ -457,6 +457,7 @@ class TestLibraryContentAnalytics(LibraryContentTest):
# except for one of the two already assigned to the student:
keep_block_key = initial_blocks_assigned[0].location
keep_block_lib_usage_key, keep_block_lib_version = self.store.get_block_original_usage(keep_block_key)
self.assertIsNotNone(keep_block_lib_usage_key)
deleted_block_key = initial_blocks_assigned[1].location
self.library.children = [keep_block_lib_usage_key]
self.store.update_item(self.library, self.user_id)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment