Commit b033a214 by Calen Pennington

Merge pull request #5131 from cpennington/split/prod-import-export-tests

Split-mongo block uniqueness constraint [LMS-11364]
parents 4ebe2669 7b81dcc3
...@@ -198,9 +198,7 @@ class TemplateTests(unittest.TestCase): ...@@ -198,9 +198,7 @@ class TemplateTests(unittest.TestCase):
second_problem = persistent_factories.ItemFactory.create( second_problem = persistent_factories.ItemFactory.create(
display_name='problem 2', display_name='problem 2',
parent_location=BlockUsageLocator.make_relative( parent_location=sub.location.version_agnostic(),
test_course.location.version_agnostic(), block_type='problem', block_id=sub.location.block_id
),
user_id='testbot', category='problem', user_id='testbot', category='problem',
data="<problem></problem>" data="<problem></problem>"
) )
...@@ -208,6 +206,7 @@ class TemplateTests(unittest.TestCase): ...@@ -208,6 +206,7 @@ class TemplateTests(unittest.TestCase):
# The draft course root has 2 revisions: the published revision, and then the subsequent # The draft course root has 2 revisions: the published revision, and then the subsequent
# changes to the draft revision # changes to the draft revision
version_history = self.split_store.get_block_generations(test_course.location) version_history = self.split_store.get_block_generations(test_course.location)
self.assertIsNotNone(version_history)
self.assertEqual(version_history.locator.version_guid, test_course.location.version_guid) self.assertEqual(version_history.locator.version_guid, test_course.location.version_guid)
self.assertEqual(len(version_history.children), 1) self.assertEqual(len(version_history.children), 1)
self.assertEqual(version_history.children[0].children, []) self.assertEqual(version_history.children[0].children, [])
......
# Disable PyContract contract checking when running as a webserver
import contracts
contracts.disable_all()
import os import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cms.envs.aws") os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cms.envs.aws")
......
""" """
General utilities General utilities
""" """
import urllib
from collections import namedtuple
from contracts import contract, check
from opaque_keys.edx.locator import BlockUsageLocator
def encode_key_for_mongo(fieldname):
"""
Fieldnames in mongo cannot have periods nor dollar signs. So encode them.
:param fieldname: an atomic field name. Note, don't pass structured paths as it will flatten them
"""
for char in [".", "$"]:
fieldname = fieldname.replace(char, '%{:02x}'.format(ord(char)))
return fieldname
class BlockKey(namedtuple('BlockKey', 'type id')):
__slots__ = ()
@contract(type="string[>0]")
def __new__(cls, type, id):
return super(BlockKey, cls).__new__(cls, type, id)
@classmethod
@contract(usage_key=BlockUsageLocator)
def from_usage_key(cls, usage_key):
return cls(usage_key.block_type, usage_key.block_id)
def decode_key_from_mongo(fieldname):
"""
The inverse of encode_key_for_mongo
:param fieldname: with period and dollar escaped
"""
return urllib.unquote(fieldname)
import sys import sys
import logging import logging
from contracts import contract, new_contract
from xblock.runtime import KvsFieldData from xblock.runtime import KvsFieldData
from xblock.fields import ScopeIds from xblock.fields import ScopeIds
from opaque_keys.edx.locator import BlockUsageLocator, LocalId, CourseLocator, DefinitionLocator from opaque_keys.edx.locator import BlockUsageLocator, LocalId, CourseLocator, DefinitionLocator
from xmodule.mako_module import MakoDescriptorSystem from xmodule.mako_module import MakoDescriptorSystem
from xmodule.error_module import ErrorDescriptor from xmodule.error_module import ErrorDescriptor
from xmodule.errortracker import exc_info_to_str from xmodule.errortracker import exc_info_to_str
from xmodule.modulestore.split_mongo import encode_key_for_mongo
from ..exceptions import ItemNotFoundError from ..exceptions import ItemNotFoundError
from .split_mongo_kvs import SplitMongoKVS from .split_mongo_kvs import SplitMongoKVS
from fs.osfs import OSFS from fs.osfs import OSFS
from .definition_lazy_loader import DefinitionLazyLoader from .definition_lazy_loader import DefinitionLazyLoader
from xmodule.modulestore.edit_info import EditInfoRuntimeMixin from xmodule.modulestore.edit_info import EditInfoRuntimeMixin
from xmodule.modulestore.split_mongo import BlockKey
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
new_contract('BlockUsageLocator', BlockUsageLocator)
new_contract('BlockKey', BlockKey)
class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
""" """
...@@ -58,30 +62,31 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -58,30 +62,31 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
# Compute inheritance # Compute inheritance
modulestore.inherit_settings( modulestore.inherit_settings(
course_entry['structure'].get('blocks', {}), course_entry['structure'].get('blocks', {}),
encode_key_for_mongo(course_entry['structure'].get('root')), course_entry['structure'].get('root'),
course_entry.setdefault('inherited_settings', {}), course_entry.setdefault('inherited_settings', {}),
) )
self.default_class = default_class self.default_class = default_class
self.local_modules = {} self.local_modules = {}
@contract(usage_key="BlockUsageLocator | BlockKey")
def _load_item(self, usage_key, course_entry_override=None, **kwargs): def _load_item(self, usage_key, course_entry_override=None, **kwargs):
# usage_key is either a UsageKey or just the block_id. if a usage_key, # usage_key is either a UsageKey or just the block_key. if a usage_key,
if isinstance(usage_key, BlockUsageLocator): if isinstance(usage_key, BlockUsageLocator):
# trust the passed in key to know the caller's expectations of which fields are filled in.
# particularly useful for strip_keys so may go away when we're version aware
course_key = usage_key.course_key
if isinstance(usage_key.block_id, LocalId): if isinstance(usage_key.block_id, LocalId):
try: try:
return self.local_modules[usage_key] return self.local_modules[usage_key]
except KeyError: except KeyError:
raise ItemNotFoundError raise ItemNotFoundError
else: else:
block_id = usage_key.block_id block_key = BlockKey.from_usage_key(usage_key)
else: else:
block_id = usage_key block_key = usage_key
if isinstance(usage_key, BlockUsageLocator):
# trust the passed in key to know the caller's expectations of which fields are filled in.
# particularly useful for strip_keys so may go away when we're version aware
course_key = usage_key.course_key
else:
course_info = course_entry_override or self.course_entry course_info = course_entry_override or self.course_entry
course_key = CourseLocator( course_key = CourseLocator(
version_guid=course_info['structure']['_id'], version_guid=course_info['structure']['_id'],
...@@ -90,27 +95,29 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -90,27 +95,29 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
run=course_info.get('run'), run=course_info.get('run'),
branch=course_info.get('branch'), branch=course_info.get('branch'),
) )
json_data = self.get_module_data(block_id, course_key)
class_ = self.load_block_type(json_data.get('category')) json_data = self.get_module_data(block_key, course_key)
class_ = self.load_block_type(json_data.get('block_type'))
# pass None for inherited_settings to signal that it should get the settings from cache # pass None for inherited_settings to signal that it should get the settings from cache
new_item = self.xblock_from_json(class_, course_key, block_id, json_data, None, course_entry_override, **kwargs) new_item = self.xblock_from_json(class_, course_key, block_key, json_data, None, course_entry_override, **kwargs)
return new_item return new_item
def get_module_data(self, block_id, course_key): @contract(block_key=BlockKey, course_key=CourseLocator)
def get_module_data(self, block_key, course_key):
""" """
Get block from module_data adding it to module_data if it's not already there but is in the structure Get block from module_data adding it to module_data if it's not already there but is in the structure
Raises: Raises:
ItemNotFoundError if block is not in the structure ItemNotFoundError if block is not in the structure
""" """
json_data = self.module_data.get(block_id) json_data = self.module_data.get(block_key)
if json_data is None: if json_data is None:
# deeper than initial descendant fetch or doesn't exist # deeper than initial descendant fetch or doesn't exist
self.modulestore.cache_items(self, [block_id], course_key, lazy=self.lazy) self.modulestore.cache_items(self, [block_key], course_key, lazy=self.lazy)
json_data = self.module_data.get(block_id) json_data = self.module_data.get(block_key)
if json_data is None: if json_data is None:
raise ItemNotFoundError(block_id) raise ItemNotFoundError(block_key)
return json_data return json_data
...@@ -125,8 +132,9 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -125,8 +132,9 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
# low; thus, the course_entry is most likely correct. If the thread is looking at > 1 named container # low; thus, the course_entry is most likely correct. If the thread is looking at > 1 named container
# pointing to the same structure, the access is likely to be chunky enough that the last known container # pointing to the same structure, the access is likely to be chunky enough that the last known container
# is the intended one when not given a course_entry_override; thus, the caching of the last branch/course id. # is the intended one when not given a course_entry_override; thus, the caching of the last branch/course id.
@contract(block_key="BlockKey | None")
def xblock_from_json( def xblock_from_json(
self, class_, course_key, block_id, json_data, inherited_settings, course_entry_override=None, **kwargs self, class_, course_key, block_key, json_data, inherited_settings, course_entry_override=None, **kwargs
): ):
if course_entry_override is None: if course_entry_override is None:
course_entry_override = self.course_entry course_entry_override = self.course_entry
...@@ -138,20 +146,23 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -138,20 +146,23 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
self.course_entry['run'] = course_entry_override['run'] self.course_entry['run'] = course_entry_override['run']
definition_id = json_data.get('definition') definition_id = json_data.get('definition')
block_type = json_data['category']
if block_id is not None: # If no usage id is provided, generate an in-memory id
if block_key is None:
block_key = BlockKey(json_data['block_type'], LocalId())
else:
if inherited_settings is None: if inherited_settings is None:
# see if there's a value in course_entry # see if there's a value in course_entry
if (block_type, block_id) in self.course_entry['inherited_settings']: if block_key in self.course_entry['inherited_settings']:
inherited_settings = self.course_entry['inherited_settings'][(block_type, block_id)] inherited_settings = self.course_entry['inherited_settings'][block_key]
elif (block_type, block_id) not in self.course_entry['inherited_settings']: elif block_key not in self.course_entry['inherited_settings']:
self.course_entry['inherited_settings'][(block_type, block_id)] = inherited_settings self.course_entry['inherited_settings'][block_key] = inherited_settings
if definition_id is not None and not json_data.get('definition_loaded', False): if definition_id is not None and not json_data.get('definition_loaded', False):
definition_loader = DefinitionLazyLoader( definition_loader = DefinitionLazyLoader(
self.modulestore, block_type, definition_id, self.modulestore, block_key.type, definition_id,
lambda fields: self.modulestore.convert_references_to_keys( lambda fields: self.modulestore.convert_references_to_keys(
course_key, self.load_block_type(block_type), course_key, self.load_block_type(block_key.type),
fields, self.course_entry['structure']['blocks'], fields, self.course_entry['structure']['blocks'],
) )
) )
...@@ -162,14 +173,10 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -162,14 +173,10 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
if definition_id is None: if definition_id is None:
definition_id = LocalId() definition_id = LocalId()
# If no usage id is provided, generate an in-memory id
if block_id is None:
block_id = LocalId()
block_locator = BlockUsageLocator( block_locator = BlockUsageLocator(
course_key, course_key,
block_type=block_type, block_type=block_key.type,
block_id=block_id, block_id=block_key.id,
) )
converted_fields = self.modulestore.convert_references_to_keys( converted_fields = self.modulestore.convert_references_to_keys(
...@@ -186,7 +193,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -186,7 +193,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
try: try:
module = self.construct_xblock_from_class( module = self.construct_xblock_from_class(
class_, class_,
ScopeIds(None, block_type, definition_id, block_locator), ScopeIds(None, block_key.type, definition_id, block_locator),
field_data, field_data,
) )
except Exception: except Exception:
...@@ -197,7 +204,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -197,7 +204,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
BlockUsageLocator( BlockUsageLocator(
CourseLocator(version_guid=course_entry_override['structure']['_id']), CourseLocator(version_guid=course_entry_override['structure']['_id']),
block_type='error', block_type='error',
block_id=block_id block_id=block_key.id
), ),
error_msg=exc_info_to_str(sys.exc_info()) error_msg=exc_info_to_str(sys.exc_info())
) )
...@@ -208,7 +215,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -208,7 +215,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
module.previous_version = edit_info.get('previous_version') module.previous_version = edit_info.get('previous_version')
module.update_version = edit_info.get('update_version') module.update_version = edit_info.get('update_version')
module.source_version = edit_info.get('source_version', None) module.source_version = edit_info.get('source_version', None)
module.definition_locator = DefinitionLocator(block_type, definition_id) module.definition_locator = DefinitionLocator(block_key.type, definition_id)
# decache any pending field settings # decache any pending field settings
module.save() module.save()
...@@ -235,7 +242,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -235,7 +242,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
See :class: cms.lib.xblock.runtime.EditInfoRuntimeMixin See :class: cms.lib.xblock.runtime.EditInfoRuntimeMixin
""" """
if not hasattr(xblock, '_subtree_edited_by'): if not hasattr(xblock, '_subtree_edited_by'):
json_data = self.module_data[xblock.location.block_id] json_data = self.module_data[BlockKey.from_usage_key(xblock.location)]
if '_subtree_edited_by' not in json_data.setdefault('edit_info', {}): if '_subtree_edited_by' not in json_data.setdefault('edit_info', {}):
self._compute_subtree_edited_internal( self._compute_subtree_edited_internal(
xblock.location.block_id, json_data, xblock.location.course_key xblock.location.block_id, json_data, xblock.location.course_key
...@@ -249,7 +256,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -249,7 +256,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
See :class: cms.lib.xblock.runtime.EditInfoRuntimeMixin See :class: cms.lib.xblock.runtime.EditInfoRuntimeMixin
""" """
if not hasattr(xblock, '_subtree_edited_on'): if not hasattr(xblock, '_subtree_edited_on'):
json_data = self.module_data[xblock.location.block_id] json_data = self.module_data[BlockKey.from_usage_key(xblock.location)]
if '_subtree_edited_on' not in json_data.setdefault('edit_info', {}): if '_subtree_edited_on' not in json_data.setdefault('edit_info', {}):
self._compute_subtree_edited_internal( self._compute_subtree_edited_internal(
xblock.location.block_id, json_data, xblock.location.course_key xblock.location.block_id, json_data, xblock.location.course_key
...@@ -284,7 +291,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -284,7 +291,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
max_by = json_data['edit_info']['edited_by'] max_by = json_data['edit_info']['edited_by']
for child in json_data.get('fields', {}).get('children', []): for child in json_data.get('fields', {}).get('children', []):
child_data = self.get_module_data(child, course_key) child_data = self.get_module_data(BlockKey(*child), course_key)
if '_subtree_edited_on' not in json_data.setdefault('edit_info', {}): if '_subtree_edited_on' not in json_data.setdefault('edit_info', {}):
self._compute_subtree_edited_internal(child, child_data, course_key) self._compute_subtree_edited_internal(child, child_data, course_key)
if child_data['edit_info']['_subtree_edited_on'] > max_date: if child_data['edit_info']['_subtree_edited_on'] > max_date:
......
...@@ -28,6 +28,6 @@ class DefinitionLazyLoader(object): ...@@ -28,6 +28,6 @@ class DefinitionLazyLoader(object):
def as_son(self): def as_son(self):
return SON(( return SON((
('category', self.definition_locator.block_type), ('block_type', self.definition_locator.block_type),
('definition', self.definition_locator.definition_id) ('definition', self.definition_locator.definition_id)
)) ))
...@@ -4,7 +4,68 @@ Segregation of pymongo functions from the data modeling mechanisms for split mod ...@@ -4,7 +4,68 @@ Segregation of pymongo functions from the data modeling mechanisms for split mod
import re import re
import pymongo import pymongo
from bson import son from bson import son
from contracts import check
from xmodule.exceptions import HeartbeatFailure from xmodule.exceptions import HeartbeatFailure
from xmodule.modulestore.split_mongo import BlockKey
def structure_from_mongo(structure):
"""
Converts the 'blocks' key from a list [block_data] to a map
{BlockKey: block_data}.
Converts 'root' from [block_type, block_id] to BlockKey.
Converts 'blocks.*.fields.children' from [[block_type, block_id]] to [BlockKey].
N.B. Does not convert any other ReferenceFields (because we don't know which fields they are at this level).
"""
check('seq[2]', structure['root'])
check('list(dict)', structure['blocks'])
for block in structure['blocks']:
if 'children' in block['fields']:
check('list(list[2])', block['fields']['children'])
structure['root'] = BlockKey(*structure['root'])
new_blocks = {}
for block in structure['blocks']:
if 'children' in block['fields']:
block['fields']['children'] = [BlockKey(*child) for child in block['fields']['children']]
new_blocks[BlockKey(block['block_type'], block.pop('block_id'))] = block
structure['blocks'] = new_blocks
return structure
def structure_to_mongo(structure):
"""
Converts the 'blocks' key from a map {BlockKey: block_data} to
a list [block_data], inserting BlockKey.type as 'block_type'
and BlockKey.id as 'block_id'.
Doesn't convert 'root', since namedtuple's can be inserted
directly into mongo.
"""
check('BlockKey', structure['root'])
check('dict(BlockKey: dict)', structure['blocks'])
for block in structure['blocks'].itervalues():
if 'children' in block['fields']:
check('list(BlockKey)', block['fields']['children'])
new_structure = dict(structure)
new_structure['blocks'] = []
for block_key, block in structure['blocks'].iteritems():
new_block = dict(block)
new_block.setdefault('block_type', block_key.type)
new_block['block_id'] = block_key.id
new_structure['blocks'].append(new_block)
return new_structure
def definition_from_mongo(definition):
"""
Converts 'fields.children' from a list [[block_type, block_id]] to [BlockKey]
"""
new_def = dict(definition)
class MongoConnection(object): class MongoConnection(object):
""" """
...@@ -55,7 +116,7 @@ class MongoConnection(object): ...@@ -55,7 +116,7 @@ class MongoConnection(object):
""" """
Get the structure from the persistence mechanism whose id is the given key Get the structure from the persistence mechanism whose id is the given key
""" """
return self.structures.find_one({'_id': key}) return structure_from_mongo(self.structures.find_one({'_id': key}))
def find_structures_by_id(self, ids): def find_structures_by_id(self, ids):
""" """
...@@ -64,7 +125,7 @@ class MongoConnection(object): ...@@ -64,7 +125,7 @@ class MongoConnection(object):
Arguments: Arguments:
ids (list): A list of structure ids ids (list): A list of structure ids
""" """
return self.structures.find({'_id': {'$in': ids}}) return [structure_from_mongo(structure) for structure in self.structures.find({'_id': {'$in': ids}})]
def find_structures_derived_from(self, ids): def find_structures_derived_from(self, ids):
""" """
...@@ -73,26 +134,32 @@ class MongoConnection(object): ...@@ -73,26 +134,32 @@ class MongoConnection(object):
Arguments: Arguments:
ids (list): A list of structure ids ids (list): A list of structure ids
""" """
return self.structures.find({'previous_version': {'$in': ids}}) return [structure_from_mongo(structure) for structure in self.structures.find({'previous_version': {'$in': ids}})]
def find_ancestor_structures(self, original_version, block_id): def find_ancestor_structures(self, original_version, block_key):
""" """
Find all structures that originated from ``original_version`` that contain ``block_id``. Find all structures that originated from ``original_version`` that contain ``block_key``.
Arguments: Arguments:
original_version (str or ObjectID): The id of a structure original_version (str or ObjectID): The id of a structure
block_id (str): The id of the block in question block_key (BlockKey): The id of the block in question
""" """
return self.structures.find({ return [structure_from_mongo(structure) for structure in self.structures.find({
'original_version': original_version, 'original_version': original_version,
'blocks.{}.edit_info.update_version'.format(block_id): {'$exists': True} 'blocks': {
}) '$elemMatch': {
'block_id': block_key.id,
'block_type': block_key.type,
'edit_info.update_version': {'$exists': True},
}
}
})]
def upsert_structure(self, structure): def upsert_structure(self, structure):
""" """
Update the db record for structure, creating that record if it doesn't already exist Update the db record for structure, creating that record if it doesn't already exist
""" """
self.structures.update({'_id': structure['_id']}, structure, upsert=True) self.structures.update({'_id': structure['_id']}, structure_to_mongo(structure), upsert=True)
def get_course_index(self, key, ignore_case=False): def get_course_index(self, key, ignore_case=False):
""" """
......
...@@ -14,7 +14,7 @@ Representation: ...@@ -14,7 +14,7 @@ Representation:
should change the search targets to SplitMongoModuleStore.SEARCH_TARGET dict should change the search targets to SplitMongoModuleStore.SEARCH_TARGET dict
* structure: * structure:
** '_id': an ObjectId (guid), ** '_id': an ObjectId (guid),
** 'root': root_block_id (string of key in 'blocks' for the root of this structure, ** 'root': BlockKey (the block_type and block_id of the root block in the 'blocks' dictionary)
** 'previous_version': the structure from which this one was derived. For published courses, this ** 'previous_version': the structure from which this one was derived. For published courses, this
points to the previously published version of the structure not the draft published to this. points to the previously published version of the structure not the draft published to this.
** 'original_version': the original structure id in the previous_version relation. Is a pseudo object ** 'original_version': the original structure id in the previous_version relation. Is a pseudo object
...@@ -22,10 +22,11 @@ Representation: ...@@ -22,10 +22,11 @@ Representation:
** 'edited_by': user_id of the user whose change caused the creation of this structure version, ** 'edited_by': user_id of the user whose change caused the creation of this structure version,
** 'edited_on': the datetime for the change causing this creation of this structure version, ** 'edited_on': the datetime for the change causing this creation of this structure version,
** 'blocks': dictionary of xblocks in this structure: ** 'blocks': dictionary of xblocks in this structure:
*** block_id: dictionary of block settings and children: *** BlockKey: dictionary of block settings and children:
**** 'category': the xblock type id **** 'block_type': the xblock type id
**** 'definition': the db id of the record containing the content payload for this xblock **** 'definition': the db id of the record containing the content payload for this xblock
**** 'fields': the Scope.settings and children field values **** 'fields': the Scope.settings and children field values
***** 'children': This is stored as a list of (block_type, block_id) pairs
**** 'edit_info': dictionary: **** 'edit_info': dictionary:
***** 'edited_on': when was this xblock's fields last changed (will be edited_on value of ***** 'edited_on': when was this xblock's fields last changed (will be edited_on value of
update_version structure) update_version structure)
...@@ -40,7 +41,7 @@ Representation: ...@@ -40,7 +41,7 @@ Representation:
***** 'source_version': the guid for the structure was copied/published into this block ***** 'source_version': the guid for the structure was copied/published into this block
* definition: shared content with revision history for xblock content fields * definition: shared content with revision history for xblock content fields
** '_id': definition_id (guid), ** '_id': definition_id (guid),
** 'category': xblock type id ** 'block_type': xblock type id
** 'fields': scope.content (and possibly other) field values. ** 'fields': scope.content (and possibly other) field values.
** 'edit_info': dictionary: ** 'edit_info': dictionary:
*** 'edited_by': user_id whose edit caused this version of the definition, *** 'edited_by': user_id whose edit caused this version of the definition,
...@@ -49,12 +50,13 @@ Representation: ...@@ -49,12 +50,13 @@ Representation:
*** 'original_version': definition_id of the root of the previous version relation on this *** 'original_version': definition_id of the root of the previous version relation on this
definition. Acts as a pseudo-object identifier. definition. Acts as a pseudo-object identifier.
""" """
import copy
import threading import threading
import datetime import datetime
import logging import logging
from contracts import contract, new_contract
from importlib import import_module from importlib import import_module
from path import path from path import path
import copy
from pytz import UTC from pytz import UTC
from bson.objectid import ObjectId from bson.objectid import ObjectId
...@@ -73,9 +75,8 @@ from xmodule.modulestore import ( ...@@ -73,9 +75,8 @@ from xmodule.modulestore import (
from ..exceptions import ItemNotFoundError from ..exceptions import ItemNotFoundError
from .caching_descriptor_system import CachingDescriptorSystem from .caching_descriptor_system import CachingDescriptorSystem
from xmodule.modulestore.split_mongo.mongo_connection import MongoConnection from xmodule.modulestore.split_mongo.mongo_connection import MongoConnection, BlockKey
from xmodule.error_module import ErrorDescriptor from xmodule.error_module import ErrorDescriptor
from xmodule.modulestore.split_mongo import encode_key_for_mongo, decode_key_from_mongo
from _collections import defaultdict from _collections import defaultdict
from types import NoneType from types import NoneType
...@@ -104,6 +105,10 @@ log = logging.getLogger(__name__) ...@@ -104,6 +105,10 @@ log = logging.getLogger(__name__)
EXCLUDE_ALL = '*' EXCLUDE_ALL = '*'
new_contract('BlockUsageLocator', BlockUsageLocator)
new_contract('BlockKey', BlockKey)
class SplitBulkWriteRecord(BulkOpsRecord): class SplitBulkWriteRecord(BulkOpsRecord):
def __init__(self): def __init__(self):
super(SplitBulkWriteRecord, self).__init__() super(SplitBulkWriteRecord, self).__init__()
...@@ -397,15 +402,15 @@ class SplitBulkWriteMixin(BulkOperationsMixin): ...@@ -397,15 +402,15 @@ class SplitBulkWriteMixin(BulkOperationsMixin):
) )
return structures return structures
def find_ancestor_structures(self, original_version, block_id): def find_ancestor_structures(self, original_version, block_key):
""" """
Find all structures that originated from ``original_version`` that contain ``block_id``. Find all structures that originated from ``original_version`` that contain ``block_key``.
Any structure found in the cache will be preferred to a structure with the same id from the database. Any structure found in the cache will be preferred to a structure with the same id from the database.
Arguments: Arguments:
original_version (str or ObjectID): The id of a structure original_version (str or ObjectID): The id of a structure
block_id (str): The id of the block in question block_key (BlockKey): The id of the block in question
""" """
found_structure_ids = set() found_structure_ids = set()
structures = [] structures = []
...@@ -418,10 +423,10 @@ class SplitBulkWriteMixin(BulkOperationsMixin): ...@@ -418,10 +423,10 @@ class SplitBulkWriteMixin(BulkOperationsMixin):
if structure['original_version'] != original_version: if structure['original_version'] != original_version:
continue continue
if block_id not in structure.get('blocks', {}): if block_key not in structure.get('blocks', {}):
continue continue
if 'update_version' not in structure['blocks'][block_id].get('edit_info', {}): if 'update_version' not in structure['blocks'][block_key].get('edit_info', {}):
continue continue
structures.append(structure) structures.append(structure)
...@@ -429,7 +434,7 @@ class SplitBulkWriteMixin(BulkOperationsMixin): ...@@ -429,7 +434,7 @@ class SplitBulkWriteMixin(BulkOperationsMixin):
structures.extend( structures.extend(
structure structure
for structure in self.db_connection.find_ancestor_structures(original_version, block_id) for structure in self.db_connection.find_ancestor_structures(original_version, block_key)
if structure['_id'] not in found_structure_ids if structure['_id'] not in found_structure_ids
) )
return structures return structures
...@@ -507,7 +512,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -507,7 +512,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
Arguments: Arguments:
system: a CachingDescriptorSystem system: a CachingDescriptorSystem
base_block_ids: list of block_ids to fetch base_block_ids: list of BlockIds to fetch
course_key: the destination course providing the context course_key: the destination course providing the context
depth: how deep below these to prefetch depth: how deep below these to prefetch
lazy: whether to fetch definitions or use placeholders lazy: whether to fetch definitions or use placeholders
...@@ -534,7 +539,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -534,7 +539,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
for block in new_module_data.itervalues(): for block in new_module_data.itervalues():
if block['definition'] in definitions: if block['definition'] in definitions:
converted_fields = self.convert_references_to_keys( converted_fields = self.convert_references_to_keys(
course_key, system.load_block_type(block['category']), course_key, system.load_block_type(block['block_type']),
definitions[block['definition']].get('fields'), definitions[block['definition']].get('fields'),
system.course_entry['structure']['blocks'], system.course_entry['structure']['blocks'],
) )
...@@ -544,7 +549,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -544,7 +549,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
system.module_data.update(new_module_data) system.module_data.update(new_module_data)
return system.module_data return system.module_data
def _load_items(self, course_entry, block_ids, depth=0, lazy=True, **kwargs): def _load_items(self, course_entry, block_keys, depth=0, lazy=True, **kwargs):
''' '''
Load & cache the given blocks from the course. Prefetch down to the Load & cache the given blocks from the course. Prefetch down to the
given depth. Load the definitions into each block if lazy is False; given depth. Load the definitions into each block if lazy is False;
...@@ -561,8 +566,8 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -561,8 +566,8 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
run=course_entry.get('run'), run=course_entry.get('run'),
branch=course_entry.get('branch'), branch=course_entry.get('branch'),
) )
self.cache_items(runtime, block_ids, course_key, depth, lazy) self.cache_items(runtime, block_keys, course_key, depth, lazy)
return [runtime.load_item(block_id, course_entry, **kwargs) for block_id in block_ids] return [runtime.load_item(block_key, course_entry, **kwargs) for block_key in block_keys]
def _get_cache(self, course_version_guid): def _get_cache(self, course_version_guid):
""" """
...@@ -747,7 +752,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -747,7 +752,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# this error only occurs if the course does not exist # this error only occurs if the course does not exist
return False return False
return self._get_block_from_structure(course_structure, usage_key.block_id) is not None return self._get_block_from_structure(course_structure, BlockKey.from_usage_key(usage_key)) is not None
def get_item(self, usage_key, depth=0, **kwargs): def get_item(self, usage_key, depth=0, **kwargs):
""" """
...@@ -763,7 +768,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -763,7 +768,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
raise ItemNotFoundError(usage_key) raise ItemNotFoundError(usage_key)
course = self._lookup_course(usage_key.course_key) course = self._lookup_course(usage_key.course_key)
items = self._load_items(course, [usage_key.block_id], depth, lazy=True, **kwargs) items = self._load_items(course, [BlockKey.from_usage_key(usage_key)], depth, lazy=True, **kwargs)
if len(items) == 0: if len(items) == 0:
raise ItemNotFoundError(usage_key) raise ItemNotFoundError(usage_key)
elif len(items) > 1: elif len(items) > 1:
...@@ -814,12 +819,17 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -814,12 +819,17 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
settings = {} settings = {}
if 'name' in qualifiers: if 'name' in qualifiers:
# odd case where we don't search just confirm # odd case where we don't search just confirm
block_id = qualifiers.pop('name') block_name = qualifiers.pop('name')
block = course['structure']['blocks'].get(block_id) block_ids = []
if _block_matches_all(block): for block_id, block in course['structure']['blocks'].iteritems():
return self._load_items(course, [block_id], lazy=True, **kwargs) if block_name == block_id.id and _block_matches_all(block):
else: block_ids.append(block_id)
return []
return self._load_items(course, block_ids, lazy=True, **kwargs)
if 'category' in qualifiers:
qualifiers['block_type'] = qualifiers.pop('category')
# don't expect caller to know that children are in fields # don't expect caller to know that children are in fields
if 'children' in qualifiers: if 'children' in qualifiers:
settings['children'] = qualifiers.pop('children') settings['children'] = qualifiers.pop('children')
...@@ -841,13 +851,13 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -841,13 +851,13 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
:param locator: BlockUsageLocator restricting search scope :param locator: BlockUsageLocator restricting search scope
''' '''
course = self._lookup_course(locator.course_key) course = self._lookup_course(locator.course_key)
parent_id = self._get_parent_from_structure(locator.block_id, course['structure']) parent_id = self._get_parent_from_structure(BlockKey.from_usage_key(locator), course['structure'])
if parent_id is None: if parent_id is None:
return None return None
return BlockUsageLocator.make_relative( return BlockUsageLocator.make_relative(
locator, locator,
block_type=course['structure']['blocks'][parent_id].get('category'), block_type=parent_id.type,
block_id=decode_key_from_mongo(parent_id), block_id=parent_id.id,
) )
def get_orphans(self, course_key, **kwargs): def get_orphans(self, course_key, **kwargs):
...@@ -856,17 +866,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -856,17 +866,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
""" """
detached_categories = [name for name, __ in XBlock.load_tagged_classes("detached")] detached_categories = [name for name, __ in XBlock.load_tagged_classes("detached")]
course = self._lookup_course(course_key) course = self._lookup_course(course_key)
items = {decode_key_from_mongo(block_id) for block_id in course['structure']['blocks'].keys()} items = set(course['structure']['blocks'].keys())
items.remove(course['structure']['root']) items.remove(course['structure']['root'])
blocks = course['structure']['blocks'] blocks = course['structure']['blocks']
for block_id, block_data in blocks.iteritems(): for block_id, block_data in blocks.iteritems():
items.difference_update(block_data.get('fields', {}).get('children', [])) items.difference_update(BlockKey(*child) for child in block_data.get('fields', {}).get('children', []))
if block_data['category'] in detached_categories: if block_data['block_type'] in detached_categories:
items.discard(decode_key_from_mongo(block_id)) items.discard(block_id)
return [ return [
BlockUsageLocator( course_key.make_usage_key(block_type=block_id.type, block_id=block_id.id)
course_key=course_key, block_type=blocks[block_id]['category'], block_id=block_id
)
for block_id in items for block_id in items
] ]
...@@ -967,16 +975,16 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -967,16 +975,16 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
''' '''
# course_agnostic means we don't care if the head and version don't align, trust the version # course_agnostic means we don't care if the head and version don't align, trust the version
course_struct = self._lookup_course(block_locator.course_key.course_agnostic())['structure'] course_struct = self._lookup_course(block_locator.course_key.course_agnostic())['structure']
block_id = block_locator.block_id block_key = BlockKey.from_usage_key(block_locator)
all_versions_with_block = self.find_ancestor_structures( all_versions_with_block = self.find_ancestor_structures(
original_version=course_struct['original_version'], original_version=course_struct['original_version'],
block_id=block_id block_key=block_key
) )
# find (all) root versions and build map {previous: {successors}..} # find (all) root versions and build map {previous: {successors}..}
possible_roots = [] possible_roots = []
result = {} result = {}
for version in all_versions_with_block: for version in all_versions_with_block:
block_payload = self._get_block_from_structure(version, block_id) block_payload = self._get_block_from_structure(version, block_key)
if version['_id'] == block_payload['edit_info']['update_version']: if version['_id'] == block_payload['edit_info']['update_version']:
if block_payload['edit_info'].get('previous_version') is None: if block_payload['edit_info'].get('previous_version') is None:
# this was when this block was created # this was when this block was created
...@@ -988,7 +996,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -988,7 +996,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# more than one possible_root means usage was added and deleted > 1x. # more than one possible_root means usage was added and deleted > 1x.
if len(possible_roots) > 1: if len(possible_roots) > 1:
# find the history segment including block_locator's version # find the history segment including block_locator's version
element_to_find = self._get_block_from_structure(course_struct, block_id)['edit_info']['update_version'] element_to_find = self._get_block_from_structure(course_struct, block_key)['edit_info']['update_version']
if element_to_find in possible_roots: if element_to_find in possible_roots:
possible_roots = [element_to_find] possible_roots = [element_to_find]
for possibility in possible_roots: for possibility in possible_roots:
...@@ -1026,7 +1034,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1026,7 +1034,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
new_id = ObjectId() new_id = ObjectId()
document = { document = {
'_id': new_id, '_id': new_id,
"category": category, "block_type": category,
"fields": new_def_data, "fields": new_def_data,
"edit_info": { "edit_info": {
"edited_by": user_id, "edited_by": user_id,
...@@ -1061,7 +1069,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1061,7 +1069,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
if old_definition is None: if old_definition is None:
raise ItemNotFoundError(definition_locator) raise ItemNotFoundError(definition_locator)
new_def_data = self._serialize_fields(old_definition['category'], new_def_data) new_def_data = self._serialize_fields(old_definition['block_type'], new_def_data)
if needs_saved(): if needs_saved():
# new id to create new version # new id to create new version
old_definition['_id'] = ObjectId() old_definition['_id'] = ObjectId()
...@@ -1072,11 +1080,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1072,11 +1080,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
old_definition['edit_info']['previous_version'] = definition_locator.definition_id old_definition['edit_info']['previous_version'] = definition_locator.definition_id
old_definition['schema_version'] = self.SCHEMA_VERSION old_definition['schema_version'] = self.SCHEMA_VERSION
self.db_connection.insert_definition(old_definition) self.db_connection.insert_definition(old_definition)
return DefinitionLocator(old_definition['category'], old_definition['_id']), True return DefinitionLocator(old_definition['block_type'], old_definition['_id']), True
else: else:
return definition_locator, False return definition_locator, False
def _generate_block_id(self, course_blocks, category): def _generate_block_key(self, course_blocks, category):
""" """
Generate a somewhat readable block id unique w/in this course using the category Generate a somewhat readable block id unique w/in this course using the category
:param course_blocks: the current list of blocks. :param course_blocks: the current list of blocks.
...@@ -1087,11 +1095,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1087,11 +1095,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# {category: last_serial...} # {category: last_serial...}
# A potential confusion is if the name incorporates the parent's name, then if the child # A potential confusion is if the name incorporates the parent's name, then if the child
# moves, its id won't change and will be confusing # moves, its id won't change and will be confusing
# NOTE2: this assumes category will never contain a $ nor a period.
serial = 1 serial = 1
while category + str(serial) in course_blocks: while True:
potential_key = BlockKey(category, "{}{}".format(category, serial))
if potential_key not in course_blocks:
return potential_key
serial += 1 serial += 1
return category + str(serial)
def create_item( def create_item(
self, user_id, course_key, block_type, block_id=None, self, user_id, course_key, block_type, block_id=None,
...@@ -1169,17 +1178,16 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1169,17 +1178,16 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# generate usage id # generate usage id
if block_id is not None: if block_id is not None:
if encode_key_for_mongo(block_id) in new_structure['blocks']: block_key = BlockKey(block_type, block_id)
if block_key in new_structure['blocks']:
raise DuplicateItemError(block_id, self, 'structures') raise DuplicateItemError(block_id, self, 'structures')
else:
new_block_id = block_id
else: else:
new_block_id = self._generate_block_id(new_structure['blocks'], block_type) block_key = self._generate_block_key(new_structure['blocks'], block_type)
block_fields = partitioned_fields.get(Scope.settings, {}) block_fields = partitioned_fields.get(Scope.settings, {})
if Scope.children in partitioned_fields: if Scope.children in partitioned_fields:
block_fields.update(partitioned_fields[Scope.children]) block_fields.update(partitioned_fields[Scope.children])
self._update_block_in_structure(new_structure, new_block_id, self._new_block( self._update_block_in_structure(new_structure, block_key, self._new_block(
user_id, user_id,
block_type, block_type,
block_fields, block_fields,
...@@ -1198,13 +1206,13 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1198,13 +1206,13 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
item_loc = BlockUsageLocator( item_loc = BlockUsageLocator(
course_key.version_agnostic(), course_key.version_agnostic(),
block_type=block_type, block_type=block_type,
block_id=new_block_id, block_id=block_key.id,
) )
else: else:
item_loc = BlockUsageLocator( item_loc = BlockUsageLocator(
CourseLocator(version_guid=new_id), CourseLocator(version_guid=new_id),
block_type=block_type, block_type=block_type,
block_id=new_block_id, block_id=block_key.id,
) )
# reconstruct the new_item from the cache # reconstruct the new_item from the cache
...@@ -1235,12 +1243,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1235,12 +1243,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
new_structure = self._lookup_course(xblock.location.course_key)['structure'] new_structure = self._lookup_course(xblock.location.course_key)['structure']
# add new block as child and update parent's version # add new block as child and update parent's version
encoded_block_id = encode_key_for_mongo(parent_usage_key.block_id) block_id = BlockKey.from_usage_key(parent_usage_key)
if encoded_block_id not in new_structure['blocks']: if block_id not in new_structure['blocks']:
raise ItemNotFoundError(parent_usage_key) raise ItemNotFoundError(parent_usage_key)
parent = new_structure['blocks'][encoded_block_id] parent = new_structure['blocks'][block_id]
parent['fields'].setdefault('children', []).append(xblock.location.block_id) parent['fields'].setdefault('children', []).append(BlockKey.from_usage_key(xblock.location))
if parent['edit_info']['update_version'] != new_structure['_id']: if parent['edit_info']['update_version'] != new_structure['_id']:
# if the parent hadn't been previously changed in this bulk transaction, indicate that it's # if the parent hadn't been previously changed in this bulk transaction, indicate that it's
# part of the bulk transaction # part of the bulk transaction
...@@ -1344,7 +1352,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1344,7 +1352,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
definition_id = ObjectId() definition_id = ObjectId()
definition_entry = { definition_entry = {
'_id': definition_id, '_id': definition_id,
'category': root_category, 'block_type': root_category,
'fields': definition_fields, 'fields': definition_fields,
'edit_info': { 'edit_info': {
'edited_by': user_id, 'edited_by': user_id,
...@@ -1358,8 +1366,10 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1358,8 +1366,10 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
draft_structure = self._new_structure( draft_structure = self._new_structure(
user_id, user_id,
root_block_id or SplitMongoModuleStore.DEFAULT_ROOT_BLOCK_ID, BlockKey(
root_category, root_category,
root_block_id or SplitMongoModuleStore.DEFAULT_ROOT_BLOCK_ID,
),
block_fields, block_fields,
definition_id definition_id
) )
...@@ -1376,8 +1386,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1376,8 +1386,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
draft_structure = self._lookup_course(draft_version)['structure'] draft_structure = self._lookup_course(draft_version)['structure']
draft_structure = self.version_structure(locator, draft_structure, user_id) draft_structure = self.version_structure(locator, draft_structure, user_id)
new_id = draft_structure['_id'] new_id = draft_structure['_id']
encoded_block_id = encode_key_for_mongo(draft_structure['root']) root_block = draft_structure['blocks'][draft_structure['root']]
root_block = draft_structure['blocks'][encoded_block_id]
if block_fields is not None: if block_fields is not None:
root_block['fields'].update(self._serialize_fields(root_category, block_fields)) root_block['fields'].update(self._serialize_fields(root_category, block_fields))
if definition_fields is not None: if definition_fields is not None:
...@@ -1440,45 +1449,45 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1440,45 +1449,45 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
""" """
partitioned_fields = self.partition_xblock_fields_by_scope(descriptor) partitioned_fields = self.partition_xblock_fields_by_scope(descriptor)
return self._update_item_from_fields( return self._update_item_from_fields(
user_id, descriptor.location.course_key, descriptor.location.block_type, descriptor.location.block_id, user_id, descriptor.location.course_key, BlockKey.from_usage_key(descriptor.location),
partitioned_fields, descriptor.definition_locator, allow_not_found, force, **kwargs partitioned_fields, descriptor.definition_locator, allow_not_found, force, **kwargs
) or descriptor ) or descriptor
def _update_item_from_fields( def _update_item_from_fields(
self, user_id, course_key, block_type, block_id, partitioned_fields, self, user_id, course_key, block_key, partitioned_fields,
definition_locator, allow_not_found, force, **kwargs definition_locator, allow_not_found, force, **kwargs
): ):
""" """
Broke out guts of update_item for short-circuited internal use only Broke out guts of update_item for short-circuited internal use only
""" """
with self.bulk_operations(course_key): with self.bulk_operations(course_key):
if allow_not_found and isinstance(block_id, (LocalId, NoneType)): if allow_not_found and isinstance(block_key.id, (LocalId, NoneType)):
fields = {} fields = {}
for subfields in partitioned_fields.itervalues(): for subfields in partitioned_fields.itervalues():
fields.update(subfields) fields.update(subfields)
return self.create_item( return self.create_item(
user_id, course_key, block_type, fields=fields, force=force user_id, course_key, block_key.type, fields=fields, force=force
) )
original_structure = self._lookup_course(course_key)['structure'] original_structure = self._lookup_course(course_key)['structure']
index_entry = self._get_index_if_valid(course_key, force) index_entry = self._get_index_if_valid(course_key, force)
original_entry = self._get_block_from_structure(original_structure, block_id) original_entry = self._get_block_from_structure(original_structure, block_key)
if original_entry is None: if original_entry is None:
if allow_not_found: if allow_not_found:
fields = {} fields = {}
for subfields in partitioned_fields.itervalues(): for subfields in partitioned_fields.itervalues():
fields.update(subfields) fields.update(subfields)
return self.create_item( return self.create_item(
user_id, course_key, block_type, block_id=block_id, fields=fields, force=force, user_id, course_key, block_key.type, block_id=block_key.id, fields=fields, force=force,
) )
else: else:
raise ItemNotFoundError(course_key.make_usage_key(block_type, block_id)) raise ItemNotFoundError(course_key.make_usage_key(block_key.type, block_key.id))
is_updated = False is_updated = False
definition_fields = partitioned_fields[Scope.content] definition_fields = partitioned_fields[Scope.content]
if definition_locator is None: if definition_locator is None:
definition_locator = DefinitionLocator(original_entry['category'], original_entry['definition']) definition_locator = DefinitionLocator(original_entry['block_type'], original_entry['definition'])
if definition_fields: if definition_fields:
definition_locator, is_updated = self.update_definition_from_data( definition_locator, is_updated = self.update_definition_from_data(
definition_locator, definition_fields, user_id definition_locator, definition_fields, user_id
...@@ -1486,13 +1495,13 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1486,13 +1495,13 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# check metadata # check metadata
settings = partitioned_fields[Scope.settings] settings = partitioned_fields[Scope.settings]
settings = self._serialize_fields(block_type, settings) settings = self._serialize_fields(block_key.type, settings)
if not is_updated: if not is_updated:
is_updated = self._compare_settings(settings, original_entry['fields']) is_updated = self._compare_settings(settings, original_entry['fields'])
# check children # check children
if partitioned_fields.get(Scope.children, {}): # purposely not 'is not None' if partitioned_fields.get(Scope.children, {}): # purposely not 'is not None'
serialized_children = [child.block_id for child in partitioned_fields[Scope.children]['children']] serialized_children = [BlockKey.from_usage_key(child) for child in partitioned_fields[Scope.children]['children']]
is_updated = is_updated or original_entry['fields'].get('children', []) != serialized_children is_updated = is_updated or original_entry['fields'].get('children', []) != serialized_children
if is_updated: if is_updated:
settings['children'] = serialized_children settings['children'] = serialized_children
...@@ -1500,7 +1509,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1500,7 +1509,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# if updated, rev the structure # if updated, rev the structure
if is_updated: if is_updated:
new_structure = self.version_structure(course_key, original_structure, user_id) new_structure = self.version_structure(course_key, original_structure, user_id)
block_data = self._get_block_from_structure(new_structure, block_id) block_data = self._get_block_from_structure(new_structure, block_key)
block_data["definition"] = definition_locator.definition_id block_data["definition"] = definition_locator.definition_id
block_data["fields"] = settings block_data["fields"] = settings
...@@ -1524,7 +1533,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1524,7 +1533,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
course_key = CourseLocator(version_guid=new_id) course_key = CourseLocator(version_guid=new_id)
# fetch and return the new item--fetching is unnecessary but a good qc step # fetch and return the new item--fetching is unnecessary but a good qc step
new_locator = course_key.make_usage_key(block_type, block_id) new_locator = course_key.make_usage_key(block_key.type, block_key.id)
return self.get_item(new_locator, **kwargs) return self.get_item(new_locator, **kwargs)
else: else:
return None return None
...@@ -1550,7 +1559,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1550,7 +1559,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
xblock_class = runtime.load_block_type(block_type) xblock_class = runtime.load_block_type(block_type)
json_data = { json_data = {
'category': block_type, 'block_type': block_type,
'fields': {}, 'fields': {},
} }
if definition_id is not None: if definition_id is not None:
...@@ -1566,7 +1575,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1566,7 +1575,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
inherited_settings[field_name] = fields[field_name] inherited_settings[field_name] = fields[field_name]
new_block = runtime.xblock_from_json( new_block = runtime.xblock_from_json(
xblock_class, course_key, block_id, json_data, inherited_settings, **kwargs xblock_class,
course_key,
BlockKey(block_type, block_id) if block_id else None,
json_data,
inherited_settings,
**kwargs
) )
for field_name, value in (fields or {}).iteritems(): for field_name, value in (fields or {}).iteritems():
setattr(new_block, field_name, value) setattr(new_block, field_name, value)
...@@ -1634,13 +1648,14 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1634,13 +1648,14 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
is_updated = True is_updated = True
block_id = getattr(xblock.scope_ids.usage_id.block_id, 'block_id', None) block_id = getattr(xblock.scope_ids.usage_id.block_id, 'block_id', None)
if block_id is None: if block_id is None:
block_id = self._generate_block_id(structure_blocks, xblock.category) block_key = self._generate_block_key(structure_blocks, xblock.scope_ids.block_type)
encoded_block_id = encode_key_for_mongo(block_id) else:
new_usage_id = xblock.scope_ids.usage_id.replace(block_id=block_id) block_key = BlockKey(xblock.scope_ids.block_type, block_id)
new_usage_id = xblock.scope_ids.usage_id.replace(block_id=block_key.id)
xblock.scope_ids = xblock.scope_ids._replace(usage_id=new_usage_id) # pylint: disable=protected-access xblock.scope_ids = xblock.scope_ids._replace(usage_id=new_usage_id) # pylint: disable=protected-access
else: else:
is_new = False is_new = False
encoded_block_id = encode_key_for_mongo(xblock.location.block_id) block_key = BlockKey(xblock.scope_ids.block_type, xblock.scope_ids.usage_id.block_id)
children = [] children = []
if xblock.has_children: if xblock.has_children:
...@@ -1648,15 +1663,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1648,15 +1663,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
if isinstance(child.block_id, LocalId): if isinstance(child.block_id, LocalId):
child_block = xblock.system.get_block(child) child_block = xblock.system.get_block(child)
is_updated = self._persist_subdag(child_block, user_id, structure_blocks, new_id) or is_updated is_updated = self._persist_subdag(child_block, user_id, structure_blocks, new_id) or is_updated
children.append(child_block.location.block_id) children.append(BlockKey.from_usage_key(child_block.location))
else: else:
children.append(child.block_id) children.append(BlockKey.from_usage_key(child))
is_updated = is_updated or structure_blocks[encoded_block_id]['fields']['children'] != children is_updated = is_updated or structure_blocks[block_key]['fields']['children'] != children
block_fields = partitioned_fields[Scope.settings] block_fields = partitioned_fields[Scope.settings]
block_fields = self._serialize_fields(xblock.category, block_fields) block_fields = self._serialize_fields(xblock.category, block_fields)
if not is_new and not is_updated: if not is_new and not is_updated:
is_updated = self._compare_settings(block_fields, structure_blocks[encoded_block_id]['fields']) is_updated = self._compare_settings(block_fields, structure_blocks[block_key]['fields'])
if children: if children:
block_fields['children'] = children block_fields['children'] = children
...@@ -1671,12 +1686,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1671,12 +1686,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
raw=True raw=True
) )
else: else:
block_info = structure_blocks[encoded_block_id] block_info = structure_blocks[block_key]
block_info['fields'] = block_fields block_info['fields'] = block_fields
block_info['definition'] = xblock.definition_locator.definition_id block_info['definition'] = xblock.definition_locator.definition_id
self.version_block(block_info, user_id, new_id) self.version_block(block_info, user_id, new_id)
structure_blocks[encoded_block_id] = block_info structure_blocks[block_key] = block_info
return is_updated return is_updated
...@@ -1739,13 +1754,13 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1739,13 +1754,13 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
raise ItemNotFoundError(destination_course) raise ItemNotFoundError(destination_course)
if destination_course.branch not in index_entry['versions']: if destination_course.branch not in index_entry['versions']:
# must be copying the dag root if there's no current dag # must be copying the dag root if there's no current dag
root_block_id = source_structure['root'] root_block_key = source_structure['root']
if not any(root_block_id == subtree.block_id for subtree in subtree_list): if not any(root_block_key == BlockKey.from_usage_key(subtree) for subtree in subtree_list):
raise ItemNotFoundError(u'Must publish course root {}'.format(root_block_id)) raise ItemNotFoundError(u'Must publish course root {}'.format(root_block_key))
root_source = source_structure['blocks'][root_block_id] root_source = source_structure['blocks'][root_block_key]
# create branch # create branch
destination_structure = self._new_structure( destination_structure = self._new_structure(
user_id, root_block_id, root_category=root_source['category'], user_id, root_block_key,
# leave off the fields b/c the children must be filtered # leave off the fields b/c the children must be filtered
definition_id=root_source['definition'], definition_id=root_source['definition'],
) )
...@@ -1754,14 +1769,14 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1754,14 +1769,14 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
destination_structure = self.version_structure(destination_course, destination_structure, user_id) destination_structure = self.version_structure(destination_course, destination_structure, user_id)
if blacklist != EXCLUDE_ALL: if blacklist != EXCLUDE_ALL:
blacklist = [shunned.block_id for shunned in blacklist or []] blacklist = [BlockKey.from_usage_key(shunned) for shunned in blacklist or []]
# iterate over subtree list filtering out blacklist. # iterate over subtree list filtering out blacklist.
orphans = set() orphans = set()
destination_blocks = destination_structure['blocks'] destination_blocks = destination_structure['blocks']
for subtree_root in subtree_list: for subtree_root in subtree_list:
if subtree_root.block_id != source_structure['root']: if BlockKey.from_usage_key(subtree_root) != source_structure['root']:
# find the parents and put root in the right sequence # find the parents and put root in the right sequence
parent = self._get_parent_from_structure(subtree_root.block_id, source_structure) parent = self._get_parent_from_structure(BlockKey.from_usage_key(subtree_root), source_structure)
if parent is not None: # may be a detached category xblock if parent is not None: # may be a detached category xblock
if not parent in destination_blocks: if not parent in destination_blocks:
raise ItemNotFoundError(parent) raise ItemNotFoundError(parent)
...@@ -1769,14 +1784,17 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1769,14 +1784,17 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
self._sync_children( self._sync_children(
source_structure['blocks'][parent], source_structure['blocks'][parent],
destination_blocks[parent], destination_blocks[parent],
subtree_root.block_id BlockKey.from_usage_key(subtree_root)
) )
) )
# update/create the subtree and its children in destination (skipping blacklist) # update/create the subtree and its children in destination (skipping blacklist)
orphans.update( orphans.update(
self._copy_subdag( self._copy_subdag(
user_id, destination_structure['_id'], user_id, destination_structure['_id'],
subtree_root.block_id, source_structure['blocks'], destination_blocks, blacklist BlockKey.from_usage_key(subtree_root),
source_structure['blocks'],
destination_blocks,
blacklist
) )
) )
# remove any remaining orphans # remove any remaining orphans
...@@ -1809,24 +1827,25 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1809,24 +1827,25 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
with self.bulk_operations(usage_locator.course_key): with self.bulk_operations(usage_locator.course_key):
original_structure = self._lookup_course(usage_locator.course_key)['structure'] original_structure = self._lookup_course(usage_locator.course_key)['structure']
if original_structure['root'] == usage_locator.block_id: block_key = BlockKey.from_usage_key(usage_locator)
if original_structure['root'] == block_key:
raise ValueError("Cannot delete the root of a course") raise ValueError("Cannot delete the root of a course")
if encode_key_for_mongo(usage_locator.block_id) not in original_structure['blocks']: if block_key not in original_structure['blocks']:
raise ValueError("Cannot delete a block that does not exist") raise ValueError("Cannot delete a block that does not exist")
index_entry = self._get_index_if_valid(usage_locator.course_key, force) index_entry = self._get_index_if_valid(usage_locator.course_key, force)
new_structure = self.version_structure(usage_locator.course_key, original_structure, user_id) new_structure = self.version_structure(usage_locator.course_key, original_structure, user_id)
new_blocks = new_structure['blocks'] new_blocks = new_structure['blocks']
new_id = new_structure['_id'] new_id = new_structure['_id']
encoded_block_id = self._get_parent_from_structure(usage_locator.block_id, original_structure) parent_block_key = self._get_parent_from_structure(block_key, original_structure)
if encoded_block_id: if parent_block_key:
parent_block = new_blocks[encoded_block_id] parent_block = new_blocks[parent_block_key]
parent_block['fields']['children'].remove(usage_locator.block_id) parent_block['fields']['children'].remove(block_key)
parent_block['edit_info']['edited_on'] = datetime.datetime.now(UTC) parent_block['edit_info']['edited_on'] = datetime.datetime.now(UTC)
parent_block['edit_info']['edited_by'] = user_id parent_block['edit_info']['edited_by'] = user_id
parent_block['edit_info']['previous_version'] = parent_block['edit_info']['update_version'] parent_block['edit_info']['previous_version'] = parent_block['edit_info']['update_version']
parent_block['edit_info']['update_version'] = new_id parent_block['edit_info']['update_version'] = new_id
self._remove_subtree(usage_locator.block_id, new_blocks) self._remove_subtree(BlockKey.from_usage_key(usage_locator), new_blocks)
# update index if appropriate and structures # update index if appropriate and structures
self.update_structure(usage_locator.course_key, new_structure) self.update_structure(usage_locator.course_key, new_structure)
...@@ -1840,14 +1859,14 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1840,14 +1859,14 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
return result return result
def _remove_subtree(self, block_id, blocks): @contract(block_key=BlockKey, blocks='dict(BlockKey: dict)')
def _remove_subtree(self, block_key, blocks):
""" """
Remove the subtree rooted at block_id Remove the subtree rooted at block_key
""" """
encoded_block_id = encode_key_for_mongo(block_id) for child in blocks[block_key]['fields'].get('children', []):
for child in blocks[encoded_block_id]['fields'].get('children', []): self._remove_subtree(BlockKey(*child), blocks)
self._remove_subtree(child, blocks) del blocks[block_key]
del blocks[encoded_block_id]
def delete_course(self, course_key, user_id): def delete_course(self, course_key, user_id):
""" """
...@@ -1868,25 +1887,30 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1868,25 +1887,30 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# in case the course is later restored. # in case the course is later restored.
# super(SplitMongoModuleStore, self).delete_course(course_key, user_id) # super(SplitMongoModuleStore, self).delete_course(course_key, user_id)
def inherit_settings(self, block_map, block_id, inherited_settings_map, inheriting_settings=None): @contract(block_map="dict(BlockKey: dict)", block_key=BlockKey)
def inherit_settings(
self, block_map, block_key, inherited_settings_map, inheriting_settings=None, inherited_from=None
):
""" """
Updates block_json with any inheritable setting set by an ancestor and recurses to children. Updates block_json with any inheritable setting set by an ancestor and recurses to children.
""" """
encoded_key = encode_key_for_mongo(block_id) if block_key not in block_map:
if encoded_key not in block_map:
return return
block_json = block_map[encoded_key] block_json = block_map[block_key]
if inheriting_settings is None: if inheriting_settings is None:
inheriting_settings = {} inheriting_settings = {}
if inherited_from is None:
inherited_from = []
# the currently passed down values take precedence over any previously cached ones # the currently passed down values take precedence over any previously cached ones
# NOTE: this should show the values which all fields would have if inherited: i.e., # NOTE: this should show the values which all fields would have if inherited: i.e.,
# not set to the locally defined value but to value set by nearest ancestor who sets it # not set to the locally defined value but to value set by nearest ancestor who sets it
inherited_settings_map.setdefault((block_json['category'], block_id), {}).update(inheriting_settings) inherited_settings_map.setdefault(block_key, {}).update(inheriting_settings)
# update the inheriting w/ what should pass to children # update the inheriting w/ what should pass to children
inheriting_settings = inherited_settings_map[(block_json['category'], block_id)].copy() inheriting_settings = inherited_settings_map[block_key].copy()
block_fields = block_json['fields'] block_fields = block_json['fields']
for field_name in inheritance.InheritanceMixin.fields: for field_name in inheritance.InheritanceMixin.fields:
if field_name in block_fields: if field_name in block_fields:
...@@ -1894,8 +1918,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1894,8 +1918,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
for child in block_fields.get('children', []): for child in block_fields.get('children', []):
try: try:
child = encode_key_for_mongo(child) if child in inherited_from:
self.inherit_settings(block_map, child, inherited_settings_map, inheriting_settings) raise Exception(u'Infinite loop detected when inheriting to {}, having already inherited from {}'.format(child, inherited_from))
self.inherit_settings(
block_map,
BlockKey(*child),
inherited_settings_map,
inheriting_settings,
inherited_from + [child]
)
except KeyError: except KeyError:
# here's where we need logic for looking up in other structures when we allow cross pointers # here's where we need logic for looking up in other structures when we allow cross pointers
# but it's also getting this during course creation if creating top down w/ children set or # but it's also getting this during course creation if creating top down w/ children set or
...@@ -1909,12 +1940,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1909,12 +1940,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
(0 => this usage only, 1 => this usage and its children, etc...) (0 => this usage only, 1 => this usage and its children, etc...)
A depth of None returns all descendants A depth of None returns all descendants
""" """
encoded_block_id = encode_key_for_mongo(block_id) if block_id not in block_map:
if encoded_block_id not in block_map:
return descendent_map return descendent_map
if block_id not in descendent_map: if block_id not in descendent_map:
descendent_map[block_id] = block_map[encoded_block_id] descendent_map[block_id] = block_map[block_id]
if depth is None or depth > 0: if depth is None or depth > 0:
depth = depth - 1 if depth is not None else None depth = depth - 1 if depth is not None else None
...@@ -1945,7 +1975,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1945,7 +1975,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
if 'fields' in block and 'children' in block['fields']: if 'fields' in block and 'children' in block['fields']:
block['fields']["children"] = [ block['fields']["children"] = [
block_id for block_id in block['fields']["children"] block_id for block_id in block['fields']["children"]
if encode_key_for_mongo(block_id) in original_structure['blocks'] if block_id in original_structure['blocks']
] ]
self.update_structure(course_locator, original_structure) self.update_structure(course_locator, original_structure)
...@@ -1955,9 +1985,10 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1955,9 +1985,10 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
and converting them. and converting them.
:param jsonfields: the serialized copy of the xblock's fields :param jsonfields: the serialized copy of the xblock's fields
""" """
def robust_usage_key(block_id): @contract(block_key="BlockUsageLocator | seq[2]")
def robust_usage_key(block_key):
""" """
create a course_key relative usage key for the block_id. If the block_id is in blocks, create a course_key relative usage key for the block_key. If the block_key is in blocks,
use its correct category; otherwise, use 'unknown'. use its correct category; otherwise, use 'unknown'.
The purpose for this is that some operations add pointers as they build up the The purpose for this is that some operations add pointers as they build up the
structure without worrying about order of creation. Because the category of the structure without worrying about order of creation. Because the category of the
...@@ -1965,14 +1996,17 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1965,14 +1996,17 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
out a dependency graph algorithm for those functions which may prereference blocks. out a dependency graph algorithm for those functions which may prereference blocks.
""" """
# if this was taken from cache, then its fields are already converted # if this was taken from cache, then its fields are already converted
if isinstance(block_id, BlockUsageLocator): if isinstance(block_key, BlockUsageLocator):
return block_id.map_into_course(course_key) return block_key.map_into_course(course_key)
elif not isinstance(block_key, BlockKey):
block_key = BlockKey(*block_key)
try: try:
return course_key.make_usage_key( return course_key.make_usage_key(
blocks[encode_key_for_mongo(block_id)]['category'], block_id block_key.type, block_key.id
) )
except KeyError: except KeyError:
return course_key.make_usage_key('unknown', block_id) return course_key.make_usage_key('unknown', block_key.id)
xblock_class = self.mixologist.mix(xblock_class) xblock_class = self.mixologist.mix(xblock_class)
# Make a shallow copy, so that we aren't manipulating a cached field dictionary # Make a shallow copy, so that we aren't manipulating a cached field dictionary
...@@ -1988,7 +2022,6 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1988,7 +2022,6 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
output_fields[field_name] = [robust_usage_key(ele) for ele in value] output_fields[field_name] = [robust_usage_key(ele) for ele in value]
elif isinstance(field, ReferenceValueDict): elif isinstance(field, ReferenceValueDict):
for key, subvalue in value.iteritems(): for key, subvalue in value.iteritems():
assert isinstance(subvalue, basestring)
value[key] = robust_usage_key(subvalue) value[key] = robust_usage_key(subvalue)
return output_fields return output_fields
...@@ -2081,14 +2114,14 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2081,14 +2114,14 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
for field_name, value in fields.iteritems(): for field_name, value in fields.iteritems():
if value is not None: if value is not None:
if isinstance(xblock_class.fields[field_name], Reference): if isinstance(xblock_class.fields[field_name], Reference):
fields[field_name] = value.block_id fields[field_name] = BlockKey.from_usage_key(value)
elif isinstance(xblock_class.fields[field_name], ReferenceList): elif isinstance(xblock_class.fields[field_name], ReferenceList):
fields[field_name] = [ fields[field_name] = [
ele.block_id for ele in value BlockKey.from_usage_key(ele) for ele in value
] ]
elif isinstance(xblock_class.fields[field_name], ReferenceValueDict): elif isinstance(xblock_class.fields[field_name], ReferenceValueDict):
for key, subvalue in value.iteritems(): for key, subvalue in value.iteritems():
value[key] = subvalue.block_id value[key] = BlockKey.from_usage_key(subvalue)
# should this recurse down dicts and lists just in case they contain datetime? # should this recurse down dicts and lists just in case they contain datetime?
elif not isinstance(value, datetime.datetime): # don't convert datetimes! elif not isinstance(value, datetime.datetime): # don't convert datetimes!
fields[field_name] = xblock_class.fields[field_name].to_json(value) fields[field_name] = xblock_class.fields[field_name].to_json(value)
...@@ -2097,33 +2130,31 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2097,33 +2130,31 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
if 'location' in fields: if 'location' in fields:
log.warn('attempt to persist location') log.warn('attempt to persist location')
del fields['location'] del fields['location']
if 'category' in fields: if 'block_type' in fields:
log.warn('attempt to persist category') log.warn('attempt to persist category')
del fields['category'] del fields['block_type']
return fields return fields
def _new_structure(self, user_id, root_block_id, def _new_structure(self, user_id, root_block_key, block_fields=None, definition_id=None):
root_category=None, block_fields=None, definition_id=None):
""" """
Internal function: create a structure element with no previous version. Must provide the root id Internal function: create a structure element with no previous version. Must provide the root id
but not necessarily the info needed to create it (for the use case of publishing). If providing but not necessarily the info needed to create it (for the use case of publishing). If providing
root_category, must also provide block_fields and definition_id root_category, must also provide block_fields and definition_id
""" """
new_id = ObjectId() new_id = ObjectId()
if root_category is not None: if root_block_key is not None:
encoded_root = encode_key_for_mongo(root_block_id)
if block_fields is None: if block_fields is None:
block_fields = {} block_fields = {}
blocks = { blocks = {
encoded_root: self._new_block( root_block_key: self._new_block(
user_id, root_category, block_fields, definition_id, new_id user_id, root_block_key.type, block_fields, definition_id, new_id
) )
} }
else: else:
blocks = {} blocks = {}
return { return {
'_id': new_id, '_id': new_id,
'root': root_block_id, 'root': root_block_key,
'previous_version': None, 'previous_version': None,
'original_version': new_id, 'original_version': new_id,
'edited_by': user_id, 'edited_by': user_id,
...@@ -2132,15 +2163,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2132,15 +2163,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
'schema_version': self.SCHEMA_VERSION, 'schema_version': self.SCHEMA_VERSION,
} }
def _get_parent_from_structure(self, block_id, structure): @contract(block_key=BlockKey)
def _get_parent_from_structure(self, block_key, structure):
""" """
Given a structure, find block_id's parent in that structure. Note returns Given a structure, find block_key's parent in that structure. Note returns
the encoded format for parent the encoded format for parent
""" """
for parent_id, value in structure['blocks'].iteritems(): for parent_block_key, value in structure['blocks'].iteritems():
for child_id in value['fields'].get('children', []): if block_key in value['fields'].get('children', []):
if block_id == child_id: return parent_block_key
return parent_id
return None return None
def _sync_children(self, source_parent, destination_parent, new_child): def _sync_children(self, source_parent, destination_parent, new_child):
...@@ -2149,31 +2180,31 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2149,31 +2180,31 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
Return the removed ones as orphans (a set). Return the removed ones as orphans (a set).
""" """
destination_reordered = [] destination_reordered = []
destination_children = destination_parent['fields']['children'] destination_children = set(destination_parent['fields']['children'])
source_children = source_parent['fields']['children'] source_children = source_parent['fields']['children']
orphans = set() orphans = destination_children - set(source_children)
for child in destination_children:
try:
source_children.index(child)
except ValueError:
orphans.add(child)
for child in source_children: for child in source_children:
if child == new_child or child in destination_children: if child == new_child or child in destination_children:
destination_reordered.append(child) destination_reordered.append(child)
destination_parent['fields']['children'] = destination_reordered destination_parent['fields']['children'] = destination_reordered
return orphans return orphans
def _copy_subdag(self, user_id, destination_version, block_id, source_blocks, destination_blocks, blacklist): @contract(
block_key=BlockKey,
source_blocks="dict(BlockKey: *)",
destination_blocks="dict(BlockKey: *)",
blacklist="list(BlockKey) | str",
)
def _copy_subdag(self, user_id, destination_version, block_key, source_blocks, destination_blocks, blacklist):
""" """
Update destination_blocks for the sub-dag rooted at block_id to be like the one in Update destination_blocks for the sub-dag rooted at block_key to be like the one in
source_blocks excluding blacklist. source_blocks excluding blacklist.
Return any newly discovered orphans (as a set) Return any newly discovered orphans (as a set)
""" """
orphans = set() orphans = set()
encoded_block_id = encode_key_for_mongo(block_id) destination_block = destination_blocks.get(block_key)
destination_block = destination_blocks.get(encoded_block_id) new_block = source_blocks[block_key]
new_block = source_blocks[encoded_block_id]
if destination_block: if destination_block:
# reorder children to correspond to whatever order holds for source. # reorder children to correspond to whatever order holds for source.
# remove any which source no longer claims (put into orphans) # remove any which source no longer claims (put into orphans)
...@@ -2186,7 +2217,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2186,7 +2217,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
index = source_children.index(child) index = source_children.index(child)
destination_reordered[index] = child destination_reordered[index] = child
except ValueError: except ValueError:
orphans.add(child) orphans.add(BlockKey(*child))
if blacklist != EXCLUDE_ALL: if blacklist != EXCLUDE_ALL:
for index, child in enumerate(source_children): for index, child in enumerate(source_children):
if child not in blacklist: if child not in blacklist:
...@@ -2202,7 +2233,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2202,7 +2233,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
destination_block['edit_info']['edited_on'] = datetime.datetime.now(UTC) destination_block['edit_info']['edited_on'] = datetime.datetime.now(UTC)
else: else:
destination_block = self._new_block( destination_block = self._new_block(
user_id, new_block['category'], user_id, new_block['block_type'],
self._filter_blacklist(copy.copy(new_block['fields']), blacklist), self._filter_blacklist(copy.copy(new_block['fields']), blacklist),
new_block['definition'], new_block['definition'],
destination_version, destination_version,
...@@ -2217,12 +2248,13 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2217,12 +2248,13 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
if child not in blacklist: if child not in blacklist:
orphans.update( orphans.update(
self._copy_subdag( self._copy_subdag(
user_id, destination_version, child, source_blocks, destination_blocks, blacklist user_id, destination_version, BlockKey(*child), source_blocks, destination_blocks, blacklist
) )
) )
destination_blocks[encoded_block_id] = destination_block destination_blocks[block_key] = destination_block
return orphans return orphans
@contract(blacklist='list(BlockKey) | str')
def _filter_blacklist(self, fields, blacklist): def _filter_blacklist(self, fields, blacklist):
""" """
Filter out blacklist from the children field in fields. Will construct a new list for children; Filter out blacklist from the children field in fields. Will construct a new list for children;
...@@ -2231,18 +2263,18 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2231,18 +2263,18 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
if blacklist == EXCLUDE_ALL: if blacklist == EXCLUDE_ALL:
fields['children'] = [] fields['children'] = []
else: else:
fields['children'] = [child for child in fields.get('children', []) if child not in blacklist] fields['children'] = [child for child in fields.get('children', []) if BlockKey(*child) not in blacklist]
return fields return fields
@contract(orphan=BlockKey)
def _delete_if_true_orphan(self, orphan, structure): def _delete_if_true_orphan(self, orphan, structure):
""" """
Delete the orphan and any of its descendants which no longer have parents. Delete the orphan and any of its descendants which no longer have parents.
""" """
if self._get_parent_from_structure(orphan, structure) is None: if self._get_parent_from_structure(orphan, structure) is None:
encoded_block_id = encode_key_for_mongo(orphan) for child in structure['blocks'][orphan]['fields'].get('children', []):
for child in structure['blocks'][encoded_block_id]['fields'].get('children', []): self._delete_if_true_orphan(BlockKey(*child), structure)
self._delete_if_true_orphan(child, structure) del structure['blocks'][orphan]
del structure['blocks'][encoded_block_id]
def _new_block(self, user_id, category, block_fields, definition_id, new_id, raw=False): def _new_block(self, user_id, category, block_fields, definition_id, new_id, raw=False):
""" """
...@@ -2256,7 +2288,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2256,7 +2288,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
if not raw: if not raw:
block_fields = self._serialize_fields(category, block_fields) block_fields = self._serialize_fields(category, block_fields)
return { return {
'category': category, 'block_type': category,
'definition': definition_id, 'definition': definition_id,
'fields': block_fields, 'fields': block_fields,
'edit_info': { 'edit_info': {
...@@ -2267,19 +2299,21 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2267,19 +2299,21 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
} }
} }
def _get_block_from_structure(self, structure, block_id): @contract(block_key=BlockKey)
def _get_block_from_structure(self, structure, block_key):
""" """
Encodes the block id before retrieving it from the structure to ensure it can Encodes the block id before retrieving it from the structure to ensure it can
be a json dict key. be a json dict key.
""" """
return structure['blocks'].get(encode_key_for_mongo(block_id)) return structure['blocks'].get(block_key)
def _update_block_in_structure(self, structure, block_id, content): @contract(block_key=BlockKey)
def _update_block_in_structure(self, structure, block_key, content):
""" """
Encodes the block id before accessing it in the structure to ensure it can Encodes the block id before accessing it in the structure to ensure it can
be a json dict key. be a json dict key.
""" """
structure['blocks'][encode_key_for_mongo(block_id)] = content structure['blocks'][block_key] = content
def find_courses_by_search_target(self, field_name, field_value): def find_courses_by_search_target(self, field_name, field_value):
""" """
......
...@@ -10,6 +10,7 @@ from xmodule.modulestore.draft_and_published import ( ...@@ -10,6 +10,7 @@ from xmodule.modulestore.draft_and_published import (
ModuleStoreDraftAndPublished, DIRECT_ONLY_CATEGORIES, UnsupportedRevisionError ModuleStoreDraftAndPublished, DIRECT_ONLY_CATEGORIES, UnsupportedRevisionError
) )
from opaque_keys.edx.locator import CourseLocator from opaque_keys.edx.locator import CourseLocator
from xmodule.modulestore.split_mongo import BlockKey
class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleStore): class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleStore):
...@@ -129,6 +130,8 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS ...@@ -129,6 +130,8 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
user_id, parent_usage_key, block_type, block_id=block_id, user_id, parent_usage_key, block_type, block_id=block_id,
fields=fields, **kwargs fields=fields, **kwargs
) )
# Publish both the child and the parent, if the child is a direct-only category
self._auto_publish_no_children(item.location, item.location.category, user_id, **kwargs)
self._auto_publish_no_children(parent_usage_key, item.location.category, user_id, **kwargs) self._auto_publish_no_children(parent_usage_key, item.location.category, user_id, **kwargs)
return item return item
...@@ -239,15 +242,15 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS ...@@ -239,15 +242,15 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
def get_course(branch_name): def get_course(branch_name):
return self._lookup_course(xblock.location.course_key.for_branch(branch_name))['structure'] return self._lookup_course(xblock.location.course_key.for_branch(branch_name))['structure']
def get_block(course_structure, block_id): def get_block(course_structure, block_key):
return self._get_block_from_structure(course_structure, block_id) return self._get_block_from_structure(course_structure, block_key)
draft_course = get_course(ModuleStoreEnum.BranchName.draft) draft_course = get_course(ModuleStoreEnum.BranchName.draft)
published_course = get_course(ModuleStoreEnum.BranchName.published) published_course = get_course(ModuleStoreEnum.BranchName.published)
def has_changes_subtree(block_id): def has_changes_subtree(block_key):
draft_block = get_block(draft_course, block_id) draft_block = get_block(draft_course, block_key)
published_block = get_block(published_course, block_id) published_block = get_block(published_course, block_key)
if not published_block: if not published_block:
return True return True
...@@ -263,7 +266,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS ...@@ -263,7 +266,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
return False return False
return has_changes_subtree(xblock.location.block_id) return has_changes_subtree(BlockKey.from_usage_key(xblock.location))
def publish(self, location, user_id, blacklist=None, **kwargs): def publish(self, location, user_id, blacklist=None, **kwargs):
""" """
...@@ -314,7 +317,10 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS ...@@ -314,7 +317,10 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
published_course_structure = self._lookup_course( published_course_structure = self._lookup_course(
location.course_key.for_branch(ModuleStoreEnum.BranchName.published) location.course_key.for_branch(ModuleStoreEnum.BranchName.published)
)['structure'] )['structure']
published_block = self._get_block_from_structure(published_course_structure, location.block_id) published_block = self._get_block_from_structure(
published_course_structure,
BlockKey.from_usage_key(location)
)
if published_block is None: if published_block is None:
raise InvalidVersionError(location) raise InvalidVersionError(location)
...@@ -323,7 +329,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS ...@@ -323,7 +329,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
new_structure = self.version_structure(draft_course_key, draft_course_structure, user_id) new_structure = self.version_structure(draft_course_key, draft_course_structure, user_id)
# remove the block and its descendants from the new structure # remove the block and its descendants from the new structure
self._remove_subtree(location.block_id, new_structure['blocks']) self._remove_subtree(BlockKey.from_usage_key(location), new_structure['blocks'])
# copy over the block and its descendants from the published branch # copy over the block and its descendants from the published branch
def copy_from_published(root_block_id): def copy_from_published(root_block_id):
...@@ -339,7 +345,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS ...@@ -339,7 +345,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
for child_block_id in block.setdefault('fields', {}).get('children', []): for child_block_id in block.setdefault('fields', {}).get('children', []):
copy_from_published(child_block_id) copy_from_published(child_block_id)
copy_from_published(location.block_id) copy_from_published(BlockKey.from_usage_key(location))
# update course structure and index # update course structure and index
self.update_structure(draft_course_key, new_structure) self.update_structure(draft_course_key, new_structure)
...@@ -387,7 +393,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS ...@@ -387,7 +393,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
def _get_head(self, xblock, branch): def _get_head(self, xblock, branch):
course_structure = self._lookup_course(xblock.location.course_key.for_branch(branch))['structure'] course_structure = self._lookup_course(xblock.location.course_key.for_branch(branch))['structure']
return self._get_block_from_structure(course_structure, xblock.location.block_id) return self._get_block_from_structure(course_structure, BlockKey.from_usage_key(xblock.location))
def _get_version(self, block): def _get_version(self, block):
""" """
...@@ -427,7 +433,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS ...@@ -427,7 +433,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
partitioned_fields = self.partition_fields_by_scope(block_type, fields) partitioned_fields = self.partition_fields_by_scope(block_type, fields)
course_key = self._map_revision_to_branch(course_key) # cast to branch_setting course_key = self._map_revision_to_branch(course_key) # cast to branch_setting
return self._update_item_from_fields( return self._update_item_from_fields(
user_id, course_key, block_type, block_id, partitioned_fields, None, allow_not_found=True, force=True user_id, course_key, BlockKey(block_type, block_id), partitioned_fields, None, allow_not_found=True, force=True
) or self.get_item(new_usage_key) ) or self.get_item(new_usage_key)
def compute_published_info_internal(self, xblock): def compute_published_info_internal(self, xblock):
......
...@@ -1482,13 +1482,23 @@ class TestMixedModuleStore(unittest.TestCase): ...@@ -1482,13 +1482,23 @@ class TestMixedModuleStore(unittest.TestCase):
test_course_key = test_course.id test_course_key = test_course.id
# test create_item of direct-only category to make sure we are autopublishing # test create_item of direct-only category to make sure we are autopublishing
chapter = self.store.create_item(self.user_id, test_course_key, 'chapter', 'Overview') chapter = self.store.create_child(self.user_id, test_course.location, 'chapter', 'Overview')
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only):
self.assertIn(
chapter.location,
self.store.get_item(test_course.location).children,
)
self.assertTrue(self.store.has_published_version(chapter)) self.assertTrue(self.store.has_published_version(chapter))
chapter_location = chapter.location chapter_location = chapter.location
# test create_child of direct-only category to make sure we are autopublishing # test create_child of direct-only category to make sure we are autopublishing
sequential = self.store.create_child(self.user_id, chapter_location, 'sequential', 'Sequence') sequential = self.store.create_child(self.user_id, chapter_location, 'sequential', 'Sequence')
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only):
self.assertIn(
sequential.location,
self.store.get_item(chapter_location).children,
)
self.assertTrue(self.store.has_published_version(sequential)) self.assertTrue(self.store.has_published_version(sequential))
# test update_item of direct-only category to make sure we are autopublishing # test update_item of direct-only category to make sure we are autopublishing
...@@ -1498,6 +1508,11 @@ class TestMixedModuleStore(unittest.TestCase): ...@@ -1498,6 +1508,11 @@ class TestMixedModuleStore(unittest.TestCase):
# test delete_item of direct-only category to make sure we are autopublishing # test delete_item of direct-only category to make sure we are autopublishing
self.store.delete_item(sequential.location, self.user_id, revision=ModuleStoreEnum.RevisionOption.all) self.store.delete_item(sequential.location, self.user_id, revision=ModuleStoreEnum.RevisionOption.all)
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only):
self.assertNotIn(
sequential.location,
self.store.get_item(chapter_location).children,
)
chapter = self.store.get_item(chapter.location.for_branch(None)) chapter = self.store.get_item(chapter.location.for_branch(None))
self.assertTrue(self.store.has_published_version(chapter)) self.assertTrue(self.store.has_published_version(chapter))
......
...@@ -6,6 +6,7 @@ import random ...@@ -6,6 +6,7 @@ import random
import re import re
import unittest import unittest
import uuid import uuid
from contracts import contract
from importlib import import_module from importlib import import_module
from path import path from path import path
...@@ -22,6 +23,7 @@ from xmodule.x_module import XModuleMixin ...@@ -22,6 +23,7 @@ from xmodule.x_module import XModuleMixin
from xmodule.fields import Date, Timedelta from xmodule.fields import Date, Timedelta
from xmodule.modulestore.split_mongo.split import SplitMongoModuleStore from xmodule.modulestore.split_mongo.split import SplitMongoModuleStore
from xmodule.modulestore.tests.test_modulestore import check_has_course_method from xmodule.modulestore.tests.test_modulestore import check_has_course_method
from xmodule.modulestore.split_mongo import BlockKey
from xmodule.modulestore.tests.mongo_connection import MONGO_PORT_NUM, MONGO_HOST from xmodule.modulestore.tests.mongo_connection import MONGO_PORT_NUM, MONGO_HOST
from xmodule.modulestore.edit_info import EditInfoMixin from xmodule.modulestore.edit_info import EditInfoMixin
...@@ -129,7 +131,7 @@ class SplitModuleTest(unittest.TestCase): ...@@ -129,7 +131,7 @@ class SplitModuleTest(unittest.TestCase):
"revisions": [{ "revisions": [{
"user_id": "testassist@edx.org", "user_id": "testassist@edx.org",
"update": { "update": {
"head12345": { ("course", "head12345"): {
"end": _date_field.from_json("2013-04-13T04:30"), "end": _date_field.from_json("2013-04-13T04:30"),
"tabs": [ "tabs": [
{ {
...@@ -198,7 +200,7 @@ class SplitModuleTest(unittest.TestCase): ...@@ -198,7 +200,7 @@ class SplitModuleTest(unittest.TestCase):
}, },
{"user_id": "testassist@edx.org", {"user_id": "testassist@edx.org",
"update": "update":
{"head12345": { {("course", "head12345"): {
"end": _date_field.from_json("2013-06-13T04:30"), "end": _date_field.from_json("2013-06-13T04:30"),
"grading_policy": { "grading_policy": {
"GRADER": [ "GRADER": [
...@@ -243,6 +245,7 @@ class SplitModuleTest(unittest.TestCase): ...@@ -243,6 +245,7 @@ class SplitModuleTest(unittest.TestCase):
{ {
"id": "chapter1", "id": "chapter1",
"parent": "head12345", "parent": "head12345",
"parent_type": "course",
"category": "chapter", "category": "chapter",
"fields": { "fields": {
"display_name": "Hercules" "display_name": "Hercules"
...@@ -251,6 +254,7 @@ class SplitModuleTest(unittest.TestCase): ...@@ -251,6 +254,7 @@ class SplitModuleTest(unittest.TestCase):
{ {
"id": "chapter2", "id": "chapter2",
"parent": "head12345", "parent": "head12345",
"parent_type": "course",
"category": "chapter", "category": "chapter",
"fields": { "fields": {
"display_name": "Hera heckles Hercules" "display_name": "Hera heckles Hercules"
...@@ -259,6 +263,7 @@ class SplitModuleTest(unittest.TestCase): ...@@ -259,6 +263,7 @@ class SplitModuleTest(unittest.TestCase):
{ {
"id": "chapter3", "id": "chapter3",
"parent": "head12345", "parent": "head12345",
"parent_type": "course",
"category": "chapter", "category": "chapter",
"fields": { "fields": {
"display_name": "Hera cuckolds Zeus" "display_name": "Hera cuckolds Zeus"
...@@ -267,6 +272,7 @@ class SplitModuleTest(unittest.TestCase): ...@@ -267,6 +272,7 @@ class SplitModuleTest(unittest.TestCase):
{ {
"id": "problem1", "id": "problem1",
"parent": "chapter3", "parent": "chapter3",
"parent_type": "chapter",
"category": "problem", "category": "problem",
"fields": { "fields": {
"display_name": "Problem 3.1", "display_name": "Problem 3.1",
...@@ -276,6 +282,7 @@ class SplitModuleTest(unittest.TestCase): ...@@ -276,6 +282,7 @@ class SplitModuleTest(unittest.TestCase):
{ {
"id": "problem3_2", "id": "problem3_2",
"parent": "chapter3", "parent": "chapter3",
"parent_type": "chapter",
"category": "problem", "category": "problem",
"fields": { "fields": {
"display_name": "Problem 3.2" "display_name": "Problem 3.2"
...@@ -350,7 +357,7 @@ class SplitModuleTest(unittest.TestCase): ...@@ -350,7 +357,7 @@ class SplitModuleTest(unittest.TestCase):
"revisions": [{ "revisions": [{
"user_id": "test@edx.org", "user_id": "test@edx.org",
"update": { "update": {
"head23456": { ("course", "head23456"): {
"display_name": "The most wonderful course", "display_name": "The most wonderful course",
"grading_policy": { "grading_policy": {
"GRADER": [ "GRADER": [
...@@ -466,13 +473,13 @@ class SplitModuleTest(unittest.TestCase): ...@@ -466,13 +473,13 @@ class SplitModuleTest(unittest.TestCase):
root_block_id=course_spec['root_block_id'] root_block_id=course_spec['root_block_id']
) )
for revision in course_spec.get('revisions', []): for revision in course_spec.get('revisions', []):
for block_id, fields in revision.get('update', {}).iteritems(): for (block_type, block_id), fields in revision.get('update', {}).iteritems():
# cheat since course is most frequent # cheat since course is most frequent
if course.location.block_id == block_id: if course.location.block_id == block_id:
block = course block = course
else: else:
# not easy to figure out the category but get_item won't care # not easy to figure out the category but get_item won't care
block_usage = BlockUsageLocator.make_relative(course.location, '', block_id) block_usage = BlockUsageLocator.make_relative(course.location, block_type, block_id)
block = split_store.get_item(block_usage) block = split_store.get_item(block_usage)
for key, value in fields.iteritems(): for key, value in fields.iteritems():
setattr(block, key, value) setattr(block, key, value)
...@@ -484,7 +491,7 @@ class SplitModuleTest(unittest.TestCase): ...@@ -484,7 +491,7 @@ class SplitModuleTest(unittest.TestCase):
elif spec['parent'] == course.location.block_id: elif spec['parent'] == course.location.block_id:
parent = course parent = course
else: else:
block_usage = BlockUsageLocator.make_relative(course.location, '', spec['parent']) block_usage = BlockUsageLocator.make_relative(course.location, spec['parent_type'], spec['parent'])
parent = split_store.get_item(block_usage) parent = split_store.get_item(block_usage)
block_id = LocalId(spec['id']) block_id = LocalId(spec['id'])
child = split_store.create_xblock( child = split_store.create_xblock(
...@@ -680,10 +687,10 @@ class SplitModuleCourseTests(SplitModuleTest): ...@@ -680,10 +687,10 @@ class SplitModuleCourseTests(SplitModuleTest):
locator = CourseLocator(org='testx', course='GreekHero', run="run", branch=BRANCH_NAME_DRAFT) locator = CourseLocator(org='testx', course='GreekHero', run="run", branch=BRANCH_NAME_DRAFT)
course = modulestore().get_course(locator) course = modulestore().get_course(locator)
block_map = modulestore().cache_items( block_map = modulestore().cache_items(
course.system, [child.block_id for child in course.children], course.id, depth=3 course.system, [BlockKey.from_usage_key(child) for child in course.children], course.id, depth=3
) )
self.assertIn('chapter1', block_map) self.assertIn(BlockKey('chapter', 'chapter1'), block_map)
self.assertIn('problem3_2', block_map) self.assertIn(BlockKey('problem', 'problem3_2'), block_map)
def test_course_successors(self): def test_course_successors(self):
""" """
...@@ -903,10 +910,6 @@ class SplitModuleItemTests(SplitModuleTest): ...@@ -903,10 +910,6 @@ class SplitModuleItemTests(SplitModuleTest):
) )
self.assertEqual(len(matches), 2) self.assertEqual(len(matches), 2)
matches = modulestore().get_items(locator, qualifiers={'children': 'chapter2'})
self.assertEqual(len(matches), 1)
self.assertEqual(matches[0].location.block_id, 'head12345')
def test_get_parents(self): def test_get_parents(self):
''' '''
get_parent_location(locator): BlockUsageLocator get_parent_location(locator): BlockUsageLocator
...@@ -920,8 +923,9 @@ class SplitModuleItemTests(SplitModuleTest): ...@@ -920,8 +923,9 @@ class SplitModuleItemTests(SplitModuleTest):
self.assertEqual(parent.block_id, 'head12345') self.assertEqual(parent.block_id, 'head12345')
self.assertEqual(parent.org, "testx") self.assertEqual(parent.org, "testx")
self.assertEqual(parent.course, "GreekHero") self.assertEqual(parent.course, "GreekHero")
locator = locator.course_key.make_usage_key('Chapter', 'chapter2') locator = locator.course_key.make_usage_key('chapter', 'chapter2')
parent = modulestore().get_parent_location(locator) parent = modulestore().get_parent_location(locator)
self.assertIsNotNone(parent)
self.assertEqual(parent.block_id, 'head12345') self.assertEqual(parent.block_id, 'head12345')
locator = locator.course_key.make_usage_key('garbage', 'nosuchblock') locator = locator.course_key.make_usage_key('garbage', 'nosuchblock')
parent = modulestore().get_parent_location(locator) parent = modulestore().get_parent_location(locator)
...@@ -1243,7 +1247,7 @@ class TestItemCrud(SplitModuleTest): ...@@ -1243,7 +1247,7 @@ class TestItemCrud(SplitModuleTest):
self.assertNotEqual(updated_problem.location.version_guid, pre_version_guid) self.assertNotEqual(updated_problem.location.version_guid, pre_version_guid)
self.assertEqual(version_agnostic(updated_problem.children), version_agnostic(block.children)) self.assertEqual(version_agnostic(updated_problem.children), version_agnostic(block.children))
self.assertNotIn(moved_child, version_agnostic(updated_problem.children)) self.assertNotIn(moved_child, version_agnostic(updated_problem.children))
locator = locator.course_key.make_usage_key('Chapter', "chapter1") locator = locator.course_key.make_usage_key('chapter', "chapter1")
other_block = modulestore().get_item(locator) other_block = modulestore().get_item(locator)
other_block.children.append(moved_child) other_block.children.append(moved_child)
other_updated = modulestore().update_item(other_block, self.user_id) other_updated = modulestore().update_item(other_block, self.user_id)
...@@ -1524,9 +1528,9 @@ class TestCourseCreation(SplitModuleTest): ...@@ -1524,9 +1528,9 @@ class TestCourseCreation(SplitModuleTest):
new_course.location.as_object_id(new_course.location.version_guid) new_course.location.as_object_id(new_course.location.version_guid)
) )
self.assertIsNotNone(db_structure, "Didn't find course") self.assertIsNotNone(db_structure, "Didn't find course")
self.assertNotIn('course', db_structure['blocks']) self.assertNotIn(BlockKey('course', 'course'), db_structure['blocks'])
self.assertIn('top', db_structure['blocks']) self.assertIn(BlockKey('chapter', 'top'), db_structure['blocks'])
self.assertEqual(db_structure['blocks']['top']['category'], 'chapter') self.assertEqual(db_structure['blocks'][BlockKey('chapter', 'top')]['block_type'], 'chapter')
def test_create_id_dupe(self): def test_create_id_dupe(self):
""" """
...@@ -1636,23 +1640,27 @@ class TestPublish(SplitModuleTest): ...@@ -1636,23 +1640,27 @@ class TestPublish(SplitModuleTest):
chapter2 = source_course.make_usage_key('chapter', 'chapter2') chapter2 = source_course.make_usage_key('chapter', 'chapter2')
chapter3 = source_course.make_usage_key('chapter', 'chapter3') chapter3 = source_course.make_usage_key('chapter', 'chapter3')
modulestore().copy(self.user_id, source_course, dest_course, [head], [chapter2, chapter3]) modulestore().copy(self.user_id, source_course, dest_course, [head], [chapter2, chapter3])
expected = [head.block_id, chapter1.block_id] expected = [BlockKey.from_usage_key(head), BlockKey.from_usage_key(chapter1)]
self._check_course( unexpected = [
source_course, dest_course, expected, [chapter2.block_id, chapter3.block_id, "problem1", "problem3_2"] BlockKey.from_usage_key(chapter2),
) BlockKey.from_usage_key(chapter3),
BlockKey("problem", "problem1"),
BlockKey("problem", "problem3_2")
]
self._check_course(source_course, dest_course, expected, unexpected)
# add a child under chapter1 # add a child under chapter1
new_module = modulestore().create_child( new_module = modulestore().create_child(
self.user_id, chapter1, "sequential", self.user_id, chapter1, "sequential",
fields={'display_name': 'new sequential'}, fields={'display_name': 'new sequential'},
) )
# remove chapter1 from expected b/c its pub'd version != the source anymore since source changed # remove chapter1 from expected b/c its pub'd version != the source anymore since source changed
expected.remove(chapter1.block_id) expected.remove(BlockKey.from_usage_key(chapter1))
# check that it's not in published course # check that it's not in published course
with self.assertRaises(ItemNotFoundError): with self.assertRaises(ItemNotFoundError):
modulestore().get_item(new_module.location.map_into_course(dest_course)) modulestore().get_item(new_module.location.map_into_course(dest_course))
# publish it # publish it
modulestore().copy(self.user_id, source_course, dest_course, [new_module.location], None) modulestore().copy(self.user_id, source_course, dest_course, [new_module.location], None)
expected.append(new_module.location.block_id) expected.append(BlockKey.from_usage_key(new_module.location))
# check that it is in the published course and that its parent is the chapter # check that it is in the published course and that its parent is the chapter
pub_module = modulestore().get_item(new_module.location.map_into_course(dest_course)) pub_module = modulestore().get_item(new_module.location.map_into_course(dest_course))
self.assertEqual( self.assertEqual(
...@@ -1664,12 +1672,10 @@ class TestPublish(SplitModuleTest): ...@@ -1664,12 +1672,10 @@ class TestPublish(SplitModuleTest):
) )
# publish it # publish it
modulestore().copy(self.user_id, source_course, dest_course, [new_module.location], None) modulestore().copy(self.user_id, source_course, dest_course, [new_module.location], None)
expected.append(new_module.location.block_id) expected.append(BlockKey.from_usage_key(new_module.location))
# check that it is in the published course (no error means it worked) # check that it is in the published course (no error means it worked)
pub_module = modulestore().get_item(new_module.location.map_into_course(dest_course)) pub_module = modulestore().get_item(new_module.location.map_into_course(dest_course))
self._check_course( self._check_course(source_course, dest_course, expected, unexpected)
source_course, dest_course, expected, [chapter2.block_id, chapter3.block_id, "problem1", "problem3_2"]
)
def test_exceptions(self): def test_exceptions(self):
""" """
...@@ -1702,8 +1708,14 @@ class TestPublish(SplitModuleTest): ...@@ -1702,8 +1708,14 @@ class TestPublish(SplitModuleTest):
chapter2 = source_course.make_usage_key('chapter', 'chapter2') chapter2 = source_course.make_usage_key('chapter', 'chapter2')
problem1 = source_course.make_usage_key('problem', 'problem1') problem1 = source_course.make_usage_key('problem', 'problem1')
modulestore().copy(self.user_id, source_course, dest_course, [head], [chapter2]) modulestore().copy(self.user_id, source_course, dest_course, [head], [chapter2])
expected = ["head12345", "chapter1", "chapter3", "problem1", "problem3_2"] expected = [
self._check_course(source_course, dest_course, expected, ["chapter2"]) BlockKey("course", "head12345"),
BlockKey("chapter", "chapter1"),
BlockKey("chapter", "chapter3"),
BlockKey("problem", "problem1"),
BlockKey("problem", "problem3_2"),
]
self._check_course(source_course, dest_course, expected, [BlockKey("chapter", "chapter2")])
# now move problem1 and delete problem3_2 # now move problem1 and delete problem3_2
chapter1 = modulestore().get_item(source_course.make_usage_key("chapter", "chapter1")) chapter1 = modulestore().get_item(source_course.make_usage_key("chapter", "chapter1"))
chapter3 = modulestore().get_item(source_course.make_usage_key("chapter", "chapter3")) chapter3 = modulestore().get_item(source_course.make_usage_key("chapter", "chapter3"))
...@@ -1711,9 +1723,15 @@ class TestPublish(SplitModuleTest): ...@@ -1711,9 +1723,15 @@ class TestPublish(SplitModuleTest):
chapter3.children.remove(problem1.map_into_course(chapter3.location.course_key)) chapter3.children.remove(problem1.map_into_course(chapter3.location.course_key))
modulestore().delete_item(source_course.make_usage_key("problem", "problem3_2"), self.user_id) modulestore().delete_item(source_course.make_usage_key("problem", "problem3_2"), self.user_id)
modulestore().copy(self.user_id, source_course, dest_course, [head], [chapter2]) modulestore().copy(self.user_id, source_course, dest_course, [head], [chapter2])
expected = ["head12345", "chapter1", "chapter3", "problem1"] expected = [
self._check_course(source_course, dest_course, expected, ["chapter2", "problem3_2"]) BlockKey("course", "head12345"),
BlockKey("chapter", "chapter1"),
BlockKey("chapter", "chapter3"),
BlockKey("problem", "problem1")
]
self._check_course(source_course, dest_course, expected, [BlockKey("chapter", "chapter2"), BlockKey("problem", "problem3_2")])
@contract(expected_blocks="list(BlockKey)", unexpected_blocks="list(BlockKey)")
def _check_course(self, source_course_loc, dest_course_loc, expected_blocks, unexpected_blocks): def _check_course(self, source_course_loc, dest_course_loc, expected_blocks, unexpected_blocks):
""" """
Check that the course has the expected blocks and does not have the unexpected blocks Check that the course has the expected blocks and does not have the unexpected blocks
...@@ -1721,9 +1739,8 @@ class TestPublish(SplitModuleTest): ...@@ -1721,9 +1739,8 @@ class TestPublish(SplitModuleTest):
history_info = modulestore().get_course_history_info(dest_course_loc) history_info = modulestore().get_course_history_info(dest_course_loc)
self.assertEqual(history_info['edited_by'], self.user_id) self.assertEqual(history_info['edited_by'], self.user_id)
for expected in expected_blocks: for expected in expected_blocks:
# since block_type has no impact on identity, we can just provide an empty string source = modulestore().get_item(source_course_loc.make_usage_key(expected.type, expected.id))
source = modulestore().get_item(source_course_loc.make_usage_key("", expected)) pub_copy = modulestore().get_item(dest_course_loc.make_usage_key(expected.type, expected.id))
pub_copy = modulestore().get_item(dest_course_loc.make_usage_key("", expected))
# everything except previous_version & children should be the same # everything except previous_version & children should be the same
self.assertEqual(source.category, pub_copy.category) self.assertEqual(source.category, pub_copy.category)
self.assertEqual( self.assertEqual(
...@@ -1743,21 +1760,28 @@ class TestPublish(SplitModuleTest): ...@@ -1743,21 +1760,28 @@ class TestPublish(SplitModuleTest):
self.assertEqual(field.read_from(source), field.read_from(pub_copy)) self.assertEqual(field.read_from(source), field.read_from(pub_copy))
for unexp in unexpected_blocks: for unexp in unexpected_blocks:
with self.assertRaises(ItemNotFoundError): with self.assertRaises(ItemNotFoundError):
modulestore().get_item(dest_course_loc.make_usage_key("", unexp)) modulestore().get_item(dest_course_loc.make_usage_key(unexp.type, unexp.id))
@contract(
source_children="list(BlockUsageLocator)",
dest_children="list(BlockUsageLocator)",
unexpected="list(BlockKey)"
)
def _compare_children(self, source_children, dest_children, unexpected): def _compare_children(self, source_children, dest_children, unexpected):
""" """
Ensure dest_children == source_children minus unexpected Ensure dest_children == source_children minus unexpected
""" """
dest_cursor = 0 source_block_keys = [
for child in source_children: src_key
child = child.version_agnostic() for src_key
if child.block_id in unexpected: in (BlockKey.from_usage_key(src) for src in source_children)
self.assertNotIn(child.block_id, [dest.block_id for dest in dest_children]) if src_key not in unexpected
else: ]
self.assertEqual(child.block_id, dest_children[dest_cursor].block_id) dest_block_keys = [BlockKey.from_usage_key(dest) for dest in dest_children]
dest_cursor += 1 for unexp in unexpected:
self.assertEqual(dest_cursor, len(dest_children)) self.assertNotIn(unexp, dest_block_keys)
self.assertEqual(source_block_keys, dest_block_keys)
class TestSchema(SplitModuleTest): class TestSchema(SplitModuleTest):
......
...@@ -12,6 +12,7 @@ import os ...@@ -12,6 +12,7 @@ import os
import pprint import pprint
import unittest import unittest
from contextlib import contextmanager
from mock import Mock from mock import Mock
from path import path from path import path
...@@ -174,12 +175,64 @@ def map_references(value, field, actual_course_key): ...@@ -174,12 +175,64 @@ def map_references(value, field, actual_course_key):
return value return value
class CourseComparisonTest(unittest.TestCase): class BulkAssertionManager(object):
"""
This provides a facility for making a large number of assertions, and seeing all of
the failures at once, rather than only seeing single failures.
"""
def __init__(self, test_case):
self._equal_expected = []
self._equal_actual = []
self._test_case = test_case
def assertEqual(self, expected, actual, description=None):
if description is None:
description = u"{!r} does not equal {!r}".format(expected, actual)
if expected != actual:
self._equal_expected.append((description, expected))
self._equal_actual.append((description, actual))
def run_assertions(self):
self._test_case.assertEqual(self._equal_expected, self._equal_actual)
class BulkAssertionTest(unittest.TestCase):
"""
This context manager provides a BulkAssertionManager to assert with,
and then calls `run_assertions` at the end of the block to validate all
of the assertions.
"""
def setUp(self, *args, **kwargs):
super(BulkAssertionTest, self).setUp(*args, **kwargs)
self._manager = None
@contextmanager
def bulk_assertions(self):
if self._manager:
yield
else:
try:
self._manager = BulkAssertionManager(self)
yield
finally:
self._manager.run_assertions()
self._manager = None
def assertEqual(self, expected, actual, message=None):
if self._manager is not None:
self._manager.assertEqual(expected, actual, message)
else:
super(BulkAssertionTest, self).assertEqual(expected, actual, message)
class CourseComparisonTest(BulkAssertionTest):
""" """
Mixin that has methods for comparing courses for equality. Mixin that has methods for comparing courses for equality.
""" """
def setUp(self): def setUp(self):
super(CourseComparisonTest, self).setUp()
self.field_exclusions = set() self.field_exclusions = set()
self.ignored_asset_keys = set() self.ignored_asset_keys = set()
...@@ -235,68 +288,72 @@ class CourseComparisonTest(unittest.TestCase): ...@@ -235,68 +288,72 @@ class CourseComparisonTest(unittest.TestCase):
self._assertCoursesEqual(expected_items, actual_items, actual_course_key, expect_drafts=True) self._assertCoursesEqual(expected_items, actual_items, actual_course_key, expect_drafts=True)
def _assertCoursesEqual(self, expected_items, actual_items, actual_course_key, expect_drafts=False): def _assertCoursesEqual(self, expected_items, actual_items, actual_course_key, expect_drafts=False):
self.assertEqual(len(expected_items), len(actual_items)) with self.bulk_assertions():
self.assertEqual(len(expected_items), len(actual_items))
actual_item_map = {
item.location.block_id: item def map_key(usage_key):
for item in actual_items return (usage_key.block_type, usage_key.block_id)
}
actual_item_map = {
for expected_item in expected_items: map_key(item.location): item
actual_item_location = actual_course_key.make_usage_key(expected_item.category, expected_item.location.block_id) for item in actual_items
# split and old mongo use different names for the course root but we don't know which }
# modulestore actual's come from here; so, assume old mongo and if that fails, assume split
if expected_item.location.category == 'course': for expected_item in expected_items:
actual_item_location = actual_item_location.replace(name=actual_item_location.run) actual_item_location = actual_course_key.make_usage_key(expected_item.category, expected_item.location.block_id)
actual_item = actual_item_map.get(actual_item_location.block_id) # split and old mongo use different names for the course root but we don't know which
# must be split # modulestore actual's come from here; so, assume old mongo and if that fails, assume split
if actual_item is None and expected_item.location.category == 'course': if expected_item.location.category == 'course':
actual_item_location = actual_item_location.replace(name='course') actual_item_location = actual_item_location.replace(name=actual_item_location.run)
actual_item = actual_item_map.get(actual_item_location.block_id) actual_item = actual_item_map.get(map_key(actual_item_location))
self.assertIsNotNone(actual_item, u'cannot find {} in {}'.format(actual_item_location, actual_item_map)) # must be split
if actual_item is None and expected_item.location.category == 'course':
# compare fields actual_item_location = actual_item_location.replace(name='course')
self.assertEqual(expected_item.fields, actual_item.fields) actual_item = actual_item_map.get(map_key(actual_item_location))
self.assertIsNotNone(actual_item, u'cannot find {} in {}'.format(map_key(actual_item_location), actual_item_map))
for field_name, field in expected_item.fields.iteritems():
if (expected_item.scope_ids.usage_id, field_name) in self.field_exclusions: # compare fields
continue self.assertEqual(expected_item.fields, actual_item.fields)
if (None, field_name) in self.field_exclusions: for field_name, field in expected_item.fields.iteritems():
continue if (expected_item.scope_ids.usage_id, field_name) in self.field_exclusions:
continue
# Children are handled specially
if field_name == 'children': if (None, field_name) in self.field_exclusions:
continue continue
exp_value = map_references(field.read_from(expected_item), field, actual_course_key) # Children are handled specially
actual_value = field.read_from(actual_item) if field_name == 'children':
self.assertEqual( continue
exp_value,
actual_value, exp_value = map_references(field.read_from(expected_item), field, actual_course_key)
"Field {!r} doesn't match between usages {} and {}: {!r} != {!r}".format( actual_value = field.read_from(actual_item)
field_name, self.assertEqual(
expected_item.scope_ids.usage_id,
actual_item.scope_ids.usage_id,
exp_value, exp_value,
actual_value, actual_value,
"Field {!r} doesn't match between usages {} and {}: {!r} != {!r}".format(
field_name,
expected_item.scope_ids.usage_id,
actual_item.scope_ids.usage_id,
exp_value,
actual_value,
)
) )
)
# compare children
# compare children self.assertEqual(expected_item.has_children, actual_item.has_children)
self.assertEqual(expected_item.has_children, actual_item.has_children) if expected_item.has_children:
if expected_item.has_children: expected_children = [
expected_children = [ (expected_item_child.location.block_type, expected_item_child.location.block_id)
(expected_item_child.location.block_type, expected_item_child.location.block_id) # get_children() rather than children to strip privates from public parents
# get_children() rather than children to strip privates from public parents for expected_item_child in expected_item.get_children()
for expected_item_child in expected_item.get_children() ]
] actual_children = [
actual_children = [ (item_child.location.block_type, item_child.location.block_id)
(item_child.location.block_type, item_child.location.block_id) # get_children() rather than children to strip privates from public parents
# get_children() rather than children to strip privates from public parents for item_child in actual_item.get_children()
for item_child in actual_item.get_children() ]
] self.assertEqual(expected_children, actual_children)
self.assertEqual(expected_children, actual_children)
def assertAssetEqual(self, expected_course_key, expected_asset, actual_course_key, actual_asset): def assertAssetEqual(self, expected_course_key, expected_asset, actual_course_key, actual_asset):
""" """
...@@ -339,10 +396,12 @@ class CourseComparisonTest(unittest.TestCase): ...@@ -339,10 +396,12 @@ class CourseComparisonTest(unittest.TestCase):
expected_content, expected_count = expected_store.get_all_content_for_course(expected_course_key) expected_content, expected_count = expected_store.get_all_content_for_course(expected_course_key)
actual_content, actual_count = actual_store.get_all_content_for_course(actual_course_key) actual_content, actual_count = actual_store.get_all_content_for_course(actual_course_key)
self.assertEqual(expected_count, actual_count) with self.bulk_assertions():
self._assertAssetsEqual(expected_course_key, expected_content, actual_course_key, actual_content)
self.assertEqual(expected_count, actual_count)
self._assertAssetsEqual(expected_course_key, expected_content, actual_course_key, actual_content)
expected_thumbs = expected_store.get_all_content_thumbnails_for_course(expected_course_key) expected_thumbs = expected_store.get_all_content_thumbnails_for_course(expected_course_key)
actual_thumbs = actual_store.get_all_content_thumbnails_for_course(actual_course_key) actual_thumbs = actual_store.get_all_content_thumbnails_for_course(actual_course_key)
self._assertAssetsEqual(expected_course_key, expected_thumbs, actual_course_key, actual_thumbs) self._assertAssetsEqual(expected_course_key, expected_thumbs, actual_course_key, actual_thumbs)
<html url_name="Duplicate_URL_Name">
This is test html.
</html>
<sequential display_name="New Subsection 10.20"> <sequential display_name="New Subsection 10.20">
<vertical url_name="3a7305c59c254ce9814093f98b913e8a"/> <vertical url_name="3a7305c59c254ce9814093f98b913e8a"/>
<vertical url_name="Duplicate_URL_Name"/>
</sequential> </sequential>
<vertical display_name="Duplicate URL Name">
<html url_name="Duplicate_URL_Name"/>
</vertical>
# Disable PyContract contract checking when running as a webserver
import contracts
contracts.disable_all()
import os import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lms.envs.aws") os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lms.envs.aws")
......
...@@ -127,6 +127,7 @@ rednose==0.3 ...@@ -127,6 +127,7 @@ rednose==0.3
selenium==2.42.1 selenium==2.42.1
splinter==0.5.4 splinter==0.5.4
testtools==0.9.34 testtools==0.9.34
PyContracts==1.6.4
# Used for Segment.io analytics # Used for Segment.io analytics
analytics-python==0.4.4 analytics-python==0.4.4
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment