Commit 62bccd4e by Don Mitchell

Split mongo correctly serializes and deserialized references

STUD-1592, STUD-1603, LMS-2642
parent 4944cc76
...@@ -68,7 +68,7 @@ class TemplateTests(unittest.TestCase): ...@@ -68,7 +68,7 @@ class TemplateTests(unittest.TestCase):
self.assertIsInstance(test_chapter, SequenceDescriptor) self.assertIsInstance(test_chapter, SequenceDescriptor)
# refetch parent which should now point to child # refetch parent which should now point to child
test_course = modulestore('split').get_course(test_course.id.version_agnostic()) test_course = modulestore('split').get_course(test_course.id.version_agnostic())
self.assertIn(test_chapter.location.block_id, test_course.children) self.assertIn(test_chapter.location, test_course.children)
with self.assertRaises(DuplicateCourseError): with self.assertRaises(DuplicateCourseError):
persistent_factories.PersistentCourseFactory.create( persistent_factories.PersistentCourseFactory.create(
......
...@@ -127,15 +127,15 @@ class SplitMigrator(object): ...@@ -127,15 +127,15 @@ class SplitMigrator(object):
block_id=new_locator.block_id, block_id=new_locator.block_id,
fields=self._get_json_fields_translate_references(module, course_key, True) fields=self._get_json_fields_translate_references(module, course_key, True)
) )
awaiting_adoption[module.location] = new_locator.block_id awaiting_adoption[module.location] = new_locator
for draft_location, new_block_id in awaiting_adoption.iteritems(): for draft_location, new_locator in awaiting_adoption.iteritems():
for parent_loc in self.draft_modulestore.get_parent_locations(draft_location): for parent_loc in self.draft_modulestore.get_parent_locations(draft_location):
old_parent = self.draft_modulestore.get_item(parent_loc) old_parent = self.draft_modulestore.get_item(parent_loc)
new_parent = self.split_modulestore.get_item( new_parent = self.split_modulestore.get_item(
self.loc_mapper.translate_location(old_parent.location, False) self.loc_mapper.translate_location(old_parent.location, False)
) )
# this only occurs if the parent was also awaiting adoption # this only occurs if the parent was also awaiting adoption
if new_block_id in new_parent.children: if any(new_locator == child.version_agnostic() for child in new_parent.children):
break break
# find index for module: new_parent may be missing quite a few of old_parent's children # find index for module: new_parent may be missing quite a few of old_parent's children
new_parent_cursor = 0 new_parent_cursor = 0
...@@ -145,36 +145,36 @@ class SplitMigrator(object): ...@@ -145,36 +145,36 @@ class SplitMigrator(object):
sibling_loc = self.loc_mapper.translate_location(old_child_loc, False) sibling_loc = self.loc_mapper.translate_location(old_child_loc, False)
# sibling may move cursor # sibling may move cursor
for idx in range(new_parent_cursor, len(new_parent.children)): for idx in range(new_parent_cursor, len(new_parent.children)):
if new_parent.children[idx] == sibling_loc.block_id: if new_parent.children[idx].version_agnostic() == sibling_loc:
new_parent_cursor = idx + 1 new_parent_cursor = idx + 1
break break
new_parent.children.insert(new_parent_cursor, new_block_id) new_parent.children.insert(new_parent_cursor, new_locator)
new_parent = self.split_modulestore.update_item(new_parent, user.id) new_parent = self.split_modulestore.update_item(new_parent, user.id)
def _get_json_fields_translate_references(self, xblock, old_course_id, published): def _get_json_fields_translate_references(self, xblock, old_course_id, published):
""" """
Return the json repr for explicitly set fields but convert all references to their block_id's Return the json repr for explicitly set fields but convert all references to their block_id's
""" """
# FIXME change split to take field values as pythonic values not json values def get_translation(location):
"""
Convert the location and add to loc mapper
"""
return self.loc_mapper.translate_location(location, published, add_entry_if_missing=True)
result = {} result = {}
for field_name, field in xblock.fields.iteritems(): for field_name, field in xblock.fields.iteritems():
if field.is_set_on(xblock): if field.is_set_on(xblock):
if isinstance(field, Reference): field_value = getattr(xblock, field_name)
result[field_name] = unicode(self.loc_mapper.translate_location( if isinstance(field, Reference) and field_value is not None:
getattr(xblock, field_name), published, add_entry_if_missing=True result[field_name] = get_translation(field_value)
))
elif isinstance(field, ReferenceList): elif isinstance(field, ReferenceList):
result[field_name] = [ result[field_name] = [
unicode(self.loc_mapper.translate_location( get_translation(ele) for ele in field_value
ele, published, add_entry_if_missing=True
)) for ele in getattr(xblock, field_name)
] ]
elif isinstance(field, ReferenceValueDict): elif isinstance(field, ReferenceValueDict):
result[field_name] = { result[field_name] = {
key: unicode(self.loc_mapper.translate_location( key: get_translation(subvalue)
subvalue, published, add_entry_if_missing=True for key, subvalue in field_value.iteritems()
))
for key, subvalue in getattr(xblock, field_name).iteritems()
} }
else: else:
result[field_name] = field.read_json(xblock) result[field_name] = field.read_json(xblock)
......
...@@ -66,7 +66,12 @@ class CachingDescriptorSystem(MakoDescriptorSystem): ...@@ -66,7 +66,12 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
json_data = self.module_data.get(block_id) json_data = self.module_data.get(block_id)
if json_data is None: if json_data is None:
# deeper than initial descendant fetch or doesn't exist # deeper than initial descendant fetch or doesn't exist
self.modulestore.cache_items(self, [block_id], lazy=self.lazy) course_info = course_entry_override or self.course_entry
course_key = CourseLocator(
course_info.get('org'), course_info.get('offering'), course_info.get('branch'),
course_info['structure']['_id']
)
self.modulestore.cache_items(self, [block_id], course_key, lazy=self.lazy)
json_data = self.module_data.get(block_id) json_data = self.module_data.get(block_id)
if json_data is None: if json_data is None:
raise ItemNotFoundError(block_id) raise ItemNotFoundError(block_id)
...@@ -112,9 +117,12 @@ class CachingDescriptorSystem(MakoDescriptorSystem): ...@@ -112,9 +117,12 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
block_id=block_id, block_id=block_id,
) )
converted_fields = self.modulestore.convert_references_to_keys(
block_locator.course_key, class_, json_data.get('fields', {}), self.course_entry['structure']['blocks'],
)
kvs = SplitMongoKVS( kvs = SplitMongoKVS(
definition, definition,
json_data.get('fields', {}), converted_fields,
json_data.get('_inherited_settings'), json_data.get('_inherited_settings'),
) )
field_data = KvsFieldData(kvs) field_data = KvsFieldData(kvs)
......
...@@ -8,7 +8,7 @@ class DefinitionLazyLoader(object): ...@@ -8,7 +8,7 @@ class DefinitionLazyLoader(object):
object doesn't force access during init but waits until client wants the object doesn't force access during init but waits until client wants the
definition. Only works if the modulestore is a split mongo store. definition. Only works if the modulestore is a split mongo store.
""" """
def __init__(self, modulestore, block_type, definition_id): def __init__(self, modulestore, block_type, definition_id, field_converter):
""" """
Simple placeholder for yet-to-be-fetched data Simple placeholder for yet-to-be-fetched data
:param modulestore: the pymongo db connection with the definitions :param modulestore: the pymongo db connection with the definitions
...@@ -16,6 +16,7 @@ class DefinitionLazyLoader(object): ...@@ -16,6 +16,7 @@ class DefinitionLazyLoader(object):
""" """
self.modulestore = modulestore self.modulestore = modulestore
self.definition_locator = DefinitionLocator(block_type, definition_id) self.definition_locator = DefinitionLocator(block_type, definition_id)
self.field_converter = field_converter
def fetch(self): def fetch(self):
""" """
......
...@@ -110,6 +110,7 @@ class SplitMongoKVS(InheritanceKeyValueStore): ...@@ -110,6 +110,7 @@ class SplitMongoKVS(InheritanceKeyValueStore):
if isinstance(self._definition, DefinitionLazyLoader): if isinstance(self._definition, DefinitionLazyLoader):
persisted_definition = self._definition.fetch() persisted_definition = self._definition.fetch()
if persisted_definition is not None: if persisted_definition is not None:
self._fields.update(persisted_definition.get('fields')) fields = self._definition.field_converter(persisted_definition.get('fields'))
self._fields.update(fields)
# do we want to cache any of the edit_info? # do we want to cache any of the edit_info?
self._definition = None # already loaded self._definition = None # already loaded
...@@ -10,10 +10,9 @@ from xmodule.modulestore.split_migrator import SplitMigrator ...@@ -10,10 +10,9 @@ from xmodule.modulestore.split_migrator import SplitMigrator
from xmodule.modulestore.mongo import draft from xmodule.modulestore.mongo import draft
from xmodule.modulestore.tests import test_location_mapper from xmodule.modulestore.tests import test_location_mapper
from xmodule.modulestore.tests.test_split_w_old_mongo import SplitWMongoCourseBoostrapper from xmodule.modulestore.tests.test_split_w_old_mongo import SplitWMongoCourseBoostrapper
from nose.tools import nottest from xblock.fields import Reference, ReferenceList, ReferenceValueDict
@nottest
class TestMigration(SplitWMongoCourseBoostrapper): class TestMigration(SplitWMongoCourseBoostrapper):
""" """
Test the split migrator Test the split migrator
...@@ -181,8 +180,8 @@ class TestMigration(SplitWMongoCourseBoostrapper): ...@@ -181,8 +180,8 @@ class TestMigration(SplitWMongoCourseBoostrapper):
self.loc_mapper.translate_locator_to_location(split_dag_root.location).replace(revision=None) self.loc_mapper.translate_locator_to_location(split_dag_root.location).replace(revision=None)
) )
# compare all fields but children # compare all fields but children
for name in presplit_dag_root.fields.iterkeys(): for name, field in presplit_dag_root.fields.iteritems():
if name != 'children': if not isinstance(field, (Reference, ReferenceList, ReferenceValueDict)):
self.assertEqual( self.assertEqual(
getattr(presplit_dag_root, name), getattr(presplit_dag_root, name),
getattr(split_dag_root, name), getattr(split_dag_root, name),
...@@ -190,19 +189,7 @@ class TestMigration(SplitWMongoCourseBoostrapper): ...@@ -190,19 +189,7 @@ class TestMigration(SplitWMongoCourseBoostrapper):
split_dag_root.location, name, getattr(presplit_dag_root, name), getattr(split_dag_root, name) split_dag_root.location, name, getattr(presplit_dag_root, name), getattr(split_dag_root, name)
) )
) )
# test split get_item using old Location: old draft store didn't set revision for things above vertical
# but split does distinguish these; so, set revision if not published
if not published:
location = draft.as_draft(presplit_dag_root.location)
else:
location = presplit_dag_root.location
refetched = self.split_mongo.get_item(location)
self.assertEqual(
refetched.location, split_dag_root.location,
"Fetch from split via old Location {} not same as new {}".format(
refetched.location, split_dag_root.location
)
)
# compare children # compare children
if presplit_dag_root.has_children: if presplit_dag_root.has_children:
self.assertEqual( self.assertEqual(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment