Commit 62bccd4e by Don Mitchell

Split mongo correctly serializes and deserialized references

STUD-1592, STUD-1603, LMS-2642
parent 4944cc76
......@@ -68,7 +68,7 @@ class TemplateTests(unittest.TestCase):
self.assertIsInstance(test_chapter, SequenceDescriptor)
# refetch parent which should now point to child
test_course = modulestore('split').get_course(test_course.id.version_agnostic())
self.assertIn(test_chapter.location.block_id, test_course.children)
self.assertIn(test_chapter.location, test_course.children)
with self.assertRaises(DuplicateCourseError):
persistent_factories.PersistentCourseFactory.create(
......
......@@ -127,15 +127,15 @@ class SplitMigrator(object):
block_id=new_locator.block_id,
fields=self._get_json_fields_translate_references(module, course_key, True)
)
awaiting_adoption[module.location] = new_locator.block_id
for draft_location, new_block_id in awaiting_adoption.iteritems():
awaiting_adoption[module.location] = new_locator
for draft_location, new_locator in awaiting_adoption.iteritems():
for parent_loc in self.draft_modulestore.get_parent_locations(draft_location):
old_parent = self.draft_modulestore.get_item(parent_loc)
new_parent = self.split_modulestore.get_item(
self.loc_mapper.translate_location(old_parent.location, False)
)
# this only occurs if the parent was also awaiting adoption
if new_block_id in new_parent.children:
if any(new_locator == child.version_agnostic() for child in new_parent.children):
break
# find index for module: new_parent may be missing quite a few of old_parent's children
new_parent_cursor = 0
......@@ -145,36 +145,36 @@ class SplitMigrator(object):
sibling_loc = self.loc_mapper.translate_location(old_child_loc, False)
# sibling may move cursor
for idx in range(new_parent_cursor, len(new_parent.children)):
if new_parent.children[idx] == sibling_loc.block_id:
if new_parent.children[idx].version_agnostic() == sibling_loc:
new_parent_cursor = idx + 1
break
new_parent.children.insert(new_parent_cursor, new_block_id)
new_parent.children.insert(new_parent_cursor, new_locator)
new_parent = self.split_modulestore.update_item(new_parent, user.id)
def _get_json_fields_translate_references(self, xblock, old_course_id, published):
"""
Return the json repr for explicitly set fields but convert all references to their block_id's
"""
# FIXME change split to take field values as pythonic values not json values
def get_translation(location):
"""
Convert the location and add to loc mapper
"""
return self.loc_mapper.translate_location(location, published, add_entry_if_missing=True)
result = {}
for field_name, field in xblock.fields.iteritems():
if field.is_set_on(xblock):
if isinstance(field, Reference):
result[field_name] = unicode(self.loc_mapper.translate_location(
getattr(xblock, field_name), published, add_entry_if_missing=True
))
field_value = getattr(xblock, field_name)
if isinstance(field, Reference) and field_value is not None:
result[field_name] = get_translation(field_value)
elif isinstance(field, ReferenceList):
result[field_name] = [
unicode(self.loc_mapper.translate_location(
ele, published, add_entry_if_missing=True
)) for ele in getattr(xblock, field_name)
get_translation(ele) for ele in field_value
]
elif isinstance(field, ReferenceValueDict):
result[field_name] = {
key: unicode(self.loc_mapper.translate_location(
subvalue, published, add_entry_if_missing=True
))
for key, subvalue in getattr(xblock, field_name).iteritems()
key: get_translation(subvalue)
for key, subvalue in field_value.iteritems()
}
else:
result[field_name] = field.read_json(xblock)
......
......@@ -66,7 +66,12 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
json_data = self.module_data.get(block_id)
if json_data is None:
# deeper than initial descendant fetch or doesn't exist
self.modulestore.cache_items(self, [block_id], lazy=self.lazy)
course_info = course_entry_override or self.course_entry
course_key = CourseLocator(
course_info.get('org'), course_info.get('offering'), course_info.get('branch'),
course_info['structure']['_id']
)
self.modulestore.cache_items(self, [block_id], course_key, lazy=self.lazy)
json_data = self.module_data.get(block_id)
if json_data is None:
raise ItemNotFoundError(block_id)
......@@ -112,9 +117,12 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
block_id=block_id,
)
converted_fields = self.modulestore.convert_references_to_keys(
block_locator.course_key, class_, json_data.get('fields', {}), self.course_entry['structure']['blocks'],
)
kvs = SplitMongoKVS(
definition,
json_data.get('fields', {}),
converted_fields,
json_data.get('_inherited_settings'),
)
field_data = KvsFieldData(kvs)
......
......@@ -8,7 +8,7 @@ class DefinitionLazyLoader(object):
object doesn't force access during init but waits until client wants the
definition. Only works if the modulestore is a split mongo store.
"""
def __init__(self, modulestore, block_type, definition_id):
def __init__(self, modulestore, block_type, definition_id, field_converter):
"""
Simple placeholder for yet-to-be-fetched data
:param modulestore: the pymongo db connection with the definitions
......@@ -16,6 +16,7 @@ class DefinitionLazyLoader(object):
"""
self.modulestore = modulestore
self.definition_locator = DefinitionLocator(block_type, definition_id)
self.field_converter = field_converter
def fetch(self):
"""
......
......@@ -60,12 +60,12 @@ from xmodule.modulestore.locator import (
)
from xmodule.modulestore.exceptions import InsufficientSpecificationError, VersionConflictError, DuplicateItemError, \
DuplicateCourseError
from xmodule.modulestore import inheritance, ModuleStoreWriteBase, Location, SPLIT_MONGO_MODULESTORE_TYPE
from xmodule.modulestore import inheritance, ModuleStoreWriteBase, SPLIT_MONGO_MODULESTORE_TYPE
from ..exceptions import ItemNotFoundError
from .definition_lazy_loader import DefinitionLazyLoader
from .caching_descriptor_system import CachingDescriptorSystem
from xblock.fields import Scope
from xblock.fields import Scope, Reference, ReferenceList, ReferenceValueDict
from bson.objectid import ObjectId
from xmodule.modulestore.split_mongo.mongo_connection import MongoConnection
from xblock.core import XBlock
......@@ -132,12 +132,13 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
self.render_template = render_template
self.i18n_service = i18n_service
def cache_items(self, system, base_block_ids, depth=0, lazy=True):
def cache_items(self, system, base_block_ids, course_key, depth=0, lazy=True):
'''
Handles caching of items once inheritance and any other one time
per course per fetch operations are done.
:param system: a CachingDescriptorSystem
:param base_block_ids: list of block_ids to fetch
:param course_key: the destination course providing the context
:param depth: how deep below these to prefetch
:param lazy: whether to fetch definitions or use placeholders
'''
......@@ -152,7 +153,13 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
if lazy:
for block in new_module_data.itervalues():
block['definition'] = DefinitionLazyLoader(self, block['category'], block['definition'])
block['definition'] = DefinitionLazyLoader(
self, block['category'], block['definition'],
lambda fields: self.convert_references_to_keys(
course_key, system.load_block_type(block['category']),
fields, system.course_entry['structure']['blocks'],
)
)
else:
# Load all descendants by id
descendent_definitions = self.db_connection.find_matching_definitions({
......@@ -164,7 +171,12 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
for block in new_module_data.itervalues():
if block['definition'] in definitions:
block['fields'].update(definitions[block['definition']].get('fields'))
converted_fields = self.convert_references_to_keys(
course_key, system.load_block_type(block['category']),
definitions[block['definition']].get('fields'),
system.course_entry['structure']['blocks'],
)
block['fields'].update(converted_fields)
system.module_data.update(new_module_data)
return system.module_data
......@@ -195,7 +207,13 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
services=services,
)
self._add_cache(course_entry['structure']['_id'], system)
self.cache_items(system, block_ids, depth, lazy)
course_key = CourseLocator(
version_guid=course_entry['structure']['_id'],
org=course_entry.get('org'),
offering=course_entry.get('offering'),
branch=course_entry.get('branch'),
)
self.cache_items(system, block_ids, course_key, depth, lazy)
return [system.load_item(block_id, course_entry) for block_id in block_ids]
def _get_cache(self, course_version_guid):
......@@ -359,16 +377,6 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
descendants.
raises InsufficientSpecificationError or ItemNotFoundError
"""
# intended for temporary support of some pointers being old-style
if isinstance(usage_key, Location):
if self.loc_mapper is None:
raise InsufficientSpecificationError('No location mapper configured')
else:
usage_key = self.loc_mapper.translate_location(
usage_key,
usage_key.revision is None,
add_entry_if_missing=False
)
assert isinstance(usage_key, BlockUsageLocator)
course = self._lookup_course(usage_key)
items = self._load_items(course, [usage_key.block_id], depth, lazy=True)
......@@ -631,7 +639,7 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
:param user_id: request.user object
"""
new_def_data = self._filter_special_fields(new_def_data)
new_def_data = self._serialize_fields(category, new_def_data)
new_id = ObjectId()
document = {
'_id': new_id,
......@@ -656,7 +664,6 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
:param user_id: request.user
"""
new_def_data = self._filter_special_fields(new_def_data)
def needs_saved():
for key, value in new_def_data.iteritems():
if key not in old_definition['fields'] or value != old_definition['fields'][key]:
......@@ -669,8 +676,9 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
# actual change b/c the descriptor and cache probably point to the same objects
old_definition = self.db_connection.get_definition(definition_locator.definition_id)
if old_definition is None:
raise ItemNotFoundError(definition_locator.to_deprecated_string())
raise ItemNotFoundError(definition_locator)
new_def_data = self._serialize_fields(old_definition['category'], new_def_data)
if needs_saved():
# new id to create new version
old_definition['_id'] = ObjectId()
......@@ -791,7 +799,7 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
self._update_block_in_structure(new_structure, new_block_id, {
"category": category,
"definition": definition_locator.definition_id,
"fields": block_fields,
"fields": self._serialize_fields(category, block_fields),
'edit_info': {
'edited_on': datetime.datetime.now(UTC),
'edited_by': user_id,
......@@ -891,7 +899,7 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
block_fields = partitioned_fields.setdefault(Scope.settings, {})
if Scope.children in partitioned_fields:
block_fields.update(partitioned_fields[Scope.children])
definition_fields = self._filter_special_fields(partitioned_fields.get(Scope.content, {}))
definition_fields = self._serialize_fields(root_category, partitioned_fields.get(Scope.content, {}))
# build from inside out: definition, structure, index entry
# if building a wholly new structure
......@@ -934,7 +942,7 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
encoded_block_id = LocMapperStore.encode_key_for_mongo(draft_structure['root'])
root_block = draft_structure['blocks'][encoded_block_id]
if block_fields is not None:
root_block['fields'].update(block_fields)
root_block['fields'].update(self._serialize_fields(root_category, block_fields))
if definition_fields is not None:
definition = self.db_connection.get_definition(root_block['definition'])
definition['fields'].update(definition_fields)
......@@ -985,17 +993,20 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
descriptor.definition_locator, is_updated = self.update_definition_from_data(
descriptor.definition_locator, descriptor.get_explicitly_set_fields_by_scope(Scope.content), user_id)
# check children
original_entry = self._get_block_from_structure(original_structure, descriptor.location.block_id)
is_updated = is_updated or (
descriptor.has_children and original_entry['fields'].get('children', []) != descriptor.children
)
# check metadata
settings = descriptor.get_explicitly_set_fields_by_scope(Scope.settings)
settings = self._serialize_fields(descriptor.category, settings)
if not is_updated:
is_updated = self._compare_settings(
descriptor.get_explicitly_set_fields_by_scope(Scope.settings),
original_entry['fields']
)
is_updated = self._compare_settings(settings, original_entry['fields'])
# check children
if descriptor.has_children:
serialized_children = [child.block_id for child in descriptor.children]
is_updated = is_updated or original_entry['fields'].get('children', []) != serialized_children
if is_updated:
settings['children'] = serialized_children
# if updated, rev the structure
if is_updated:
......@@ -1003,9 +1014,7 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
block_data = self._get_block_from_structure(new_structure, descriptor.location.block_id)
block_data["definition"] = descriptor.definition_locator.definition_id
block_data["fields"] = descriptor.get_explicitly_set_fields_by_scope(Scope.settings)
if descriptor.has_children:
block_data['fields']["children"] = descriptor.children
block_data["fields"] = settings
new_id = new_structure['_id']
block_data['edit_info'] = {
......@@ -1105,7 +1114,7 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
def _persist_subdag(self, xblock, user_id, structure_blocks, new_id):
# persist the definition if persisted != passed
new_def_data = self._filter_special_fields(xblock.get_explicitly_set_fields_by_scope(Scope.content))
new_def_data = self._serialize_fields(xblock.category, xblock.get_explicitly_set_fields_by_scope(Scope.content))
is_updated = False
if xblock.definition_locator is None or isinstance(xblock.definition_locator.definition_id, LocalId):
xblock.definition_locator = self.create_definition_from_data(
......@@ -1137,10 +1146,11 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
is_updated = self._persist_subdag(child_block, user_id, structure_blocks, new_id) or is_updated
children.append(child_block.location.block_id)
else:
children.append(child)
children.append(child.block_id)
is_updated = is_updated or structure_blocks[encoded_block_id]['fields']['children'] != children
block_fields = xblock.get_explicitly_set_fields_by_scope(Scope.settings)
block_fields = self._serialize_fields(xblock.category, block_fields)
if not is_new and not is_updated:
is_updated = self._compare_settings(block_fields, structure_blocks[encoded_block_id]['fields'])
if children:
......@@ -1204,12 +1214,14 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
Note, if the branch doesn't exist, then the source_course structure's root must be in subtree_list;
otherwise, the publish will violate the parents must exist rule.
:param subtree_list: a list of xblock ids whose subtrees to publish. The ids can be the block
id or BlockUsageLocator. If a Locator and if the Locator's course id != source_course, this will
raise InvalidLocationError(locator)
:param subtree_list: a list of usage keys whose subtrees to publish.
:param blacklist: a list of xblock ids to not change in the destination: i.e., don't add
:param blacklist: a list of usage keys to not change in the destination: i.e., don't add
if not there, don't update if there.
Raises:
ItemNotFoundError: if it cannot find the course. if the request is to publish a
subtree but the ancestors up to and including the course root are not published.
"""
# get the destination's index, and source and destination structures.
source_structure = self._lookup_course(source_course)['structure']
......@@ -1219,20 +1231,22 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
raise ItemNotFoundError(destination_course)
if destination_course.branch not in index_entry['versions']:
# must be publishing the dag root if there's no current dag
if source_structure['root'] not in subtree_list:
raise ItemNotFoundError(source_structure['root'])
root_block_id = source_structure['root']
if not any(root_block_id == subtree.block_id for subtree in subtree_list):
raise ItemNotFoundError(u'Must publish course root {}'.format(root_block_id))
# create branch
destination_structure = self._new_structure(user_id, source_structure['root'])
destination_structure = self._new_structure(user_id, root_block_id)
else:
destination_structure = self._lookup_course(destination_course)['structure']
destination_structure = self._version_structure(destination_structure, user_id)
blacklist = [shunned.block_id for shunned in blacklist or []]
# iterate over subtree list filtering out blacklist.
orphans = set()
destination_blocks = destination_structure['blocks']
for subtree_root in subtree_list:
# find the parents and put root in the right sequence
parents = self._get_parents_from_structure(subtree_root, source_structure)
parents = self._get_parents_from_structure(subtree_root.block_id, source_structure)
if not all(parent in destination_blocks for parent in parents):
raise ItemNotFoundError(parents)
for parent_loc in parents:
......@@ -1240,12 +1254,12 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
self._sync_children(
source_structure['blocks'][parent_loc],
destination_blocks[parent_loc],
subtree_root
subtree_root.block_id
))
# update/create the subtree and its children in destination (skipping blacklist)
orphans.update(
self._publish_subdag(
user_id, subtree_root, source_structure['blocks'], destination_blocks, blacklist or []
user_id, subtree_root.block_id, source_structure['blocks'], destination_blocks, blacklist
)
)
# remove any remaining orphans
......@@ -1444,6 +1458,47 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
# clear cache again b/c inheritance may be wrong over orphans
self._clear_cache(original_structure['_id'])
def convert_references_to_keys(self, course_key, xblock_class, jsonfields, blocks):
"""
Convert the given serialized fields to the deserialized values by finding all references
and converting them.
:param jsonfields: the serialized copy of the xblock's fields
"""
def robust_usage_key(block_id):
"""
create a course_key relative usage key for the block_id. If the block_id is in blocks,
use its correct category; otherwise, use 'unknown'.
The purpose for this is that some operations add pointers as they build up the
structure without worrying about order of creation. Because the category of the
usage_key is for the most part inert, it's better to hack a value than to work
out a dependency graph algorithm for those functions which may prereference blocks.
"""
# if this was taken from cache, then its fields are already converted
if isinstance(block_id, BlockUsageLocator):
return block_id
try:
return course_key.make_usage_key(
blocks[LocMapperStore.encode_key_for_mongo(block_id)]['category'], block_id
)
except KeyError:
return course_key.make_usage_key('unknown', block_id)
xblock_class = self.mixologist.mix(xblock_class)
for field_name, value in jsonfields.iteritems():
if value:
field = xblock_class.fields.get(field_name)
if field is None:
continue
elif isinstance(field, Reference):
jsonfields[field_name] = robust_usage_key(value)
elif isinstance(field, ReferenceList):
jsonfields[field_name] = [robust_usage_key(ele) for ele in value]
elif isinstance(field, ReferenceValueDict):
for key, subvalue in value.iteritems():
assert isinstance(subvalue, basestring)
value[key] = robust_usage_key(subvalue)
return jsonfields
def _get_index_if_valid(self, locator, force=False, continue_version=False):
"""
If the locator identifies a course and points to its draft (or plausibly its draft),
......@@ -1514,15 +1569,36 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
index_entry['versions'][branch] = new_id
self.db_connection.update_course_index(index_entry)
def _filter_special_fields(self, fields):
def _serialize_fields(self, category, fields):
"""
Convert any references to their serialized form.
Remove any fields which split or its kvs computes or adds but does not want persisted.
:param fields: a dict of fields
"""
assert isinstance(fields, dict)
xblock_class = XBlock.load_class(category, self.default_class)
xblock_class = self.mixologist.mix(xblock_class)
for field_name, value in fields.iteritems():
if value:
if isinstance(xblock_class.fields[field_name], Reference):
fields[field_name] = value.block_id
elif isinstance(xblock_class.fields[field_name], ReferenceList):
fields[field_name] = [
ele.block_id for ele in value
]
elif isinstance(xblock_class.fields[field_name], ReferenceValueDict):
for key, subvalue in value.iteritems():
assert isinstance(subvalue, Location)
value[key] = subvalue.block_id
# I think these are obsolete conditions; so, I want to confirm that. Thus the warnings
if 'location' in fields:
log.warn('attempt to persist location')
del fields['location']
if 'category' in fields:
log.warn('attempt to persist category')
del fields['category']
return fields
......@@ -1616,7 +1692,8 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
user_id, new_block['category'],
self._filter_blacklist(copy.copy(new_block['fields']), blacklist),
new_block['definition'],
new_block['edit_info']['update_version']
new_block['edit_info']['update_version'],
raw=True
)
for child in destination_block['fields'].get('children', []):
if child not in blacklist:
......@@ -1642,7 +1719,17 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
self._delete_if_true_orphan(child, structure)
del structure['blocks'][encoded_block_id]
def _new_block(self, user_id, category, block_fields, definition_id, new_id):
def _new_block(self, user_id, category, block_fields, definition_id, new_id, raw=False):
"""
Create the core document structure for a block.
:param block_fields: the settings and children scoped fields as a dict or son
:param definition_id: the pointer to the content scoped fields
:param new_id: the structure's version id
:param raw: true if this block already has all references serialized
"""
if not raw:
block_fields = self._serialize_fields(category, block_fields)
return {
'category': category,
'definition': definition_id,
......
......@@ -110,6 +110,7 @@ class SplitMongoKVS(InheritanceKeyValueStore):
if isinstance(self._definition, DefinitionLazyLoader):
persisted_definition = self._definition.fetch()
if persisted_definition is not None:
self._fields.update(persisted_definition.get('fields'))
fields = self._definition.field_converter(persisted_definition.get('fields'))
self._fields.update(fields)
# do we want to cache any of the edit_info?
self._definition = None # already loaded
......@@ -10,10 +10,9 @@ from xmodule.modulestore.split_migrator import SplitMigrator
from xmodule.modulestore.mongo import draft
from xmodule.modulestore.tests import test_location_mapper
from xmodule.modulestore.tests.test_split_w_old_mongo import SplitWMongoCourseBoostrapper
from nose.tools import nottest
from xblock.fields import Reference, ReferenceList, ReferenceValueDict
@nottest
class TestMigration(SplitWMongoCourseBoostrapper):
"""
Test the split migrator
......@@ -181,8 +180,8 @@ class TestMigration(SplitWMongoCourseBoostrapper):
self.loc_mapper.translate_locator_to_location(split_dag_root.location).replace(revision=None)
)
# compare all fields but children
for name in presplit_dag_root.fields.iterkeys():
if name != 'children':
for name, field in presplit_dag_root.fields.iteritems():
if not isinstance(field, (Reference, ReferenceList, ReferenceValueDict)):
self.assertEqual(
getattr(presplit_dag_root, name),
getattr(split_dag_root, name),
......@@ -190,19 +189,7 @@ class TestMigration(SplitWMongoCourseBoostrapper):
split_dag_root.location, name, getattr(presplit_dag_root, name), getattr(split_dag_root, name)
)
)
# test split get_item using old Location: old draft store didn't set revision for things above vertical
# but split does distinguish these; so, set revision if not published
if not published:
location = draft.as_draft(presplit_dag_root.location)
else:
location = presplit_dag_root.location
refetched = self.split_mongo.get_item(location)
self.assertEqual(
refetched.location, split_dag_root.location,
"Fetch from split via old Location {} not same as new {}".format(
refetched.location, split_dag_root.location
)
)
# compare children
if presplit_dag_root.has_children:
self.assertEqual(
......
......@@ -482,7 +482,7 @@ class SplitModuleTest(unittest.TestCase):
block_id="head23456"
)
destination = CourseLocator(org="testx", offering="wonderful", branch="published")
split_store.xblock_publish("test@edx.org", to_publish, destination, [to_publish.block_id], None)
split_store.xblock_publish("test@edx.org", to_publish, destination, [to_publish], None)
def tearDown(self):
"""
......@@ -628,7 +628,9 @@ class SplitModuleCourseTests(SplitModuleTest):
"""
locator = CourseLocator(org='testx', offering='GreekHero', branch='draft')
course = modulestore().get_course(locator)
block_map = modulestore().cache_items(course.system, course.children, depth=3)
block_map = modulestore().cache_items(
course.system, [child.block_id for child in course.children], course.id, depth=3
)
self.assertIn('chapter1', block_map)
self.assertIn('problem3_2', block_map)
......@@ -891,6 +893,10 @@ class SplitModuleItemTests(SplitModuleTest):
self.assertEqual(len(expected_ids), 0)
def version_agnostic(children):
return [child.version_agnostic() for child in children]
class TestItemCrud(SplitModuleTest):
"""
Test create update and delete of items
......@@ -974,9 +980,10 @@ class TestItemCrud(SplitModuleTest):
# check that course version changed and course's previous is the other one
self.assertNotEqual(new_module.location.version_guid, premod_course.location.version_guid)
parent = modulestore().get_item(locator)
self.assertIn(new_module.location.block_id, parent.children)
self.assertIn(new_module.location.version_agnostic(), version_agnostic(parent.children))
self.assertEqual(new_module.definition_locator.definition_id, original.definition_locator.definition_id)
def test_unique_naming(self):
"""
Check that 2 modules of same type get unique block_ids. Also check that if creation provides
......@@ -1007,8 +1014,8 @@ class TestItemCrud(SplitModuleTest):
# check that course version changed and course's previous is the other one
parent = modulestore().get_item(locator)
self.assertNotEqual(new_module.location.block_id, another_module.location.block_id)
self.assertIn(new_module.location.block_id, parent.children)
self.assertIn(another_module.location.block_id, parent.children)
self.assertIn(new_module.location.version_agnostic(), version_agnostic(parent.children))
self.assertIn(another_module.location.version_agnostic(), version_agnostic(parent.children))
self.assertEqual(new_module.data, new_payload)
self.assertEqual(another_module.data, another_payload)
# check definition histories
......@@ -1046,7 +1053,7 @@ class TestItemCrud(SplitModuleTest):
new_module = modulestore().get_item(problem_locator)
self.assertEqual(new_module.location.block_id, problem_locator.block_id)
chapter = modulestore().get_item(chapter_locator)
self.assertIn(problem_locator.block_id, chapter.children)
self.assertIn(problem_locator, version_agnostic(chapter.children))
def test_create_continue_version(self):
"""
......@@ -1077,7 +1084,7 @@ class TestItemCrud(SplitModuleTest):
self.assertEqual(refetch_course.update_version, course_block_update_version)
refetch_index_history_info = modulestore().get_course_history_info(refetch_course.location)
self.assertEqual(refetch_index_history_info, index_history_info)
self.assertIn(new_ele.location.block_id, refetch_course.children)
self.assertIn(new_ele.location.version_agnostic(), version_agnostic(refetch_course.children))
# try to create existing item
with self.assertRaises(DuplicateItemError):
......@@ -1123,7 +1130,7 @@ class TestItemCrud(SplitModuleTest):
# check children, previous_version
refetch_course = modulestore().get_course(versionless_course_locator)
self.assertIn(new_ele.location.block_id, refetch_course.children)
self.assertIn(new_ele.location.version_agnostic(), version_agnostic(refetch_course.children))
self.assertEqual(refetch_course.previous_version, course_block_update_version)
self.assertEqual(refetch_course.update_version, transaction_guid)
......@@ -1180,13 +1187,13 @@ class TestItemCrud(SplitModuleTest):
# check that course version changed and course's previous is the other one
self.assertEqual(updated_problem.definition_locator.definition_id, pre_def_id)
self.assertNotEqual(updated_problem.location.version_guid, pre_version_guid)
self.assertEqual(updated_problem.children, block.children)
self.assertNotIn(moved_child, updated_problem.children)
self.assertEqual(version_agnostic(updated_problem.children), version_agnostic(block.children))
self.assertNotIn(moved_child, version_agnostic(updated_problem.children))
locator = locator.course_key.make_usage_key('Chapter', "chapter1")
other_block = modulestore().get_item(locator)
other_block.children.append(moved_child)
other_updated = modulestore().update_item(other_block, '**replace_user**')
self.assertIn(moved_child, other_updated.children)
self.assertIn(moved_child.version_agnostic(), version_agnostic(other_updated.children))
def test_update_definition(self):
"""
......@@ -1251,7 +1258,7 @@ class TestItemCrud(SplitModuleTest):
self.assertNotEqual(updated_block.definition_locator.definition_id, pre_def_id)
self.assertNotEqual(updated_block.location.version_guid, pre_version_guid)
self.assertEqual(updated_block.grading_policy['GRADER'][0]['min_count'], 13)
self.assertEqual(updated_block.children[0], block.children[0])
self.assertEqual(updated_block.children[0].version_agnostic(), block.children[0].version_agnostic())
self.assertEqual(updated_block.advertised_start, "Soon")
def test_delete_item(self):
......@@ -1520,40 +1527,44 @@ class TestPublish(SplitModuleTest):
"""
source_course = CourseLocator(org='testx', offering='GreekHero', branch='draft')
dest_course = CourseLocator(org='testx', offering='GreekHero', branch="published")
modulestore().xblock_publish(self.user, source_course, dest_course, ["head12345"], ["chapter2", "chapter3"])
expected = ["head12345", "chapter1"]
head = source_course.make_usage_key('course', "head12345")
chapter1 = source_course.make_usage_key('chapter', 'chapter1')
chapter2 = source_course.make_usage_key('chapter', 'chapter2')
chapter3 = source_course.make_usage_key('chapter', 'chapter3')
modulestore().xblock_publish(self.user, source_course, dest_course, [head], [chapter2, chapter3])
expected = [head.block_id, chapter1.block_id]
self._check_course(
source_course, dest_course, expected, ["chapter2", "chapter3", "problem1", "problem3_2"]
source_course, dest_course, expected, [chapter2.block_id, chapter3.block_id, "problem1", "problem3_2"]
)
# add a child under chapter1
new_module = modulestore().create_item(
BlockUsageLocator.make_relative(source_course, "chapter", "chapter1"), "sequential", self.user,
chapter1, "sequential", self.user,
fields={'display_name': 'new sequential'},
)
# remove chapter1 from expected b/c its pub'd version != the source anymore since source changed
expected.remove("chapter1")
expected.remove(chapter1.block_id)
# check that it's not in published course
with self.assertRaises(ItemNotFoundError):
modulestore().get_item(new_module.location.map_into_course(dest_course))
# publish it
modulestore().xblock_publish(self.user, source_course, dest_course, [new_module.location.block_id], None)
modulestore().xblock_publish(self.user, source_course, dest_course, [new_module.location], None)
expected.append(new_module.location.block_id)
# check that it is in the published course and that its parent is the chapter
pub_module = modulestore().get_item(new_module.location.map_into_course(dest_course))
self.assertEqual(
modulestore().get_parent_locations(pub_module.location)[0].block_id, "chapter1"
modulestore().get_parent_locations(pub_module.location)[0].block_id, chapter1.block_id
)
# ensure intentionally orphaned blocks work (e.g., course_info)
new_module = modulestore().create_item(
source_course, "course_info", self.user, block_id="handouts"
)
# publish it
modulestore().xblock_publish(self.user, source_course, dest_course, [new_module.location.block_id], None)
modulestore().xblock_publish(self.user, source_course, dest_course, [new_module.location], None)
expected.append(new_module.location.block_id)
# check that it is in the published course (no error means it worked)
pub_module = modulestore().get_item(new_module.location.map_into_course(dest_course))
self._check_course(
source_course, dest_course, expected, ["chapter2", "chapter3", "problem1", "problem3_2"]
source_course, dest_course, expected, [chapter2.block_id, chapter3.block_id, "problem1", "problem3_2"]
)
def test_exceptions(self):
......@@ -1563,16 +1574,19 @@ class TestPublish(SplitModuleTest):
source_course = CourseLocator(org='testx', offering='GreekHero', branch='draft')
# destination does not exist
destination_course = CourseLocator(org='fake', offering='Unknown', branch="published")
head = source_course.make_usage_key('course', "head12345")
chapter3 = source_course.make_usage_key('chapter', 'chapter3')
problem1 = source_course.make_usage_key('problem', 'problem1')
with self.assertRaises(ItemNotFoundError):
modulestore().xblock_publish(self.user, source_course, destination_course, ["chapter3"], None)
modulestore().xblock_publish(self.user, source_course, destination_course, [chapter3], None)
# publishing into a new branch w/o publishing the root
destination_course = CourseLocator(org='testx', offering='GreekHero', branch="published")
with self.assertRaises(ItemNotFoundError):
modulestore().xblock_publish(self.user, source_course, destination_course, ["chapter3"], None)
modulestore().xblock_publish(self.user, source_course, destination_course, [chapter3], None)
# publishing a subdag w/o the parent already in course
modulestore().xblock_publish(self.user, source_course, destination_course, ["head12345"], ["chapter3"])
modulestore().xblock_publish(self.user, source_course, destination_course, [head], [chapter3])
with self.assertRaises(ItemNotFoundError):
modulestore().xblock_publish(self.user, source_course, destination_course, ["problem1"], [])
modulestore().xblock_publish(self.user, source_course, destination_course, [problem1], [])
def test_move_delete(self):
"""
......@@ -1580,16 +1594,19 @@ class TestPublish(SplitModuleTest):
"""
source_course = CourseLocator(org='testx', offering='GreekHero', branch='draft')
dest_course = CourseLocator(org='testx', offering='GreekHero', branch="published")
modulestore().xblock_publish(self.user, source_course, dest_course, ["head12345"], ["chapter2"])
head = source_course.make_usage_key('course', "head12345")
chapter2 = source_course.make_usage_key('chapter', 'chapter2')
problem1 = source_course.make_usage_key('problem', 'problem1')
modulestore().xblock_publish(self.user, source_course, dest_course, [head], [chapter2])
expected = ["head12345", "chapter1", "chapter3", "problem1", "problem3_2"]
self._check_course(source_course, dest_course, expected, ["chapter2"])
# now move problem1 and delete problem3_2
chapter1 = modulestore().get_item(source_course.make_usage_key("chapter", "chapter1"))
chapter3 = modulestore().get_item(source_course.make_usage_key("chapter", "chapter3"))
chapter1.children.append("problem1")
chapter3.children.remove("problem1")
chapter1.children.append(problem1)
chapter3.children.remove(problem1.map_into_course(chapter3.location.course_key))
modulestore().delete_item(source_course.make_usage_key("problem", "problem3_2"), self.user)
modulestore().xblock_publish(self.user, source_course, dest_course, ["head12345"], ["chapter2"])
modulestore().xblock_publish(self.user, source_course, dest_course, [head], [chapter2])
expected = ["head12345", "chapter1", "chapter3", "problem1"]
self._check_course(source_course, dest_course, expected, ["chapter2", "problem3_2"])
......@@ -1625,10 +1642,11 @@ class TestPublish(SplitModuleTest):
"""
dest_cursor = 0
for child in source_children:
if child in unexpected:
self.assertNotIn(child, dest_children)
child = child.version_agnostic()
if child.block_id in unexpected:
self.assertNotIn(child.block_id, [dest.block_id for dest in dest_children])
else:
self.assertEqual(child, dest_children[dest_cursor])
self.assertEqual(child.block_id, dest_children[dest_cursor].block_id)
dest_cursor += 1
self.assertEqual(dest_cursor, len(dest_children))
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment