Commit 39f4cb80 by Don Mitchell

Merge pull request #1832 from edx/dhm/publish

SplitMongo publish method
parents f747c762 43789a92
......@@ -34,7 +34,6 @@ from xblock.fields import Scope, ScopeIds
from xmodule.modulestore import ModuleStoreWriteBase, Location, MONGO_MODULESTORE_TYPE
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.modulestore.inheritance import own_metadata, InheritanceMixin, inherit_metadata, InheritanceKeyValueStore
import re
log = logging.getLogger(__name__)
......
"""
Provides full versioning CRUD and representation for collections of xblocks (e.g., courses, modules, etc).
Representation:
* course_index: a dictionary:
** '_id': course_id (e.g., myu.mydept.mycourse.myrun),
** 'org': the org's id. Only used for searching not identity,
** 'prettyid': a vague to-be-determined field probably more useful to storing searchable tags,
** 'edited_by': user_id of user who created the original entry,
** 'edited_on': the datetime of the original creation,
** 'versions': versions_dict: {branch_id: structure_id, ...}
* structure:
** '_id': an ObjectId (guid),
** 'root': root_usage_id (string of key in 'blocks' for the root of this structure,
** 'previous_version': the structure from which this one was derived. For published courses, this
points to the previously published version of the structure not the draft published to this.
** 'original_version': the original structure id in the previous_version relation. Is a pseudo object
identifier enabling quick determination if 2 structures have any shared history,
** 'edited_by': user_id of the user whose change caused the creation of this structure version,
** 'edited_on': the datetime for the change causing this creation of this structure version,
** 'blocks': dictionary of xblocks in this structure:
*** block_id: dictionary of block settings and children:
**** 'category': the xblock type id
**** 'definition': the db id of the record containing the content payload for this xblock
**** 'fields': the Scope.settings and children field values
**** 'edit_info': dictionary:
***** 'edited_on': when was this xblock's fields last changed (will be edited_on value of
update_version structure)
***** 'edited_by': user_id for who changed this xblock last (will be edited_by value of
update_version structure)
***** 'update_version': the guid for the structure where this xblock got its current field
values. This may point to a structure not in this structure's history (e.g., to a draft
branch from which this version was published.)
***** 'previous_version': the guid for the structure which previously changed this xblock
(will be the previous value of update_version; so, may point to a structure not in this
structure's history.)
* definition: shared content with revision history for xblock content fields
** '_id': definition_id (guid),
** 'category': xblock type id
** 'fields': scope.content (and possibly other) field values.
** 'edit_info': dictionary:
*** 'edited_by': user_id whose edit caused this version of the definition,
*** 'edited_on': datetime of the change causing this version
*** 'previous_version': the definition_id of the previous version of this definition
*** 'original_version': definition_id of the root of the previous version relation on this
definition. Acts as a pseudo-object identifier.
"""
import threading
import datetime
import logging
......@@ -382,12 +429,9 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
:param usage_id: ignored. Only included for API compatibility. Specify the usage_id within the locator.
'''
course = self._lookup_course(locator)
items = []
for parent_id, value in course['structure']['blocks'].iteritems():
for child_id in value['fields'].get('children', []):
if locator.usage_id == child_id:
items.append(BlockUsageLocator(url=locator.as_course_locator(), usage_id=parent_id))
return items
items = self._get_parents_from_structure(locator.usage_id, course['structure'])
return [BlockUsageLocator(url=locator.as_course_locator(), usage_id=parent_id)
for parent_id in items]
def get_orphans(self, course_id, detached_categories, branch):
"""
......@@ -557,13 +601,12 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
"category" : category,
"fields": new_def_data,
"edit_info": {
"_id": new_id,
"edited_by": user_id,
"edited_on": datetime.datetime.now(UTC),
"previous_version": None,
"original_version": new_id,
}
}
}
self.db_connection.insert_definition(document)
definition_locator = DefinitionLocator(new_id)
return definition_locator
......@@ -826,28 +869,11 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
}
self.db_connection.insert_definition(definition_entry)
new_id = ObjectId()
draft_structure = {
'_id': new_id,
'root': root_usage_id,
'previous_version': None,
'original_version': new_id,
'edited_by': user_id,
'edited_on': datetime.datetime.now(UTC),
'blocks': {
root_usage_id: {
'category': root_category,
'definition': definition_id,
'fields': block_fields,
'edit_info': {
'edited_on': datetime.datetime.now(UTC),
'edited_by': user_id,
'previous_version': None,
'update_version': new_id,
}
}
}
}
draft_structure = self._new_structure(
user_id, root_usage_id, root_category, block_fields, definition_id
)
new_id = draft_structure['_id']
self.db_connection.insert_structure(draft_structure)
if versions_dict is None:
......@@ -1083,6 +1109,84 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
'''Deprecated, use update_item.'''
raise NotImplementedError('use update_item')
def xblock_publish(self, user_id, source_course, destination_course, subtree_list, blacklist):
"""
Publishes each xblock in subtree_list and those blocks descendants excluding blacklist
from source_course to destination_course.
To delete a block, publish its parent. You can blacklist the other sibs to keep them from
being refreshed. You can also just call delete_item on the destination.
To unpublish a block, call delete_item on the destination.
Ensures that each subtree occurs in the same place in destination as it does in source. If any
of the source's subtree parents are missing from destination, it raises ItemNotFound([parent_ids]).
To determine the same relative order vis-a-vis published siblings,
publishing may involve changing the order of previously published siblings. For example,
if publishing `[c, d]` and source parent has children `[a, b, c, d, e]` and destination parent
currently has children `[e, b]`, there's no obviously correct resulting order; thus, publish will
reorder destination to `[b, c, d, e]` to make it conform with the source.
:param source_course: a CourseLocator (can be a version or course w/ branch)
:param destination_course: a CourseLocator which must be an existing course but branch doesn't have
to exist yet. (The course must exist b/c Locator doesn't have everything necessary to create it).
Note, if the branch doesn't exist, then the source_course structure's root must be in subtree_list;
otherwise, the publish will violate the parents must exist rule.
:param subtree_list: a list of xblock ids whose subtrees to publish. The ids can be the block
id or BlockUsageLocator. If a Locator and if the Locator's course id != source_course, this will
raise InvalidLocationError(locator)
:param blacklist: a list of xblock ids to not change in the destination: i.e., don't add
if not there, don't update if there.
"""
# get the destination's index, and source and destination structures.
source_structure = self._lookup_course(source_course)['structure']
index_entry = self.db_connection.get_course_index(destination_course.course_id)
if index_entry is None:
# brand new course
raise ItemNotFoundError(destination_course)
if destination_course.branch not in index_entry['versions']:
# must be publishing the dag root if there's no current dag
if source_structure['root'] not in subtree_list:
raise ItemNotFoundError(source_structure['root'])
# create branch
destination_structure = self._new_structure(user_id, source_structure['root'])
else:
destination_structure = self._lookup_course(destination_course)['structure']
destination_structure = self._version_structure(destination_structure, user_id)
# iterate over subtree list filtering out blacklist.
orphans = set()
destination_blocks = destination_structure['blocks']
for subtree_root in subtree_list:
# find the parents and put root in the right sequence
parents = self._get_parents_from_structure(subtree_root, source_structure)
if not all(parent in destination_blocks for parent in parents):
raise ItemNotFoundError(parents)
for parent_loc in parents:
orphans.update(
self._sync_children(
source_structure['blocks'][parent_loc],
destination_blocks[parent_loc],
subtree_root
))
# update/create the subtree and its children in destination (skipping blacklist)
orphans.update(
self._publish_subdag(
user_id, subtree_root, source_structure['blocks'], destination_blocks, blacklist or []
)
)
# remove any remaining orphans
for orphan in orphans:
# orphans will include moved as well as deleted xblocks. Only delete the deleted ones.
self._delete_if_true_orphan(orphan, destination_structure)
# update the db
self.db_connection.insert_structure(destination_structure)
self._update_head(index_entry, destination_course.branch, destination_structure['_id'])
def update_course_index(self, updated_index_entry):
"""
Change the given course's index entry.
......@@ -1091,9 +1195,6 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
Does not return anything useful.
"""
# TODO how should this log the change? edited_on and edited_by for this entry
# has the semantic of who created the course and when; so, changing those will lose
# that information.
self.db_connection.update_course_index(updated_index_entry)
def delete_item(self, usage_locator, user_id, delete_children=False, force=False):
......@@ -1135,6 +1236,8 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
del new_blocks[usage_id]
if delete_children:
remove_subtree(usage_locator.usage_id)
else:
del new_blocks[usage_locator.usage_id]
# update index if appropriate and structures
self.db_connection.insert_structure(new_structure)
......@@ -1419,3 +1522,125 @@ class SplitMongoModuleStore(ModuleStoreWriteBase):
"split" for new-style split MongoDB backed courses.
"""
return SPLIT_MONGO_MODULESTORE_TYPE
def _new_structure(self, user_id, root_usage_id,
root_category=None, block_fields=None, definition_id=None):
"""
Internal function: create a structure element with no previous version. Must provide the root id
but not necessarily the info needed to create it (for the use case of publishing). If providing
root_category, must also provide block_fields and definition_id
"""
new_id = ObjectId()
if root_category is not None:
blocks = {
root_usage_id: self._new_block(user_id, root_category, block_fields, definition_id, new_id)
}
else:
blocks = {}
return {
'_id': new_id,
'root': root_usage_id,
'previous_version': None,
'original_version': new_id,
'edited_by': user_id,
'edited_on': datetime.datetime.now(UTC),
'blocks': blocks
}
def _get_parents_from_structure(self, usage_id, structure):
"""
Given a structure, find all of usage_id's parents in that structure
"""
items = []
for parent_id, value in structure['blocks'].iteritems():
for child_id in value['fields'].get('children', []):
if usage_id == child_id:
items.append(parent_id)
return items
def _sync_children(self, source_parent, destination_parent, new_child):
"""
Reorder destination's children to the same as source's and remove any no longer in source.
Return the removed ones as orphans (a set).
"""
destination_reordered = []
destination_children = destination_parent['fields']['children']
source_children = source_parent['fields']['children']
orphans = set()
for child in destination_children:
try:
source_children.index(child)
except ValueError:
orphans.add(child)
for child in source_children:
if child == new_child or child in destination_children:
destination_reordered.append(child)
destination_parent['fields']['children'] = destination_reordered
return orphans
def _publish_subdag(self, user_id, block_id, source_blocks, destination_blocks, blacklist):
"""
Update destination_blocks for the sub-dag rooted at block_id to be like the one in
source_blocks excluding blacklist.
Return any newly discovered orphans (as a set)
"""
orphans = set()
destination_block = destination_blocks.get(block_id)
new_block = source_blocks[block_id]
if destination_block:
if destination_block['edit_info']['update_version'] != new_block['edit_info']['update_version']:
source_children = new_block['fields']['children']
for child in destination_block['fields']['children']:
try:
source_children.index(child)
except ValueError:
orphans.add(child)
previous_version = new_block['edit_info']['update_version']
destination_block = copy.deepcopy(new_block)
destination_block['fields'] = self._filter_blacklist(destination_block['fields'], blacklist)
destination_block['edit_info']['previous_version'] = previous_version
destination_block['edit_info']['edited_by'] = user_id
else:
destination_block = self._new_block(
user_id, new_block['category'],
self._filter_blacklist(copy.copy(new_block['fields']), blacklist),
new_block['definition'],
new_block['edit_info']['update_version']
)
for child in destination_block['fields'].get('children', []):
if child not in blacklist:
orphans.update(self._publish_subdag(user_id, child, source_blocks, destination_blocks, blacklist))
destination_blocks[block_id] = destination_block
return orphans
def _filter_blacklist(self, fields, blacklist):
"""
Filter out blacklist from the children field in fields. Will construct a new list for children;
so, no need to worry about copying the children field, but it will modify fiels.
"""
fields['children'] = [child for child in fields.get('children', []) if child not in blacklist]
return fields
def _delete_if_true_orphan(self, orphan, structure):
"""
Delete the orphan and any of its descendants which no longer have parents.
"""
if not self._get_parents_from_structure(orphan, structure):
for child in structure['blocks'][orphan]['fields'].get('children', []):
self._delete_if_true_orphan(child, structure)
del structure['blocks'][orphan]
def _new_block(self, user_id, category, block_fields, definition_id, new_id):
return {
'category': category,
'definition': definition_id,
'fields': block_fields,
'edit_info': {
'edited_on': datetime.datetime.now(UTC),
'edited_by': user_id,
'previous_version': None,
'update_version': new_id
}
}
......@@ -829,7 +829,7 @@ class TestItemCrud(SplitModuleTest):
# delete a leaf
problems = modulestore().get_items(reusable_location, {'category': 'problem'})
locn_to_del = problems[0].location
new_course_loc = modulestore().delete_item(locn_to_del, 'deleting_user', delete_children=True)
new_course_loc = modulestore().delete_item(locn_to_del, 'deleting_user', delete_children=False)
deleted = BlockUsageLocator(course_id=reusable_location.course_id,
branch=reusable_location.branch,
usage_id=locn_to_del.usage_id)
......@@ -1085,7 +1085,135 @@ class TestInheritance(SplitModuleTest):
# overridden
self.assertEqual(node.graceperiod, datetime.timedelta(hours=4))
# TODO test inheritance after set and delete of attrs
class TestPublish(SplitModuleTest):
"""
Test the publishing api
"""
def setUp(self):
SplitModuleTest.setUp(self)
self.user = random.getrandbits(32)
def tearDown(self):
SplitModuleTest.tearDownClass()
def test_publish_safe(self):
"""
Test the standard patterns: publish to new branch, revise and publish
"""
source_course = CourseLocator(course_id="GreekHero", branch='draft')
dest_course = CourseLocator(course_id="GreekHero", branch="published")
modulestore().xblock_publish(self.user, source_course, dest_course, ["head12345"], ["chapter2", "chapter3"])
expected = ["head12345", "chapter1"]
self._check_course(
source_course, dest_course, expected, ["chapter2", "chapter3", "problem1", "problem3_2"]
)
# add a child under chapter1
new_module = modulestore().create_item(
self._usage(source_course, "chapter1"), "sequential", self.user,
fields={'display_name': 'new sequential'},
)
# remove chapter1 from expected b/c its pub'd version != the source anymore since source changed
expected.remove("chapter1")
# check that it's not in published course
with self.assertRaises(ItemNotFoundError):
modulestore().get_item(self._usage(dest_course, new_module.location.usage_id))
# publish it
modulestore().xblock_publish(self.user, source_course, dest_course, [new_module.location.usage_id], None)
expected.append(new_module.location.usage_id)
# check that it is in the published course and that its parent is the chapter
pub_module = modulestore().get_item(self._usage(dest_course, new_module.location.usage_id))
self.assertEqual(
modulestore().get_parent_locations(pub_module.location)[0].usage_id, "chapter1"
)
# ensure intentionally orphaned blocks work (e.g., course_info)
new_module = modulestore().create_item(
source_course, "course_info", self.user, usage_id="handouts"
)
# publish it
modulestore().xblock_publish(self.user, source_course, dest_course, [new_module.location.usage_id], None)
expected.append(new_module.location.usage_id)
# check that it is in the published course (no error means it worked)
pub_module = modulestore().get_item(self._usage(dest_course, new_module.location.usage_id))
self._check_course(
source_course, dest_course, expected, ["chapter2", "chapter3", "problem1", "problem3_2"]
)
def test_exceptions(self):
"""
Test the exceptions which preclude successful publication
"""
source_course = CourseLocator(course_id="GreekHero", branch='draft')
# destination does not exist
destination_course = CourseLocator(course_id="Unknown", branch="published")
with self.assertRaises(ItemNotFoundError):
modulestore().xblock_publish(self.user, source_course, destination_course, ["chapter3"], None)
# publishing into a new branch w/o publishing the root
destination_course = CourseLocator(course_id="GreekHero", branch="published")
with self.assertRaises(ItemNotFoundError):
modulestore().xblock_publish(self.user, source_course, destination_course, ["chapter3"], None)
# publishing a subdag w/o the parent already in course
modulestore().xblock_publish(self.user, source_course, destination_course, ["head12345"], ["chapter3"])
with self.assertRaises(ItemNotFoundError):
modulestore().xblock_publish(self.user, source_course, destination_course, ["problem1"], [])
def test_move_delete(self):
"""
Test publishing moves and deletes.
"""
source_course = CourseLocator(course_id="GreekHero", branch='draft')
dest_course = CourseLocator(course_id="GreekHero", branch="published")
modulestore().xblock_publish(self.user, source_course, dest_course, ["head12345"], ["chapter2"])
expected = ["head12345", "chapter1", "chapter3", "problem1", "problem3_2"]
self._check_course(source_course, dest_course, expected, ["chapter2"])
# now move problem1 and delete problem3_2
chapter1 = modulestore().get_item(self._usage(source_course, "chapter1"))
chapter3 = modulestore().get_item(self._usage(source_course, "chapter3"))
chapter1.children.append("problem1")
chapter3.children.remove("problem1")
modulestore().delete_item(self._usage(source_course, "problem3_2"), self.user)
modulestore().xblock_publish(self.user, source_course, dest_course, ["head12345"], ["chapter2"])
expected = ["head12345", "chapter1", "chapter3", "problem1"]
self._check_course(source_course, dest_course, expected, ["chapter2", "problem3_2"])
def _check_course(self, source_course_loc, dest_course_loc, expected_blocks, unexpected_blocks):
"""
Check that the course has the expected blocks and does not have the unexpected blocks
"""
for expected in expected_blocks:
source = modulestore().get_item(self._usage(source_course_loc, expected))
pub_copy = modulestore().get_item(self._usage(dest_course_loc, expected))
# everything except previous_version & children should be the same
self.assertEqual(source.category, pub_copy.category)
self.assertEqual(source.update_version, pub_copy.update_version)
self.assertEqual(self.user, pub_copy.edited_by)
for field in source.fields.values():
if field.name == 'children':
self._compare_children(field.read_from(source), field.read_from(pub_copy), unexpected_blocks)
else:
self.assertEqual(field.read_from(source), field.read_from(pub_copy))
for unexp in unexpected_blocks:
with self.assertRaises(ItemNotFoundError):
modulestore().get_item(self._usage(dest_course_loc, unexp))
def _usage(self, course_loc, block_id):
"""
Generate a BlockUsageLocator for the combo of the course location and block id
"""
return BlockUsageLocator(course_id=course_loc.course_id, branch=course_loc.branch, usage_id=block_id)
def _compare_children(self, source_children, dest_children, unexpected):
"""
Ensure dest_children == source_children minus unexpected
"""
dest_cursor = 0
for child in source_children:
if child in unexpected:
self.assertNotIn(child, dest_children)
else:
self.assertEqual(child, dest_children[dest_cursor])
dest_cursor += 1
self.assertEqual(dest_cursor, len(dest_children))
#===========================================
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment