Commit 23d78fe7 by Don Mitchell

Merge pull request #4803 from edx/dhm/import_split_test

Dhm/import split test
parents b6f82a94 3b43f34b
......@@ -981,7 +981,7 @@ class TestEditSplitModule(ItemTest):
# group_id_to_child and children have not changed yet.
split_test = self._assert_children(2)
group_id_to_child = split_test.group_id_to_child
group_id_to_child = split_test.group_id_to_child.copy()
self.assertEqual(2, len(group_id_to_child))
# Test environment and Studio use different module systems
......
......@@ -707,8 +707,8 @@ class EdxJSONEncoder(json.JSONEncoder):
ISO date strings
"""
def default(self, obj):
if isinstance(obj, Location):
return obj.to_deprecated_string()
if isinstance(obj, (CourseKey, UsageKey)):
return unicode(obj)
elif isinstance(obj, datetime.datetime):
if obj.tzinfo is not None:
if obj.utcoffset() is None:
......
......@@ -170,15 +170,6 @@ class MixedModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
# return the default store
return self.default_modulestore
# return the first store, as the default
return self.default_modulestore
@property
def default_modulestore(self):
"""
Return the default modulestore
"""
return self.modulestores[0]
def _get_modulestore_by_type(self, modulestore_type):
"""
......
......@@ -229,7 +229,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
# Convert the serialized fields values in self.cached_metadata
# to python values
metadata_to_inherit = self.cached_metadata.get(non_draft_loc.to_deprecated_string(), {})
metadata_to_inherit = self.cached_metadata.get(unicode(non_draft_loc), {})
inherit_metadata(module, metadata_to_inherit)
edit_info = json_data.get('edit_info')
......@@ -238,10 +238,11 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
if not edit_info:
module.edited_by = module.edited_on = module.subtree_edited_on = \
module.subtree_edited_by = module.published_date = None
raw_metadata = json_data.get('metadata', {})
# published_date was previously stored as a list of time components instead of a datetime
if metadata.get('published_date'):
module.published_date = datetime(*metadata.get('published_date')[0:6]).replace(tzinfo=UTC)
module.published_by = metadata.get('published_by')
if raw_metadata.get('published_date'):
module.published_date = datetime(*raw_metadata.get('published_date')[0:6]).replace(tzinfo=UTC)
module.published_by = raw_metadata.get('published_by')
# otherwise restore the stored editing information
else:
module.edited_by = edit_info.get('edited_by')
......@@ -267,7 +268,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
"""
Convert a single serialized UsageKey string in a ReferenceField into a UsageKey.
"""
key = Location.from_deprecated_string(ref_string)
key = Location.from_string(ref_string)
return key.replace(run=self.modulestore.fill_in_run(key.course_key).run)
def __setattr__(self, name, value):
......@@ -280,22 +281,26 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
:param course_key: a CourseKey object for the given course
:param jsonfields: a dict of the jsonified version of the fields
"""
result = {}
for field_name, value in jsonfields.iteritems():
if value:
field = class_.fields.get(field_name)
if field is None:
continue
elif isinstance(field, Reference):
jsonfields[field_name] = self._convert_reference_to_key(value)
elif isinstance(field, ReferenceList):
jsonfields[field_name] = [
self._convert_reference_to_key(ele) for ele in value
]
elif isinstance(field, ReferenceValueDict):
for key, subvalue in value.iteritems():
assert isinstance(subvalue, basestring)
value[key] = self._convert_reference_to_key(subvalue)
return jsonfields
field = class_.fields.get(field_name)
if field is None:
continue
elif value is None:
result[field_name] = value
elif isinstance(field, Reference):
result[field_name] = self._convert_reference_to_key(value)
elif isinstance(field, ReferenceList):
result[field_name] = [
self._convert_reference_to_key(ele) for ele in value
]
elif isinstance(field, ReferenceValueDict):
result[field_name] = {
key: self._convert_reference_to_key(subvalue) for key, subvalue in value.iteritems()
}
else:
result[field_name] = value
return result
def lookup_item(self, location):
"""
......@@ -520,7 +525,7 @@ class MongoModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
# manually pick it apart b/c the db has tag and we want as_published revision regardless
location = as_published(Location._from_deprecated_son(result['_id'], course_id.run))
location_url = location.to_deprecated_string()
location_url = unicode(location)
if location_url in results_by_url:
# found either draft or live to complement the other revision
existing_children = results_by_url[location_url].get('definition', {}).get('children', [])
......@@ -1125,14 +1130,11 @@ class MongoModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
therefore propagate subtree edit info up the tree
"""
try:
definition_data = self._convert_reference_fields_to_strings(
xblock,
xblock.get_explicitly_set_fields_by_scope()
)
definition_data = self._serialize_scope(xblock, Scope.content)
now = datetime.now(UTC)
payload = {
'definition.data': definition_data,
'metadata': self._convert_reference_fields_to_strings(xblock, own_metadata(xblock)),
'metadata': self._serialize_scope(xblock, Scope.settings),
'edit_info.edited_on': now,
'edit_info.edited_by': user_id,
'edit_info.subtree_edited_on': now,
......@@ -1144,7 +1146,7 @@ class MongoModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
payload['edit_info.published_by'] = user_id
if xblock.has_children:
children = self._convert_reference_fields_to_strings(xblock, {'children': xblock.children})
children = self._serialize_scope(xblock, Scope.children)
payload.update({'definition.children': children['children']})
self._update_single_item(xblock.scope_ids.usage_id, payload)
......@@ -1185,25 +1187,27 @@ class MongoModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
return xblock
def _convert_reference_fields_to_strings(self, xblock, jsonfields):
def _serialize_scope(self, xblock, scope):
"""
Find all fields of type reference and convert the payload from UsageKeys to deprecated strings
:param xblock: the XBlock class
:param jsonfields: a dict of the jsonified version of the fields
"""
assert isinstance(jsonfields, dict)
for field_name, value in jsonfields.iteritems():
if value:
if isinstance(xblock.fields[field_name], Reference):
jsonfields[field_name] = value.to_deprecated_string()
elif isinstance(xblock.fields[field_name], ReferenceList):
jsonfields = {}
for field_name, field in xblock.fields.iteritems():
if (field.scope == scope and field.is_set_on(xblock)):
if isinstance(field, Reference):
jsonfields[field_name] = unicode(field.read_from(xblock))
elif isinstance(field, ReferenceList):
jsonfields[field_name] = [
ele.to_deprecated_string() for ele in value
unicode(ele) for ele in field.read_from(xblock)
]
elif isinstance(xblock.fields[field_name], ReferenceValueDict):
for key, subvalue in value.iteritems():
assert isinstance(subvalue, UsageKey)
value[key] = subvalue.to_deprecated_string()
elif isinstance(field, ReferenceValueDict):
jsonfields[field_name] = {
key: unicode(subvalue) for key, subvalue in field.read_from(xblock).iteritems()
}
else:
jsonfields[field_name] = field.read_json(xblock)
return jsonfields
def _get_raw_parent_location(self, location, revision=ModuleStoreEnum.RevisionOption.published_only):
......@@ -1217,7 +1221,7 @@ class MongoModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
# create a query with tag, org, course, and the children field set to the given location
query = self._course_key_to_son(location.course_key)
query['definition.children'] = location.to_deprecated_string()
query['definition.children'] = unicode(location)
# if only looking for the PUBLISHED parent, set the revision in the query to None
if revision == ModuleStoreEnum.RevisionOption.published_only:
......@@ -1292,7 +1296,7 @@ class MongoModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
if item['_id']['category'] != 'course':
# It would be nice to change this method to return UsageKeys instead of the deprecated string.
item_locs.add(
as_published(Location._from_deprecated_son(item['_id'], course_key.run)).to_deprecated_string()
unicode(as_published(Location._from_deprecated_son(item['_id'], course_key.run)))
)
all_reachable = all_reachable.union(item.get('definition', {}).get('children', []))
item_locs -= all_reachable
......
......@@ -223,7 +223,7 @@ class MongoContentstoreBuilder(object):
MODULESTORE_SETUPS = (
MongoModulestoreBuilder(),
VersioningModulestoreBuilder(),
# VersioningModulestoreBuilder(), # FIXME LMS-11227
MixedModulestoreBuilder([('draft', MongoModulestoreBuilder())]),
MixedModulestoreBuilder([('split', VersioningModulestoreBuilder())]),
)
......@@ -231,6 +231,8 @@ CONTENTSTORE_SETUPS = (MongoContentstoreBuilder(),)
COURSE_DATA_NAMES = (
'toy',
'manual-testing-complete',
'split_test_module',
'split_test_module_draft',
)
......
......@@ -4,7 +4,7 @@ Methods for exporting course data to XML
import logging
import lxml.etree
from xblock.fields import Scope
from xblock.fields import Scope, Reference, ReferenceList, ReferenceValueDict
from xmodule.contentstore.content import StaticContent
from xmodule.exceptions import NotFoundError
from xmodule.modulestore import EdxJSONEncoder, ModuleStoreEnum
......@@ -16,6 +16,7 @@ import os
from path import path
import shutil
from xmodule.modulestore.draft_and_published import DIRECT_ONLY_CATEGORIES
from opaque_keys.edx.locator import CourseLocator
DRAFT_DIR = "drafts"
PUBLISHED_DIR = "published"
......@@ -36,8 +37,7 @@ def export_to_xml(modulestore, contentstore, course_key, root_dir, course_dir):
`course_dir`: The name of the directory inside `root_dir` to write the course content to
"""
course = modulestore.get_course(course_key)
course = modulestore.get_course(course_key, depth=None) # None means infinite
fsm = OSFS(root_dir)
export_fs = course.runtime.export_fs = fsm.makeopendir(course_dir)
......@@ -45,6 +45,10 @@ def export_to_xml(modulestore, contentstore, course_key, root_dir, course_dir):
# export only the published content
with modulestore.branch_setting(ModuleStoreEnum.Branch.published_only, course_key):
# change all of the references inside the course to use the xml expected key type w/o version & branch
xml_centric_course_key = CourseLocator(course_key.org, course_key.course, course_key.run, deprecated=True)
adapt_references(course, xml_centric_course_key, export_fs)
course.add_xml_to_node(root)
with export_fs.open('course.xml', 'w') as course_xml:
......@@ -79,16 +83,16 @@ def export_to_xml(modulestore, contentstore, course_key, root_dir, course_dir):
course_image_file.write(course_image.data)
# export the static tabs
export_extra_content(export_fs, modulestore, course_key, 'static_tab', 'tabs', '.html')
export_extra_content(export_fs, modulestore, xml_centric_course_key, 'static_tab', 'tabs', '.html')
# export the custom tags
export_extra_content(export_fs, modulestore, course_key, 'custom_tag_template', 'custom_tags')
export_extra_content(export_fs, modulestore, xml_centric_course_key, 'custom_tag_template', 'custom_tags')
# export the course updates
export_extra_content(export_fs, modulestore, course_key, 'course_info', 'info', '.html')
export_extra_content(export_fs, modulestore, xml_centric_course_key, 'course_info', 'info', '.html')
# export the 'about' data (e.g. overview, etc.)
export_extra_content(export_fs, modulestore, course_key, 'about', 'about', '.html')
export_extra_content(export_fs, modulestore, xml_centric_course_key, 'about', 'about', '.html')
# export the grading policy
course_run_policy_dir = policies_dir.makeopendir(course.location.name)
......@@ -125,10 +129,39 @@ def export_to_xml(modulestore, contentstore, course_key, root_dir, course_dir):
index = sequential.children.index(draft_vertical.location)
draft_vertical.xml_attributes['index_in_children_list'] = str(index)
draft_vertical.runtime.export_fs = draft_course_dir
adapt_references(draft_vertical, xml_centric_course_key, draft_course_dir)
node = lxml.etree.Element('unknown')
draft_vertical.add_xml_to_node(node)
def adapt_references(subtree, destination_course_key, export_fs):
"""
Map every reference in the subtree into destination_course_key and set it back into the xblock fields.
Make sure every runtime knows where the export_fs is.
"""
subtree.runtime.export_fs = export_fs # ensure everything knows where it's going!
for field_name, field in subtree.fields.iteritems():
if field.is_set_on(subtree):
if isinstance(field, Reference):
value = field.read_from(subtree)
if value is not None:
field.write_to(subtree, field.read_from(subtree).map_into_course(destination_course_key))
elif field_name == 'children':
# don't change the children field but do recurse over the children
[adapt_references(child, destination_course_key, export_fs) for child in subtree.get_children()]
elif isinstance(field, ReferenceList):
field.write_to(
subtree,
[ele.map_into_course(destination_course_key) for ele in field.read_from(subtree)]
)
elif isinstance(field, ReferenceValueDict):
field.write_to(
subtree, {
key: ele.map_into_course(destination_course_key) for key, ele in field.read_from(subtree).iteritems()
}
)
def _export_field_content(xblock_item, item_dir):
"""
Export all fields related to 'xblock_item' other than 'metadata' and 'data' to json file in provided directory
......@@ -149,6 +182,7 @@ def export_extra_content(export_fs, modulestore, course_key, category_type, dirn
if len(items) > 0:
item_dir = export_fs.makeopendir(dirname)
for item in items:
adapt_references(item, course_key, export_fs)
with item_dir.open(item.location.name + file_suffix, 'w') as item_file:
item_file.write(item.data.encode('utf8'))
......
......@@ -431,7 +431,7 @@ def _import_module_and_update_references(
fields[field_name] = {
key: _convert_reference_fields_to_new_namespace(reference)
for key, reference
in reference_dict.items()
in reference_dict.iteritems()
}
elif field_name == 'xml_attributes':
value = field.read_from(module)
......
......@@ -2,7 +2,6 @@ from lxml import etree
from xmodule.editing_module import XMLEditingDescriptor
from xmodule.xml_module import XmlDescriptor
import logging
import sys
from xblock.fields import String, Scope
from exceptions import SerializationError
......
......@@ -16,7 +16,7 @@ from mock import Mock
from path import path
from xblock.field_data import DictFieldData
from xblock.fields import ScopeIds, Scope
from xblock.fields import ScopeIds, Scope, Reference, ReferenceList, ReferenceValueDict
from xmodule.x_module import ModuleSystem, XModuleDescriptor, XModuleMixin
from xmodule.modulestore.inheritance import InheritanceMixin, own_metadata
......@@ -159,6 +159,21 @@ class LogicTest(unittest.TestCase):
return json.loads(self.xmodule.handle_ajax(dispatch, data))
def map_references(value, field, actual_course_key):
"""
Map the references in value to actual_course_key and return value
"""
if not value: # if falsey
return value
if isinstance(field, Reference):
return value.map_into_course(actual_course_key)
if isinstance(field, ReferenceList):
return [sub.map_into_course(actual_course_key) for sub in value]
if isinstance(field, ReferenceValueDict):
return {key: ele.map_into_course(actual_course_key) for key, ele in value.iteritems()}
return value
class CourseComparisonTest(unittest.TestCase):
"""
Mixin that has methods for comparing courses for equality.
......@@ -239,7 +254,7 @@ class CourseComparisonTest(unittest.TestCase):
# compare fields
self.assertEqual(expected_item.fields, actual_item.fields)
for field_name in expected_item.fields:
for field_name, field in expected_item.fields.iteritems():
if (expected_item.scope_ids.usage_id, field_name) in self.field_exclusions:
continue
......@@ -250,8 +265,8 @@ class CourseComparisonTest(unittest.TestCase):
if field_name == 'children':
continue
exp_value = getattr(expected_item, field_name)
actual_value = getattr(actual_item, field_name)
exp_value = map_references(field.read_from(expected_item), field, actual_course_key)
actual_value = field.read_from(actual_item)
self.assertEqual(
exp_value,
actual_value,
......
......@@ -388,8 +388,8 @@ class XmlDescriptor(XModuleDescriptor):
url_path = name_to_pathname(self.url_name)
filepath = self._format_filepath(self.category, url_path)
resource_fs.makedir(os.path.dirname(filepath), recursive=True, allow_recreate=True)
with resource_fs.open(filepath, 'w') as file:
file.write(etree.tostring(xml_object, pretty_print=True, encoding='utf-8'))
with resource_fs.open(filepath, 'w') as fileobj:
fileobj.write(etree.tostring(xml_object, pretty_print=True, encoding='utf-8'))
# And return just a pointer with the category and filename.
record_object = etree.Element(self.category)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment