Commit 87558c2c by Calen Pennington

Merge pull request #5212 from cpennington/split/import-export-performance

Improve performance of split-mongo import/export performance
parents cd1a4d1d 1a17b31a
...@@ -56,7 +56,7 @@ class ContentStoreImportTest(ModuleStoreTestCase): ...@@ -56,7 +56,7 @@ class ContentStoreImportTest(ModuleStoreTestCase):
target_course_id=target_course_id, target_course_id=target_course_id,
create_new_course_if_not_present=create_new_course_if_not_present, create_new_course_if_not_present=create_new_course_if_not_present,
) )
course_id = SlashSeparatedCourseKey('edX', 'test_import_course', '2012_Fall') course_id = module_store.make_course_key('edX', 'test_import_course', '2012_Fall')
course = module_store.get_course(course_id) course = module_store.get_course(course_id)
self.assertIsNotNone(course) self.assertIsNotNone(course)
......
...@@ -5,6 +5,9 @@ source = common/lib/xmodule ...@@ -5,6 +5,9 @@ source = common/lib/xmodule
[report] [report]
ignore_errors = True ignore_errors = True
exclude_lines =
pragma: no cover
raise NotImplementedError
[html] [html]
title = XModule Python Test Coverage Report title = XModule Python Test Coverage Report
......
...@@ -167,7 +167,7 @@ class MongoContentStore(ContentStore): ...@@ -167,7 +167,7 @@ class MongoContentStore(ContentStore):
policy.setdefault(asset['asset_key'].name, {})[attr] = value policy.setdefault(asset['asset_key'].name, {})[attr] = value
with open(assets_policy_file, 'w') as f: with open(assets_policy_file, 'w') as f:
json.dump(policy, f) json.dump(policy, f, sort_keys=True, indent=4)
def get_all_content_thumbnails_for_course(self, course_key): def get_all_content_thumbnails_for_course(self, course_key):
return self._get_all_content_for_course(course_key, get_thumbnails=True)[0] return self._get_all_content_for_course(course_key, get_thumbnails=True)[0]
......
...@@ -2,7 +2,7 @@ import time ...@@ -2,7 +2,7 @@ import time
import logging import logging
import re import re
from xblock.fields import Field from xblock.fields import JSONField
import datetime import datetime
import dateutil.parser import dateutil.parser
...@@ -11,7 +11,7 @@ from pytz import UTC ...@@ -11,7 +11,7 @@ from pytz import UTC
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class Date(Field): class Date(JSONField):
''' '''
Date fields know how to parse and produce json (iso) compatible formats. Converts to tz aware datetimes. Date fields know how to parse and produce json (iso) compatible formats. Converts to tz aware datetimes.
''' '''
...@@ -85,7 +85,7 @@ class Date(Field): ...@@ -85,7 +85,7 @@ class Date(Field):
TIMEDELTA_REGEX = re.compile(r'^((?P<days>\d+?) day(?:s?))?(\s)?((?P<hours>\d+?) hour(?:s?))?(\s)?((?P<minutes>\d+?) minute(?:s)?)?(\s)?((?P<seconds>\d+?) second(?:s)?)?$') TIMEDELTA_REGEX = re.compile(r'^((?P<days>\d+?) day(?:s?))?(\s)?((?P<hours>\d+?) hour(?:s?))?(\s)?((?P<minutes>\d+?) minute(?:s)?)?(\s)?((?P<seconds>\d+?) second(?:s)?)?$')
class Timedelta(Field): class Timedelta(JSONField):
# Timedeltas are immutable, see http://docs.python.org/2/library/datetime.html#available-types # Timedeltas are immutable, see http://docs.python.org/2/library/datetime.html#available-types
MUTABLE = False MUTABLE = False
...@@ -101,6 +101,10 @@ class Timedelta(Field): ...@@ -101,6 +101,10 @@ class Timedelta(Field):
""" """
if time_str is None: if time_str is None:
return None return None
if isinstance(time_str, datetime.timedelta):
return time_str
parts = TIMEDELTA_REGEX.match(time_str) parts = TIMEDELTA_REGEX.match(time_str)
if not parts: if not parts:
return return
...@@ -112,6 +116,9 @@ class Timedelta(Field): ...@@ -112,6 +116,9 @@ class Timedelta(Field):
return datetime.timedelta(**time_params) return datetime.timedelta(**time_params)
def to_json(self, value): def to_json(self, value):
if value is None:
return None
values = [] values = []
for attr in ('days', 'hours', 'minutes', 'seconds'): for attr in ('days', 'hours', 'minutes', 'seconds'):
cur_value = getattr(value, attr, 0) cur_value = getattr(value, attr, 0)
...@@ -129,7 +136,7 @@ class Timedelta(Field): ...@@ -129,7 +136,7 @@ class Timedelta(Field):
return self.from_json(value) return self.from_json(value)
class RelativeTime(Field): class RelativeTime(JSONField):
""" """
Field for start_time and end_time video module properties. Field for start_time and end_time video module properties.
...@@ -182,6 +189,9 @@ class RelativeTime(Field): ...@@ -182,6 +189,9 @@ class RelativeTime(Field):
if not value: if not value:
return datetime.timedelta(seconds=0) return datetime.timedelta(seconds=0)
if isinstance(value, datetime.timedelta):
return value
# We've seen serialized versions of float in this field # We've seen serialized versions of float in this field
if isinstance(value, float): if isinstance(value, float):
return datetime.timedelta(seconds=value) return datetime.timedelta(seconds=value)
......
...@@ -634,7 +634,7 @@ class ModuleStoreReadBase(BulkOperationsMixin, ModuleStoreRead): ...@@ -634,7 +634,7 @@ class ModuleStoreReadBase(BulkOperationsMixin, ModuleStoreRead):
''' '''
Set up the error-tracking logic. Set up the error-tracking logic.
''' '''
super(ModuleStoreReadBase, self).__init__() super(ModuleStoreReadBase, self).__init__(**kwargs)
self._course_errors = defaultdict(make_error_tracker) # location -> ErrorLog self._course_errors = defaultdict(make_error_tracker) # location -> ErrorLog
# TODO move the inheritance_cache_subsystem to classes which use it # TODO move the inheritance_cache_subsystem to classes which use it
self.metadata_inheritance_cache_subsystem = metadata_inheritance_cache_subsystem self.metadata_inheritance_cache_subsystem = metadata_inheritance_cache_subsystem
......
...@@ -18,6 +18,8 @@ import threading ...@@ -18,6 +18,8 @@ import threading
from xmodule.util.django import get_current_request_hostname from xmodule.util.django import get_current_request_hostname
import xmodule.modulestore # pylint: disable=unused-import import xmodule.modulestore # pylint: disable=unused-import
from xmodule.modulestore.mixed import MixedModuleStore
from xmodule.modulestore.draft_and_published import BranchSettingMixin
from xmodule.contentstore.django import contentstore from xmodule.contentstore.django import contentstore
import xblock.reference.plugins import xblock.reference.plugins
...@@ -66,6 +68,12 @@ def create_modulestore_instance(engine, content_store, doc_store_config, options ...@@ -66,6 +68,12 @@ def create_modulestore_instance(engine, content_store, doc_store_config, options
except InvalidCacheBackendError: except InvalidCacheBackendError:
metadata_inheritance_cache = get_cache('default') metadata_inheritance_cache = get_cache('default')
if issubclass(class_, MixedModuleStore):
_options['create_modulestore_instance'] = create_modulestore_instance
if issubclass(class_, BranchSettingMixin):
_options['branch_setting_func'] = _get_modulestore_branch_setting
return class_( return class_(
contentstore=content_store, contentstore=content_store,
metadata_inheritance_cache_subsystem=metadata_inheritance_cache, metadata_inheritance_cache_subsystem=metadata_inheritance_cache,
...@@ -75,8 +83,6 @@ def create_modulestore_instance(engine, content_store, doc_store_config, options ...@@ -75,8 +83,6 @@ def create_modulestore_instance(engine, content_store, doc_store_config, options
doc_store_config=doc_store_config, doc_store_config=doc_store_config,
i18n_service=i18n_service or ModuleI18nService(), i18n_service=i18n_service or ModuleI18nService(),
fs_service=fs_service or xblock.reference.plugins.FSService(), fs_service=fs_service or xblock.reference.plugins.FSService(),
branch_setting_func=_get_modulestore_branch_setting,
create_modulestore_instance=create_modulestore_instance,
**_options **_options
) )
......
...@@ -25,11 +25,11 @@ class BranchSettingMixin(object): ...@@ -25,11 +25,11 @@ class BranchSettingMixin(object):
:param branch_setting_func: a function that returns the default branch setting for this object. :param branch_setting_func: a function that returns the default branch setting for this object.
If not specified, ModuleStoreEnum.Branch.published_only is used as the default setting. If not specified, ModuleStoreEnum.Branch.published_only is used as the default setting.
""" """
super(BranchSettingMixin, self).__init__(*args, **kwargs)
self.default_branch_setting_func = kwargs.pop( self.default_branch_setting_func = kwargs.pop(
'branch_setting_func', 'branch_setting_func',
lambda: ModuleStoreEnum.Branch.published_only lambda: ModuleStoreEnum.Branch.published_only
) )
super(BranchSettingMixin, self).__init__(*args, **kwargs)
# cache the branch setting on a local thread to support a multi-threaded environment # cache the branch setting on a local thread to support a multi-threaded environment
self.thread_cache = threading.local() self.thread_cache = threading.local()
...@@ -69,9 +69,6 @@ class ModuleStoreDraftAndPublished(BranchSettingMixin): ...@@ -69,9 +69,6 @@ class ModuleStoreDraftAndPublished(BranchSettingMixin):
""" """
__metaclass__ = ABCMeta __metaclass__ = ABCMeta
def __init__(self, *args, **kwargs):
super(ModuleStoreDraftAndPublished, self).__init__(*args, **kwargs)
@abstractmethod @abstractmethod
def delete_item(self, location, user_id, revision=None, **kwargs): def delete_item(self, location, user_id, revision=None, **kwargs):
raise NotImplementedError raise NotImplementedError
...@@ -116,7 +113,7 @@ class ModuleStoreDraftAndPublished(BranchSettingMixin): ...@@ -116,7 +113,7 @@ class ModuleStoreDraftAndPublished(BranchSettingMixin):
raise NotImplementedError raise NotImplementedError
class UnsupportedRevisionError(ValueError): class UnsupportedRevisionError(ValueError):
""" """
This error is raised if a method is called with an unsupported revision parameter. This error is raised if a method is called with an unsupported revision parameter.
""" """
......
...@@ -74,7 +74,9 @@ class DuplicateCourseError(Exception): ...@@ -74,7 +74,9 @@ class DuplicateCourseError(Exception):
""" """
existing_entry will have the who, when, and other properties of the existing entry existing_entry will have the who, when, and other properties of the existing entry
""" """
super(DuplicateCourseError, self).__init__() super(DuplicateCourseError, self).__init__(
u'Cannot create course {}, which duplicates {}'.format(course_id, existing_entry)
)
self.course_id = course_id self.course_id = course_id
self.existing_entry = existing_entry self.existing_entry = existing_entry
...@@ -84,9 +86,6 @@ class InvalidBranchSetting(Exception): ...@@ -84,9 +86,6 @@ class InvalidBranchSetting(Exception):
Raised when the process' branch setting did not match the required setting for the attempted operation on a store. Raised when the process' branch setting did not match the required setting for the attempted operation on a store.
""" """
def __init__(self, expected_setting, actual_setting): def __init__(self, expected_setting, actual_setting):
super(InvalidBranchSetting, self).__init__() super(InvalidBranchSetting, self).__init__(u"Invalid branch: expected {} but got {}".format(expected_setting, actual_setting))
self.expected_setting = expected_setting self.expected_setting = expected_setting
self.actual_setting = actual_setting self.actual_setting = actual_setting
def __unicode__(self, *args, **kwargs):
return u"Invalid branch: expected {} but got {}".format(self.expected_setting, self.actual_setting)
...@@ -214,11 +214,19 @@ class InheritingFieldData(KvsFieldData): ...@@ -214,11 +214,19 @@ class InheritingFieldData(KvsFieldData):
""" """
The default for an inheritable name is found on a parent. The default for an inheritable name is found on a parent.
""" """
if name in self.inheritable_names and block.parent is not None: if name in self.inheritable_names:
parent = block.get_parent() # Walk up the content tree to find the first ancestor
if parent: # that this field is set on. Use the field from the current
return getattr(parent, name) # block so that if it has a different default than the root
super(InheritingFieldData, self).default(block, name) # node of the tree, the block's default will be used.
field = block.fields[name]
ancestor = block.get_parent()
while ancestor is not None:
if field.is_set_on(ancestor):
return field.read_json(ancestor)
else:
ancestor = ancestor.get_parent()
return super(InheritingFieldData, self).default(block, name)
def inheriting_field_data(kvs): def inheriting_field_data(kvs):
......
...@@ -20,3 +20,5 @@ class BlockKey(namedtuple('BlockKey', 'type id')): ...@@ -20,3 +20,5 @@ class BlockKey(namedtuple('BlockKey', 'type id')):
def from_usage_key(cls, usage_key): def from_usage_key(cls, usage_key):
return cls(usage_key.block_type, usage_key.block_id) return cls(usage_key.block_type, usage_key.block_id)
CourseEnvelope = namedtuple('CourseEnvelope', 'course_key structure')
import sys import sys
import logging import logging
from contracts import contract, new_contract from contracts import contract, new_contract
from lazy import lazy
from xblock.runtime import KvsFieldData from xblock.runtime import KvsFieldData
from xblock.fields import ScopeIds from xblock.fields import ScopeIds
from opaque_keys.edx.locator import BlockUsageLocator, LocalId, CourseLocator, DefinitionLocator from opaque_keys.edx.locator import BlockUsageLocator, LocalId, CourseLocator, DefinitionLocator
...@@ -12,12 +13,14 @@ from .split_mongo_kvs import SplitMongoKVS ...@@ -12,12 +13,14 @@ from .split_mongo_kvs import SplitMongoKVS
from fs.osfs import OSFS from fs.osfs import OSFS
from .definition_lazy_loader import DefinitionLazyLoader from .definition_lazy_loader import DefinitionLazyLoader
from xmodule.modulestore.edit_info import EditInfoRuntimeMixin from xmodule.modulestore.edit_info import EditInfoRuntimeMixin
from xmodule.modulestore.split_mongo import BlockKey from xmodule.modulestore.inheritance import inheriting_field_data, InheritanceMixin
from xmodule.modulestore.split_mongo import BlockKey, CourseEnvelope
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
new_contract('BlockUsageLocator', BlockUsageLocator) new_contract('BlockUsageLocator', BlockUsageLocator)
new_contract('BlockKey', BlockKey) new_contract('BlockKey', BlockKey)
new_contract('CourseEnvelope', CourseEnvelope)
class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
...@@ -27,6 +30,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -27,6 +30,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
Computes the settings (nee 'metadata') inheritance upon creation. Computes the settings (nee 'metadata') inheritance upon creation.
""" """
@contract(course_entry=CourseEnvelope)
def __init__(self, modulestore, course_entry, default_class, module_data, lazy, **kwargs): def __init__(self, modulestore, course_entry, default_class, module_data, lazy, **kwargs):
""" """
Computes the settings inheritance and sets up the cache. Computes the settings inheritance and sets up the cache.
...@@ -34,8 +38,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -34,8 +38,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
modulestore: the module store that can be used to retrieve additional modulestore: the module store that can be used to retrieve additional
modules modules
course_entry: the originally fetched enveloped course_structure w/ branch and course id info course_entry: the originally fetched enveloped course_structure w/ branch and course id info.
plus a dictionary of cached inherited_settings indexed by (block_type, block_id) tuple.
Callers to _load_item provide an override but that function ignores the provided structure and Callers to _load_item provide an override but that function ignores the provided structure and
only looks at the branch and course id only looks at the branch and course id
...@@ -43,10 +46,10 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -43,10 +46,10 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
underlying modulestore underlying modulestore
""" """
# needed by capa_problem (as runtime.filestore via this.resources_fs) # needed by capa_problem (as runtime.filestore via this.resources_fs)
if 'course' in course_entry: if course_entry.course_key.course:
root = modulestore.fs_root / course_entry['org'] / course_entry['course'] / course_entry['run'] root = modulestore.fs_root / course_entry.course_key.org / course_entry.course_key.course / course_entry.course_key.run
else: else:
root = modulestore.fs_root / course_entry['structure']['_id'] root = modulestore.fs_root / course_entry.structure['_id']
root.makedirs_p() # create directory if it doesn't exist root.makedirs_p() # create directory if it doesn't exist
super(CachingDescriptorSystem, self).__init__( super(CachingDescriptorSystem, self).__init__(
...@@ -59,16 +62,19 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -59,16 +62,19 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
self.course_entry = course_entry self.course_entry = course_entry
self.lazy = lazy self.lazy = lazy
self.module_data = module_data self.module_data = module_data
# Compute inheritance
modulestore.inherit_settings(
course_entry['structure'].get('blocks', {}),
course_entry['structure'].get('root'),
course_entry.setdefault('inherited_settings', {}),
)
self.default_class = default_class self.default_class = default_class
self.local_modules = {} self.local_modules = {}
@contract(usage_key="BlockUsageLocator | BlockKey") @lazy
@contract(returns="dict(BlockKey: BlockKey)")
def _parent_map(self):
parent_map = {}
for block_key, block in self.course_entry.structure['blocks'].iteritems():
for child in block['fields'].get('children', []):
parent_map[child] = block_key
return parent_map
@contract(usage_key="BlockUsageLocator | BlockKey", course_entry_override="CourseEnvelope | None")
def _load_item(self, usage_key, course_entry_override=None, **kwargs): def _load_item(self, usage_key, course_entry_override=None, **kwargs):
# usage_key is either a UsageKey or just the block_key. if a usage_key, # usage_key is either a UsageKey or just the block_key. if a usage_key,
if isinstance(usage_key, BlockUsageLocator): if isinstance(usage_key, BlockUsageLocator):
...@@ -88,20 +94,17 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -88,20 +94,17 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
block_key = usage_key block_key = usage_key
course_info = course_entry_override or self.course_entry course_info = course_entry_override or self.course_entry
course_key = CourseLocator( course_key = course_info.course_key
version_guid=course_info['structure']['_id'],
org=course_info.get('org'), if course_entry_override:
course=course_info.get('course'), structure_id = course_entry_override.structure.get('_id')
run=course_info.get('run'), else:
branch=course_info.get('branch'), structure_id = self.course_entry.structure.get('_id')
)
json_data = self.get_module_data(block_key, course_key) json_data = self.get_module_data(block_key, course_key)
class_ = self.load_block_type(json_data.get('block_type')) class_ = self.load_block_type(json_data.get('block_type'))
# pass None for inherited_settings to signal that it should get the settings from cache return self.xblock_from_json(class_, course_key, block_key, json_data, course_entry_override, **kwargs)
new_item = self.xblock_from_json(class_, course_key, block_key, json_data, None, course_entry_override, **kwargs)
return new_item
@contract(block_key=BlockKey, course_key=CourseLocator) @contract(block_key=BlockKey, course_key=CourseLocator)
def get_module_data(self, block_key, course_key): def get_module_data(self, block_key, course_key):
...@@ -134,36 +137,29 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -134,36 +137,29 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
# is the intended one when not given a course_entry_override; thus, the caching of the last branch/course id. # is the intended one when not given a course_entry_override; thus, the caching of the last branch/course id.
@contract(block_key="BlockKey | None") @contract(block_key="BlockKey | None")
def xblock_from_json( def xblock_from_json(
self, class_, course_key, block_key, json_data, inherited_settings, course_entry_override=None, **kwargs self, class_, course_key, block_key, json_data, course_entry_override=None, **kwargs
): ):
if course_entry_override is None: if course_entry_override is None:
course_entry_override = self.course_entry course_entry_override = self.course_entry
else: else:
# most recent retrieval is most likely the right one for next caller (see comment above fn) # most recent retrieval is most likely the right one for next caller (see comment above fn)
self.course_entry['branch'] = course_entry_override['branch'] self.course_entry = CourseEnvelope(course_entry_override.course_key, self.course_entry.structure)
self.course_entry['org'] = course_entry_override['org']
self.course_entry['course'] = course_entry_override['course']
self.course_entry['run'] = course_entry_override['run']
definition_id = json_data.get('definition') definition_id = json_data.get('definition')
# If no usage id is provided, generate an in-memory id # If no usage id is provided, generate an in-memory id
if block_key is None: if block_key is None:
block_key = BlockKey(json_data['block_type'], LocalId()) block_key = BlockKey(json_data['block_type'], LocalId())
else:
if inherited_settings is None:
# see if there's a value in course_entry
if block_key in self.course_entry['inherited_settings']:
inherited_settings = self.course_entry['inherited_settings'][block_key]
elif block_key not in self.course_entry['inherited_settings']:
self.course_entry['inherited_settings'][block_key] = inherited_settings
if definition_id is not None and not json_data.get('definition_loaded', False): if definition_id is not None and not json_data.get('definition_loaded', False):
definition_loader = DefinitionLazyLoader( definition_loader = DefinitionLazyLoader(
self.modulestore, block_key.type, definition_id, self.modulestore,
course_key,
block_key.type,
definition_id,
lambda fields: self.modulestore.convert_references_to_keys( lambda fields: self.modulestore.convert_references_to_keys(
course_key, self.load_block_type(block_key.type), course_key, self.load_block_type(block_key.type),
fields, self.course_entry['structure']['blocks'], fields, self.course_entry.structure['blocks'],
) )
) )
else: else:
...@@ -180,15 +176,24 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -180,15 +176,24 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
) )
converted_fields = self.modulestore.convert_references_to_keys( converted_fields = self.modulestore.convert_references_to_keys(
block_locator.course_key, class_, json_data.get('fields', {}), self.course_entry['structure']['blocks'], block_locator.course_key, class_, json_data.get('fields', {}), self.course_entry.structure['blocks'],
) )
if block_key in self._parent_map:
parent_key = self._parent_map[block_key]
parent = course_key.make_usage_key(parent_key.type, parent_key.id)
else:
parent = None
kvs = SplitMongoKVS( kvs = SplitMongoKVS(
definition_loader, definition_loader,
converted_fields, converted_fields,
inherited_settings, parent=parent,
**kwargs field_decorator=kwargs.get('field_decorator')
) )
field_data = KvsFieldData(kvs)
if InheritanceMixin in self.modulestore.xblock_mixins:
field_data = inheriting_field_data(kvs)
else:
field_data = KvsFieldData(kvs)
try: try:
module = self.construct_xblock_from_class( module = self.construct_xblock_from_class(
...@@ -201,8 +206,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin): ...@@ -201,8 +206,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem, EditInfoRuntimeMixin):
return ErrorDescriptor.from_json( return ErrorDescriptor.from_json(
json_data, json_data,
self, self,
BlockUsageLocator( course_entry_override.course_key.make_usage_key(
CourseLocator(version_guid=course_entry_override['structure']['_id']),
block_type='error', block_type='error',
block_id=block_key.id block_id=block_key.id
), ),
......
...@@ -8,13 +8,14 @@ class DefinitionLazyLoader(object): ...@@ -8,13 +8,14 @@ class DefinitionLazyLoader(object):
object doesn't force access during init but waits until client wants the object doesn't force access during init but waits until client wants the
definition. Only works if the modulestore is a split mongo store. definition. Only works if the modulestore is a split mongo store.
""" """
def __init__(self, modulestore, block_type, definition_id, field_converter): def __init__(self, modulestore, course_key, block_type, definition_id, field_converter):
""" """
Simple placeholder for yet-to-be-fetched data Simple placeholder for yet-to-be-fetched data
:param modulestore: the pymongo db connection with the definitions :param modulestore: the pymongo db connection with the definitions
:param definition_locator: the id of the record in the above to fetch :param definition_locator: the id of the record in the above to fetch
""" """
self.modulestore = modulestore self.modulestore = modulestore
self.course_key = course_key
self.definition_locator = DefinitionLocator(block_type, definition_id) self.definition_locator = DefinitionLocator(block_type, definition_id)
self.field_converter = field_converter self.field_converter = field_converter
...@@ -23,4 +24,4 @@ class DefinitionLazyLoader(object): ...@@ -23,4 +24,4 @@ class DefinitionLazyLoader(object):
Fetch the definition. Note, the caller should replace this lazy Fetch the definition. Note, the caller should replace this lazy
loader pointer with the result so as not to fetch more than once loader pointer with the result so as not to fetch more than once
""" """
return self.modulestore.db_connection.get_definition(self.definition_locator.definition_id) return self.modulestore.get_definition(self.course_key, self.definition_locator.definition_id)
...@@ -3,7 +3,14 @@ Segregation of pymongo functions from the data modeling mechanisms for split mod ...@@ -3,7 +3,14 @@ Segregation of pymongo functions from the data modeling mechanisms for split mod
""" """
import re import re
import pymongo import pymongo
import time
# Import this just to export it
from pymongo.errors import DuplicateKeyError # pylint: disable=unused-import
from contracts import check from contracts import check
from functools import wraps
from pymongo.errors import AutoReconnect
from xmodule.exceptions import HeartbeatFailure from xmodule.exceptions import HeartbeatFailure
from xmodule.modulestore.split_mongo import BlockKey from xmodule.modulestore.split_mongo import BlockKey
from datetime import tzinfo from datetime import tzinfo
...@@ -62,6 +69,32 @@ def structure_to_mongo(structure): ...@@ -62,6 +69,32 @@ def structure_to_mongo(structure):
return new_structure return new_structure
def autoretry_read(wait=0.1, retries=5):
"""
Automatically retry a read-only method in the case of a pymongo
AutoReconnect exception.
See http://emptysqua.re/blog/save-the-monkey-reliably-writing-to-mongodb/
for a discussion of this technique.
"""
def decorate(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
for attempt in xrange(retries):
try:
return fn(*args, **kwargs)
break
except AutoReconnect:
# Reraise if we failed on our last attempt
if attempt == retries - 1:
raise
if wait:
time.sleep(wait)
return wrapper
return decorate
class MongoConnection(object): class MongoConnection(object):
""" """
Segregation of pymongo functions from the data modeling mechanisms for split modulestore. Segregation of pymongo functions from the data modeling mechanisms for split modulestore.
...@@ -106,12 +139,14 @@ class MongoConnection(object): ...@@ -106,12 +139,14 @@ class MongoConnection(object):
else: else:
raise HeartbeatFailure("Can't connect to {}".format(self.database.name)) raise HeartbeatFailure("Can't connect to {}".format(self.database.name))
@autoretry_read()
def get_structure(self, key): def get_structure(self, key):
""" """
Get the structure from the persistence mechanism whose id is the given key Get the structure from the persistence mechanism whose id is the given key
""" """
return structure_from_mongo(self.structures.find_one({'_id': key})) return structure_from_mongo(self.structures.find_one({'_id': key}))
@autoretry_read()
def find_structures_by_id(self, ids): def find_structures_by_id(self, ids):
""" """
Return all structures that specified in ``ids``. Return all structures that specified in ``ids``.
...@@ -121,6 +156,7 @@ class MongoConnection(object): ...@@ -121,6 +156,7 @@ class MongoConnection(object):
""" """
return [structure_from_mongo(structure) for structure in self.structures.find({'_id': {'$in': ids}})] return [structure_from_mongo(structure) for structure in self.structures.find({'_id': {'$in': ids}})]
@autoretry_read()
def find_structures_derived_from(self, ids): def find_structures_derived_from(self, ids):
""" """
Return all structures that were immediately derived from a structure listed in ``ids``. Return all structures that were immediately derived from a structure listed in ``ids``.
...@@ -130,6 +166,7 @@ class MongoConnection(object): ...@@ -130,6 +166,7 @@ class MongoConnection(object):
""" """
return [structure_from_mongo(structure) for structure in self.structures.find({'previous_version': {'$in': ids}})] return [structure_from_mongo(structure) for structure in self.structures.find({'previous_version': {'$in': ids}})]
@autoretry_read()
def find_ancestor_structures(self, original_version, block_key): def find_ancestor_structures(self, original_version, block_key):
""" """
Find all structures that originated from ``original_version`` that contain ``block_key``. Find all structures that originated from ``original_version`` that contain ``block_key``.
...@@ -149,12 +186,13 @@ class MongoConnection(object): ...@@ -149,12 +186,13 @@ class MongoConnection(object):
} }
})] })]
def upsert_structure(self, structure): def insert_structure(self, structure):
""" """
Update the db record for structure, creating that record if it doesn't already exist Insert a new structure into the database.
""" """
self.structures.update({'_id': structure['_id']}, structure_to_mongo(structure), upsert=True) self.structures.insert(structure_to_mongo(structure))
@autoretry_read()
def get_course_index(self, key, ignore_case=False): def get_course_index(self, key, ignore_case=False):
""" """
Get the course_index from the persistence mechanism whose id is the given key Get the course_index from the persistence mechanism whose id is the given key
...@@ -171,6 +209,7 @@ class MongoConnection(object): ...@@ -171,6 +209,7 @@ class MongoConnection(object):
} }
return self.course_index.find_one(query) return self.course_index.find_one(query)
@autoretry_read()
def find_matching_course_indexes(self, branch=None, search_targets=None): def find_matching_course_indexes(self, branch=None, search_targets=None):
""" """
Find the course_index matching particular conditions. Find the course_index matching particular conditions.
...@@ -229,18 +268,19 @@ class MongoConnection(object): ...@@ -229,18 +268,19 @@ class MongoConnection(object):
'run': course_index['run'], 'run': course_index['run'],
}) })
@autoretry_read()
def get_definition(self, key): def get_definition(self, key):
""" """
Get the definition from the persistence mechanism whose id is the given key Get the definition from the persistence mechanism whose id is the given key
""" """
return self.definitions.find_one({'_id': key}) return self.definitions.find_one({'_id': key})
def find_matching_definitions(self, query): @autoretry_read()
def get_definitions(self, definitions):
""" """
Find the definitions matching the query. Right now the query must be a legal mongo query Retrieve all definitions listed in `definitions`.
:param query: a mongo-style query of {key: [value|{$in ..}|..], ..}
""" """
return self.definitions.find(query) return self.definitions.find({'$in': {'_id': definitions}})
def insert_definition(self, definition): def insert_definition(self, definition):
""" """
......
...@@ -75,9 +75,10 @@ from xmodule.modulestore import ( ...@@ -75,9 +75,10 @@ from xmodule.modulestore import (
from ..exceptions import ItemNotFoundError from ..exceptions import ItemNotFoundError
from .caching_descriptor_system import CachingDescriptorSystem from .caching_descriptor_system import CachingDescriptorSystem
from xmodule.modulestore.split_mongo.mongo_connection import MongoConnection, BlockKey from xmodule.modulestore.split_mongo.mongo_connection import MongoConnection, DuplicateKeyError
from xmodule.modulestore.split_mongo import BlockKey, CourseEnvelope
from xmodule.error_module import ErrorDescriptor from xmodule.error_module import ErrorDescriptor
from _collections import defaultdict from collections import defaultdict
from types import NoneType from types import NoneType
...@@ -116,6 +117,8 @@ class SplitBulkWriteRecord(BulkOpsRecord): ...@@ -116,6 +117,8 @@ class SplitBulkWriteRecord(BulkOpsRecord):
self.index = None self.index = None
self.structures = {} self.structures = {}
self.structures_in_db = set() self.structures_in_db = set()
self.definitions = {}
self.definitions_in_db = set()
# TODO: This needs to track which branches have actually been modified/versioned, # TODO: This needs to track which branches have actually been modified/versioned,
# so that copying one branch to another doesn't update the original branch. # so that copying one branch to another doesn't update the original branch.
...@@ -223,7 +226,22 @@ class SplitBulkWriteMixin(BulkOperationsMixin): ...@@ -223,7 +226,22 @@ class SplitBulkWriteMixin(BulkOperationsMixin):
""" """
# If the content is dirty, then update the database # If the content is dirty, then update the database
for _id in bulk_write_record.structures.viewkeys() - bulk_write_record.structures_in_db: for _id in bulk_write_record.structures.viewkeys() - bulk_write_record.structures_in_db:
self.db_connection.upsert_structure(bulk_write_record.structures[_id]) try:
self.db_connection.insert_structure(bulk_write_record.structures[_id])
except DuplicateKeyError:
# We may not have looked up this structure inside this bulk operation, and thus
# didn't realize that it was already in the database. That's OK, the store is
# append only, so if it's already been written, we can just keep going.
log.debug("Attempted to insert duplicate structure %s", _id)
for _id in bulk_write_record.definitions.viewkeys() - bulk_write_record.definitions_in_db:
try:
self.db_connection.insert_definition(bulk_write_record.definitions[_id])
except DuplicateKeyError:
# We may not have looked up this definition inside this bulk operation, and thus
# didn't realize that it was already in the database. That's OK, the store is
# append only, so if it's already been written, we can just keep going.
log.debug("Attempted to insert duplicate definition %s", _id)
if bulk_write_record.index is not None and bulk_write_record.index != bulk_write_record.initial_index: if bulk_write_record.index is not None and bulk_write_record.index != bulk_write_record.initial_index:
if bulk_write_record.initial_index is None: if bulk_write_record.initial_index is None:
...@@ -289,7 +307,71 @@ class SplitBulkWriteMixin(BulkOperationsMixin): ...@@ -289,7 +307,71 @@ class SplitBulkWriteMixin(BulkOperationsMixin):
if bulk_write_record.active: if bulk_write_record.active:
bulk_write_record.structures[structure['_id']] = structure bulk_write_record.structures[structure['_id']] = structure
else: else:
self.db_connection.upsert_structure(structure) self.db_connection.insert_structure(structure)
def get_definition(self, course_key, definition_guid):
"""
Retrieve a single definition by id, respecting the active bulk operation
on course_key.
Args:
course_key (:class:`.CourseKey`): The course being operated on
definition_guid (str or ObjectID): The id of the definition to load
"""
bulk_write_record = self._get_bulk_ops_record(course_key)
if bulk_write_record.active:
definition = bulk_write_record.definitions.get(definition_guid)
# The definition hasn't been loaded from the db yet, so load it
if definition is None:
definition = self.db_connection.get_definition(definition_guid)
bulk_write_record.definitions[definition_guid] = definition
if definition is not None:
bulk_write_record.definitions_in_db.add(definition_guid)
return definition
else:
# cast string to ObjectId if necessary
definition_guid = course_key.as_object_id(definition_guid)
return self.db_connection.get_definition(definition_guid)
def get_definitions(self, course_key, ids):
"""
Return all definitions that specified in ``ids``.
If a definition with the same id is in both the cache and the database,
the cached version will be preferred.
Arguments:
course_key (:class:`.CourseKey`): The course that these definitions are being loaded
for (to respect bulk operations).
ids (list): A list of definition ids
"""
definitions = []
ids = set(ids)
bulk_write_record = self._get_bulk_ops_record(course_key)
if bulk_write_record.active:
for definition in bulk_write_record.definitions.values():
definition_id = definition.get('_id')
if definition_id in ids:
ids.remove(definition_id)
definitions.append(definition)
definitions.extend(self.db_connection.get_definitions(list(ids)))
return definitions
def update_definition(self, course_key, definition):
"""
Update a definition, respecting the current bulk operation status
(no data will be written to the database if a bulk operation is active.)
"""
bulk_write_record = self._get_bulk_ops_record(course_key)
if bulk_write_record.active:
bulk_write_record.definitions[definition['_id']] = definition
else:
self.db_connection.insert_definition(definition)
def version_structure(self, course_key, structure, user_id): def version_structure(self, course_key, structure, user_id):
""" """
...@@ -521,7 +603,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -521,7 +603,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
new_module_data = {} new_module_data = {}
for block_id in base_block_ids: for block_id in base_block_ids:
new_module_data = self.descendants( new_module_data = self.descendants(
system.course_entry['structure']['blocks'], system.course_entry.structure['blocks'],
block_id, block_id,
depth, depth,
new_module_data new_module_data
...@@ -529,9 +611,13 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -529,9 +611,13 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
if not lazy: if not lazy:
# Load all descendants by id # Load all descendants by id
descendent_definitions = self.db_connection.find_matching_definitions({ descendent_definitions = self.get_definitions(
'_id': {'$in': [block['definition'] course_key,
for block in new_module_data.itervalues()]}}) [
block['definition']
for block in new_module_data.itervalues()
]
)
# turn into a map # turn into a map
definitions = {definition['_id']: definition definitions = {definition['_id']: definition
for definition in descendent_definitions} for definition in descendent_definitions}
...@@ -541,7 +627,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -541,7 +627,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
converted_fields = self.convert_references_to_keys( converted_fields = self.convert_references_to_keys(
course_key, system.load_block_type(block['block_type']), course_key, system.load_block_type(block['block_type']),
definitions[block['definition']].get('fields'), definitions[block['definition']].get('fields'),
system.course_entry['structure']['blocks'], system.course_entry.structure['blocks'],
) )
block['fields'].update(converted_fields) block['fields'].update(converted_fields)
block['definition_loaded'] = True block['definition_loaded'] = True
...@@ -549,24 +635,18 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -549,24 +635,18 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
system.module_data.update(new_module_data) system.module_data.update(new_module_data)
return system.module_data return system.module_data
@contract(course_entry=CourseEnvelope, block_keys="list(BlockKey)", depth="int | None")
def _load_items(self, course_entry, block_keys, depth=0, lazy=True, **kwargs): def _load_items(self, course_entry, block_keys, depth=0, lazy=True, **kwargs):
''' '''
Load & cache the given blocks from the course. Prefetch down to the Load & cache the given blocks from the course. Prefetch down to the
given depth. Load the definitions into each block if lazy is False; given depth. Load the definitions into each block if lazy is False;
otherwise, use the lazy definition placeholder. otherwise, use the lazy definition placeholder.
''' '''
runtime = self._get_cache(course_entry['structure']['_id']) runtime = self._get_cache(course_entry.structure['_id'])
if runtime is None: if runtime is None:
runtime = self.create_runtime(course_entry, lazy) runtime = self.create_runtime(course_entry, lazy)
self._add_cache(course_entry['structure']['_id'], runtime) self._add_cache(course_entry.structure['_id'], runtime)
course_key = CourseLocator( self.cache_items(runtime, block_keys, course_entry.course_key, depth, lazy)
version_guid=course_entry['structure']['_id'],
org=course_entry.get('org'),
course=course_entry.get('course'),
run=course_entry.get('run'),
branch=course_entry.get('branch'),
)
self.cache_items(runtime, block_keys, course_key, depth, lazy)
return [runtime.load_item(block_key, course_entry, **kwargs) for block_key in block_keys] return [runtime.load_item(block_key, course_entry, **kwargs) for block_key in block_keys]
def _get_cache(self, course_version_guid): def _get_cache(self, course_version_guid):
...@@ -650,14 +730,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -650,14 +730,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# 'run', and 'branch' are not intrinsic to structure # 'run', and 'branch' are not intrinsic to structure
# and the one assoc'd w/ it by another fetch may not be the one relevant to this fetch; so, # and the one assoc'd w/ it by another fetch may not be the one relevant to this fetch; so,
# add it in the envelope for the structure. # add it in the envelope for the structure.
envelope = { return CourseEnvelope(course_key.replace(version_guid=version_guid), entry)
'org': course_key.org,
'course': course_key.course,
'run': course_key.run,
'branch': course_key.branch,
'structure': entry,
}
return envelope
def get_courses(self, branch, **kwargs): def get_courses(self, branch, **kwargs):
''' '''
...@@ -690,13 +763,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -690,13 +763,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
result = [] result = []
for entry in matching_structures: for entry in matching_structures:
course_info = id_version_map[entry['_id']] course_info = id_version_map[entry['_id']]
envelope = { envelope = CourseEnvelope(
'org': course_info['org'], CourseLocator(
'course': course_info['course'], org=course_info['org'],
'run': course_info['run'], course=course_info['course'],
'branch': branch, run=course_info['run'],
'structure': entry, branch=branch,
} ),
entry
)
root = entry['root'] root = entry['root']
course_list = self._load_items(envelope, [root], 0, lazy=True, **kwargs) course_list = self._load_items(envelope, [root], 0, lazy=True, **kwargs)
if not isinstance(course_list[0], ErrorDescriptor): if not isinstance(course_list[0], ErrorDescriptor):
...@@ -716,12 +791,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -716,12 +791,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
''' '''
Gets the course descriptor for the course identified by the locator Gets the course descriptor for the course identified by the locator
''' '''
if not isinstance(course_id, CourseLocator): if not isinstance(course_id, CourseLocator) or course_id.deprecated:
# The supplied CourseKey is of the wrong type, so it can't possibly be stored in this modulestore. # The supplied CourseKey is of the wrong type, so it can't possibly be stored in this modulestore.
raise ItemNotFoundError(course_id) raise ItemNotFoundError(course_id)
course_entry = self._lookup_course(course_id) course_entry = self._lookup_course(course_id)
root = course_entry['structure']['root'] root = course_entry.structure['root']
result = self._load_items(course_entry, [root], depth, lazy=True, **kwargs) result = self._load_items(course_entry, [root], depth, lazy=True, **kwargs)
return result[0] return result[0]
...@@ -734,7 +809,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -734,7 +809,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
Note: we return the course_id instead of a boolean here since the found course may have Note: we return the course_id instead of a boolean here since the found course may have
a different id than the given course_id when ignore_case is True. a different id than the given course_id when ignore_case is True.
''' '''
if not isinstance(course_id, CourseLocator): if not isinstance(course_id, CourseLocator) or course_id.deprecated:
# The supplied CourseKey is of the wrong type, so it can't possibly be stored in this modulestore. # The supplied CourseKey is of the wrong type, so it can't possibly be stored in this modulestore.
return False return False
...@@ -747,10 +822,14 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -747,10 +822,14 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
the course or the block w/in the course do not exist for the given version. the course or the block w/in the course do not exist for the given version.
raises InsufficientSpecificationError if the usage_key does not id a block raises InsufficientSpecificationError if the usage_key does not id a block
""" """
if not isinstance(usage_key, BlockUsageLocator) or usage_key.deprecated:
# The supplied UsageKey is of the wrong type, so it can't possibly be stored in this modulestore.
return False
if usage_key.block_id is None: if usage_key.block_id is None:
raise InsufficientSpecificationError(usage_key) raise InsufficientSpecificationError(usage_key)
try: try:
course_structure = self._lookup_course(usage_key.course_key)['structure'] course_structure = self._lookup_course(usage_key.course_key).structure
except ItemNotFoundError: except ItemNotFoundError:
# this error only occurs if the course does not exist # this error only occurs if the course does not exist
return False return False
...@@ -766,7 +845,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -766,7 +845,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
descendants. descendants.
raises InsufficientSpecificationError or ItemNotFoundError raises InsufficientSpecificationError or ItemNotFoundError
""" """
if not isinstance(usage_key, BlockUsageLocator): if not isinstance(usage_key, BlockUsageLocator) or usage_key.deprecated:
# The supplied UsageKey is of the wrong type, so it can't possibly be stored in this modulestore. # The supplied UsageKey is of the wrong type, so it can't possibly be stored in this modulestore.
raise ItemNotFoundError(usage_key) raise ItemNotFoundError(usage_key)
...@@ -799,6 +878,10 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -799,6 +878,10 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
For split, For split,
you can search by ``edited_by``, ``edited_on`` providing a function testing limits. you can search by ``edited_by``, ``edited_on`` providing a function testing limits.
""" """
if not isinstance(course_locator, CourseLocator) or course_locator.deprecated:
# The supplied CourseKey is of the wrong type, so it can't possibly be stored in this modulestore.
return []
course = self._lookup_course(course_locator) course = self._lookup_course(course_locator)
items = [] items = []
qualifiers = qualifiers.copy() if qualifiers else {} # copy the qualifiers (destructively manipulated here) qualifiers = qualifiers.copy() if qualifiers else {} # copy the qualifiers (destructively manipulated here)
...@@ -813,7 +896,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -813,7 +896,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
self._block_matches(block_json.get('fields', {}), settings) self._block_matches(block_json.get('fields', {}), settings)
): ):
if content: if content:
definition_block = self.db_connection.get_definition(block_json['definition']) definition_block = self.get_definition(course_locator, block_json['definition'])
return self._block_matches(definition_block.get('fields', {}), content) return self._block_matches(definition_block.get('fields', {}), content)
else: else:
return True return True
...@@ -824,7 +907,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -824,7 +907,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# odd case where we don't search just confirm # odd case where we don't search just confirm
block_name = qualifiers.pop('name') block_name = qualifiers.pop('name')
block_ids = [] block_ids = []
for block_id, block in course['structure']['blocks'].iteritems(): for block_id, block in course.structure['blocks'].iteritems():
if block_name == block_id.id and _block_matches_all(block): if block_name == block_id.id and _block_matches_all(block):
block_ids.append(block_id) block_ids.append(block_id)
...@@ -836,7 +919,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -836,7 +919,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# don't expect caller to know that children are in fields # don't expect caller to know that children are in fields
if 'children' in qualifiers: if 'children' in qualifiers:
settings['children'] = qualifiers.pop('children') settings['children'] = qualifiers.pop('children')
for block_id, value in course['structure']['blocks'].iteritems(): for block_id, value in course.structure['blocks'].iteritems():
if _block_matches_all(value): if _block_matches_all(value):
items.append(block_id) items.append(block_id)
...@@ -853,8 +936,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -853,8 +936,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
:param locator: BlockUsageLocator restricting search scope :param locator: BlockUsageLocator restricting search scope
''' '''
if not isinstance(locator, BlockUsageLocator) or locator.deprecated:
# The supplied locator is of the wrong type, so it can't possibly be stored in this modulestore.
raise ItemNotFoundError(locator)
course = self._lookup_course(locator.course_key) course = self._lookup_course(locator.course_key)
parent_id = self._get_parent_from_structure(BlockKey.from_usage_key(locator), course['structure']) parent_id = self._get_parent_from_structure(BlockKey.from_usage_key(locator), course.structure)
if parent_id is None: if parent_id is None:
return None return None
return BlockUsageLocator.make_relative( return BlockUsageLocator.make_relative(
...@@ -867,11 +954,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -867,11 +954,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
""" """
Return an array of all of the orphans in the course. Return an array of all of the orphans in the course.
""" """
if not isinstance(course_key, CourseLocator) or course_key.deprecated:
# The supplied CourseKey is of the wrong type, so it can't possibly be stored in this modulestore.
raise ItemNotFoundError(course_key)
detached_categories = [name for name, __ in XBlock.load_tagged_classes("detached")] detached_categories = [name for name, __ in XBlock.load_tagged_classes("detached")]
course = self._lookup_course(course_key) course = self._lookup_course(course_key)
items = set(course['structure']['blocks'].keys()) items = set(course.structure['blocks'].keys())
items.remove(course['structure']['root']) items.remove(course.structure['root'])
blocks = course['structure']['blocks'] blocks = course.structure['blocks']
for block_id, block_data in blocks.iteritems(): for block_id, block_data in blocks.iteritems():
items.difference_update(BlockKey(*child) for child in block_data.get('fields', {}).get('children', [])) items.difference_update(BlockKey(*child) for child in block_data.get('fields', {}).get('children', []))
if block_data['block_type'] in detached_categories: if block_data['block_type'] in detached_categories:
...@@ -895,6 +986,10 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -895,6 +986,10 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
'edited_on': when the course was originally created 'edited_on': when the course was originally created
} }
""" """
if not isinstance(course_key, CourseLocator) or course_key.deprecated:
# The supplied CourseKey is of the wrong type, so it can't possibly be stored in this modulestore.
raise ItemNotFoundError(course_key)
if not (course_key.course and course_key.run and course_key.org): if not (course_key.course and course_key.run and course_key.org):
return None return None
index = self.get_course_index(course_key) index = self.get_course_index(course_key)
...@@ -912,7 +1007,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -912,7 +1007,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
'edited_on': when the change was made 'edited_on': when the change was made
} }
""" """
course = self._lookup_course(course_key)['structure'] if not isinstance(course_key, CourseLocator) or course_key.deprecated:
# The supplied CourseKey is of the wrong type, so it can't possibly be stored in this modulestore.
raise ItemNotFoundError(course_key)
course = self._lookup_course(course_key).structure
return { return {
'original_version': course['original_version'], 'original_version': course['original_version'],
'previous_version': course['previous_version'], 'previous_version': course['previous_version'],
...@@ -930,6 +1029,10 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -930,6 +1029,10 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
'edited_on': when the change was made 'edited_on': when the change was made
} }
""" """
if not isinstance(definition_locator, DefinitionLocator) or definition_locator.deprecated:
# The supplied locator is of the wrong type, so it can't possibly be stored in this modulestore.
raise ItemNotFoundError(definition_locator)
definition = self.db_connection.get_definition(definition_locator.definition_id) definition = self.db_connection.get_definition(definition_locator.definition_id)
if definition is None: if definition is None:
return None return None
...@@ -942,11 +1045,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -942,11 +1045,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
next versions, these do include those created for other courses. next versions, these do include those created for other courses.
:param course_locator: :param course_locator:
''' '''
if not isinstance(course_locator, CourseLocator) or course_locator.deprecated:
# The supplied CourseKey is of the wrong type, so it can't possibly be stored in this modulestore.
raise ItemNotFoundError(course_locator)
if version_history_depth < 1: if version_history_depth < 1:
return None return None
if course_locator.version_guid is None: if course_locator.version_guid is None:
course = self._lookup_course(course_locator) course = self._lookup_course(course_locator)
version_guid = course['structure']['_id'] version_guid = course.structure['_id']
course_locator = course_locator.for_version(version_guid) course_locator = course_locator.for_version(version_guid)
else: else:
version_guid = course_locator.version_guid version_guid = course_locator.version_guid
...@@ -977,7 +1084,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -977,7 +1084,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
''' '''
# course_agnostic means we don't care if the head and version don't align, trust the version # course_agnostic means we don't care if the head and version don't align, trust the version
course_struct = self._lookup_course(block_locator.course_key.course_agnostic())['structure'] course_struct = self._lookup_course(block_locator.course_key.course_agnostic()).structure
block_key = BlockKey.from_usage_key(block_locator) block_key = BlockKey.from_usage_key(block_locator)
all_versions_with_block = self.find_ancestor_structures( all_versions_with_block = self.find_ancestor_structures(
original_version=course_struct['original_version'], original_version=course_struct['original_version'],
...@@ -1026,7 +1133,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1026,7 +1133,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# TODO implement # TODO implement
raise NotImplementedError() raise NotImplementedError()
def create_definition_from_data(self, new_def_data, category, user_id): def create_definition_from_data(self, course_key, new_def_data, category, user_id):
""" """
Pull the definition fields out of descriptor and save to the db as a new definition Pull the definition fields out of descriptor and save to the db as a new definition
w/o a predecessor and return the new id. w/o a predecessor and return the new id.
...@@ -1047,11 +1154,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1047,11 +1154,11 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
}, },
'schema_version': self.SCHEMA_VERSION, 'schema_version': self.SCHEMA_VERSION,
} }
self.db_connection.insert_definition(document) self.update_definition(course_key, document)
definition_locator = DefinitionLocator(category, new_id) definition_locator = DefinitionLocator(category, new_id)
return definition_locator return definition_locator
def update_definition_from_data(self, definition_locator, new_def_data, user_id): def update_definition_from_data(self, course_key, definition_locator, new_def_data, user_id):
""" """
See if new_def_data differs from the persisted version. If so, update See if new_def_data differs from the persisted version. If so, update
the persisted version and return the new id. the persisted version and return the new id.
...@@ -1068,22 +1175,23 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1068,22 +1175,23 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# if this looks in cache rather than fresh fetches, then it will probably not detect # if this looks in cache rather than fresh fetches, then it will probably not detect
# actual change b/c the descriptor and cache probably point to the same objects # actual change b/c the descriptor and cache probably point to the same objects
old_definition = self.db_connection.get_definition(definition_locator.definition_id) old_definition = self.get_definition(course_key, definition_locator.definition_id)
if old_definition is None: if old_definition is None:
raise ItemNotFoundError(definition_locator) raise ItemNotFoundError(definition_locator)
new_def_data = self._serialize_fields(old_definition['block_type'], new_def_data) new_def_data = self._serialize_fields(old_definition['block_type'], new_def_data)
if needs_saved(): if needs_saved():
# new id to create new version # Do a deep copy so that we don't corrupt the cached version of the definition
old_definition['_id'] = ObjectId() new_definition = copy.deepcopy(old_definition)
old_definition['fields'] = new_def_data new_definition['_id'] = ObjectId()
old_definition['edit_info']['edited_by'] = user_id new_definition['fields'] = new_def_data
old_definition['edit_info']['edited_on'] = datetime.datetime.now(UTC) new_definition['edit_info']['edited_by'] = user_id
new_definition['edit_info']['edited_on'] = datetime.datetime.now(UTC)
# previous version id # previous version id
old_definition['edit_info']['previous_version'] = definition_locator.definition_id new_definition['edit_info']['previous_version'] = definition_locator.definition_id
old_definition['schema_version'] = self.SCHEMA_VERSION new_definition['schema_version'] = self.SCHEMA_VERSION
self.db_connection.insert_definition(old_definition) self.update_definition(course_key, new_definition)
return DefinitionLocator(old_definition['block_type'], old_definition['_id']), True return DefinitionLocator(new_definition['block_type'], new_definition['_id']), True
else: else:
return definition_locator, False return definition_locator, False
...@@ -1164,15 +1272,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1164,15 +1272,15 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
# find course_index entry if applicable and structures entry # find course_index entry if applicable and structures entry
index_entry = self._get_index_if_valid(course_key, force) index_entry = self._get_index_if_valid(course_key, force)
structure = self._lookup_course(course_key)['structure'] structure = self._lookup_course(course_key).structure
partitioned_fields = self.partition_fields_by_scope(block_type, fields) partitioned_fields = self.partition_fields_by_scope(block_type, fields)
new_def_data = partitioned_fields.get(Scope.content, {}) new_def_data = partitioned_fields.get(Scope.content, {})
# persist the definition if persisted != passed # persist the definition if persisted != passed
if (definition_locator is None or isinstance(definition_locator.definition_id, LocalId)): if (definition_locator is None or isinstance(definition_locator.definition_id, LocalId)):
definition_locator = self.create_definition_from_data(new_def_data, block_type, user_id) definition_locator = self.create_definition_from_data(course_key, new_def_data, block_type, user_id)
elif new_def_data is not None: elif new_def_data is not None:
definition_locator, _ = self.update_definition_from_data(definition_locator, new_def_data, user_id) definition_locator, _ = self.update_definition_from_data(course_key, definition_locator, new_def_data, user_id)
# copy the structure and modify the new one # copy the structure and modify the new one
new_structure = self.version_structure(course_key, structure, user_id) new_structure = self.version_structure(course_key, structure, user_id)
...@@ -1243,7 +1351,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1243,7 +1351,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
**kwargs) **kwargs)
# don't version the structure as create_item handled that already. # don't version the structure as create_item handled that already.
new_structure = self._lookup_course(xblock.location.course_key)['structure'] new_structure = self._lookup_course(xblock.location.course_key).structure
# add new block as child and update parent's version # add new block as child and update parent's version
block_id = BlockKey.from_usage_key(parent_usage_key) block_id = BlockKey.from_usage_key(parent_usage_key)
...@@ -1365,7 +1473,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1365,7 +1473,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
}, },
'schema_version': self.SCHEMA_VERSION, 'schema_version': self.SCHEMA_VERSION,
} }
self.db_connection.insert_definition(definition_entry) self.update_definition(locator, definition_entry)
draft_structure = self._new_structure( draft_structure = self._new_structure(
user_id, user_id,
...@@ -1386,21 +1494,21 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1386,21 +1494,21 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
elif block_fields or definition_fields: # pointing to existing course w/ some overrides elif block_fields or definition_fields: # pointing to existing course w/ some overrides
# just get the draft_version structure # just get the draft_version structure
draft_version = CourseLocator(version_guid=versions_dict[master_branch]) draft_version = CourseLocator(version_guid=versions_dict[master_branch])
draft_structure = self._lookup_course(draft_version)['structure'] draft_structure = self._lookup_course(draft_version).structure
draft_structure = self.version_structure(locator, draft_structure, user_id) draft_structure = self.version_structure(locator, draft_structure, user_id)
new_id = draft_structure['_id'] new_id = draft_structure['_id']
root_block = draft_structure['blocks'][draft_structure['root']] root_block = draft_structure['blocks'][draft_structure['root']]
if block_fields is not None: if block_fields is not None:
root_block['fields'].update(self._serialize_fields(root_category, block_fields)) root_block['fields'].update(self._serialize_fields(root_category, block_fields))
if definition_fields is not None: if definition_fields is not None:
definition = self.db_connection.get_definition(root_block['definition']) definition = copy.deepcopy(self.get_definition(locator, root_block['definition']))
definition['fields'].update(definition_fields) definition['fields'].update(definition_fields)
definition['edit_info']['previous_version'] = definition['_id'] definition['edit_info']['previous_version'] = definition['_id']
definition['edit_info']['edited_by'] = user_id definition['edit_info']['edited_by'] = user_id
definition['edit_info']['edited_on'] = datetime.datetime.now(UTC) definition['edit_info']['edited_on'] = datetime.datetime.now(UTC)
definition['_id'] = ObjectId() definition['_id'] = ObjectId()
definition['schema_version'] = self.SCHEMA_VERSION definition['schema_version'] = self.SCHEMA_VERSION
self.db_connection.insert_definition(definition) self.update_definition(locator, definition)
root_block['definition'] = definition['_id'] root_block['definition'] = definition['_id']
root_block['edit_info']['edited_on'] = datetime.datetime.now(UTC) root_block['edit_info']['edited_on'] = datetime.datetime.now(UTC)
root_block['edit_info']['edited_by'] = user_id root_block['edit_info']['edited_by'] = user_id
...@@ -1411,7 +1519,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1411,7 +1519,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
else: # Pointing to an existing course structure else: # Pointing to an existing course structure
new_id = versions_dict[master_branch] new_id = versions_dict[master_branch]
draft_version = CourseLocator(version_guid=new_id) draft_version = CourseLocator(version_guid=new_id)
draft_structure = self._lookup_course(draft_version)['structure'] draft_structure = self._lookup_course(draft_version).structure
locator = locator.replace(version_guid=new_id) locator = locator.replace(version_guid=new_id)
with self.bulk_operations(locator): with self.bulk_operations(locator):
...@@ -1472,7 +1580,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1472,7 +1580,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
user_id, course_key, block_key.type, fields=fields, force=force user_id, course_key, block_key.type, fields=fields, force=force
) )
original_structure = self._lookup_course(course_key)['structure'] original_structure = self._lookup_course(course_key).structure
index_entry = self._get_index_if_valid(course_key, force) index_entry = self._get_index_if_valid(course_key, force)
original_entry = self._get_block_from_structure(original_structure, block_key) original_entry = self._get_block_from_structure(original_structure, block_key)
...@@ -1493,7 +1601,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1493,7 +1601,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
definition_locator = DefinitionLocator(original_entry['block_type'], original_entry['definition']) definition_locator = DefinitionLocator(original_entry['block_type'], original_entry['definition'])
if definition_fields: if definition_fields:
definition_locator, is_updated = self.update_definition_from_data( definition_locator, is_updated = self.update_definition_from_data(
definition_locator, definition_fields, user_id course_key, definition_locator, definition_fields, user_id
) )
# check metadata # check metadata
...@@ -1582,7 +1690,6 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1582,7 +1690,6 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
course_key, course_key,
BlockKey(block_type, block_id) if block_id else None, BlockKey(block_type, block_id) if block_id else None,
json_data, json_data,
inherited_settings,
**kwargs **kwargs
) )
for field_name, value in (fields or {}).iteritems(): for field_name, value in (fields or {}).iteritems():
...@@ -1615,10 +1722,10 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1615,10 +1722,10 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
course_key = xblock.location.course_key course_key = xblock.location.course_key
with self.bulk_operations(course_key): with self.bulk_operations(course_key):
index_entry = self._get_index_if_valid(course_key, force) index_entry = self._get_index_if_valid(course_key, force)
structure = self._lookup_course(course_key)['structure'] structure = self._lookup_course(course_key).structure
new_structure = self.version_structure(course_key, structure, user_id) new_structure = self.version_structure(course_key, structure, user_id)
new_id = new_structure['_id'] new_id = new_structure['_id']
is_updated = self._persist_subdag(xblock, user_id, new_structure['blocks'], new_id) is_updated = self._persist_subdag(course_key, xblock, user_id, new_structure['blocks'], new_id)
if is_updated: if is_updated:
self.update_structure(course_key, new_structure) self.update_structure(course_key, new_structure)
...@@ -1632,18 +1739,20 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1632,18 +1739,20 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
else: else:
return xblock return xblock
def _persist_subdag(self, xblock, user_id, structure_blocks, new_id): def _persist_subdag(self, course_key, xblock, user_id, structure_blocks, new_id):
# persist the definition if persisted != passed # persist the definition if persisted != passed
partitioned_fields = self.partition_xblock_fields_by_scope(xblock) partitioned_fields = self.partition_xblock_fields_by_scope(xblock)
new_def_data = self._serialize_fields(xblock.category, partitioned_fields[Scope.content]) new_def_data = self._serialize_fields(xblock.category, partitioned_fields[Scope.content])
is_updated = False is_updated = False
if xblock.definition_locator is None or isinstance(xblock.definition_locator.definition_id, LocalId): if xblock.definition_locator is None or isinstance(xblock.definition_locator.definition_id, LocalId):
xblock.definition_locator = self.create_definition_from_data( xblock.definition_locator = self.create_definition_from_data(
new_def_data, xblock.category, user_id) course_key, new_def_data, xblock.category, user_id
)
is_updated = True is_updated = True
elif new_def_data: elif new_def_data:
xblock.definition_locator, is_updated = self.update_definition_from_data( xblock.definition_locator, is_updated = self.update_definition_from_data(
xblock.definition_locator, new_def_data, user_id) course_key, xblock.definition_locator, new_def_data, user_id
)
if isinstance(xblock.scope_ids.usage_id.block_id, LocalId): if isinstance(xblock.scope_ids.usage_id.block_id, LocalId):
# generate an id # generate an id
...@@ -1665,7 +1774,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1665,7 +1774,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
for child in xblock.children: for child in xblock.children:
if isinstance(child.block_id, LocalId): if isinstance(child.block_id, LocalId):
child_block = xblock.system.get_block(child) child_block = xblock.system.get_block(child)
is_updated = self._persist_subdag(child_block, user_id, structure_blocks, new_id) or is_updated is_updated = self._persist_subdag(course_key, child_block, user_id, structure_blocks, new_id) or is_updated
children.append(BlockKey.from_usage_key(child_block.location)) children.append(BlockKey.from_usage_key(child_block.location))
else: else:
children.append(BlockKey.from_usage_key(child)) children.append(BlockKey.from_usage_key(child))
...@@ -1749,65 +1858,66 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1749,65 +1858,66 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
""" """
# get the destination's index, and source and destination structures. # get the destination's index, and source and destination structures.
with self.bulk_operations(source_course): with self.bulk_operations(source_course):
with self.bulk_operations(destination_course): source_structure = self._lookup_course(source_course).structure
source_structure = self._lookup_course(source_course)['structure']
index_entry = self.get_course_index(destination_course) with self.bulk_operations(destination_course):
if index_entry is None: index_entry = self.get_course_index(destination_course)
# brand new course if index_entry is None:
raise ItemNotFoundError(destination_course) # brand new course
if destination_course.branch not in index_entry['versions']: raise ItemNotFoundError(destination_course)
# must be copying the dag root if there's no current dag if destination_course.branch not in index_entry['versions']:
root_block_key = source_structure['root'] # must be copying the dag root if there's no current dag
if not any(root_block_key == BlockKey.from_usage_key(subtree) for subtree in subtree_list): root_block_key = source_structure['root']
raise ItemNotFoundError(u'Must publish course root {}'.format(root_block_key)) if not any(root_block_key == BlockKey.from_usage_key(subtree) for subtree in subtree_list):
root_source = source_structure['blocks'][root_block_key] raise ItemNotFoundError(u'Must publish course root {}'.format(root_block_key))
# create branch root_source = source_structure['blocks'][root_block_key]
destination_structure = self._new_structure( # create branch
user_id, root_block_key, destination_structure = self._new_structure(
# leave off the fields b/c the children must be filtered user_id, root_block_key,
definition_id=root_source['definition'], # leave off the fields b/c the children must be filtered
) definition_id=root_source['definition'],
else: )
destination_structure = self._lookup_course(destination_course)['structure'] else:
destination_structure = self.version_structure(destination_course, destination_structure, user_id) destination_structure = self._lookup_course(destination_course).structure
destination_structure = self.version_structure(destination_course, destination_structure, user_id)
if blacklist != EXCLUDE_ALL:
blacklist = [BlockKey.from_usage_key(shunned) for shunned in blacklist or []] if blacklist != EXCLUDE_ALL:
# iterate over subtree list filtering out blacklist. blacklist = [BlockKey.from_usage_key(shunned) for shunned in blacklist or []]
orphans = set() # iterate over subtree list filtering out blacklist.
destination_blocks = destination_structure['blocks'] orphans = set()
for subtree_root in subtree_list: destination_blocks = destination_structure['blocks']
if BlockKey.from_usage_key(subtree_root) != source_structure['root']: for subtree_root in subtree_list:
# find the parents and put root in the right sequence if BlockKey.from_usage_key(subtree_root) != source_structure['root']:
parent = self._get_parent_from_structure(BlockKey.from_usage_key(subtree_root), source_structure) # find the parents and put root in the right sequence
if parent is not None: # may be a detached category xblock parent = self._get_parent_from_structure(BlockKey.from_usage_key(subtree_root), source_structure)
if not parent in destination_blocks: if parent is not None: # may be a detached category xblock
raise ItemNotFoundError(parent) if not parent in destination_blocks:
orphans.update( raise ItemNotFoundError(parent)
self._sync_children( orphans.update(
source_structure['blocks'][parent], self._sync_children(
destination_blocks[parent], source_structure['blocks'][parent],
BlockKey.from_usage_key(subtree_root) destination_blocks[parent],
) BlockKey.from_usage_key(subtree_root)
) )
# update/create the subtree and its children in destination (skipping blacklist)
orphans.update(
self._copy_subdag(
user_id, destination_structure['_id'],
BlockKey.from_usage_key(subtree_root),
source_structure['blocks'],
destination_blocks,
blacklist
) )
# update/create the subtree and its children in destination (skipping blacklist)
orphans.update(
self._copy_subdag(
user_id, destination_structure['_id'],
BlockKey.from_usage_key(subtree_root),
source_structure['blocks'],
destination_blocks,
blacklist
) )
# remove any remaining orphans )
for orphan in orphans: # remove any remaining orphans
# orphans will include moved as well as deleted xblocks. Only delete the deleted ones. for orphan in orphans:
self._delete_if_true_orphan(orphan, destination_structure) # orphans will include moved as well as deleted xblocks. Only delete the deleted ones.
self._delete_if_true_orphan(orphan, destination_structure)
# update the db # update the db
self.update_structure(destination_course, destination_structure) self.update_structure(destination_course, destination_structure)
self._update_head(destination_course, index_entry, destination_course.branch, destination_structure['_id']) self._update_head(destination_course, index_entry, destination_course.branch, destination_structure['_id'])
def delete_item(self, usage_locator, user_id, force=False): def delete_item(self, usage_locator, user_id, force=False):
""" """
...@@ -1824,12 +1934,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1824,12 +1934,12 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
change to this item, it raises a VersionConflictError unless force is True. In the force case, it forks change to this item, it raises a VersionConflictError unless force is True. In the force case, it forks
the course but leaves the head pointer where it is (this change will not be in the course head). the course but leaves the head pointer where it is (this change will not be in the course head).
""" """
if not isinstance(usage_locator, BlockUsageLocator): if not isinstance(usage_locator, BlockUsageLocator) or usage_locator.deprecated:
# The supplied UsageKey is of the wrong type, so it can't possibly be stored in this modulestore. # The supplied UsageKey is of the wrong type, so it can't possibly be stored in this modulestore.
raise ItemNotFoundError(usage_locator) raise ItemNotFoundError(usage_locator)
with self.bulk_operations(usage_locator.course_key): with self.bulk_operations(usage_locator.course_key):
original_structure = self._lookup_course(usage_locator.course_key)['structure'] original_structure = self._lookup_course(usage_locator.course_key).structure
block_key = BlockKey.from_usage_key(usage_locator) block_key = BlockKey.from_usage_key(usage_locator)
if original_structure['root'] == block_key: if original_structure['root'] == block_key:
raise ValueError("Cannot delete the root of a course") raise ValueError("Cannot delete the root of a course")
...@@ -1973,7 +2083,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1973,7 +2083,7 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
:param course_locator: the course to clean :param course_locator: the course to clean
""" """
original_structure = self._lookup_course(course_locator)['structure'] original_structure = self._lookup_course(course_locator).structure
for block in original_structure['blocks'].itervalues(): for block in original_structure['blocks'].itervalues():
if 'fields' in block and 'children' in block['fields']: if 'fields' in block and 'children' in block['fields']:
block['fields']["children"] = [ block['fields']["children"] = [
......
...@@ -242,7 +242,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS ...@@ -242,7 +242,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
:return: True if the draft and published versions differ :return: True if the draft and published versions differ
""" """
def get_course(branch_name): def get_course(branch_name):
return self._lookup_course(xblock.location.course_key.for_branch(branch_name))['structure'] return self._lookup_course(xblock.location.course_key.for_branch(branch_name)).structure
def get_block(course_structure, block_key): def get_block(course_structure, block_key):
return self._get_block_from_structure(course_structure, block_key) return self._get_block_from_structure(course_structure, block_key)
...@@ -318,7 +318,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS ...@@ -318,7 +318,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
# get head version of Published branch # get head version of Published branch
published_course_structure = self._lookup_course( published_course_structure = self._lookup_course(
location.course_key.for_branch(ModuleStoreEnum.BranchName.published) location.course_key.for_branch(ModuleStoreEnum.BranchName.published)
)['structure'] ).structure
published_block = self._get_block_from_structure( published_block = self._get_block_from_structure(
published_course_structure, published_course_structure,
BlockKey.from_usage_key(location) BlockKey.from_usage_key(location)
...@@ -327,7 +327,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS ...@@ -327,7 +327,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
raise InvalidVersionError(location) raise InvalidVersionError(location)
# create a new versioned draft structure # create a new versioned draft structure
draft_course_structure = self._lookup_course(draft_course_key)['structure'] draft_course_structure = self._lookup_course(draft_course_key).structure
new_structure = self.version_structure(draft_course_key, draft_course_structure, user_id) new_structure = self.version_structure(draft_course_key, draft_course_structure, user_id)
# remove the block and its descendants from the new structure # remove the block and its descendants from the new structure
...@@ -394,7 +394,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS ...@@ -394,7 +394,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
pass pass
def _get_head(self, xblock, branch): def _get_head(self, xblock, branch):
course_structure = self._lookup_course(xblock.location.course_key.for_branch(branch))['structure'] course_structure = self._lookup_course(xblock.location.course_key.for_branch(branch)).structure
return self._get_block_from_structure(course_structure, BlockKey.from_usage_key(xblock.location)) return self._get_block_from_structure(course_structure, BlockKey.from_usage_key(xblock.location))
def _get_version(self, block): def _get_version(self, block):
......
import copy import copy
from contracts import contract, new_contract
from xblock.fields import Scope from xblock.fields import Scope
from collections import namedtuple from collections import namedtuple
from xblock.exceptions import InvalidScopeError from xblock.exceptions import InvalidScopeError
from .definition_lazy_loader import DefinitionLazyLoader from .definition_lazy_loader import DefinitionLazyLoader
from xmodule.modulestore.inheritance import InheritanceKeyValueStore from xmodule.modulestore.inheritance import InheritanceKeyValueStore
from opaque_keys.edx.locator import BlockUsageLocator
# id is a BlockUsageLocator, def_id is the definition's guid # id is a BlockUsageLocator, def_id is the definition's guid
SplitMongoKVSid = namedtuple('SplitMongoKVSid', 'id, def_id') SplitMongoKVSid = namedtuple('SplitMongoKVSid', 'id, def_id')
new_contract('BlockUsageLocator', BlockUsageLocator)
class SplitMongoKVS(InheritanceKeyValueStore): class SplitMongoKVS(InheritanceKeyValueStore):
...@@ -15,22 +18,25 @@ class SplitMongoKVS(InheritanceKeyValueStore): ...@@ -15,22 +18,25 @@ class SplitMongoKVS(InheritanceKeyValueStore):
known to the MongoModuleStore (data, children, and metadata) known to the MongoModuleStore (data, children, and metadata)
""" """
def __init__(self, definition, initial_values, inherited_settings, **kwargs): @contract(parent="BlockUsageLocator | None")
def __init__(self, definition, initial_values, parent, field_decorator=None):
""" """
:param definition: either a lazyloader or definition id for the definition :param definition: either a lazyloader or definition id for the definition
:param initial_values: a dictionary of the locally set values :param initial_values: a dictionary of the locally set values
:param inherited_settings: the json value of each inheritable field from above this.
Note, local fields may override and disagree w/ this b/c this says what the value
should be if the field is undefined.
""" """
# deepcopy so that manipulations of fields does not pollute the source # deepcopy so that manipulations of fields does not pollute the source
super(SplitMongoKVS, self).__init__(copy.deepcopy(initial_values), inherited_settings) super(SplitMongoKVS, self).__init__(copy.deepcopy(initial_values))
self._definition = definition # either a DefinitionLazyLoader or the db id of the definition. self._definition = definition # either a DefinitionLazyLoader or the db id of the definition.
# if the db id, then the definition is presumed to be loaded into _fields # if the db id, then the definition is presumed to be loaded into _fields
# a decorator function for field values (to be called when a field is accessed) # a decorator function for field values (to be called when a field is accessed)
self.field_decorator = kwargs.get('field_decorator', lambda x: x) if field_decorator is None:
self.field_decorator = lambda x: x
else:
self.field_decorator = field_decorator
self.parent = parent
def get(self, key): def get(self, key):
...@@ -38,8 +44,7 @@ class SplitMongoKVS(InheritanceKeyValueStore): ...@@ -38,8 +44,7 @@ class SplitMongoKVS(InheritanceKeyValueStore):
if key.field_name not in self._fields: if key.field_name not in self._fields:
# parent undefined in editing runtime (I think) # parent undefined in editing runtime (I think)
if key.scope == Scope.parent: if key.scope == Scope.parent:
# see STUD-624. Right now copies MongoKeyValueStore.get's behavior of returning None return self.parent
return None
if key.scope == Scope.children: if key.scope == Scope.children:
# didn't find children in _fields; so, see if there's a default # didn't find children in _fields; so, see if there's a default
raise KeyError() raise KeyError()
......
...@@ -28,6 +28,8 @@ from xmodule.modulestore.xml_importer import import_from_xml ...@@ -28,6 +28,8 @@ from xmodule.modulestore.xml_importer import import_from_xml
from xmodule.modulestore.xml_exporter import export_to_xml from xmodule.modulestore.xml_exporter import export_to_xml
from xmodule.modulestore.split_mongo.split_draft import DraftVersioningModuleStore from xmodule.modulestore.split_mongo.split_draft import DraftVersioningModuleStore
from xmodule.modulestore.tests.mongo_connection import MONGO_PORT_NUM, MONGO_HOST from xmodule.modulestore.tests.mongo_connection import MONGO_PORT_NUM, MONGO_HOST
from xmodule.modulestore.inheritance import InheritanceMixin
from xmodule.x_module import XModuleMixin
COMMON_DOCSTORE_CONFIG = { COMMON_DOCSTORE_CONFIG = {
...@@ -36,6 +38,9 @@ COMMON_DOCSTORE_CONFIG = { ...@@ -36,6 +38,9 @@ COMMON_DOCSTORE_CONFIG = {
} }
XBLOCK_MIXINS = (InheritanceMixin, XModuleMixin)
class MemoryCache(object): class MemoryCache(object):
""" """
This fits the metadata_inheritance_cache_subsystem interface used by This fits the metadata_inheritance_cache_subsystem interface used by
...@@ -95,6 +100,7 @@ class MongoModulestoreBuilder(object): ...@@ -95,6 +100,7 @@ class MongoModulestoreBuilder(object):
render_template=repr, render_template=repr,
branch_setting_func=lambda: ModuleStoreEnum.Branch.draft_preferred, branch_setting_func=lambda: ModuleStoreEnum.Branch.draft_preferred,
metadata_inheritance_cache_subsystem=MemoryCache(), metadata_inheritance_cache_subsystem=MemoryCache(),
xblock_mixins=XBLOCK_MIXINS,
) )
modulestore.ensure_indexes() modulestore.ensure_indexes()
...@@ -139,6 +145,7 @@ class VersioningModulestoreBuilder(object): ...@@ -139,6 +145,7 @@ class VersioningModulestoreBuilder(object):
doc_store_config, doc_store_config,
fs_root, fs_root,
render_template=repr, render_template=repr,
xblock_mixins=XBLOCK_MIXINS,
) )
modulestore.ensure_indexes() modulestore.ensure_indexes()
...@@ -189,7 +196,13 @@ class MixedModulestoreBuilder(object): ...@@ -189,7 +196,13 @@ class MixedModulestoreBuilder(object):
# Generate a fake list of stores to give the already generated stores appropriate names # Generate a fake list of stores to give the already generated stores appropriate names
stores = [{'NAME': name, 'ENGINE': 'This space deliberately left blank'} for name in names] stores = [{'NAME': name, 'ENGINE': 'This space deliberately left blank'} for name in names]
modulestore = MixedModuleStore(contentstore, self.mappings, stores, create_modulestore_instance=create_modulestore_instance) modulestore = MixedModuleStore(
contentstore,
self.mappings,
stores,
create_modulestore_instance=create_modulestore_instance,
xblock_mixins=XBLOCK_MIXINS,
)
yield modulestore yield modulestore
...@@ -269,8 +282,8 @@ class CrossStoreXMLRoundtrip(CourseComparisonTest): ...@@ -269,8 +282,8 @@ class CrossStoreXMLRoundtrip(CourseComparisonTest):
with dest_content_builder.build() as dest_content: with dest_content_builder.build() as dest_content:
# Construct the modulestore for storing the second import (using the second contentstore) # Construct the modulestore for storing the second import (using the second contentstore)
with dest_builder.build(dest_content) as dest_store: with dest_builder.build(dest_content) as dest_store:
source_course_key = source_store.make_course_key('source', 'course', 'key') source_course_key = source_store.make_course_key('a', 'course', 'course')
dest_course_key = dest_store.make_course_key('dest', 'course', 'key') dest_course_key = dest_store.make_course_key('a', 'course', 'course')
import_from_xml( import_from_xml(
source_store, source_store,
...@@ -287,20 +300,30 @@ class CrossStoreXMLRoundtrip(CourseComparisonTest): ...@@ -287,20 +300,30 @@ class CrossStoreXMLRoundtrip(CourseComparisonTest):
source_content, source_content,
source_course_key, source_course_key,
self.export_dir, self.export_dir,
'exported_course', 'exported_source_course',
) )
import_from_xml( import_from_xml(
dest_store, dest_store,
'test_user', 'test_user',
self.export_dir, self.export_dir,
course_dirs=['exported_source_course'],
static_content_store=dest_content, static_content_store=dest_content,
target_course_id=dest_course_key, target_course_id=dest_course_key,
create_new_course_if_not_present=True, create_new_course_if_not_present=True,
) )
export_to_xml(
dest_store,
dest_content,
dest_course_key,
self.export_dir,
'exported_dest_course',
)
self.exclude_field(None, 'wiki_slug') self.exclude_field(None, 'wiki_slug')
self.exclude_field(None, 'xml_attributes') self.exclude_field(None, 'xml_attributes')
self.exclude_field(None, 'parent')
self.ignore_asset_key('_id') self.ignore_asset_key('_id')
self.ignore_asset_key('uploadDate') self.ignore_asset_key('uploadDate')
self.ignore_asset_key('content_son') self.ignore_asset_key('content_son')
......
...@@ -18,6 +18,7 @@ from uuid import uuid4 ...@@ -18,6 +18,7 @@ from uuid import uuid4
# TODO remove this import and the configuration -- xmodule should not depend on django! # TODO remove this import and the configuration -- xmodule should not depend on django!
from django.conf import settings from django.conf import settings
from xmodule.modulestore.edit_info import EditInfoMixin from xmodule.modulestore.edit_info import EditInfoMixin
from xmodule.modulestore.inheritance import InheritanceMixin
if not settings.configured: if not settings.configured:
settings.configure() settings.configure()
...@@ -26,17 +27,17 @@ from opaque_keys.edx.locations import SlashSeparatedCourseKey ...@@ -26,17 +27,17 @@ from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator from opaque_keys.edx.locator import BlockUsageLocator, CourseLocator
from xmodule.exceptions import InvalidVersionError from xmodule.exceptions import InvalidVersionError
from xmodule.modulestore import ModuleStoreEnum from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.draft_and_published import UnsupportedRevisionError from xmodule.modulestore.draft_and_published import UnsupportedRevisionError, ModuleStoreDraftAndPublished
from xmodule.modulestore.exceptions import ItemNotFoundError, DuplicateCourseError, ReferentialIntegrityError, NoPathToItem from xmodule.modulestore.exceptions import ItemNotFoundError, DuplicateCourseError, ReferentialIntegrityError, NoPathToItem
from xmodule.modulestore.mixed import MixedModuleStore from xmodule.modulestore.mixed import MixedModuleStore
from xmodule.modulestore.search import path_to_location from xmodule.modulestore.search import path_to_location
from xmodule.modulestore.tests.factories import check_mongo_calls from xmodule.modulestore.tests.factories import check_mongo_calls
from xmodule.modulestore.tests.mongo_connection import MONGO_PORT_NUM, MONGO_HOST from xmodule.modulestore.tests.mongo_connection import MONGO_PORT_NUM, MONGO_HOST
from xmodule.tests import DATA_DIR from xmodule.tests import DATA_DIR, CourseComparisonTest
@ddt.ddt @ddt.ddt
class TestMixedModuleStore(unittest.TestCase): class TestMixedModuleStore(CourseComparisonTest):
""" """
Quasi-superclass which tests Location based apps against both split and mongo dbs (Locator and Quasi-superclass which tests Location based apps against both split and mongo dbs (Locator and
Location-based dbs) Location-based dbs)
...@@ -58,7 +59,7 @@ class TestMixedModuleStore(unittest.TestCase): ...@@ -58,7 +59,7 @@ class TestMixedModuleStore(unittest.TestCase):
'default_class': DEFAULT_CLASS, 'default_class': DEFAULT_CLASS,
'fs_root': DATA_DIR, 'fs_root': DATA_DIR,
'render_template': RENDER_TEMPLATE, 'render_template': RENDER_TEMPLATE,
'xblock_mixins': (EditInfoMixin,) 'xblock_mixins': (EditInfoMixin, InheritanceMixin),
} }
DOC_STORE_CONFIG = { DOC_STORE_CONFIG = {
'host': HOST, 'host': HOST,
...@@ -244,7 +245,8 @@ class TestMixedModuleStore(unittest.TestCase): ...@@ -244,7 +245,8 @@ class TestMixedModuleStore(unittest.TestCase):
for course_id, course_key in self.course_locations.iteritems() # pylint: disable=maybe-no-member for course_id, course_key in self.course_locations.iteritems() # pylint: disable=maybe-no-member
} }
self.fake_location = self.course_locations[self.MONGO_COURSEID].course_key.make_usage_key('vertical', 'fake') mongo_course_key = self.course_locations[self.MONGO_COURSEID].course_key
self.fake_location = self.store.make_course_key(mongo_course_key.org, mongo_course_key.course, mongo_course_key.run).make_usage_key('vertical', 'fake')
self.xml_chapter_location = self.course_locations[self.XML_COURSEID1].replace( self.xml_chapter_location = self.course_locations[self.XML_COURSEID1].replace(
category='chapter', name='Overview' category='chapter', name='Overview'
...@@ -1046,7 +1048,7 @@ class TestMixedModuleStore(unittest.TestCase): ...@@ -1046,7 +1048,7 @@ class TestMixedModuleStore(unittest.TestCase):
self.store.revert_to_published(self.vertical_x1a, self.user_id) self.store.revert_to_published(self.vertical_x1a, self.user_id)
reverted_parent = self.store.get_item(self.vertical_x1a) reverted_parent = self.store.get_item(self.vertical_x1a)
self.assertEqual(vertical_children_num, len(published_parent.children)) self.assertEqual(vertical_children_num, len(published_parent.children))
self.assertEqual(reverted_parent, published_parent) self.assertBlocksEqualByFields(reverted_parent, published_parent)
self.assertFalse(self._has_changes(self.vertical_x1a)) self.assertFalse(self._has_changes(self.vertical_x1a))
@ddt.data('draft', 'split') @ddt.data('draft', 'split')
...@@ -1081,7 +1083,8 @@ class TestMixedModuleStore(unittest.TestCase): ...@@ -1081,7 +1083,8 @@ class TestMixedModuleStore(unittest.TestCase):
orig_vertical = self.store.get_item(self.vertical_x1a) orig_vertical = self.store.get_item(self.vertical_x1a)
self.store.revert_to_published(self.vertical_x1a, self.user_id) self.store.revert_to_published(self.vertical_x1a, self.user_id)
reverted_vertical = self.store.get_item(self.vertical_x1a) reverted_vertical = self.store.get_item(self.vertical_x1a)
self.assertEqual(orig_vertical, reverted_vertical)
self.assertBlocksEqualByFields(orig_vertical, reverted_vertical)
@ddt.data('draft', 'split') @ddt.data('draft', 'split')
def test_revert_to_published_no_published(self, default_ms): def test_revert_to_published_no_published(self, default_ms):
...@@ -1787,9 +1790,11 @@ def create_modulestore_instance(engine, contentstore, doc_store_config, options, ...@@ -1787,9 +1790,11 @@ def create_modulestore_instance(engine, contentstore, doc_store_config, options,
""" """
class_ = load_function(engine) class_ = load_function(engine)
if issubclass(class_, ModuleStoreDraftAndPublished):
options['branch_setting_func'] = lambda: ModuleStoreEnum.Branch.draft_preferred
return class_( return class_(
doc_store_config=doc_store_config, doc_store_config=doc_store_config,
contentstore=contentstore, contentstore=contentstore,
branch_setting_func=lambda: ModuleStoreEnum.Branch.draft_preferred,
**options **options
) )
...@@ -10,6 +10,7 @@ from contracts import contract ...@@ -10,6 +10,7 @@ from contracts import contract
from importlib import import_module from importlib import import_module
from path import path from path import path
from xblock.fields import Reference, ReferenceList, ReferenceValueDict
from xmodule.course_module import CourseDescriptor from xmodule.course_module import CourseDescriptor
from xmodule.modulestore import ModuleStoreEnum from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.exceptions import ( from xmodule.modulestore.exceptions import (
...@@ -1592,7 +1593,7 @@ class TestInheritance(SplitModuleTest): ...@@ -1592,7 +1593,7 @@ class TestInheritance(SplitModuleTest):
# unset on parent, retrieve child, verify unset # unset on parent, retrieve child, verify unset
chapter = modulestore().get_item(chapter.location.version_agnostic()) chapter = modulestore().get_item(chapter.location.version_agnostic())
chapter.fields['visible_to_staff_only'].delete_from(chapter) del chapter.visible_to_staff_only
modulestore().update_item(chapter, self.user_id) modulestore().update_item(chapter, self.user_id)
problem = modulestore().get_item(problem.location.version_agnostic()) problem = modulestore().get_item(problem.location.version_agnostic())
...@@ -1756,12 +1757,26 @@ class TestPublish(SplitModuleTest): ...@@ -1756,12 +1757,26 @@ class TestPublish(SplitModuleTest):
for field in source.fields.values(): for field in source.fields.values():
if field.name == 'children': if field.name == 'children':
self._compare_children(field.read_from(source), field.read_from(pub_copy), unexpected_blocks) self._compare_children(field.read_from(source), field.read_from(pub_copy), unexpected_blocks)
elif isinstance(field, (Reference, ReferenceList, ReferenceValueDict)):
self.assertReferenceEqual(field.read_from(source), field.read_from(pub_copy))
else: else:
self.assertEqual(field.read_from(source), field.read_from(pub_copy)) self.assertEqual(field.read_from(source), field.read_from(pub_copy))
for unexp in unexpected_blocks: for unexp in unexpected_blocks:
with self.assertRaises(ItemNotFoundError): with self.assertRaises(ItemNotFoundError):
modulestore().get_item(dest_course_loc.make_usage_key(unexp.type, unexp.id)) modulestore().get_item(dest_course_loc.make_usage_key(unexp.type, unexp.id))
def assertReferenceEqual(self, expected, actual):
if isinstance(expected, BlockUsageLocator):
expected = BlockKey.from_usage_key(expected)
actual = BlockKey.from_usage_key(actual)
elif isinstance(expected, list):
expected = [BlockKey.from_usage_key(key) for key in expected]
actual = [BlockKey.from_usage_key(key) for key in actual]
elif isinstance(expected, dict):
expected = {key: BlockKey.from_usage_key(val) for (key, val) in expected}
actual = {key: BlockKey.from_usage_key(val) for (key, val) in actual}
self.assertEqual(expected, actual)
@contract( @contract(
source_children="list(BlockUsageLocator)", source_children="list(BlockUsageLocator)",
dest_children="list(BlockUsageLocator)", dest_children="list(BlockUsageLocator)",
......
...@@ -21,6 +21,7 @@ class TestBulkWriteMixin(unittest.TestCase): ...@@ -21,6 +21,7 @@ class TestBulkWriteMixin(unittest.TestCase):
self.course_key = CourseLocator('org', 'course', 'run-a', branch='test') self.course_key = CourseLocator('org', 'course', 'run-a', branch='test')
self.course_key_b = CourseLocator('org', 'course', 'run-b', branch='test') self.course_key_b = CourseLocator('org', 'course', 'run-b', branch='test')
self.structure = {'this': 'is', 'a': 'structure', '_id': ObjectId()} self.structure = {'this': 'is', 'a': 'structure', '_id': ObjectId()}
self.definition = {'this': 'is', 'a': 'definition', '_id': ObjectId()}
self.index_entry = {'this': 'is', 'an': 'index'} self.index_entry = {'this': 'is', 'an': 'index'}
def assertConnCalls(self, *calls): def assertConnCalls(self, *calls):
...@@ -63,9 +64,23 @@ class TestBulkWriteMixinClosed(TestBulkWriteMixin): ...@@ -63,9 +64,23 @@ class TestBulkWriteMixinClosed(TestBulkWriteMixin):
# call through to the db_connection. It should also clear the # call through to the db_connection. It should also clear the
# system cache # system cache
self.bulk.update_structure(self.course_key, self.structure) self.bulk.update_structure(self.course_key, self.structure)
self.assertConnCalls(call.upsert_structure(self.structure)) self.assertConnCalls(call.insert_structure(self.structure))
self.clear_cache.assert_called_once_with(self.structure['_id']) self.clear_cache.assert_called_once_with(self.structure['_id'])
@ddt.data('deadbeef1234' * 2, u'deadbeef1234' * 2, ObjectId())
def test_no_bulk_read_definition(self, version_guid):
# Reading a definition when no bulk operation is active should just call
# through to the db_connection
result = self.bulk.get_definition(self.course_key, version_guid)
self.assertConnCalls(call.get_definition(self.course_key.as_object_id(version_guid)))
self.assertEqual(result, self.conn.get_definition.return_value)
def test_no_bulk_write_definition(self):
# Writing a definition when no bulk operation is active should just
# call through to the db_connection.
self.bulk.update_definition(self.course_key, self.definition)
self.assertConnCalls(call.insert_definition(self.definition))
@ddt.data(True, False) @ddt.data(True, False)
def test_no_bulk_read_index(self, ignore_case): def test_no_bulk_read_index(self, ignore_case):
# Reading a course index when no bulk operation is active should just call # Reading a course index when no bulk operation is active should just call
...@@ -113,7 +128,7 @@ class TestBulkWriteMixinClosed(TestBulkWriteMixin): ...@@ -113,7 +128,7 @@ class TestBulkWriteMixinClosed(TestBulkWriteMixin):
self.bulk.update_structure(self.course_key, self.structure) self.bulk.update_structure(self.course_key, self.structure)
self.assertConnCalls() self.assertConnCalls()
self.bulk._end_bulk_operation(self.course_key) self.bulk._end_bulk_operation(self.course_key)
self.assertConnCalls(call.upsert_structure(self.structure)) self.assertConnCalls(call.insert_structure(self.structure))
def test_write_multiple_structures_on_close(self): def test_write_multiple_structures_on_close(self):
self.conn.get_course_index.return_value = None self.conn.get_course_index.return_value = None
...@@ -125,7 +140,69 @@ class TestBulkWriteMixinClosed(TestBulkWriteMixin): ...@@ -125,7 +140,69 @@ class TestBulkWriteMixinClosed(TestBulkWriteMixin):
self.assertConnCalls() self.assertConnCalls()
self.bulk._end_bulk_operation(self.course_key) self.bulk._end_bulk_operation(self.course_key)
self.assertItemsEqual( self.assertItemsEqual(
[call.upsert_structure(self.structure), call.upsert_structure(other_structure)], [call.insert_structure(self.structure), call.insert_structure(other_structure)],
self.conn.mock_calls
)
def test_write_index_and_definition_on_close(self):
original_index = {'versions': {}}
self.conn.get_course_index.return_value = copy.deepcopy(original_index)
self.bulk._begin_bulk_operation(self.course_key)
self.conn.reset_mock()
self.bulk.update_definition(self.course_key, self.definition)
self.bulk.insert_course_index(self.course_key, {'versions': {self.course_key.branch: self.definition['_id']}})
self.assertConnCalls()
self.bulk._end_bulk_operation(self.course_key)
self.assertConnCalls(
call.insert_definition(self.definition),
call.update_course_index(
{'versions': {self.course_key.branch: self.definition['_id']}},
from_index=original_index
)
)
def test_write_index_and_multiple_definitions_on_close(self):
original_index = {'versions': {'a': ObjectId(), 'b': ObjectId()}}
self.conn.get_course_index.return_value = copy.deepcopy(original_index)
self.bulk._begin_bulk_operation(self.course_key)
self.conn.reset_mock()
self.bulk.update_definition(self.course_key.replace(branch='a'), self.definition)
other_definition = {'another': 'definition', '_id': ObjectId()}
self.bulk.update_definition(self.course_key.replace(branch='b'), other_definition)
self.bulk.insert_course_index(self.course_key, {'versions': {'a': self.definition['_id'], 'b': other_definition['_id']}})
self.bulk._end_bulk_operation(self.course_key)
self.assertItemsEqual(
[
call.insert_definition(self.definition),
call.insert_definition(other_definition),
call.update_course_index(
{'versions': {'a': self.definition['_id'], 'b': other_definition['_id']}},
from_index=original_index
)
],
self.conn.mock_calls
)
def test_write_definition_on_close(self):
self.conn.get_course_index.return_value = None
self.bulk._begin_bulk_operation(self.course_key)
self.conn.reset_mock()
self.bulk.update_definition(self.course_key, self.definition)
self.assertConnCalls()
self.bulk._end_bulk_operation(self.course_key)
self.assertConnCalls(call.insert_definition(self.definition))
def test_write_multiple_definitions_on_close(self):
self.conn.get_course_index.return_value = None
self.bulk._begin_bulk_operation(self.course_key)
self.conn.reset_mock()
self.bulk.update_definition(self.course_key.replace(branch='a'), self.definition)
other_definition = {'another': 'definition', '_id': ObjectId()}
self.bulk.update_definition(self.course_key.replace(branch='b'), other_definition)
self.assertConnCalls()
self.bulk._end_bulk_operation(self.course_key)
self.assertItemsEqual(
[call.insert_definition(self.definition), call.insert_definition(other_definition)],
self.conn.mock_calls self.conn.mock_calls
) )
...@@ -139,7 +216,7 @@ class TestBulkWriteMixinClosed(TestBulkWriteMixin): ...@@ -139,7 +216,7 @@ class TestBulkWriteMixinClosed(TestBulkWriteMixin):
self.assertConnCalls() self.assertConnCalls()
self.bulk._end_bulk_operation(self.course_key) self.bulk._end_bulk_operation(self.course_key)
self.assertConnCalls( self.assertConnCalls(
call.upsert_structure(self.structure), call.insert_structure(self.structure),
call.update_course_index( call.update_course_index(
{'versions': {self.course_key.branch: self.structure['_id']}}, {'versions': {self.course_key.branch: self.structure['_id']}},
from_index=original_index from_index=original_index
...@@ -158,8 +235,8 @@ class TestBulkWriteMixinClosed(TestBulkWriteMixin): ...@@ -158,8 +235,8 @@ class TestBulkWriteMixinClosed(TestBulkWriteMixin):
self.bulk._end_bulk_operation(self.course_key) self.bulk._end_bulk_operation(self.course_key)
self.assertItemsEqual( self.assertItemsEqual(
[ [
call.upsert_structure(self.structure), call.insert_structure(self.structure),
call.upsert_structure(other_structure), call.insert_structure(other_structure),
call.update_course_index( call.update_course_index(
{'versions': {'a': self.structure['_id'], 'b': other_structure['_id']}}, {'versions': {'a': self.structure['_id'], 'b': other_structure['_id']}},
from_index=original_index from_index=original_index
...@@ -181,6 +258,7 @@ class TestBulkWriteMixinClosed(TestBulkWriteMixin): ...@@ -181,6 +258,7 @@ class TestBulkWriteMixinClosed(TestBulkWriteMixin):
get_result = self.bulk.get_structure(self.course_key, version_result['_id']) get_result = self.bulk.get_structure(self.course_key, version_result['_id'])
self.assertEquals(version_result, get_result) self.assertEquals(version_result, get_result)
class TestBulkWriteMixinClosedAfterPrevTransaction(TestBulkWriteMixinClosed, TestBulkWriteMixinPreviousTransaction): class TestBulkWriteMixinClosedAfterPrevTransaction(TestBulkWriteMixinClosed, TestBulkWriteMixinPreviousTransaction):
""" """
Test that operations on with a closed transaction aren't affected by a previously executed transaction Test that operations on with a closed transaction aren't affected by a previously executed transaction
...@@ -307,6 +385,36 @@ class TestBulkWriteMixinFindMethods(TestBulkWriteMixin): ...@@ -307,6 +385,36 @@ class TestBulkWriteMixinFindMethods(TestBulkWriteMixin):
else: else:
self.assertNotIn(db_structure(_id), results) self.assertNotIn(db_structure(_id), results)
@ddt.data(
([], [], []),
([1, 2, 3], [1, 2], [1, 2]),
([1, 2, 3], [1], [1, 2]),
([1, 2, 3], [], [1, 2]),
)
@ddt.unpack
def test_get_definitions(self, search_ids, active_ids, db_ids):
db_definition = lambda _id: {'db': 'definition', '_id': _id}
active_definition = lambda _id: {'active': 'definition', '_id': _id}
db_definitions = [db_definition(_id) for _id in db_ids if _id not in active_ids]
self.bulk._begin_bulk_operation(self.course_key)
for n, _id in enumerate(active_ids):
self.bulk.update_definition(self.course_key, active_definition(_id))
self.conn.get_definitions.return_value = db_definitions
results = self.bulk.get_definitions(self.course_key, search_ids)
self.conn.get_definitions.assert_called_once_with(list(set(search_ids) - set(active_ids)))
for _id in active_ids:
if _id in search_ids:
self.assertIn(active_definition(_id), results)
else:
self.assertNotIn(active_definition(_id), results)
for _id in db_ids:
if _id in search_ids and _id not in active_ids:
self.assertIn(db_definition(_id), results)
else:
self.assertNotIn(db_definition(_id), results)
def test_no_bulk_find_structures_derived_from(self): def test_no_bulk_find_structures_derived_from(self):
ids = [Mock(name='id')] ids = [Mock(name='id')]
self.conn.find_structures_derived_from.return_value = [MagicMock(name='result')] self.conn.find_structures_derived_from.return_value = [MagicMock(name='result')]
...@@ -456,6 +564,45 @@ class TestBulkWriteMixinOpen(TestBulkWriteMixin): ...@@ -456,6 +564,45 @@ class TestBulkWriteMixinOpen(TestBulkWriteMixin):
self.assertEquals(self.conn.get_structure.call_count, 1) self.assertEquals(self.conn.get_structure.call_count, 1)
self.assertEqual(result, self.structure) self.assertEqual(result, self.structure)
@ddt.data('deadbeef1234' * 2, u'deadbeef1234' * 2, ObjectId())
def test_read_definition_without_write_from_db(self, version_guid):
# Reading a definition before it's been written (while in bulk operation mode)
# returns the definition from the database
result = self.bulk.get_definition(self.course_key, version_guid)
self.assertEquals(self.conn.get_definition.call_count, 1)
self.assertEqual(result, self.conn.get_definition.return_value)
self.assertCacheNotCleared()
@ddt.data('deadbeef1234' * 2, u'deadbeef1234' * 2, ObjectId())
def test_read_definition_without_write_only_reads_once(self, version_guid):
# Reading the same definition multiple times shouldn't hit the database
# more than once
for _ in xrange(2):
result = self.bulk.get_definition(self.course_key, version_guid)
self.assertEquals(self.conn.get_definition.call_count, 1)
self.assertEqual(result, self.conn.get_definition.return_value)
self.assertCacheNotCleared()
@ddt.data('deadbeef1234' * 2, u'deadbeef1234' * 2, ObjectId())
def test_read_definition_after_write_no_db(self, version_guid):
# Reading a definition that's already been written shouldn't hit the db at all
self.definition['_id'] = version_guid
self.bulk.update_definition(self.course_key, self.definition)
result = self.bulk.get_definition(self.course_key, version_guid)
self.assertEquals(self.conn.get_definition.call_count, 0)
self.assertEqual(result, self.definition)
@ddt.data('deadbeef1234' * 2, u'deadbeef1234' * 2, ObjectId())
def test_read_definition_after_write_after_read(self, version_guid):
# Reading a definition that's been updated after being pulled from the db should
# still get the updated value
self.definition['_id'] = version_guid
self.bulk.get_definition(self.course_key, version_guid)
self.bulk.update_definition(self.course_key, self.definition)
result = self.bulk.get_definition(self.course_key, version_guid)
self.assertEquals(self.conn.get_definition.call_count, 1)
self.assertEqual(result, self.definition)
@ddt.data(True, False) @ddt.data(True, False)
def test_read_index_without_write_from_db(self, ignore_case): def test_read_index_without_write_from_db(self, ignore_case):
# Reading the index without writing to it should pull from the database # Reading the index without writing to it should pull from the database
...@@ -521,7 +668,7 @@ class TestBulkWriteMixinOpen(TestBulkWriteMixin): ...@@ -521,7 +668,7 @@ class TestBulkWriteMixinOpen(TestBulkWriteMixin):
index_copy['versions']['draft'] = index['versions']['published'] index_copy['versions']['draft'] = index['versions']['published']
self.bulk.update_course_index(self.course_key, index_copy) self.bulk.update_course_index(self.course_key, index_copy)
self.bulk._end_bulk_operation(self.course_key) self.bulk._end_bulk_operation(self.course_key)
self.conn.upsert_structure.assert_called_once_with(published_structure) self.conn.insert_structure.assert_called_once_with(published_structure)
self.conn.update_course_index.assert_called_once_with(index_copy, from_index=self.conn.get_course_index.return_value) self.conn.update_course_index.assert_called_once_with(index_copy, from_index=self.conn.get_course_index.return_value)
self.conn.get_course_index.assert_called_once_with(self.course_key) self.conn.get_course_index.assert_called_once_with(self.course_key)
......
...@@ -370,7 +370,7 @@ class XMLModuleStore(ModuleStoreReadBase): ...@@ -370,7 +370,7 @@ class XMLModuleStore(ModuleStoreReadBase):
""" """
def __init__( def __init__(
self, data_dir, default_class=None, course_dirs=None, course_ids=None, self, data_dir, default_class=None, course_dirs=None, course_ids=None,
load_error_modules=True, i18n_service=None, pyfs_service=None, **kwargs load_error_modules=True, i18n_service=None, fs_service=None, **kwargs
): ):
""" """
Initialize an XMLModuleStore from data_dir Initialize an XMLModuleStore from data_dir
...@@ -409,7 +409,7 @@ class XMLModuleStore(ModuleStoreReadBase): ...@@ -409,7 +409,7 @@ class XMLModuleStore(ModuleStoreReadBase):
self.field_data = inheriting_field_data(kvs=DictKeyValueStore()) self.field_data = inheriting_field_data(kvs=DictKeyValueStore())
self.i18n_service = i18n_service self.i18n_service = i18n_service
self.pyfs_service = pyfs_service self.fs_service = fs_service
# If we are specifically asked for missing courses, that should # If we are specifically asked for missing courses, that should
# be an error. If we are asked for "all" courses, find the ones # be an error. If we are asked for "all" courses, find the ones
...@@ -555,8 +555,8 @@ class XMLModuleStore(ModuleStoreReadBase): ...@@ -555,8 +555,8 @@ class XMLModuleStore(ModuleStoreReadBase):
if self.i18n_service: if self.i18n_service:
services['i18n'] = self.i18n_service services['i18n'] = self.i18n_service
if self.pyfs_service: if self.fs_service:
services['fs'] = self.pyfs_service services['fs'] = self.fs_service
system = ImportSystem( system = ImportSystem(
xmlstore=self, xmlstore=self,
......
...@@ -37,106 +37,108 @@ def export_to_xml(modulestore, contentstore, course_key, root_dir, course_dir): ...@@ -37,106 +37,108 @@ def export_to_xml(modulestore, contentstore, course_key, root_dir, course_dir):
`course_dir`: The name of the directory inside `root_dir` to write the course content to `course_dir`: The name of the directory inside `root_dir` to write the course content to
""" """
course = modulestore.get_course(course_key, depth=None) # None means infinite with modulestore.bulk_operations(course_key):
fsm = OSFS(root_dir)
export_fs = course.runtime.export_fs = fsm.makeopendir(course_dir) course = modulestore.get_course(course_key, depth=None) # None means infinite
fsm = OSFS(root_dir)
root = lxml.etree.Element('unknown') export_fs = course.runtime.export_fs = fsm.makeopendir(course_dir)
# export only the published content root = lxml.etree.Element('unknown')
with modulestore.branch_setting(ModuleStoreEnum.Branch.published_only, course_key):
# change all of the references inside the course to use the xml expected key type w/o version & branch # export only the published content
xml_centric_course_key = CourseLocator(course_key.org, course_key.course, course_key.run, deprecated=True) with modulestore.branch_setting(ModuleStoreEnum.Branch.published_only, course_key):
adapt_references(course, xml_centric_course_key, export_fs) # change all of the references inside the course to use the xml expected key type w/o version & branch
xml_centric_course_key = CourseLocator(course_key.org, course_key.course, course_key.run, deprecated=True)
course.add_xml_to_node(root) adapt_references(course, xml_centric_course_key, export_fs)
with export_fs.open('course.xml', 'w') as course_xml: course.add_xml_to_node(root)
lxml.etree.ElementTree(root).write(course_xml)
with export_fs.open('course.xml', 'w') as course_xml:
# export the static assets lxml.etree.ElementTree(root).write(course_xml)
policies_dir = export_fs.makeopendir('policies')
if contentstore: # export the static assets
contentstore.export_all_for_course( policies_dir = export_fs.makeopendir('policies')
course_key, if contentstore:
root_dir + '/' + course_dir + '/static/', contentstore.export_all_for_course(
root_dir + '/' + course_dir + '/policies/assets.json',
)
# If we are using the default course image, export it to the
# legacy location to support backwards compatibility.
if course.course_image == course.fields['course_image'].default:
try:
course_image = contentstore.find(
StaticContent.compute_location(
course.id,
course.course_image
),
)
except NotFoundError:
pass
else:
output_dir = root_dir + '/' + course_dir + '/static/images/'
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
with OSFS(output_dir).open('course_image.jpg', 'wb') as course_image_file:
course_image_file.write(course_image.data)
# export the static tabs
export_extra_content(export_fs, modulestore, xml_centric_course_key, 'static_tab', 'tabs', '.html')
# export the custom tags
export_extra_content(export_fs, modulestore, xml_centric_course_key, 'custom_tag_template', 'custom_tags')
# export the course updates
export_extra_content(export_fs, modulestore, xml_centric_course_key, 'course_info', 'info', '.html')
# export the 'about' data (e.g. overview, etc.)
export_extra_content(export_fs, modulestore, xml_centric_course_key, 'about', 'about', '.html')
# export the grading policy
course_run_policy_dir = policies_dir.makeopendir(course.location.name)
with course_run_policy_dir.open('grading_policy.json', 'w') as grading_policy:
grading_policy.write(dumps(course.grading_policy, cls=EdxJSONEncoder))
# export all of the course metadata in policy.json
with course_run_policy_dir.open('policy.json', 'w') as course_policy:
policy = {'course/' + course.location.name: own_metadata(course)}
course_policy.write(dumps(policy, cls=EdxJSONEncoder))
#### DRAFTS ####
# xml backed courses don't support drafts!
if course.runtime.modulestore.get_modulestore_type() != ModuleStoreEnum.Type.xml:
# NOTE: this code assumes that verticals are the top most draftable container
# should we change the application, then this assumption will no longer be valid
# NOTE: we need to explicitly implement the logic for setting the vertical's parent
# and index here since the XML modulestore cannot load draft modules
with modulestore.branch_setting(ModuleStoreEnum.Branch.draft_preferred, course_key):
draft_verticals = modulestore.get_items(
course_key, course_key,
qualifiers={'category': 'vertical'}, root_dir + '/' + course_dir + '/static/',
revision=ModuleStoreEnum.RevisionOption.draft_only root_dir + '/' + course_dir + '/policies/assets.json',
) )
if len(draft_verticals) > 0: # If we are using the default course image, export it to the
draft_course_dir = export_fs.makeopendir(DRAFT_DIR) # legacy location to support backwards compatibility.
for draft_vertical in draft_verticals: if course.course_image == course.fields['course_image'].default:
parent_loc = modulestore.get_parent_location( try:
draft_vertical.location, course_image = contentstore.find(
revision=ModuleStoreEnum.RevisionOption.draft_preferred StaticContent.compute_location(
course.id,
course.course_image
),
) )
# Don't try to export orphaned items. except NotFoundError:
if parent_loc is not None: pass
logging.debug('parent_loc = {0}'.format(parent_loc)) else:
if parent_loc.category in DIRECT_ONLY_CATEGORIES: output_dir = root_dir + '/' + course_dir + '/static/images/'
draft_vertical.xml_attributes['parent_sequential_url'] = parent_loc.to_deprecated_string() if not os.path.isdir(output_dir):
sequential = modulestore.get_item(parent_loc) os.makedirs(output_dir)
index = sequential.children.index(draft_vertical.location) with OSFS(output_dir).open('course_image.jpg', 'wb') as course_image_file:
draft_vertical.xml_attributes['index_in_children_list'] = str(index) course_image_file.write(course_image.data)
draft_vertical.runtime.export_fs = draft_course_dir
adapt_references(draft_vertical, xml_centric_course_key, draft_course_dir) # export the static tabs
node = lxml.etree.Element('unknown') export_extra_content(export_fs, modulestore, course_key, xml_centric_course_key, 'static_tab', 'tabs', '.html')
draft_vertical.add_xml_to_node(node)
# export the custom tags
export_extra_content(export_fs, modulestore, course_key, xml_centric_course_key, 'custom_tag_template', 'custom_tags')
# export the course updates
export_extra_content(export_fs, modulestore, course_key, xml_centric_course_key, 'course_info', 'info', '.html')
# export the 'about' data (e.g. overview, etc.)
export_extra_content(export_fs, modulestore, course_key, xml_centric_course_key, 'about', 'about', '.html')
# export the grading policy
course_run_policy_dir = policies_dir.makeopendir(course.location.name)
with course_run_policy_dir.open('grading_policy.json', 'w') as grading_policy:
grading_policy.write(dumps(course.grading_policy, cls=EdxJSONEncoder, sort_keys=True, indent=4))
# export all of the course metadata in policy.json
with course_run_policy_dir.open('policy.json', 'w') as course_policy:
policy = {'course/' + course.location.name: own_metadata(course)}
course_policy.write(dumps(policy, cls=EdxJSONEncoder, sort_keys=True, indent=4))
#### DRAFTS ####
# xml backed courses don't support drafts!
if course.runtime.modulestore.get_modulestore_type() != ModuleStoreEnum.Type.xml:
# NOTE: this code assumes that verticals are the top most draftable container
# should we change the application, then this assumption will no longer be valid
# NOTE: we need to explicitly implement the logic for setting the vertical's parent
# and index here since the XML modulestore cannot load draft modules
with modulestore.branch_setting(ModuleStoreEnum.Branch.draft_preferred, course_key):
draft_verticals = modulestore.get_items(
course_key,
qualifiers={'category': 'vertical'},
revision=ModuleStoreEnum.RevisionOption.draft_only
)
if len(draft_verticals) > 0:
draft_course_dir = export_fs.makeopendir(DRAFT_DIR)
for draft_vertical in draft_verticals:
parent_loc = modulestore.get_parent_location(
draft_vertical.location,
revision=ModuleStoreEnum.RevisionOption.draft_preferred
)
# Don't try to export orphaned items.
if parent_loc is not None:
logging.debug('parent_loc = {0}'.format(parent_loc))
if parent_loc.category in DIRECT_ONLY_CATEGORIES:
draft_vertical.xml_attributes['parent_sequential_url'] = parent_loc.to_deprecated_string()
sequential = modulestore.get_item(parent_loc)
index = sequential.children.index(draft_vertical.location)
draft_vertical.xml_attributes['index_in_children_list'] = str(index)
draft_vertical.runtime.export_fs = draft_course_dir
adapt_references(draft_vertical, xml_centric_course_key, draft_course_dir)
node = lxml.etree.Element('unknown')
draft_vertical.add_xml_to_node(node)
def adapt_references(subtree, destination_course_key, export_fs): def adapt_references(subtree, destination_course_key, export_fs):
...@@ -178,16 +180,16 @@ def _export_field_content(xblock_item, item_dir): ...@@ -178,16 +180,16 @@ def _export_field_content(xblock_item, item_dir):
# filename format: {dirname}.{field_name}.json # filename format: {dirname}.{field_name}.json
with item_dir.open('{0}.{1}.{2}'.format(xblock_item.location.name, field_name, 'json'), with item_dir.open('{0}.{1}.{2}'.format(xblock_item.location.name, field_name, 'json'),
'w') as field_content_file: 'w') as field_content_file:
field_content_file.write(dumps(module_data.get(field_name, {}), cls=EdxJSONEncoder)) field_content_file.write(dumps(module_data.get(field_name, {}), cls=EdxJSONEncoder, sort_keys=True, indent=4))
def export_extra_content(export_fs, modulestore, course_key, category_type, dirname, file_suffix=''): def export_extra_content(export_fs, modulestore, source_course_key, dest_course_key, category_type, dirname, file_suffix=''):
items = modulestore.get_items(course_key, qualifiers={'category': category_type}) items = modulestore.get_items(source_course_key, qualifiers={'category': category_type})
if len(items) > 0: if len(items) > 0:
item_dir = export_fs.makeopendir(dirname) item_dir = export_fs.makeopendir(dirname)
for item in items: for item in items:
adapt_references(item, course_key, export_fs) adapt_references(item, dest_course_key, export_fs)
with item_dir.open(item.location.name + file_suffix, 'w') as item_file: with item_dir.open(item.location.name + file_suffix, 'w') as item_file:
item_file.write(item.data.encode('utf8')) item_file.write(item.data.encode('utf8'))
......
...@@ -42,6 +42,7 @@ from xmodule.modulestore.django import ASSET_IGNORE_REGEX ...@@ -42,6 +42,7 @@ from xmodule.modulestore.django import ASSET_IGNORE_REGEX
from xmodule.modulestore.exceptions import DuplicateCourseError from xmodule.modulestore.exceptions import DuplicateCourseError
from xmodule.modulestore.mongo.base import MongoRevisionKey from xmodule.modulestore.mongo.base import MongoRevisionKey
from xmodule.modulestore import ModuleStoreEnum from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.exceptions import ItemNotFoundError
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -588,6 +589,7 @@ def _import_course_draft( ...@@ -588,6 +589,7 @@ def _import_course_draft(
# IMPORTANT: Be sure to update the sequential in the NEW namespace # IMPORTANT: Be sure to update the sequential in the NEW namespace
seq_location = seq_location.map_into_course(target_course_id) seq_location = seq_location.map_into_course(target_course_id)
sequential = store.get_item(seq_location, depth=0) sequential = store.get_item(seq_location, depth=0)
non_draft_location = module.location.map_into_course(target_course_id) non_draft_location = module.location.map_into_course(target_course_id)
......
...@@ -42,6 +42,9 @@ class Group(namedtuple("Group", "id name")): ...@@ -42,6 +42,9 @@ class Group(namedtuple("Group", "id name")):
Raises TypeError if the value doesn't have the right keys. Raises TypeError if the value doesn't have the right keys.
""" """
if isinstance(value, Group):
return value
for key in ('id', 'name', 'version'): for key in ('id', 'name', 'version'):
if key not in value: if key not in value:
raise TypeError("Group dict {0} missing value key '{1}'".format( raise TypeError("Group dict {0} missing value key '{1}'".format(
...@@ -96,6 +99,9 @@ class UserPartition(namedtuple("UserPartition", "id name description groups")): ...@@ -96,6 +99,9 @@ class UserPartition(namedtuple("UserPartition", "id name description groups")):
Raises TypeError if the value doesn't have the right keys. Raises TypeError if the value doesn't have the right keys.
""" """
if isinstance(value, UserPartition):
return value
for key in ('id', 'name', 'description', 'version', 'groups'): for key in ('id', 'name', 'description', 'version', 'groups'):
if key not in value: if key not in value:
raise TypeError("UserPartition dict {0} missing value key '{1}'" raise TypeError("UserPartition dict {0} missing value key '{1}'"
......
...@@ -13,7 +13,9 @@ import pprint ...@@ -13,7 +13,9 @@ import pprint
import unittest import unittest
from contextlib import contextmanager from contextlib import contextmanager
from lazy import lazy
from mock import Mock from mock import Mock
from operator import attrgetter
from path import path from path import path
from xblock.field_data import DictFieldData from xblock.field_data import DictFieldData
...@@ -193,7 +195,7 @@ class BulkAssertionManager(object): ...@@ -193,7 +195,7 @@ class BulkAssertionManager(object):
self._equal_actual.append((description, actual)) self._equal_actual.append((description, actual))
def run_assertions(self): def run_assertions(self):
self._test_case.assertEqual(self._equal_expected, self._equal_actual) super(BulkAssertionTest, self._test_case).assertEqual(self._equal_expected, self._equal_actual)
class BulkAssertionTest(unittest.TestCase): class BulkAssertionTest(unittest.TestCase):
...@@ -224,8 +226,29 @@ class BulkAssertionTest(unittest.TestCase): ...@@ -224,8 +226,29 @@ class BulkAssertionTest(unittest.TestCase):
self._manager.assertEqual(expected, actual, message) self._manager.assertEqual(expected, actual, message)
else: else:
super(BulkAssertionTest, self).assertEqual(expected, actual, message) super(BulkAssertionTest, self).assertEqual(expected, actual, message)
assertEquals = assertEqual
class LazyFormat(object):
"""
An stringy object that delays formatting until it's put into a string context.
"""
__slots__ = ('template', 'args', 'kwargs', '_message')
def __init__(self, template, *args, **kwargs):
self.template = template
self.args = args
self.kwargs = kwargs
self._message = None
def __unicode__(self):
if self._message is None:
self._message = self.template.format(*self.args, **self.kwargs)
return self._message
def __repr__(self):
return unicode(self)
class CourseComparisonTest(BulkAssertionTest): class CourseComparisonTest(BulkAssertionTest):
""" """
Mixin that has methods for comparing courses for equality. Mixin that has methods for comparing courses for equality.
...@@ -255,6 +278,65 @@ class CourseComparisonTest(BulkAssertionTest): ...@@ -255,6 +278,65 @@ class CourseComparisonTest(BulkAssertionTest):
""" """
self.ignored_asset_keys.add(key_name) self.ignored_asset_keys.add(key_name)
def assertReferenceRelativelyEqual(self, reference_field, expected_block, actual_block):
"""
Assert that the supplied reference field is identical on the expected_block and actual_block,
assoming that the references are only relative (that is, comparing only on block_type and block_id,
not course_key).
"""
def extract_key(usage_key):
if usage_key is None:
return None
else:
return (usage_key.block_type, usage_key.block_id)
expected = reference_field.read_from(expected_block)
actual = reference_field.read_from(actual_block)
if isinstance(reference_field, Reference):
expected = extract_key(expected)
actual = extract_key(actual)
elif isinstance(reference_field, ReferenceList):
expected = [extract_key(key) for key in expected]
actual = [extract_key(key) for key in actual]
elif isinstance(reference_field, ReferenceValueDict):
expected = {key: extract_key(val) for (key, val) in expected.iteritems()}
actual = {key: extract_key(val) for (key, val) in actual.iteritems()}
self.assertEqual(
expected,
actual,
LazyFormat(
"Field {} doesn't match between usages {} and {}: {!r} != {!r}",
reference_field.name,
expected_block.scope_ids.usage_id,
actual_block.scope_ids.usage_id,
expected,
actual
)
)
def assertBlocksEqualByFields(self, expected_block, actual_block):
self.assertEqual(expected_block.fields, actual_block.fields)
for field in expected_block.fields.values():
self.assertFieldEqual(field, expected_block, actual_block)
def assertFieldEqual(self, field, expected_block, actual_block):
if isinstance(field, (Reference, ReferenceList, ReferenceValueDict)):
self.assertReferenceRelativelyEqual(field, expected_block, actual_block)
else:
expected = field.read_from(expected_block)
actual = field.read_from(actual_block)
self.assertEqual(
expected,
actual,
LazyFormat(
"Field {} doesn't match between usages {} and {}: {!r} != {!r}",
field.name,
expected_block.scope_ids.usage_id,
actual_block.scope_ids.usage_id,
expected,
actual
)
)
def assertCoursesEqual(self, expected_store, expected_course_key, actual_store, actual_course_key): def assertCoursesEqual(self, expected_store, expected_course_key, actual_store, actual_course_key):
""" """
Assert that the courses identified by ``expected_course_key`` in ``expected_store`` and Assert that the courses identified by ``expected_course_key`` in ``expected_store`` and
...@@ -312,11 +394,7 @@ class CourseComparisonTest(BulkAssertionTest): ...@@ -312,11 +394,7 @@ class CourseComparisonTest(BulkAssertionTest):
actual_item = actual_item_map.get(map_key(actual_item_location)) actual_item = actual_item_map.get(map_key(actual_item_location))
# Formatting the message slows down tests of large courses significantly, so only do it if it would be used # Formatting the message slows down tests of large courses significantly, so only do it if it would be used
if actual_item is None: self.assertIsNotNone(actual_item, LazyFormat(u'cannot find {} in {}', map_key(actual_item_location), actual_item_map))
msg = u'cannot find {} in {}'.format(map_key(actual_item_location), actual_item_map)
else:
msg = None
self.assertIsNotNone(actual_item, msg)
# compare fields # compare fields
self.assertEqual(expected_item.fields, actual_item.fields) self.assertEqual(expected_item.fields, actual_item.fields)
...@@ -332,20 +410,7 @@ class CourseComparisonTest(BulkAssertionTest): ...@@ -332,20 +410,7 @@ class CourseComparisonTest(BulkAssertionTest):
if field_name == 'children': if field_name == 'children':
continue continue
exp_value = map_references(field.read_from(expected_item), field, actual_course_key) self.assertFieldEqual(field, expected_item, actual_item)
actual_value = field.read_from(actual_item)
# Formatting the message slows down tests of large courses significantly, so only do it if it would be used
if exp_value != actual_value:
msg = "Field {!r} doesn't match between usages {} and {}: {!r} != {!r}".format(
field_name,
expected_item.scope_ids.usage_id,
actual_item.scope_ids.usage_id,
exp_value,
actual_value,
)
else:
msg = None
self.assertEqual(exp_value, actual_value, msg)
# compare children # compare children
self.assertEqual(expected_item.has_children, actual_item.has_children) self.assertEqual(expected_item.has_children, actual_item.has_children)
......
import ddt
from xmodule.tests import BulkAssertionTest
@ddt.ddt
class TestBulkAssertionTestCase(BulkAssertionTest):
@ddt.data(
('assertTrue', True),
('assertFalse', False),
('assertIs', 1, 1),
('assertIsNot', 1, 2),
('assertIsNone', None),
('assertIsNotNone', 1),
('assertIn', 1, (1, 2, 3)),
('assertNotIn', 5, (1, 2, 3)),
('assertIsInstance', 1, int),
('assertNotIsInstance', '1', int),
('assertRaises', KeyError, {}.__getitem__, '1'),
)
@ddt.unpack
def test_passing_asserts_passthrough(self, assertion, *args):
getattr(self, assertion)(*args)
@ddt.data(
('assertTrue', False),
('assertFalse', True),
('assertIs', 1, 2),
('assertIsNot', 1, 1),
('assertIsNone', 1),
('assertIsNotNone', None),
('assertIn', 5, (1, 2, 3)),
('assertNotIn', 1, (1, 2, 3)),
('assertIsInstance', '1', int),
('assertNotIsInstance', 1, int),
('assertRaises', ValueError, lambda: None),
)
@ddt.unpack
def test_failing_asserts_passthrough(self, assertion, *args):
# Use super(BulkAssertionTest) to make sure we get un-adulturated assertions
with super(BulkAssertionTest, self).assertRaises(AssertionError):
getattr(self, assertion)(*args)
def test_no_bulk_assert_equals(self):
# Use super(BulkAssertionTest) to make sure we get un-adulturated assertions
with super(BulkAssertionTest, self).assertRaises(AssertionError):
self.assertEquals(1, 2)
@ddt.data(
'assertEqual', 'assertEquals'
)
def test_bulk_assert_equals(self, asserterFn):
asserter = getattr(self, asserterFn)
contextmanager = self.bulk_assertions()
contextmanager.__enter__()
super(BulkAssertionTest, self).assertIsNotNone(self._manager)
asserter(1, 2)
asserter(3, 4)
# Use super(BulkAssertionTest) to make sure we get un-adulturated assertions
with super(BulkAssertionTest, self).assertRaises(AssertionError):
contextmanager.__exit__(None, None, None)
@ddt.data(
'assertEqual', 'assertEquals'
)
def test_bulk_assert_closed(self, asserterFn):
asserter = getattr(self, asserterFn)
with self.bulk_assertions():
asserter(1, 1)
asserter(2, 2)
# Use super(BulkAssertionTest) to make sure we get un-adulturated assertions
with super(BulkAssertionTest, self).assertRaises(AssertionError):
asserter(1, 2)
...@@ -2,6 +2,8 @@ ...@@ -2,6 +2,8 @@
Base class for pages specific to a course in Studio. Base class for pages specific to a course in Studio.
""" """
import os
from opaque_keys.edx.locator import CourseLocator
from bok_choy.page_object import PageObject from bok_choy.page_object import PageObject
from . import BASE_URL from . import BASE_URL
...@@ -34,5 +36,12 @@ class CoursePage(PageObject): ...@@ -34,5 +36,12 @@ class CoursePage(PageObject):
""" """
Construct a URL to the page within the course. Construct a URL to the page within the course.
""" """
course_key = "{course_org}/{course_num}/{course_run}".format(**self.course_info) # TODO - is there a better way to make this agnostic to the underlying default module store?
return "/".join([BASE_URL, self.url_path, course_key]) default_store = os.environ.get('DEFAULT_STORE', 'draft')
course_key = CourseLocator(
self.course_info['course_org'],
self.course_info['course_num'],
self.course_info['course_run'],
deprecated=(default_store == 'draft')
)
return "/".join([BASE_URL, self.url_path, unicode(course_key)])
...@@ -115,35 +115,36 @@ def toc_for_course(user, request, course, active_chapter, active_section, field_ ...@@ -115,35 +115,36 @@ def toc_for_course(user, request, course, active_chapter, active_section, field_
field_data_cache must include data from the course module and 2 levels of its descendents field_data_cache must include data from the course module and 2 levels of its descendents
''' '''
course_module = get_module_for_descriptor(user, request, course, field_data_cache, course.id) with modulestore().bulk_operations(course.id):
if course_module is None: course_module = get_module_for_descriptor(user, request, course, field_data_cache, course.id)
return None if course_module is None:
return None
chapters = list() chapters = list()
for chapter in course_module.get_display_items(): for chapter in course_module.get_display_items():
if chapter.hide_from_toc: if chapter.hide_from_toc:
continue continue
sections = list() sections = list()
for section in chapter.get_display_items(): for section in chapter.get_display_items():
active = (chapter.url_name == active_chapter and active = (chapter.url_name == active_chapter and
section.url_name == active_section) section.url_name == active_section)
if not section.hide_from_toc: if not section.hide_from_toc:
sections.append({'display_name': section.display_name_with_default, sections.append({'display_name': section.display_name_with_default,
'url_name': section.url_name, 'url_name': section.url_name,
'format': section.format if section.format is not None else '', 'format': section.format if section.format is not None else '',
'due': get_extended_due_date(section), 'due': get_extended_due_date(section),
'active': active, 'active': active,
'graded': section.graded, 'graded': section.graded,
}) })
chapters.append({'display_name': chapter.display_name_with_default, chapters.append({'display_name': chapter.display_name_with_default,
'url_name': chapter.url_name, 'url_name': chapter.url_name,
'sections': sections, 'sections': sections,
'active': chapter.url_name == active_chapter}) 'active': chapter.url_name == active_chapter})
return chapters return chapters
def get_module(user, request, usage_key, field_data_cache, def get_module(user, request, usage_key, field_data_cache,
......
...@@ -326,19 +326,29 @@ class TestTOC(ModuleStoreTestCase): ...@@ -326,19 +326,29 @@ class TestTOC(ModuleStoreTestCase):
self.request = factory.get(chapter_url) self.request = factory.get(chapter_url)
self.request.user = UserFactory() self.request.user = UserFactory()
self.modulestore = self.store._get_modulestore_for_courseid(self.course_key) self.modulestore = self.store._get_modulestore_for_courseid(self.course_key)
with check_mongo_calls(num_finds, num_sends): with self.modulestore.bulk_operations(self.course_key):
self.toy_course = self.store.get_course(self.toy_loc, depth=2) with check_mongo_calls(num_finds, num_sends):
self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents( self.toy_course = self.store.get_course(self.toy_loc, depth=2)
self.toy_loc, self.request.user, self.toy_course, depth=2 self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
) self.toy_loc, self.request.user, self.toy_course, depth=2
)
# TODO: LMS-11220: Document why split find count is 9 # Mongo makes 3 queries to load the course to depth 2:
# TODO: LMS-11220: Document why mongo find count is 4 # - 1 for the course
@ddt.data((ModuleStoreEnum.Type.mongo, 3, 0), (ModuleStoreEnum.Type.split, 9, 0)) # - 1 for its children
# - 1 for its grandchildren
# Split makes 6 queries to load the course to depth 2:
# - load the structure
# - load 5 definitions
# Split makes 2 queries to render the toc:
# - it loads the active version at the start of the bulk operation
# - it loads the course definition for inheritance, because it's outside
# the bulk-operation marker that loaded the course descriptor
@ddt.data((ModuleStoreEnum.Type.mongo, 3, 0, 0), (ModuleStoreEnum.Type.split, 6, 0, 2))
@ddt.unpack @ddt.unpack
def test_toc_toy_from_chapter(self, default_ms, num_finds, num_sends): def test_toc_toy_from_chapter(self, default_ms, setup_finds, setup_sends, toc_finds):
with self.store.default_store(default_ms): with self.store.default_store(default_ms):
self.setup_modulestore(default_ms, num_finds, num_sends) self.setup_modulestore(default_ms, setup_finds, setup_sends)
expected = ([{'active': True, 'sections': expected = ([{'active': True, 'sections':
[{'url_name': 'Toy_Videos', 'display_name': u'Toy Videos', 'graded': True, [{'url_name': 'Toy_Videos', 'display_name': u'Toy Videos', 'graded': True,
'format': u'Lecture Sequence', 'due': None, 'active': False}, 'format': u'Lecture Sequence', 'due': None, 'active': False},
...@@ -354,20 +364,29 @@ class TestTOC(ModuleStoreTestCase): ...@@ -354,20 +364,29 @@ class TestTOC(ModuleStoreTestCase):
'format': '', 'due': None, 'active': False}], 'format': '', 'due': None, 'active': False}],
'url_name': 'secret:magic', 'display_name': 'secret:magic'}]) 'url_name': 'secret:magic', 'display_name': 'secret:magic'}])
with check_mongo_calls(0, 0): with check_mongo_calls(toc_finds, 0):
actual = render.toc_for_course( actual = render.toc_for_course(
self.request.user, self.request, self.toy_course, self.chapter, None, self.field_data_cache self.request.user, self.request, self.toy_course, self.chapter, None, self.field_data_cache
) )
for toc_section in expected: for toc_section in expected:
self.assertIn(toc_section, actual) self.assertIn(toc_section, actual)
# TODO: LMS-11220: Document why split find count is 9 # Mongo makes 3 queries to load the course to depth 2:
# TODO: LMS-11220: Document why mongo find count is 4 # - 1 for the course
@ddt.data((ModuleStoreEnum.Type.mongo, 3, 0), (ModuleStoreEnum.Type.split, 9, 0)) # - 1 for its children
# - 1 for its grandchildren
# Split makes 6 queries to load the course to depth 2:
# - load the structure
# - load 5 definitions
# Split makes 2 queries to render the toc:
# - it loads the active version at the start of the bulk operation
# - it loads the course definition for inheritance, because it's outside
# the bulk-operation marker that loaded the course descriptor
@ddt.data((ModuleStoreEnum.Type.mongo, 3, 0, 0), (ModuleStoreEnum.Type.split, 6, 0, 2))
@ddt.unpack @ddt.unpack
def test_toc_toy_from_section(self, default_ms, num_finds, num_sends): def test_toc_toy_from_section(self, default_ms, setup_finds, setup_sends, toc_finds):
with self.store.default_store(default_ms): with self.store.default_store(default_ms):
self.setup_modulestore(default_ms, num_finds, num_sends) self.setup_modulestore(default_ms, setup_finds, setup_sends)
section = 'Welcome' section = 'Welcome'
expected = ([{'active': True, 'sections': expected = ([{'active': True, 'sections':
[{'url_name': 'Toy_Videos', 'display_name': u'Toy Videos', 'graded': True, [{'url_name': 'Toy_Videos', 'display_name': u'Toy Videos', 'graded': True,
...@@ -384,7 +403,8 @@ class TestTOC(ModuleStoreTestCase): ...@@ -384,7 +403,8 @@ class TestTOC(ModuleStoreTestCase):
'format': '', 'due': None, 'active': False}], 'format': '', 'due': None, 'active': False}],
'url_name': 'secret:magic', 'display_name': 'secret:magic'}]) 'url_name': 'secret:magic', 'display_name': 'secret:magic'}])
actual = render.toc_for_course(self.request.user, self.request, self.toy_course, self.chapter, section, self.field_data_cache) with check_mongo_calls(toc_finds, 0):
actual = render.toc_for_course(self.request.user, self.request, self.toy_course, self.chapter, section, self.field_data_cache)
for toc_section in expected: for toc_section in expected:
self.assertIn(toc_section, actual) self.assertIn(toc_section, actual)
......
...@@ -131,7 +131,7 @@ rednose==0.3 ...@@ -131,7 +131,7 @@ rednose==0.3
selenium==2.42.1 selenium==2.42.1
splinter==0.5.4 splinter==0.5.4
testtools==0.9.34 testtools==0.9.34
PyContracts==1.6.4 PyContracts==1.6.5
# Used for Segment.io analytics # Used for Segment.io analytics
analytics-python==0.4.4 analytics-python==0.4.4
......
...@@ -30,7 +30,7 @@ ...@@ -30,7 +30,7 @@
-e git+https://github.com/edx-solutions/django-splash.git@7579d052afcf474ece1239153cffe1c89935bc4f#egg=django-splash -e git+https://github.com/edx-solutions/django-splash.git@7579d052afcf474ece1239153cffe1c89935bc4f#egg=django-splash
-e git+https://github.com/edx/acid-block.git@459aff7b63db8f2c5decd1755706c1a64fb4ebb1#egg=acid-xblock -e git+https://github.com/edx/acid-block.git@459aff7b63db8f2c5decd1755706c1a64fb4ebb1#egg=acid-xblock
-e git+https://github.com/edx/edx-ora2.git@release-2014-09-18T16.00#egg=edx-ora2 -e git+https://github.com/edx/edx-ora2.git@release-2014-09-18T16.00#egg=edx-ora2
-e git+https://github.com/edx/opaque-keys.git@d45d0bd8d64c69531be69178b9505b5d38806ce0#egg=opaque-keys -e git+https://github.com/edx/opaque-keys.git@295d93170b2f6e57e3a2b9ba0a52087a4e8712c5#egg=opaque-keys
-e git+https://github.com/edx/ease.git@97de68448e5495385ba043d3091f570a699d5b5f#egg=ease -e git+https://github.com/edx/ease.git@97de68448e5495385ba043d3091f570a699d5b5f#egg=ease
-e git+https://github.com/edx/i18n-tools.git@56f048af9b6868613c14aeae760548834c495011#egg=i18n-tools -e git+https://github.com/edx/i18n-tools.git@56f048af9b6868613c14aeae760548834c495011#egg=i18n-tools
-e git+https://github.com/edx/edx-oauth2-provider.git@0.2.2#egg=oauth2-provider -e git+https://github.com/edx/edx-oauth2-provider.git@0.2.2#egg=oauth2-provider
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment