Commit d39da7fd by Calen Pennington

Merge pull request #4780 from cpennington/split-bulk-operations

Implement bulk operations on the split modulestore.
parents ae240ae7 0e7e266a
...@@ -38,7 +38,7 @@ class Command(BaseCommand): ...@@ -38,7 +38,7 @@ class Command(BaseCommand):
print("Cloning course {0} to {1}".format(source_course_id, dest_course_id)) print("Cloning course {0} to {1}".format(source_course_id, dest_course_id))
with mstore.bulk_write_operations(dest_course_id): with mstore.bulk_operations(dest_course_id):
if mstore.clone_course(source_course_id, dest_course_id, ModuleStoreEnum.UserID.mgmt_command): if mstore.clone_course(source_course_id, dest_course_id, ModuleStoreEnum.UserID.mgmt_command):
print("copying User permissions...") print("copying User permissions...")
# purposely avoids auth.add_user b/c it doesn't have a caller to authorize # purposely avoids auth.add_user b/c it doesn't have a caller to authorize
......
...@@ -867,7 +867,7 @@ class ContentStoreToyCourseTest(ContentStoreTestCase): ...@@ -867,7 +867,7 @@ class ContentStoreToyCourseTest(ContentStoreTestCase):
# so we don't need to make an extra query to compute it. # so we don't need to make an extra query to compute it.
# set the branch to 'publish' in order to prevent extra lookups of draft versions # set the branch to 'publish' in order to prevent extra lookups of draft versions
with mongo_store.branch_setting(ModuleStoreEnum.Branch.published_only): with mongo_store.branch_setting(ModuleStoreEnum.Branch.published_only):
with check_mongo_calls(mongo_store, 4, 0): with check_mongo_calls(4, 0):
course = mongo_store.get_course(course_id, depth=2) course = mongo_store.get_course(course_id, depth=2)
# make sure we pre-fetched a known sequential which should be at depth=2 # make sure we pre-fetched a known sequential which should be at depth=2
...@@ -879,7 +879,7 @@ class ContentStoreToyCourseTest(ContentStoreTestCase): ...@@ -879,7 +879,7 @@ class ContentStoreToyCourseTest(ContentStoreTestCase):
# Now, test with the branch set to draft. No extra round trips b/c it doesn't go deep enough to get # Now, test with the branch set to draft. No extra round trips b/c it doesn't go deep enough to get
# beyond direct only categories # beyond direct only categories
with mongo_store.branch_setting(ModuleStoreEnum.Branch.draft_preferred): with mongo_store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
with check_mongo_calls(mongo_store, 4, 0): with check_mongo_calls(4, 0):
mongo_store.get_course(course_id, depth=2) mongo_store.get_course(course_id, depth=2)
def test_export_course_without_content_store(self): def test_export_course_without_content_store(self):
......
...@@ -201,10 +201,11 @@ class TestCourseListing(ModuleStoreTestCase): ...@@ -201,10 +201,11 @@ class TestCourseListing(ModuleStoreTestCase):
# Now count the db queries # Now count the db queries
store = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.mongo) store = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.mongo)
with check_mongo_calls(store, USER_COURSES_COUNT): with check_mongo_calls(USER_COURSES_COUNT):
_accessible_courses_list_from_groups(self.request) _accessible_courses_list_from_groups(self.request)
with check_mongo_calls(store, 1): # TODO: LMS-11220: Document why this takes 6 calls
with check_mongo_calls(6):
_accessible_courses_list(self.request) _accessible_courses_list(self.request)
def test_get_course_list_with_same_course_id(self): def test_get_course_list_with_same_course_id(self):
......
...@@ -205,10 +205,9 @@ class TemplateTests(unittest.TestCase): ...@@ -205,10 +205,9 @@ class TemplateTests(unittest.TestCase):
data="<problem></problem>" data="<problem></problem>"
) )
# course root only updated 2x # The draft course root has 2 revisions: the published revision, and then the subsequent
# changes to the draft revision
version_history = self.split_store.get_block_generations(test_course.location) version_history = self.split_store.get_block_generations(test_course.location)
# create course causes 2 versions for the time being; skip the first.
version_history = version_history.children[0]
self.assertEqual(version_history.locator.version_guid, test_course.location.version_guid) self.assertEqual(version_history.locator.version_guid, test_course.location.version_guid)
self.assertEqual(len(version_history.children), 1) self.assertEqual(len(version_history.children), 1)
self.assertEqual(version_history.children[0].children, []) self.assertEqual(version_history.children[0].children, [])
......
...@@ -33,7 +33,7 @@ class ContentStoreImportTest(ModuleStoreTestCase): ...@@ -33,7 +33,7 @@ class ContentStoreImportTest(ModuleStoreTestCase):
""" """
def setUp(self): def setUp(self):
password = super(ContentStoreImportTest, self).setUp() password = super(ContentStoreImportTest, self).setUp()
self.client = Client() self.client = Client()
self.client.login(username=self.user.username, password=password) self.client.login(username=self.user.username, password=password)
...@@ -157,15 +157,15 @@ class ContentStoreImportTest(ModuleStoreTestCase): ...@@ -157,15 +157,15 @@ class ContentStoreImportTest(ModuleStoreTestCase):
store = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.mongo) store = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.mongo)
# we try to refresh the inheritance tree for each update_item in the import # we try to refresh the inheritance tree for each update_item in the import
with check_exact_number_of_calls(store, store.refresh_cached_metadata_inheritance_tree, 28): with check_exact_number_of_calls(store, 'refresh_cached_metadata_inheritance_tree', 28):
# _get_cached_metadata_inheritance_tree should be called only once # _get_cached_metadata_inheritance_tree should be called only once
with check_exact_number_of_calls(store, store._get_cached_metadata_inheritance_tree, 1): with check_exact_number_of_calls(store, '_get_cached_metadata_inheritance_tree', 1):
# with bulk-edit in progress, the inheritance tree should be recomputed only at the end of the import # with bulk-edit in progress, the inheritance tree should be recomputed only at the end of the import
# NOTE: On Jenkins, with memcache enabled, the number of calls here is only 1. # NOTE: On Jenkins, with memcache enabled, the number of calls here is only 1.
# Locally, without memcache, the number of calls is actually 2 (once more during the publish step) # Locally, without memcache, the number of calls is actually 2 (once more during the publish step)
with check_number_of_calls(store, store._compute_metadata_inheritance_tree, 2): with check_number_of_calls(store, '_compute_metadata_inheritance_tree', 2):
self.load_test_import_course() self.load_test_import_course()
def test_rewrite_reference_list(self): def test_rewrite_reference_list(self):
......
...@@ -72,7 +72,7 @@ def delete_course_and_groups(course_key, user_id): ...@@ -72,7 +72,7 @@ def delete_course_and_groups(course_key, user_id):
""" """
module_store = modulestore() module_store = modulestore()
with module_store.bulk_write_operations(course_key): with module_store.bulk_operations(course_key):
module_store.delete_course(course_key, user_id) module_store.delete_course(course_key, user_id)
print 'removing User permissions from course....' print 'removing User permissions from course....'
......
...@@ -423,7 +423,7 @@ def course_index(request, course_key): ...@@ -423,7 +423,7 @@ def course_index(request, course_key):
""" """
# A depth of None implies the whole course. The course outline needs this in order to compute has_changes. # A depth of None implies the whole course. The course outline needs this in order to compute has_changes.
# A unit may not have a draft version, but one of its components could, and hence the unit itself has changes. # A unit may not have a draft version, but one of its components could, and hence the unit itself has changes.
with modulestore().bulk_write_operations(course_key): with modulestore().bulk_operations(course_key):
course_module = _get_course_module(course_key, request.user, depth=None) course_module = _get_course_module(course_key, request.user, depth=None)
lms_link = get_lms_link_for_item(course_module.location) lms_link = get_lms_link_for_item(course_module.location)
sections = course_module.get_children() sections = course_module.get_children()
......
...@@ -310,6 +310,13 @@ class ModuleStoreRead(object): ...@@ -310,6 +310,13 @@ class ModuleStoreRead(object):
""" """
pass pass
@contextmanager
def bulk_operations(self, course_id):
"""
A context manager for notifying the store of bulk operations. This affects only the current thread.
"""
yield
class ModuleStoreWrite(ModuleStoreRead): class ModuleStoreWrite(ModuleStoreRead):
""" """
...@@ -543,6 +550,33 @@ class ModuleStoreReadBase(ModuleStoreRead): ...@@ -543,6 +550,33 @@ class ModuleStoreReadBase(ModuleStoreRead):
raise ValueError(u"Cannot set default store to type {}".format(store_type)) raise ValueError(u"Cannot set default store to type {}".format(store_type))
yield yield
@contextmanager
def bulk_operations(self, course_id):
"""
A context manager for notifying the store of bulk operations. This affects only the current thread.
In the case of Mongo, it temporarily disables refreshing the metadata inheritance tree
until the bulk operation is completed.
"""
# TODO: Make this multi-process-safe if future operations need it.
try:
self._begin_bulk_operation(course_id)
yield
finally:
self._end_bulk_operation(course_id)
def _begin_bulk_operation(self, course_id):
"""
Begin a bulk write operation on course_id.
"""
pass
def _end_bulk_operation(self, course_id):
"""
End the active bulk write operation on course_id.
"""
pass
class ModuleStoreWriteBase(ModuleStoreReadBase, ModuleStoreWrite): class ModuleStoreWriteBase(ModuleStoreReadBase, ModuleStoreWrite):
''' '''
...@@ -643,29 +677,6 @@ class ModuleStoreWriteBase(ModuleStoreReadBase, ModuleStoreWrite): ...@@ -643,29 +677,6 @@ class ModuleStoreWriteBase(ModuleStoreReadBase, ModuleStoreWrite):
parent.children.append(item.location) parent.children.append(item.location)
self.update_item(parent, user_id) self.update_item(parent, user_id)
@contextmanager
def bulk_write_operations(self, course_id):
"""
A context manager for notifying the store of bulk write events.
In the case of Mongo, it temporarily disables refreshing the metadata inheritance tree
until the bulk operation is completed.
"""
# TODO
# Make this multi-process-safe if future operations need it.
# Right now, only Import Course, Clone Course, and Delete Course use this, so
# it's ok if the cached metadata in the memcache is invalid when another
# request comes in for the same course.
try:
if hasattr(self, '_begin_bulk_write_operation'):
self._begin_bulk_write_operation(course_id)
yield
finally:
# check for the begin method here,
# since it's an error if an end method is not defined when a begin method is
if hasattr(self, '_begin_bulk_write_operation'):
self._end_bulk_write_operation(course_id)
def only_xmodules(identifier, entry_points): def only_xmodules(identifier, entry_points):
"""Only use entry_points that are supplied by the xmodule package""" """Only use entry_points that are supplied by the xmodule package"""
......
...@@ -54,20 +54,16 @@ class DuplicateItemError(Exception): ...@@ -54,20 +54,16 @@ class DuplicateItemError(Exception):
self, Exception.__str__(self, *args, **kwargs) self, Exception.__str__(self, *args, **kwargs)
) )
class VersionConflictError(Exception): class VersionConflictError(Exception):
""" """
The caller asked for either draft or published head and gave a version which conflicted with it. The caller asked for either draft or published head and gave a version which conflicted with it.
""" """
def __init__(self, requestedLocation, currentHeadVersionGuid): def __init__(self, requestedLocation, currentHeadVersionGuid):
super(VersionConflictError, self).__init__() super(VersionConflictError, self).__init__(u'Requested {}, but current head is {}'.format(
self.requestedLocation = requestedLocation requestedLocation,
self.currentHeadVersionGuid = currentHeadVersionGuid currentHeadVersionGuid
))
def __str__(self, *args, **kwargs):
"""
Print requested and current head info
"""
return u'Requested {} but {} is current head'.format(self.requestedLocation, self.currentHeadVersionGuid)
class DuplicateCourseError(Exception): class DuplicateCourseError(Exception):
......
...@@ -645,11 +645,11 @@ class MixedModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase): ...@@ -645,11 +645,11 @@ class MixedModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
yield yield
@contextmanager @contextmanager
def bulk_write_operations(self, course_id): def bulk_operations(self, course_id):
""" """
A context manager for notifying the store of bulk write events. A context manager for notifying the store of bulk operations.
If course_id is None, the default store is used. If course_id is None, the default store is used.
""" """
store = self._get_modulestore_for_courseid(course_id) store = self._get_modulestore_for_courseid(course_id)
with store.bulk_write_operations(course_id): with store.bulk_operations(course_id):
yield yield
...@@ -17,6 +17,7 @@ import sys ...@@ -17,6 +17,7 @@ import sys
import logging import logging
import copy import copy
import re import re
import threading
from uuid import uuid4 from uuid import uuid4
from bson.son import SON from bson.son import SON
...@@ -414,7 +415,7 @@ class MongoModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase): ...@@ -414,7 +415,7 @@ class MongoModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
# performance optimization to prevent updating the meta-data inheritance tree during # performance optimization to prevent updating the meta-data inheritance tree during
# bulk write operations # bulk write operations
self.ignore_write_events_on_courses = set() self.ignore_write_events_on_courses = threading.local()
self._course_run_cache = {} self._course_run_cache = {}
def close_connections(self): def close_connections(self):
...@@ -435,27 +436,36 @@ class MongoModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase): ...@@ -435,27 +436,36 @@ class MongoModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
connection.drop_database(self.collection.database) connection.drop_database(self.collection.database)
connection.close() connection.close()
def _begin_bulk_write_operation(self, course_id): def _begin_bulk_operation(self, course_id):
""" """
Prevent updating the meta-data inheritance cache for the given course Prevent updating the meta-data inheritance cache for the given course
""" """
self.ignore_write_events_on_courses.add(course_id) if not hasattr(self.ignore_write_events_on_courses, 'courses'):
self.ignore_write_events_on_courses.courses = set()
def _end_bulk_write_operation(self, course_id): self.ignore_write_events_on_courses.courses.add(course_id)
def _end_bulk_operation(self, course_id):
""" """
Restart updating the meta-data inheritance cache for the given course. Restart updating the meta-data inheritance cache for the given course.
Refresh the meta-data inheritance cache now since it was temporarily disabled. Refresh the meta-data inheritance cache now since it was temporarily disabled.
""" """
if course_id in self.ignore_write_events_on_courses: if not hasattr(self.ignore_write_events_on_courses, 'courses'):
self.ignore_write_events_on_courses.remove(course_id) return
if course_id in self.ignore_write_events_on_courses.courses:
self.ignore_write_events_on_courses.courses.remove(course_id)
self.refresh_cached_metadata_inheritance_tree(course_id) self.refresh_cached_metadata_inheritance_tree(course_id)
def _is_bulk_write_in_progress(self, course_id): def _is_bulk_write_in_progress(self, course_id):
""" """
Returns whether a bulk write operation is in progress for the given course. Returns whether a bulk write operation is in progress for the given course.
""" """
if not hasattr(self.ignore_write_events_on_courses, 'courses'):
return False
course_id = course_id.for_branch(None) course_id = course_id.for_branch(None)
return course_id in self.ignore_write_events_on_courses return course_id in self.ignore_write_events_on_courses.courses
def fill_in_run(self, course_key): def fill_in_run(self, course_key):
""" """
......
...@@ -68,39 +68,40 @@ def path_to_location(modulestore, usage_key): ...@@ -68,39 +68,40 @@ def path_to_location(modulestore, usage_key):
newpath = (next_usage, path) newpath = (next_usage, path)
queue.append((parent, newpath)) queue.append((parent, newpath))
if not modulestore.has_item(usage_key): with modulestore.bulk_operations(usage_key.course_key):
raise ItemNotFoundError(usage_key) if not modulestore.has_item(usage_key):
raise ItemNotFoundError(usage_key)
path = find_path_to_course()
if path is None: path = find_path_to_course()
raise NoPathToItem(usage_key) if path is None:
raise NoPathToItem(usage_key)
n = len(path)
course_id = path[0].course_key n = len(path)
# pull out the location names course_id = path[0].course_key
chapter = path[1].name if n > 1 else None # pull out the location names
section = path[2].name if n > 2 else None chapter = path[1].name if n > 1 else None
# Figure out the position section = path[2].name if n > 2 else None
position = None # Figure out the position
position = None
# This block of code will find the position of a module within a nested tree
# of modules. If a problem is on tab 2 of a sequence that's on tab 3 of a # This block of code will find the position of a module within a nested tree
# sequence, the resulting position is 3_2. However, no positional modules # of modules. If a problem is on tab 2 of a sequence that's on tab 3 of a
# (e.g. sequential and videosequence) currently deal with this form of # sequence, the resulting position is 3_2. However, no positional modules
# representing nested positions. This needs to happen before jumping to a # (e.g. sequential and videosequence) currently deal with this form of
# module nested in more than one positional module will work. # representing nested positions. This needs to happen before jumping to a
if n > 3: # module nested in more than one positional module will work.
position_list = [] if n > 3:
for path_index in range(2, n - 1): position_list = []
category = path[path_index].block_type for path_index in range(2, n - 1):
if category == 'sequential' or category == 'videosequence': category = path[path_index].block_type
section_desc = modulestore.get_item(path[path_index]) if category == 'sequential' or category == 'videosequence':
# this calls get_children rather than just children b/c old mongo includes private children section_desc = modulestore.get_item(path[path_index])
# in children but not in get_children # this calls get_children rather than just children b/c old mongo includes private children
child_locs = [c.location for c in section_desc.get_children()] # in children but not in get_children
# positions are 1-indexed, and should be strings to be consistent with child_locs = [c.location for c in section_desc.get_children()]
# url parsing. # positions are 1-indexed, and should be strings to be consistent with
position_list.append(str(child_locs.index(path[path_index + 1]) + 1)) # url parsing.
position = "_".join(position_list) position_list.append(str(child_locs.index(path[path_index + 1]) + 1))
position = "_".join(position_list)
return (course_id, chapter, section, position)
return (course_id, chapter, section, position)
...@@ -55,23 +55,27 @@ class SplitMigrator(object): ...@@ -55,23 +55,27 @@ class SplitMigrator(object):
new_run = source_course_key.run new_run = source_course_key.run
new_course_key = CourseLocator(new_org, new_course, new_run, branch=ModuleStoreEnum.BranchName.published) new_course_key = CourseLocator(new_org, new_course, new_run, branch=ModuleStoreEnum.BranchName.published)
new_fields = self._get_fields_translate_references(original_course, new_course_key, None) with self.split_modulestore.bulk_operations(new_course_key):
if fields: new_fields = self._get_fields_translate_references(original_course, new_course_key, None)
new_fields.update(fields) if fields:
new_course = self.split_modulestore.create_course( new_fields.update(fields)
new_org, new_course, new_run, user_id, new_course = self.split_modulestore.create_course(
fields=new_fields, new_org, new_course, new_run, user_id,
master_branch=ModuleStoreEnum.BranchName.published, fields=new_fields,
skip_auto_publish=True, master_branch=ModuleStoreEnum.BranchName.published,
**kwargs skip_auto_publish=True,
) **kwargs
)
with self.split_modulestore.bulk_write_operations(new_course.id):
self._copy_published_modules_to_course( self._copy_published_modules_to_course(
new_course, original_course.location, source_course_key, user_id, **kwargs new_course, original_course.location, source_course_key, user_id, **kwargs
) )
# create a new version for the drafts
with self.split_modulestore.bulk_write_operations(new_course.id): # TODO: This should be merged back into the above transaction, but can't be until split.py
# is refactored to have more coherent access patterns
with self.split_modulestore.bulk_operations(new_course_key):
# create a new version for the drafts
self._add_draft_modules_to_course(new_course.location, source_course_key, user_id, **kwargs) self._add_draft_modules_to_course(new_course.location, source_course_key, user_id, **kwargs)
return new_course.id return new_course.id
...@@ -80,7 +84,7 @@ class SplitMigrator(object): ...@@ -80,7 +84,7 @@ class SplitMigrator(object):
""" """
Copy all of the modules from the 'direct' version of the course to the new split course. Copy all of the modules from the 'direct' version of the course to the new split course.
""" """
course_version_locator = new_course.id course_version_locator = new_course.id.version_agnostic()
# iterate over published course elements. Wildcarding rather than descending b/c some elements are orphaned (e.g., # iterate over published course elements. Wildcarding rather than descending b/c some elements are orphaned (e.g.,
# course about pages, conditionals) # course about pages, conditionals)
...@@ -101,7 +105,6 @@ class SplitMigrator(object): ...@@ -101,7 +105,6 @@ class SplitMigrator(object):
fields=self._get_fields_translate_references( fields=self._get_fields_translate_references(
module, course_version_locator, new_course.location.block_id module, course_version_locator, new_course.location.block_id
), ),
continue_version=True,
skip_auto_publish=True, skip_auto_publish=True,
**kwargs **kwargs
) )
...@@ -109,7 +112,7 @@ class SplitMigrator(object): ...@@ -109,7 +112,7 @@ class SplitMigrator(object):
index_info = self.split_modulestore.get_course_index_info(course_version_locator) index_info = self.split_modulestore.get_course_index_info(course_version_locator)
versions = index_info['versions'] versions = index_info['versions']
versions[ModuleStoreEnum.BranchName.draft] = versions[ModuleStoreEnum.BranchName.published] versions[ModuleStoreEnum.BranchName.draft] = versions[ModuleStoreEnum.BranchName.published]
self.split_modulestore.update_course_index(index_info) self.split_modulestore.update_course_index(course_version_locator, index_info)
# clean up orphans in published version: in old mongo, parents pointed to the union of their published and draft # clean up orphans in published version: in old mongo, parents pointed to the union of their published and draft
# children which meant some pointers were to non-existent locations in 'direct' # children which meant some pointers were to non-existent locations in 'direct'
......
...@@ -2,7 +2,7 @@ import sys ...@@ -2,7 +2,7 @@ import sys
import logging import logging
from xblock.runtime import KvsFieldData from xblock.runtime import KvsFieldData
from xblock.fields import ScopeIds from xblock.fields import ScopeIds
from opaque_keys.edx.locator import BlockUsageLocator, LocalId, CourseLocator from opaque_keys.edx.locator import BlockUsageLocator, LocalId, CourseLocator, DefinitionLocator
from xmodule.mako_module import MakoDescriptorSystem from xmodule.mako_module import MakoDescriptorSystem
from xmodule.error_module import ErrorDescriptor from xmodule.error_module import ErrorDescriptor
from xmodule.errortracker import exc_info_to_str from xmodule.errortracker import exc_info_to_str
...@@ -10,6 +10,7 @@ from xmodule.modulestore.split_mongo import encode_key_for_mongo ...@@ -10,6 +10,7 @@ from xmodule.modulestore.split_mongo import encode_key_for_mongo
from ..exceptions import ItemNotFoundError from ..exceptions import ItemNotFoundError
from .split_mongo_kvs import SplitMongoKVS from .split_mongo_kvs import SplitMongoKVS
from fs.osfs import OSFS from fs.osfs import OSFS
from .definition_lazy_loader import DefinitionLazyLoader
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -120,9 +121,24 @@ class CachingDescriptorSystem(MakoDescriptorSystem): ...@@ -120,9 +121,24 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
self.course_entry['org'] = course_entry_override['org'] self.course_entry['org'] = course_entry_override['org']
self.course_entry['course'] = course_entry_override['course'] self.course_entry['course'] = course_entry_override['course']
self.course_entry['run'] = course_entry_override['run'] self.course_entry['run'] = course_entry_override['run']
# most likely a lazy loader or the id directly
definition = json_data.get('definition', {}) definition_id = json_data.get('definition')
definition_id = self.modulestore.definition_locator(definition) block_type = json_data['category']
if definition_id is not None and not json_data.get('definition_loaded', False):
definition_loader = DefinitionLazyLoader(
self.modulestore, block_type, definition_id,
lambda fields: self.modulestore.convert_references_to_keys(
course_key, self.load_block_type(block_type),
fields, self.course_entry['structure']['blocks'],
)
)
else:
definition_loader = None
# If no definition id is provide, generate an in-memory id
if definition_id is None:
definition_id = LocalId()
# If no usage id is provided, generate an in-memory id # If no usage id is provided, generate an in-memory id
if block_id is None: if block_id is None:
...@@ -130,7 +146,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem): ...@@ -130,7 +146,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
block_locator = BlockUsageLocator( block_locator = BlockUsageLocator(
course_key, course_key,
block_type=json_data.get('category'), block_type=block_type,
block_id=block_id, block_id=block_id,
) )
...@@ -138,7 +154,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem): ...@@ -138,7 +154,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
block_locator.course_key, class_, json_data.get('fields', {}), self.course_entry['structure']['blocks'], block_locator.course_key, class_, json_data.get('fields', {}), self.course_entry['structure']['blocks'],
) )
kvs = SplitMongoKVS( kvs = SplitMongoKVS(
definition, definition_loader,
converted_fields, converted_fields,
json_data.get('_inherited_settings'), json_data.get('_inherited_settings'),
**kwargs **kwargs
...@@ -148,7 +164,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem): ...@@ -148,7 +164,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
try: try:
module = self.construct_xblock_from_class( module = self.construct_xblock_from_class(
class_, class_,
ScopeIds(None, json_data.get('category'), definition_id, block_locator), ScopeIds(None, block_type, definition_id, block_locator),
field_data, field_data,
) )
except Exception: except Exception:
...@@ -174,7 +190,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem): ...@@ -174,7 +190,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
module.previous_version = edit_info.get('previous_version') module.previous_version = edit_info.get('previous_version')
module.update_version = edit_info.get('update_version') module.update_version = edit_info.get('update_version')
module.source_version = edit_info.get('source_version', None) module.source_version = edit_info.get('source_version', None)
module.definition_locator = definition_id module.definition_locator = DefinitionLocator(block_type, definition_id)
# decache any pending field settings # decache any pending field settings
module.save() module.save()
......
from opaque_keys.edx.locator import DefinitionLocator from opaque_keys.edx.locator import DefinitionLocator
from bson import SON
class DefinitionLazyLoader(object): class DefinitionLazyLoader(object):
...@@ -24,3 +25,9 @@ class DefinitionLazyLoader(object): ...@@ -24,3 +25,9 @@ class DefinitionLazyLoader(object):
loader pointer with the result so as not to fetch more than once loader pointer with the result so as not to fetch more than once
""" """
return self.modulestore.db_connection.get_definition(self.definition_locator.definition_id) return self.modulestore.db_connection.get_definition(self.definition_locator.definition_id)
def as_son(self):
return SON((
('category', self.definition_locator.block_type),
('definition', self.definition_locator.definition_id)
))
...@@ -57,24 +57,42 @@ class MongoConnection(object): ...@@ -57,24 +57,42 @@ class MongoConnection(object):
""" """
return self.structures.find_one({'_id': key}) return self.structures.find_one({'_id': key})
def find_matching_structures(self, query): def find_structures_by_id(self, ids):
""" """
Find the structure matching the query. Right now the query must be a legal mongo query Return all structures that specified in ``ids``.
:param query: a mongo-style query of {key: [value|{$in ..}|..], ..}
Arguments:
ids (list): A list of structure ids
"""
return self.structures.find({'_id': {'$in': ids}})
def find_structures_derived_from(self, ids):
"""
Return all structures that were immediately derived from a structure listed in ``ids``.
Arguments:
ids (list): A list of structure ids
""" """
return self.structures.find(query) return self.structures.find({'previous_version': {'$in': ids}})
def insert_structure(self, structure): def find_ancestor_structures(self, original_version, block_id):
""" """
Create the structure in the db Find all structures that originated from ``original_version`` that contain ``block_id``.
Arguments:
original_version (str or ObjectID): The id of a structure
block_id (str): The id of the block in question
""" """
self.structures.insert(structure) return self.structures.find({
'original_version': original_version,
'blocks.{}.edit_info.update_version'.format(block_id): {'$exists': True}
})
def update_structure(self, structure): def upsert_structure(self, structure):
""" """
Update the db record for structure Update the db record for structure, creating that record if it doesn't already exist
""" """
self.structures.update({'_id': structure['_id']}, structure) self.structures.update({'_id': structure['_id']}, structure, upsert=True)
def get_course_index(self, key, ignore_case=False): def get_course_index(self, key, ignore_case=False):
""" """
...@@ -88,11 +106,23 @@ class MongoConnection(object): ...@@ -88,11 +106,23 @@ class MongoConnection(object):
]) ])
) )
def find_matching_course_indexes(self, query): def find_matching_course_indexes(self, branch=None, search_targets=None):
""" """
Find the course_index matching the query. Right now the query must be a legal mongo query Find the course_index matching particular conditions.
:param query: a mongo-style query of {key: [value|{$in ..}|..], ..}
Arguments:
branch: If specified, this branch must exist in the returned courses
search_targets: If specified, this must be a dictionary specifying field values
that must exist in the search_targets of the returned courses
""" """
query = son.SON()
if branch is not None:
query['versions.{}'.format(branch)] = {'$exists': True}
if search_targets:
for key, value in search_targets.iteritems():
query['search_targets.{}'.format(key)] = value
return self.course_index.find(query) return self.course_index.find(query)
def insert_course_index(self, course_index): def insert_course_index(self, course_index):
...@@ -101,13 +131,21 @@ class MongoConnection(object): ...@@ -101,13 +131,21 @@ class MongoConnection(object):
""" """
self.course_index.insert(course_index) self.course_index.insert(course_index)
def update_course_index(self, course_index): def update_course_index(self, course_index, from_index=None):
""" """
Update the db record for course_index Update the db record for course_index.
Arguments:
from_index: If set, only update an index if it matches the one specified in `from_index`.
""" """
self.course_index.update( self.course_index.update(
son.SON([('org', course_index['org']), ('course', course_index['course']), ('run', course_index['run'])]), from_index or son.SON([
course_index ('org', course_index['org']),
('course', course_index['course']),
('run', course_index['run'])
]),
course_index,
upsert=False,
) )
def delete_course_index(self, course_index): def delete_course_index(self, course_index):
......
...@@ -287,7 +287,7 @@ class ModuleStoreTestCase(TestCase): ...@@ -287,7 +287,7 @@ class ModuleStoreTestCase(TestCase):
course_loc: the CourseKey for the created course course_loc: the CourseKey for the created course
""" """
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, None): with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, None):
# with self.store.bulk_write_operations(self.store.make_course_key(org, course, run)): # with self.store.bulk_operations(self.store.make_course_key(org, course, run)):
course = self.store.create_course(org, course, run, self.user.id, fields=course_fields) course = self.store.create_course(org, course, run, self.user.id, fields=course_fields)
self.course_loc = course.location self.course_loc = course.location
...@@ -314,7 +314,7 @@ class ModuleStoreTestCase(TestCase): ...@@ -314,7 +314,7 @@ class ModuleStoreTestCase(TestCase):
""" """
Create an equivalent to the toy xml course Create an equivalent to the toy xml course
""" """
# with self.store.bulk_write_operations(self.store.make_course_key(org, course, run)): # with self.store.bulk_operations(self.store.make_course_key(org, course, run)):
self.toy_loc = self.create_sample_course( self.toy_loc = self.create_sample_course(
org, course, run, TOY_BLOCK_INFO_TREE, org, course, run, TOY_BLOCK_INFO_TREE,
{ {
......
import pprint
import pymongo.message
from factory import Factory, lazy_attribute_sequence, lazy_attribute from factory import Factory, lazy_attribute_sequence, lazy_attribute
from factory.containers import CyclicDefinitionError from factory.containers import CyclicDefinitionError
from uuid import uuid4 from uuid import uuid4
...@@ -214,88 +217,83 @@ class ItemFactory(XModuleFactory): ...@@ -214,88 +217,83 @@ class ItemFactory(XModuleFactory):
@contextmanager @contextmanager
def check_exact_number_of_calls(object_with_method, method, num_calls, method_name=None): def check_exact_number_of_calls(object_with_method, method_name, num_calls):
""" """
Instruments the given method on the given object to verify the number of calls to the Instruments the given method on the given object to verify the number of calls to the
method is exactly equal to 'num_calls'. method is exactly equal to 'num_calls'.
""" """
with check_number_of_calls(object_with_method, method, num_calls, num_calls, method_name): with check_number_of_calls(object_with_method, method_name, num_calls, num_calls):
yield yield
@contextmanager def check_number_of_calls(object_with_method, method_name, maximum_calls, minimum_calls=1):
def check_number_of_calls(object_with_method, method, maximum_calls, minimum_calls=1, method_name=None):
""" """
Instruments the given method on the given object to verify the number of calls to the method is Instruments the given method on the given object to verify the number of calls to the method is
less than or equal to the expected maximum_calls and greater than or equal to the expected minimum_calls. less than or equal to the expected maximum_calls and greater than or equal to the expected minimum_calls.
""" """
method_wrap = Mock(wraps=method) return check_sum_of_calls(object_with_method, [method_name], maximum_calls, minimum_calls)
wrap_patch = patch.object(object_with_method, method_name or method.__name__, method_wrap)
try: @contextmanager
wrap_patch.start() def check_sum_of_calls(object_, methods, maximum_calls, minimum_calls=1):
"""
Instruments the given methods on the given object to verify that the total sum of calls made to the
methods falls between minumum_calls and maximum_calls.
"""
mocks = {
method: Mock(wraps=getattr(object_, method))
for method in methods
}
with patch.multiple(object_, **mocks):
yield yield
finally: call_count = sum(mock.call_count for mock in mocks.values())
wrap_patch.stop() calls = pprint.pformat({
method_name: mock.call_args_list
for method_name, mock in mocks.items()
})
# Assertion errors don't handle multi-line values, so pretty-print to std-out instead
if not minimum_calls <= call_count <= maximum_calls:
print "Expected between {} and {} calls, {} were made. Calls: {}".format(
minimum_calls,
maximum_calls,
call_count,
calls,
)
# verify the counter actually worked by ensuring we have counted greater than (or equal to) the minimum calls # verify the counter actually worked by ensuring we have counted greater than (or equal to) the minimum calls
assert_greater_equal(method_wrap.call_count, minimum_calls) assert_greater_equal(call_count, minimum_calls)
# now verify the number of actual calls is less than (or equal to) the expected maximum # now verify the number of actual calls is less than (or equal to) the expected maximum
assert_less_equal(method_wrap.call_count, maximum_calls) assert_less_equal(call_count, maximum_calls)
@contextmanager @contextmanager
def check_mongo_calls(mongo_store, num_finds=0, num_sends=None): def check_mongo_calls(num_finds=0, num_sends=None):
""" """
Instruments the given store to count the number of calls to find (incl find_one) and the number Instruments the given store to count the number of calls to find (incl find_one) and the number
of calls to send_message which is for insert, update, and remove (if you provide num_sends). At the of calls to send_message which is for insert, update, and remove (if you provide num_sends). At the
end of the with statement, it compares the counts to the num_finds and num_sends. end of the with statement, it compares the counts to the num_finds and num_sends.
:param mongo_store: the MongoModulestore or subclass to watch or a SplitMongoModuleStore
:param num_finds: the exact number of find calls expected :param num_finds: the exact number of find calls expected
:param num_sends: If none, don't instrument the send calls. If non-none, count and compare to :param num_sends: If none, don't instrument the send calls. If non-none, count and compare to
the given int value. the given int value.
""" """
if mongo_store.get_modulestore_type() == ModuleStoreEnum.Type.mongo: with check_sum_of_calls(
with check_exact_number_of_calls(mongo_store.collection, mongo_store.collection.find, num_finds): pymongo.message,
if num_sends is not None: ['query', 'get_more'],
with check_exact_number_of_calls( num_finds,
mongo_store.database.connection, num_finds
mongo_store.database.connection._send_message, # pylint: disable=protected-access ):
num_sends, if num_sends is not None:
): with check_sum_of_calls(
yield pymongo.message,
else: ['insert', 'update', 'delete'],
num_sends,
num_sends
):
yield yield
elif mongo_store.get_modulestore_type() == ModuleStoreEnum.Type.split: else:
collections = [ yield
mongo_store.db_connection.course_index,
mongo_store.db_connection.structures,
mongo_store.db_connection.definitions,
]
# could add else clause which raises exception or just rely on the below to suss that out
try:
find_wraps = []
wrap_patches = []
for collection in collections:
find_wrap = Mock(wraps=collection.find)
find_wraps.append(find_wrap)
wrap_patch = patch.object(collection, 'find', find_wrap)
wrap_patches.append(wrap_patch)
wrap_patch.start()
if num_sends is not None:
connection = mongo_store.db_connection.database.connection
with check_exact_number_of_calls(
connection,
connection._send_message, # pylint: disable=protected-access
num_sends,
):
yield
else:
yield
finally:
map(lambda wrap_patch: wrap_patch.stop(), wrap_patches)
call_count = sum([find_wrap.call_count for find_wrap in find_wraps])
assert_equal(call_count, num_finds)
...@@ -19,23 +19,35 @@ class TestPublish(SplitWMongoCourseBoostrapper): ...@@ -19,23 +19,35 @@ class TestPublish(SplitWMongoCourseBoostrapper):
# There are 12 created items and 7 parent updates # There are 12 created items and 7 parent updates
# create course: finds: 1 to verify uniqueness, 1 to find parents # create course: finds: 1 to verify uniqueness, 1 to find parents
# sends: 1 to create course, 1 to create overview # sends: 1 to create course, 1 to create overview
with check_mongo_calls(self.draft_mongo, 5, 2): with check_mongo_calls(5, 2):
super(TestPublish, self)._create_course(split=False) # 2 inserts (course and overview) super(TestPublish, self)._create_course(split=False) # 2 inserts (course and overview)
# with bulk will delay all inheritance computations which won't be added into the mongo_calls # with bulk will delay all inheritance computations which won't be added into the mongo_calls
with self.draft_mongo.bulk_write_operations(self.old_course_key): with self.draft_mongo.bulk_operations(self.old_course_key):
# finds: 1 for parent to add child # finds: 1 for parent to add child
# sends: 1 for insert, 1 for parent (add child) # sends: 1 for insert, 1 for parent (add child)
with check_mongo_calls(self.draft_mongo, 1, 2): with check_mongo_calls(1, 2):
self._create_item('chapter', 'Chapter1', {}, {'display_name': 'Chapter 1'}, 'course', 'runid', split=False) self._create_item('chapter', 'Chapter1', {}, {'display_name': 'Chapter 1'}, 'course', 'runid', split=False)
with check_mongo_calls(self.draft_mongo, 2, 2): with check_mongo_calls(2, 2):
self._create_item('chapter', 'Chapter2', {}, {'display_name': 'Chapter 2'}, 'course', 'runid', split=False) self._create_item('chapter', 'Chapter2', {}, {'display_name': 'Chapter 2'}, 'course', 'runid', split=False)
# update info propagation is 2 levels. create looks for draft and then published and then creates # For each vertical (2) created:
with check_mongo_calls(self.draft_mongo, 8, 6): # - load draft
# - load non-draft
# - get last error
# - load parent
# - load inheritable data
with check_mongo_calls(10, 6):
self._create_item('vertical', 'Vert1', {}, {'display_name': 'Vertical 1'}, 'chapter', 'Chapter1', split=False) self._create_item('vertical', 'Vert1', {}, {'display_name': 'Vertical 1'}, 'chapter', 'Chapter1', split=False)
self._create_item('vertical', 'Vert2', {}, {'display_name': 'Vertical 2'}, 'chapter', 'Chapter1', split=False) self._create_item('vertical', 'Vert2', {}, {'display_name': 'Vertical 2'}, 'chapter', 'Chapter1', split=False)
with check_mongo_calls(self.draft_mongo, 20, 12): # For each (4) item created
# - load draft
# - load non-draft
# - get last error
# - load parent
# - load inheritable data
# - load parent
with check_mongo_calls(24, 12):
self._create_item('html', 'Html1', "<p>Goodbye</p>", {'display_name': 'Parented Html'}, 'vertical', 'Vert1', split=False) self._create_item('html', 'Html1', "<p>Goodbye</p>", {'display_name': 'Parented Html'}, 'vertical', 'Vert1', split=False)
self._create_item( self._create_item(
'discussion', 'Discussion1', 'discussion', 'Discussion1',
...@@ -63,7 +75,7 @@ class TestPublish(SplitWMongoCourseBoostrapper): ...@@ -63,7 +75,7 @@ class TestPublish(SplitWMongoCourseBoostrapper):
split=False split=False
) )
with check_mongo_calls(self.draft_mongo, 0, 2): with check_mongo_calls(0, 2):
# 2 finds b/c looking for non-existent parents # 2 finds b/c looking for non-existent parents
self._create_item('static_tab', 'staticuno', "<p>tab</p>", {'display_name': 'Tab uno'}, None, None, split=False) self._create_item('static_tab', 'staticuno', "<p>tab</p>", {'display_name': 'Tab uno'}, None, None, split=False)
self._create_item('course_info', 'updates', "<ol><li><h2>Sep 22</h2><p>test</p></li></ol>", {}, None, None, split=False) self._create_item('course_info', 'updates', "<ol><li><h2>Sep 22</h2><p>test</p></li></ol>", {}, None, None, split=False)
...@@ -76,10 +88,11 @@ class TestPublish(SplitWMongoCourseBoostrapper): ...@@ -76,10 +88,11 @@ class TestPublish(SplitWMongoCourseBoostrapper):
vert_location = self.old_course_key.make_usage_key('vertical', block_id='Vert1') vert_location = self.old_course_key.make_usage_key('vertical', block_id='Vert1')
item = self.draft_mongo.get_item(vert_location, 2) item = self.draft_mongo.get_item(vert_location, 2)
# Vert1 has 3 children; so, publishes 4 nodes which may mean 4 inserts & 1 bulk remove # Vert1 has 3 children; so, publishes 4 nodes which may mean 4 inserts & 1 bulk remove
# TODO: LMS-11220: Document why find count is 25
# 25-June-2014 find calls are 19. Probably due to inheritance recomputation? # 25-June-2014 find calls are 19. Probably due to inheritance recomputation?
# 02-July-2014 send calls are 7. 5 from above, plus 2 for updating subtree edit info for Chapter1 and course # 02-July-2014 send calls are 7. 5 from above, plus 2 for updating subtree edit info for Chapter1 and course
# find calls are 22. 19 from above, plus 3 for finding the parent of Vert1, Chapter1, and course # find calls are 22. 19 from above, plus 3 for finding the parent of Vert1, Chapter1, and course
with check_mongo_calls(self.draft_mongo, 22, 7): with check_mongo_calls(25, 7):
self.draft_mongo.publish(item.location, self.user_id) self.draft_mongo.publish(item.location, self.user_id)
# verify status # verify status
......
...@@ -206,7 +206,7 @@ def import_from_xml( ...@@ -206,7 +206,7 @@ def import_from_xml(
) )
continue continue
with store.bulk_write_operations(dest_course_id): with store.bulk_operations(dest_course_id):
source_course = xml_module_store.get_course(course_key) source_course = xml_module_store.get_course(course_key)
# STEP 1: find and import course module # STEP 1: find and import course module
course, course_data_path = _import_course_module( course, course_data_path = _import_course_module(
...@@ -607,7 +607,7 @@ def _import_course_draft( ...@@ -607,7 +607,7 @@ def _import_course_draft(
_import_module(descriptor) _import_module(descriptor)
except Exception: except Exception:
logging.exception('There while importing draft descriptor %s', descriptor) logging.exception('while importing draft descriptor %s', descriptor)
def allowed_metadata_by_category(category): def allowed_metadata_by_category(category):
......
...@@ -287,9 +287,9 @@ class CourseComparisonTest(unittest.TestCase): ...@@ -287,9 +287,9 @@ class CourseComparisonTest(unittest.TestCase):
self.assertEqual(expected_item.has_children, actual_item.has_children) self.assertEqual(expected_item.has_children, actual_item.has_children)
if expected_item.has_children: if expected_item.has_children:
expected_children = [ expected_children = [
(course1_item_child.location.block_type, course1_item_child.location.block_id) (expected_item_child.location.block_type, expected_item_child.location.block_id)
# get_children() rather than children to strip privates from public parents # get_children() rather than children to strip privates from public parents
for course1_item_child in expected_item.get_children() for expected_item_child in expected_item.get_children()
] ]
actual_children = [ actual_children = [
(item_child.location.block_type, item_child.location.block_id) (item_child.location.block_type, item_child.location.block_id)
......
...@@ -21,6 +21,7 @@ from django.contrib.auth.models import User ...@@ -21,6 +21,7 @@ from django.contrib.auth.models import User
from xblock.runtime import KeyValueStore from xblock.runtime import KeyValueStore
from xblock.exceptions import KeyValueMultiSaveError, InvalidScopeError from xblock.exceptions import KeyValueMultiSaveError, InvalidScopeError
from xblock.fields import Scope, UserScope from xblock.fields import Scope, UserScope
from xmodule.modulestore.django import modulestore
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -109,7 +110,8 @@ class FieldDataCache(object): ...@@ -109,7 +110,8 @@ class FieldDataCache(object):
return descriptors return descriptors
descriptors = get_child_descriptors(descriptor, depth, descriptor_filter) with modulestore().bulk_operations(descriptor.location.course_key):
descriptors = get_child_descriptors(descriptor, depth, descriptor_filter)
return FieldDataCache(descriptors, course_id, user, select_for_update) return FieldDataCache(descriptors, course_id, user, select_for_update)
......
...@@ -326,13 +326,15 @@ class TestTOC(ModuleStoreTestCase): ...@@ -326,13 +326,15 @@ class TestTOC(ModuleStoreTestCase):
self.request = factory.get(chapter_url) self.request = factory.get(chapter_url)
self.request.user = UserFactory() self.request.user = UserFactory()
self.modulestore = self.store._get_modulestore_for_courseid(self.course_key) self.modulestore = self.store._get_modulestore_for_courseid(self.course_key)
with check_mongo_calls(self.modulestore, num_finds, num_sends): self.toy_course = self.store.get_course(self.toy_loc, depth=2)
self.toy_course = self.store.get_course(self.toy_loc, depth=2) with check_mongo_calls(num_finds, num_sends):
self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents( self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.toy_loc, self.request.user, self.toy_course, depth=2) self.toy_loc, self.request.user, self.toy_course, depth=2
)
@ddt.data((ModuleStoreEnum.Type.mongo, 3, 0), (ModuleStoreEnum.Type.split, 7, 0)) # TODO: LMS-11220: Document why split find count is 21
@ddt.data((ModuleStoreEnum.Type.mongo, 1, 0), (ModuleStoreEnum.Type.split, 5, 0))
@ddt.unpack @ddt.unpack
def test_toc_toy_from_chapter(self, default_ms, num_finds, num_sends): def test_toc_toy_from_chapter(self, default_ms, num_finds, num_sends):
with self.store.default_store(default_ms): with self.store.default_store(default_ms):
...@@ -352,14 +354,15 @@ class TestTOC(ModuleStoreTestCase): ...@@ -352,14 +354,15 @@ class TestTOC(ModuleStoreTestCase):
'format': '', 'due': None, 'active': False}], 'format': '', 'due': None, 'active': False}],
'url_name': 'secret:magic', 'display_name': 'secret:magic'}]) 'url_name': 'secret:magic', 'display_name': 'secret:magic'}])
with check_mongo_calls(self.modulestore, 0, 0): with check_mongo_calls(0, 0):
actual = render.toc_for_course( actual = render.toc_for_course(
self.request.user, self.request, self.toy_course, self.chapter, None, self.field_data_cache self.request.user, self.request, self.toy_course, self.chapter, None, self.field_data_cache
) )
for toc_section in expected: for toc_section in expected:
self.assertIn(toc_section, actual) self.assertIn(toc_section, actual)
@ddt.data((ModuleStoreEnum.Type.mongo, 3, 0), (ModuleStoreEnum.Type.split, 7, 0)) # TODO: LMS-11220: Document why split find count is 21
@ddt.data((ModuleStoreEnum.Type.mongo, 1, 0), (ModuleStoreEnum.Type.split, 5, 0))
@ddt.unpack @ddt.unpack
def test_toc_toy_from_section(self, default_ms, num_finds, num_sends): def test_toc_toy_from_section(self, default_ms, num_finds, num_sends):
with self.store.default_store(default_ms): with self.store.default_store(default_ms):
......
...@@ -91,7 +91,10 @@ def test_lib(options): ...@@ -91,7 +91,10 @@ def test_lib(options):
} }
if test_id: if test_id:
lib = '/'.join(test_id.split('/')[0:3]) if '/' in test_id:
lib = '/'.join(test_id.split('/')[0:3])
else:
lib = 'common/lib/' + test_id.split('.')[0]
opts['test_id'] = test_id opts['test_id'] = test_id
lib_tests = [suites.LibTestSuite(lib, **opts)] lib_tests = [suites.LibTestSuite(lib, **opts)]
else: else:
......
...@@ -4,3 +4,4 @@ psutil==1.2.1 ...@@ -4,3 +4,4 @@ psutil==1.2.1
lazy==1.1 lazy==1.1
path.py==3.0.1 path.py==3.0.1
watchdog==0.7.1 watchdog==0.7.1
python-memcached
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment