Commit d39da7fd by Calen Pennington

Merge pull request #4780 from cpennington/split-bulk-operations

Implement bulk operations on the split modulestore.
parents ae240ae7 0e7e266a
......@@ -38,7 +38,7 @@ class Command(BaseCommand):
print("Cloning course {0} to {1}".format(source_course_id, dest_course_id))
with mstore.bulk_write_operations(dest_course_id):
with mstore.bulk_operations(dest_course_id):
if mstore.clone_course(source_course_id, dest_course_id, ModuleStoreEnum.UserID.mgmt_command):
print("copying User permissions...")
# purposely avoids auth.add_user b/c it doesn't have a caller to authorize
......
......@@ -867,7 +867,7 @@ class ContentStoreToyCourseTest(ContentStoreTestCase):
# so we don't need to make an extra query to compute it.
# set the branch to 'publish' in order to prevent extra lookups of draft versions
with mongo_store.branch_setting(ModuleStoreEnum.Branch.published_only):
with check_mongo_calls(mongo_store, 4, 0):
with check_mongo_calls(4, 0):
course = mongo_store.get_course(course_id, depth=2)
# make sure we pre-fetched a known sequential which should be at depth=2
......@@ -879,7 +879,7 @@ class ContentStoreToyCourseTest(ContentStoreTestCase):
# Now, test with the branch set to draft. No extra round trips b/c it doesn't go deep enough to get
# beyond direct only categories
with mongo_store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
with check_mongo_calls(mongo_store, 4, 0):
with check_mongo_calls(4, 0):
mongo_store.get_course(course_id, depth=2)
def test_export_course_without_content_store(self):
......
......@@ -201,10 +201,11 @@ class TestCourseListing(ModuleStoreTestCase):
# Now count the db queries
store = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.mongo)
with check_mongo_calls(store, USER_COURSES_COUNT):
with check_mongo_calls(USER_COURSES_COUNT):
_accessible_courses_list_from_groups(self.request)
with check_mongo_calls(store, 1):
# TODO: LMS-11220: Document why this takes 6 calls
with check_mongo_calls(6):
_accessible_courses_list(self.request)
def test_get_course_list_with_same_course_id(self):
......
......@@ -205,10 +205,9 @@ class TemplateTests(unittest.TestCase):
data="<problem></problem>"
)
# course root only updated 2x
# The draft course root has 2 revisions: the published revision, and then the subsequent
# changes to the draft revision
version_history = self.split_store.get_block_generations(test_course.location)
# create course causes 2 versions for the time being; skip the first.
version_history = version_history.children[0]
self.assertEqual(version_history.locator.version_guid, test_course.location.version_guid)
self.assertEqual(len(version_history.children), 1)
self.assertEqual(version_history.children[0].children, [])
......
......@@ -33,7 +33,7 @@ class ContentStoreImportTest(ModuleStoreTestCase):
"""
def setUp(self):
password = super(ContentStoreImportTest, self).setUp()
self.client = Client()
self.client.login(username=self.user.username, password=password)
......@@ -157,15 +157,15 @@ class ContentStoreImportTest(ModuleStoreTestCase):
store = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.mongo)
# we try to refresh the inheritance tree for each update_item in the import
with check_exact_number_of_calls(store, store.refresh_cached_metadata_inheritance_tree, 28):
with check_exact_number_of_calls(store, 'refresh_cached_metadata_inheritance_tree', 28):
# _get_cached_metadata_inheritance_tree should be called only once
with check_exact_number_of_calls(store, store._get_cached_metadata_inheritance_tree, 1):
with check_exact_number_of_calls(store, '_get_cached_metadata_inheritance_tree', 1):
# with bulk-edit in progress, the inheritance tree should be recomputed only at the end of the import
# NOTE: On Jenkins, with memcache enabled, the number of calls here is only 1.
# Locally, without memcache, the number of calls is actually 2 (once more during the publish step)
with check_number_of_calls(store, store._compute_metadata_inheritance_tree, 2):
with check_number_of_calls(store, '_compute_metadata_inheritance_tree', 2):
self.load_test_import_course()
def test_rewrite_reference_list(self):
......
......@@ -72,7 +72,7 @@ def delete_course_and_groups(course_key, user_id):
"""
module_store = modulestore()
with module_store.bulk_write_operations(course_key):
with module_store.bulk_operations(course_key):
module_store.delete_course(course_key, user_id)
print 'removing User permissions from course....'
......
......@@ -423,7 +423,7 @@ def course_index(request, course_key):
"""
# A depth of None implies the whole course. The course outline needs this in order to compute has_changes.
# A unit may not have a draft version, but one of its components could, and hence the unit itself has changes.
with modulestore().bulk_write_operations(course_key):
with modulestore().bulk_operations(course_key):
course_module = _get_course_module(course_key, request.user, depth=None)
lms_link = get_lms_link_for_item(course_module.location)
sections = course_module.get_children()
......
......@@ -310,6 +310,13 @@ class ModuleStoreRead(object):
"""
pass
@contextmanager
def bulk_operations(self, course_id):
"""
A context manager for notifying the store of bulk operations. This affects only the current thread.
"""
yield
class ModuleStoreWrite(ModuleStoreRead):
"""
......@@ -543,6 +550,33 @@ class ModuleStoreReadBase(ModuleStoreRead):
raise ValueError(u"Cannot set default store to type {}".format(store_type))
yield
@contextmanager
def bulk_operations(self, course_id):
"""
A context manager for notifying the store of bulk operations. This affects only the current thread.
In the case of Mongo, it temporarily disables refreshing the metadata inheritance tree
until the bulk operation is completed.
"""
# TODO: Make this multi-process-safe if future operations need it.
try:
self._begin_bulk_operation(course_id)
yield
finally:
self._end_bulk_operation(course_id)
def _begin_bulk_operation(self, course_id):
"""
Begin a bulk write operation on course_id.
"""
pass
def _end_bulk_operation(self, course_id):
"""
End the active bulk write operation on course_id.
"""
pass
class ModuleStoreWriteBase(ModuleStoreReadBase, ModuleStoreWrite):
'''
......@@ -643,29 +677,6 @@ class ModuleStoreWriteBase(ModuleStoreReadBase, ModuleStoreWrite):
parent.children.append(item.location)
self.update_item(parent, user_id)
@contextmanager
def bulk_write_operations(self, course_id):
"""
A context manager for notifying the store of bulk write events.
In the case of Mongo, it temporarily disables refreshing the metadata inheritance tree
until the bulk operation is completed.
"""
# TODO
# Make this multi-process-safe if future operations need it.
# Right now, only Import Course, Clone Course, and Delete Course use this, so
# it's ok if the cached metadata in the memcache is invalid when another
# request comes in for the same course.
try:
if hasattr(self, '_begin_bulk_write_operation'):
self._begin_bulk_write_operation(course_id)
yield
finally:
# check for the begin method here,
# since it's an error if an end method is not defined when a begin method is
if hasattr(self, '_begin_bulk_write_operation'):
self._end_bulk_write_operation(course_id)
def only_xmodules(identifier, entry_points):
"""Only use entry_points that are supplied by the xmodule package"""
......
......@@ -54,20 +54,16 @@ class DuplicateItemError(Exception):
self, Exception.__str__(self, *args, **kwargs)
)
class VersionConflictError(Exception):
"""
The caller asked for either draft or published head and gave a version which conflicted with it.
"""
def __init__(self, requestedLocation, currentHeadVersionGuid):
super(VersionConflictError, self).__init__()
self.requestedLocation = requestedLocation
self.currentHeadVersionGuid = currentHeadVersionGuid
def __str__(self, *args, **kwargs):
"""
Print requested and current head info
"""
return u'Requested {} but {} is current head'.format(self.requestedLocation, self.currentHeadVersionGuid)
super(VersionConflictError, self).__init__(u'Requested {}, but current head is {}'.format(
requestedLocation,
currentHeadVersionGuid
))
class DuplicateCourseError(Exception):
......
......@@ -645,11 +645,11 @@ class MixedModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
yield
@contextmanager
def bulk_write_operations(self, course_id):
def bulk_operations(self, course_id):
"""
A context manager for notifying the store of bulk write events.
A context manager for notifying the store of bulk operations.
If course_id is None, the default store is used.
"""
store = self._get_modulestore_for_courseid(course_id)
with store.bulk_write_operations(course_id):
with store.bulk_operations(course_id):
yield
......@@ -17,6 +17,7 @@ import sys
import logging
import copy
import re
import threading
from uuid import uuid4
from bson.son import SON
......@@ -414,7 +415,7 @@ class MongoModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
# performance optimization to prevent updating the meta-data inheritance tree during
# bulk write operations
self.ignore_write_events_on_courses = set()
self.ignore_write_events_on_courses = threading.local()
self._course_run_cache = {}
def close_connections(self):
......@@ -435,27 +436,36 @@ class MongoModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
connection.drop_database(self.collection.database)
connection.close()
def _begin_bulk_write_operation(self, course_id):
def _begin_bulk_operation(self, course_id):
"""
Prevent updating the meta-data inheritance cache for the given course
"""
self.ignore_write_events_on_courses.add(course_id)
if not hasattr(self.ignore_write_events_on_courses, 'courses'):
self.ignore_write_events_on_courses.courses = set()
def _end_bulk_write_operation(self, course_id):
self.ignore_write_events_on_courses.courses.add(course_id)
def _end_bulk_operation(self, course_id):
"""
Restart updating the meta-data inheritance cache for the given course.
Refresh the meta-data inheritance cache now since it was temporarily disabled.
"""
if course_id in self.ignore_write_events_on_courses:
self.ignore_write_events_on_courses.remove(course_id)
if not hasattr(self.ignore_write_events_on_courses, 'courses'):
return
if course_id in self.ignore_write_events_on_courses.courses:
self.ignore_write_events_on_courses.courses.remove(course_id)
self.refresh_cached_metadata_inheritance_tree(course_id)
def _is_bulk_write_in_progress(self, course_id):
"""
Returns whether a bulk write operation is in progress for the given course.
"""
if not hasattr(self.ignore_write_events_on_courses, 'courses'):
return False
course_id = course_id.for_branch(None)
return course_id in self.ignore_write_events_on_courses
return course_id in self.ignore_write_events_on_courses.courses
def fill_in_run(self, course_key):
"""
......
......@@ -68,39 +68,40 @@ def path_to_location(modulestore, usage_key):
newpath = (next_usage, path)
queue.append((parent, newpath))
if not modulestore.has_item(usage_key):
raise ItemNotFoundError(usage_key)
path = find_path_to_course()
if path is None:
raise NoPathToItem(usage_key)
n = len(path)
course_id = path[0].course_key
# pull out the location names
chapter = path[1].name if n > 1 else None
section = path[2].name if n > 2 else None
# Figure out the position
position = None
# This block of code will find the position of a module within a nested tree
# of modules. If a problem is on tab 2 of a sequence that's on tab 3 of a
# sequence, the resulting position is 3_2. However, no positional modules
# (e.g. sequential and videosequence) currently deal with this form of
# representing nested positions. This needs to happen before jumping to a
# module nested in more than one positional module will work.
if n > 3:
position_list = []
for path_index in range(2, n - 1):
category = path[path_index].block_type
if category == 'sequential' or category == 'videosequence':
section_desc = modulestore.get_item(path[path_index])
# this calls get_children rather than just children b/c old mongo includes private children
# in children but not in get_children
child_locs = [c.location for c in section_desc.get_children()]
# positions are 1-indexed, and should be strings to be consistent with
# url parsing.
position_list.append(str(child_locs.index(path[path_index + 1]) + 1))
position = "_".join(position_list)
return (course_id, chapter, section, position)
with modulestore.bulk_operations(usage_key.course_key):
if not modulestore.has_item(usage_key):
raise ItemNotFoundError(usage_key)
path = find_path_to_course()
if path is None:
raise NoPathToItem(usage_key)
n = len(path)
course_id = path[0].course_key
# pull out the location names
chapter = path[1].name if n > 1 else None
section = path[2].name if n > 2 else None
# Figure out the position
position = None
# This block of code will find the position of a module within a nested tree
# of modules. If a problem is on tab 2 of a sequence that's on tab 3 of a
# sequence, the resulting position is 3_2. However, no positional modules
# (e.g. sequential and videosequence) currently deal with this form of
# representing nested positions. This needs to happen before jumping to a
# module nested in more than one positional module will work.
if n > 3:
position_list = []
for path_index in range(2, n - 1):
category = path[path_index].block_type
if category == 'sequential' or category == 'videosequence':
section_desc = modulestore.get_item(path[path_index])
# this calls get_children rather than just children b/c old mongo includes private children
# in children but not in get_children
child_locs = [c.location for c in section_desc.get_children()]
# positions are 1-indexed, and should be strings to be consistent with
# url parsing.
position_list.append(str(child_locs.index(path[path_index + 1]) + 1))
position = "_".join(position_list)
return (course_id, chapter, section, position)
......@@ -55,23 +55,27 @@ class SplitMigrator(object):
new_run = source_course_key.run
new_course_key = CourseLocator(new_org, new_course, new_run, branch=ModuleStoreEnum.BranchName.published)
new_fields = self._get_fields_translate_references(original_course, new_course_key, None)
if fields:
new_fields.update(fields)
new_course = self.split_modulestore.create_course(
new_org, new_course, new_run, user_id,
fields=new_fields,
master_branch=ModuleStoreEnum.BranchName.published,
skip_auto_publish=True,
**kwargs
)
with self.split_modulestore.bulk_write_operations(new_course.id):
with self.split_modulestore.bulk_operations(new_course_key):
new_fields = self._get_fields_translate_references(original_course, new_course_key, None)
if fields:
new_fields.update(fields)
new_course = self.split_modulestore.create_course(
new_org, new_course, new_run, user_id,
fields=new_fields,
master_branch=ModuleStoreEnum.BranchName.published,
skip_auto_publish=True,
**kwargs
)
self._copy_published_modules_to_course(
new_course, original_course.location, source_course_key, user_id, **kwargs
)
# create a new version for the drafts
with self.split_modulestore.bulk_write_operations(new_course.id):
# TODO: This should be merged back into the above transaction, but can't be until split.py
# is refactored to have more coherent access patterns
with self.split_modulestore.bulk_operations(new_course_key):
# create a new version for the drafts
self._add_draft_modules_to_course(new_course.location, source_course_key, user_id, **kwargs)
return new_course.id
......@@ -80,7 +84,7 @@ class SplitMigrator(object):
"""
Copy all of the modules from the 'direct' version of the course to the new split course.
"""
course_version_locator = new_course.id
course_version_locator = new_course.id.version_agnostic()
# iterate over published course elements. Wildcarding rather than descending b/c some elements are orphaned (e.g.,
# course about pages, conditionals)
......@@ -101,7 +105,6 @@ class SplitMigrator(object):
fields=self._get_fields_translate_references(
module, course_version_locator, new_course.location.block_id
),
continue_version=True,
skip_auto_publish=True,
**kwargs
)
......@@ -109,7 +112,7 @@ class SplitMigrator(object):
index_info = self.split_modulestore.get_course_index_info(course_version_locator)
versions = index_info['versions']
versions[ModuleStoreEnum.BranchName.draft] = versions[ModuleStoreEnum.BranchName.published]
self.split_modulestore.update_course_index(index_info)
self.split_modulestore.update_course_index(course_version_locator, index_info)
# clean up orphans in published version: in old mongo, parents pointed to the union of their published and draft
# children which meant some pointers were to non-existent locations in 'direct'
......
......@@ -2,7 +2,7 @@ import sys
import logging
from xblock.runtime import KvsFieldData
from xblock.fields import ScopeIds
from opaque_keys.edx.locator import BlockUsageLocator, LocalId, CourseLocator
from opaque_keys.edx.locator import BlockUsageLocator, LocalId, CourseLocator, DefinitionLocator
from xmodule.mako_module import MakoDescriptorSystem
from xmodule.error_module import ErrorDescriptor
from xmodule.errortracker import exc_info_to_str
......@@ -10,6 +10,7 @@ from xmodule.modulestore.split_mongo import encode_key_for_mongo
from ..exceptions import ItemNotFoundError
from .split_mongo_kvs import SplitMongoKVS
from fs.osfs import OSFS
from .definition_lazy_loader import DefinitionLazyLoader
log = logging.getLogger(__name__)
......@@ -120,9 +121,24 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
self.course_entry['org'] = course_entry_override['org']
self.course_entry['course'] = course_entry_override['course']
self.course_entry['run'] = course_entry_override['run']
# most likely a lazy loader or the id directly
definition = json_data.get('definition', {})
definition_id = self.modulestore.definition_locator(definition)
definition_id = json_data.get('definition')
block_type = json_data['category']
if definition_id is not None and not json_data.get('definition_loaded', False):
definition_loader = DefinitionLazyLoader(
self.modulestore, block_type, definition_id,
lambda fields: self.modulestore.convert_references_to_keys(
course_key, self.load_block_type(block_type),
fields, self.course_entry['structure']['blocks'],
)
)
else:
definition_loader = None
# If no definition id is provide, generate an in-memory id
if definition_id is None:
definition_id = LocalId()
# If no usage id is provided, generate an in-memory id
if block_id is None:
......@@ -130,7 +146,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
block_locator = BlockUsageLocator(
course_key,
block_type=json_data.get('category'),
block_type=block_type,
block_id=block_id,
)
......@@ -138,7 +154,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
block_locator.course_key, class_, json_data.get('fields', {}), self.course_entry['structure']['blocks'],
)
kvs = SplitMongoKVS(
definition,
definition_loader,
converted_fields,
json_data.get('_inherited_settings'),
**kwargs
......@@ -148,7 +164,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
try:
module = self.construct_xblock_from_class(
class_,
ScopeIds(None, json_data.get('category'), definition_id, block_locator),
ScopeIds(None, block_type, definition_id, block_locator),
field_data,
)
except Exception:
......@@ -174,7 +190,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
module.previous_version = edit_info.get('previous_version')
module.update_version = edit_info.get('update_version')
module.source_version = edit_info.get('source_version', None)
module.definition_locator = definition_id
module.definition_locator = DefinitionLocator(block_type, definition_id)
# decache any pending field settings
module.save()
......
from opaque_keys.edx.locator import DefinitionLocator
from bson import SON
class DefinitionLazyLoader(object):
......@@ -24,3 +25,9 @@ class DefinitionLazyLoader(object):
loader pointer with the result so as not to fetch more than once
"""
return self.modulestore.db_connection.get_definition(self.definition_locator.definition_id)
def as_son(self):
return SON((
('category', self.definition_locator.block_type),
('definition', self.definition_locator.definition_id)
))
......@@ -57,24 +57,42 @@ class MongoConnection(object):
"""
return self.structures.find_one({'_id': key})
def find_matching_structures(self, query):
def find_structures_by_id(self, ids):
"""
Find the structure matching the query. Right now the query must be a legal mongo query
:param query: a mongo-style query of {key: [value|{$in ..}|..], ..}
Return all structures that specified in ``ids``.
Arguments:
ids (list): A list of structure ids
"""
return self.structures.find({'_id': {'$in': ids}})
def find_structures_derived_from(self, ids):
"""
Return all structures that were immediately derived from a structure listed in ``ids``.
Arguments:
ids (list): A list of structure ids
"""
return self.structures.find(query)
return self.structures.find({'previous_version': {'$in': ids}})
def insert_structure(self, structure):
def find_ancestor_structures(self, original_version, block_id):
"""
Create the structure in the db
Find all structures that originated from ``original_version`` that contain ``block_id``.
Arguments:
original_version (str or ObjectID): The id of a structure
block_id (str): The id of the block in question
"""
self.structures.insert(structure)
return self.structures.find({
'original_version': original_version,
'blocks.{}.edit_info.update_version'.format(block_id): {'$exists': True}
})
def update_structure(self, structure):
def upsert_structure(self, structure):
"""
Update the db record for structure
Update the db record for structure, creating that record if it doesn't already exist
"""
self.structures.update({'_id': structure['_id']}, structure)
self.structures.update({'_id': structure['_id']}, structure, upsert=True)
def get_course_index(self, key, ignore_case=False):
"""
......@@ -88,11 +106,23 @@ class MongoConnection(object):
])
)
def find_matching_course_indexes(self, query):
def find_matching_course_indexes(self, branch=None, search_targets=None):
"""
Find the course_index matching the query. Right now the query must be a legal mongo query
:param query: a mongo-style query of {key: [value|{$in ..}|..], ..}
Find the course_index matching particular conditions.
Arguments:
branch: If specified, this branch must exist in the returned courses
search_targets: If specified, this must be a dictionary specifying field values
that must exist in the search_targets of the returned courses
"""
query = son.SON()
if branch is not None:
query['versions.{}'.format(branch)] = {'$exists': True}
if search_targets:
for key, value in search_targets.iteritems():
query['search_targets.{}'.format(key)] = value
return self.course_index.find(query)
def insert_course_index(self, course_index):
......@@ -101,13 +131,21 @@ class MongoConnection(object):
"""
self.course_index.insert(course_index)
def update_course_index(self, course_index):
def update_course_index(self, course_index, from_index=None):
"""
Update the db record for course_index
Update the db record for course_index.
Arguments:
from_index: If set, only update an index if it matches the one specified in `from_index`.
"""
self.course_index.update(
son.SON([('org', course_index['org']), ('course', course_index['course']), ('run', course_index['run'])]),
course_index
from_index or son.SON([
('org', course_index['org']),
('course', course_index['course']),
('run', course_index['run'])
]),
course_index,
upsert=False,
)
def delete_course_index(self, course_index):
......
......@@ -287,7 +287,7 @@ class ModuleStoreTestCase(TestCase):
course_loc: the CourseKey for the created course
"""
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, None):
# with self.store.bulk_write_operations(self.store.make_course_key(org, course, run)):
# with self.store.bulk_operations(self.store.make_course_key(org, course, run)):
course = self.store.create_course(org, course, run, self.user.id, fields=course_fields)
self.course_loc = course.location
......@@ -314,7 +314,7 @@ class ModuleStoreTestCase(TestCase):
"""
Create an equivalent to the toy xml course
"""
# with self.store.bulk_write_operations(self.store.make_course_key(org, course, run)):
# with self.store.bulk_operations(self.store.make_course_key(org, course, run)):
self.toy_loc = self.create_sample_course(
org, course, run, TOY_BLOCK_INFO_TREE,
{
......
import pprint
import pymongo.message
from factory import Factory, lazy_attribute_sequence, lazy_attribute
from factory.containers import CyclicDefinitionError
from uuid import uuid4
......@@ -214,88 +217,83 @@ class ItemFactory(XModuleFactory):
@contextmanager
def check_exact_number_of_calls(object_with_method, method, num_calls, method_name=None):
def check_exact_number_of_calls(object_with_method, method_name, num_calls):
"""
Instruments the given method on the given object to verify the number of calls to the
method is exactly equal to 'num_calls'.
"""
with check_number_of_calls(object_with_method, method, num_calls, num_calls, method_name):
with check_number_of_calls(object_with_method, method_name, num_calls, num_calls):
yield
@contextmanager
def check_number_of_calls(object_with_method, method, maximum_calls, minimum_calls=1, method_name=None):
def check_number_of_calls(object_with_method, method_name, maximum_calls, minimum_calls=1):
"""
Instruments the given method on the given object to verify the number of calls to the method is
less than or equal to the expected maximum_calls and greater than or equal to the expected minimum_calls.
"""
method_wrap = Mock(wraps=method)
wrap_patch = patch.object(object_with_method, method_name or method.__name__, method_wrap)
return check_sum_of_calls(object_with_method, [method_name], maximum_calls, minimum_calls)
try:
wrap_patch.start()
@contextmanager
def check_sum_of_calls(object_, methods, maximum_calls, minimum_calls=1):
"""
Instruments the given methods on the given object to verify that the total sum of calls made to the
methods falls between minumum_calls and maximum_calls.
"""
mocks = {
method: Mock(wraps=getattr(object_, method))
for method in methods
}
with patch.multiple(object_, **mocks):
yield
finally:
wrap_patch.stop()
call_count = sum(mock.call_count for mock in mocks.values())
calls = pprint.pformat({
method_name: mock.call_args_list
for method_name, mock in mocks.items()
})
# Assertion errors don't handle multi-line values, so pretty-print to std-out instead
if not minimum_calls <= call_count <= maximum_calls:
print "Expected between {} and {} calls, {} were made. Calls: {}".format(
minimum_calls,
maximum_calls,
call_count,
calls,
)
# verify the counter actually worked by ensuring we have counted greater than (or equal to) the minimum calls
assert_greater_equal(method_wrap.call_count, minimum_calls)
# verify the counter actually worked by ensuring we have counted greater than (or equal to) the minimum calls
assert_greater_equal(call_count, minimum_calls)
# now verify the number of actual calls is less than (or equal to) the expected maximum
assert_less_equal(method_wrap.call_count, maximum_calls)
# now verify the number of actual calls is less than (or equal to) the expected maximum
assert_less_equal(call_count, maximum_calls)
@contextmanager
def check_mongo_calls(mongo_store, num_finds=0, num_sends=None):
def check_mongo_calls(num_finds=0, num_sends=None):
"""
Instruments the given store to count the number of calls to find (incl find_one) and the number
of calls to send_message which is for insert, update, and remove (if you provide num_sends). At the
end of the with statement, it compares the counts to the num_finds and num_sends.
:param mongo_store: the MongoModulestore or subclass to watch or a SplitMongoModuleStore
:param num_finds: the exact number of find calls expected
:param num_sends: If none, don't instrument the send calls. If non-none, count and compare to
the given int value.
"""
if mongo_store.get_modulestore_type() == ModuleStoreEnum.Type.mongo:
with check_exact_number_of_calls(mongo_store.collection, mongo_store.collection.find, num_finds):
if num_sends is not None:
with check_exact_number_of_calls(
mongo_store.database.connection,
mongo_store.database.connection._send_message, # pylint: disable=protected-access
num_sends,
):
yield
else:
with check_sum_of_calls(
pymongo.message,
['query', 'get_more'],
num_finds,
num_finds
):
if num_sends is not None:
with check_sum_of_calls(
pymongo.message,
['insert', 'update', 'delete'],
num_sends,
num_sends
):
yield
elif mongo_store.get_modulestore_type() == ModuleStoreEnum.Type.split:
collections = [
mongo_store.db_connection.course_index,
mongo_store.db_connection.structures,
mongo_store.db_connection.definitions,
]
# could add else clause which raises exception or just rely on the below to suss that out
try:
find_wraps = []
wrap_patches = []
for collection in collections:
find_wrap = Mock(wraps=collection.find)
find_wraps.append(find_wrap)
wrap_patch = patch.object(collection, 'find', find_wrap)
wrap_patches.append(wrap_patch)
wrap_patch.start()
if num_sends is not None:
connection = mongo_store.db_connection.database.connection
with check_exact_number_of_calls(
connection,
connection._send_message, # pylint: disable=protected-access
num_sends,
):
yield
else:
yield
finally:
map(lambda wrap_patch: wrap_patch.stop(), wrap_patches)
call_count = sum([find_wrap.call_count for find_wrap in find_wraps])
assert_equal(call_count, num_finds)
else:
yield
......@@ -19,23 +19,35 @@ class TestPublish(SplitWMongoCourseBoostrapper):
# There are 12 created items and 7 parent updates
# create course: finds: 1 to verify uniqueness, 1 to find parents
# sends: 1 to create course, 1 to create overview
with check_mongo_calls(self.draft_mongo, 5, 2):
with check_mongo_calls(5, 2):
super(TestPublish, self)._create_course(split=False) # 2 inserts (course and overview)
# with bulk will delay all inheritance computations which won't be added into the mongo_calls
with self.draft_mongo.bulk_write_operations(self.old_course_key):
with self.draft_mongo.bulk_operations(self.old_course_key):
# finds: 1 for parent to add child
# sends: 1 for insert, 1 for parent (add child)
with check_mongo_calls(self.draft_mongo, 1, 2):
with check_mongo_calls(1, 2):
self._create_item('chapter', 'Chapter1', {}, {'display_name': 'Chapter 1'}, 'course', 'runid', split=False)
with check_mongo_calls(self.draft_mongo, 2, 2):
with check_mongo_calls(2, 2):
self._create_item('chapter', 'Chapter2', {}, {'display_name': 'Chapter 2'}, 'course', 'runid', split=False)
# update info propagation is 2 levels. create looks for draft and then published and then creates
with check_mongo_calls(self.draft_mongo, 8, 6):
# For each vertical (2) created:
# - load draft
# - load non-draft
# - get last error
# - load parent
# - load inheritable data
with check_mongo_calls(10, 6):
self._create_item('vertical', 'Vert1', {}, {'display_name': 'Vertical 1'}, 'chapter', 'Chapter1', split=False)
self._create_item('vertical', 'Vert2', {}, {'display_name': 'Vertical 2'}, 'chapter', 'Chapter1', split=False)
with check_mongo_calls(self.draft_mongo, 20, 12):
# For each (4) item created
# - load draft
# - load non-draft
# - get last error
# - load parent
# - load inheritable data
# - load parent
with check_mongo_calls(24, 12):
self._create_item('html', 'Html1', "<p>Goodbye</p>", {'display_name': 'Parented Html'}, 'vertical', 'Vert1', split=False)
self._create_item(
'discussion', 'Discussion1',
......@@ -63,7 +75,7 @@ class TestPublish(SplitWMongoCourseBoostrapper):
split=False
)
with check_mongo_calls(self.draft_mongo, 0, 2):
with check_mongo_calls(0, 2):
# 2 finds b/c looking for non-existent parents
self._create_item('static_tab', 'staticuno', "<p>tab</p>", {'display_name': 'Tab uno'}, None, None, split=False)
self._create_item('course_info', 'updates', "<ol><li><h2>Sep 22</h2><p>test</p></li></ol>", {}, None, None, split=False)
......@@ -76,10 +88,11 @@ class TestPublish(SplitWMongoCourseBoostrapper):
vert_location = self.old_course_key.make_usage_key('vertical', block_id='Vert1')
item = self.draft_mongo.get_item(vert_location, 2)
# Vert1 has 3 children; so, publishes 4 nodes which may mean 4 inserts & 1 bulk remove
# TODO: LMS-11220: Document why find count is 25
# 25-June-2014 find calls are 19. Probably due to inheritance recomputation?
# 02-July-2014 send calls are 7. 5 from above, plus 2 for updating subtree edit info for Chapter1 and course
# find calls are 22. 19 from above, plus 3 for finding the parent of Vert1, Chapter1, and course
with check_mongo_calls(self.draft_mongo, 22, 7):
with check_mongo_calls(25, 7):
self.draft_mongo.publish(item.location, self.user_id)
# verify status
......
......@@ -206,7 +206,7 @@ def import_from_xml(
)
continue
with store.bulk_write_operations(dest_course_id):
with store.bulk_operations(dest_course_id):
source_course = xml_module_store.get_course(course_key)
# STEP 1: find and import course module
course, course_data_path = _import_course_module(
......@@ -607,7 +607,7 @@ def _import_course_draft(
_import_module(descriptor)
except Exception:
logging.exception('There while importing draft descriptor %s', descriptor)
logging.exception('while importing draft descriptor %s', descriptor)
def allowed_metadata_by_category(category):
......
......@@ -287,9 +287,9 @@ class CourseComparisonTest(unittest.TestCase):
self.assertEqual(expected_item.has_children, actual_item.has_children)
if expected_item.has_children:
expected_children = [
(course1_item_child.location.block_type, course1_item_child.location.block_id)
(expected_item_child.location.block_type, expected_item_child.location.block_id)
# get_children() rather than children to strip privates from public parents
for course1_item_child in expected_item.get_children()
for expected_item_child in expected_item.get_children()
]
actual_children = [
(item_child.location.block_type, item_child.location.block_id)
......
......@@ -21,6 +21,7 @@ from django.contrib.auth.models import User
from xblock.runtime import KeyValueStore
from xblock.exceptions import KeyValueMultiSaveError, InvalidScopeError
from xblock.fields import Scope, UserScope
from xmodule.modulestore.django import modulestore
log = logging.getLogger(__name__)
......@@ -109,7 +110,8 @@ class FieldDataCache(object):
return descriptors
descriptors = get_child_descriptors(descriptor, depth, descriptor_filter)
with modulestore().bulk_operations(descriptor.location.course_key):
descriptors = get_child_descriptors(descriptor, depth, descriptor_filter)
return FieldDataCache(descriptors, course_id, user, select_for_update)
......
......@@ -326,13 +326,15 @@ class TestTOC(ModuleStoreTestCase):
self.request = factory.get(chapter_url)
self.request.user = UserFactory()
self.modulestore = self.store._get_modulestore_for_courseid(self.course_key)
with check_mongo_calls(self.modulestore, num_finds, num_sends):
self.toy_course = self.store.get_course(self.toy_loc, depth=2)
self.toy_course = self.store.get_course(self.toy_loc, depth=2)
with check_mongo_calls(num_finds, num_sends):
self.field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
self.toy_loc, self.request.user, self.toy_course, depth=2)
self.toy_loc, self.request.user, self.toy_course, depth=2
)
@ddt.data((ModuleStoreEnum.Type.mongo, 3, 0), (ModuleStoreEnum.Type.split, 7, 0))
# TODO: LMS-11220: Document why split find count is 21
@ddt.data((ModuleStoreEnum.Type.mongo, 1, 0), (ModuleStoreEnum.Type.split, 5, 0))
@ddt.unpack
def test_toc_toy_from_chapter(self, default_ms, num_finds, num_sends):
with self.store.default_store(default_ms):
......@@ -352,14 +354,15 @@ class TestTOC(ModuleStoreTestCase):
'format': '', 'due': None, 'active': False}],
'url_name': 'secret:magic', 'display_name': 'secret:magic'}])
with check_mongo_calls(self.modulestore, 0, 0):
with check_mongo_calls(0, 0):
actual = render.toc_for_course(
self.request.user, self.request, self.toy_course, self.chapter, None, self.field_data_cache
)
for toc_section in expected:
self.assertIn(toc_section, actual)
@ddt.data((ModuleStoreEnum.Type.mongo, 3, 0), (ModuleStoreEnum.Type.split, 7, 0))
# TODO: LMS-11220: Document why split find count is 21
@ddt.data((ModuleStoreEnum.Type.mongo, 1, 0), (ModuleStoreEnum.Type.split, 5, 0))
@ddt.unpack
def test_toc_toy_from_section(self, default_ms, num_finds, num_sends):
with self.store.default_store(default_ms):
......
......@@ -91,7 +91,10 @@ def test_lib(options):
}
if test_id:
lib = '/'.join(test_id.split('/')[0:3])
if '/' in test_id:
lib = '/'.join(test_id.split('/')[0:3])
else:
lib = 'common/lib/' + test_id.split('.')[0]
opts['test_id'] = test_id
lib_tests = [suites.LibTestSuite(lib, **opts)]
else:
......
......@@ -4,3 +4,4 @@ psutil==1.2.1
lazy==1.1
path.py==3.0.1
watchdog==0.7.1
python-memcached
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment