Commit 0e7e266a by Calen Pennington

Push bulk_write_operations up into ModuleStoreRead, and rename to remove reference to writes

parent a205788c
......@@ -38,7 +38,7 @@ class Command(BaseCommand):
print("Cloning course {0} to {1}".format(source_course_id, dest_course_id))
with mstore.bulk_write_operations(dest_course_id):
with mstore.bulk_operations(dest_course_id):
if mstore.clone_course(source_course_id, dest_course_id, ModuleStoreEnum.UserID.mgmt_command):
print("copying User permissions...")
# purposely avoids auth.add_user b/c it doesn't have a caller to authorize
......
......@@ -72,7 +72,7 @@ def delete_course_and_groups(course_key, user_id):
"""
module_store = modulestore()
with module_store.bulk_write_operations(course_key):
with module_store.bulk_operations(course_key):
module_store.delete_course(course_key, user_id)
print 'removing User permissions from course....'
......
......@@ -423,7 +423,7 @@ def course_index(request, course_key):
"""
# A depth of None implies the whole course. The course outline needs this in order to compute has_changes.
# A unit may not have a draft version, but one of its components could, and hence the unit itself has changes.
with modulestore().bulk_write_operations(course_key):
with modulestore().bulk_operations(course_key):
course_module = _get_course_module(course_key, request.user, depth=None)
lms_link = get_lms_link_for_item(course_module.location)
sections = course_module.get_children()
......
......@@ -310,6 +310,13 @@ class ModuleStoreRead(object):
"""
pass
@contextmanager
def bulk_operations(self, course_id):
"""
A context manager for notifying the store of bulk operations. This affects only the current thread.
"""
yield
class ModuleStoreWrite(ModuleStoreRead):
"""
......@@ -543,6 +550,33 @@ class ModuleStoreReadBase(ModuleStoreRead):
raise ValueError(u"Cannot set default store to type {}".format(store_type))
yield
@contextmanager
def bulk_operations(self, course_id):
"""
A context manager for notifying the store of bulk operations. This affects only the current thread.
In the case of Mongo, it temporarily disables refreshing the metadata inheritance tree
until the bulk operation is completed.
"""
# TODO: Make this multi-process-safe if future operations need it.
try:
self._begin_bulk_operation(course_id)
yield
finally:
self._end_bulk_operation(course_id)
def _begin_bulk_operation(self, course_id):
"""
Begin a bulk write operation on course_id.
"""
pass
def _end_bulk_operation(self, course_id):
"""
End the active bulk write operation on course_id.
"""
pass
class ModuleStoreWriteBase(ModuleStoreReadBase, ModuleStoreWrite):
'''
......@@ -643,37 +677,6 @@ class ModuleStoreWriteBase(ModuleStoreReadBase, ModuleStoreWrite):
parent.children.append(item.location)
self.update_item(parent, user_id)
@contextmanager
def bulk_write_operations(self, course_id):
"""
A context manager for notifying the store of bulk write events. This affects only the current thread.
In the case of Mongo, it temporarily disables refreshing the metadata inheritance tree
until the bulk operation is completed.
"""
# TODO
# Make this multi-process-safe if future operations need it.
# Right now, only Import Course, Clone Course, and Delete Course use this, so
# it's ok if the cached metadata in the memcache is invalid when another
# request comes in for the same course.
try:
self._begin_bulk_write_operation(course_id)
yield
finally:
self._end_bulk_write_operation(course_id)
def _begin_bulk_write_operation(self, course_id):
"""
Begin a bulk write operation on course_id.
"""
pass
def _end_bulk_write_operation(self, course_id):
"""
End the active bulk write operation on course_id.
"""
pass
def only_xmodules(identifier, entry_points):
"""Only use entry_points that are supplied by the xmodule package"""
......
......@@ -645,14 +645,11 @@ class MixedModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
yield
@contextmanager
def bulk_write_operations(self, course_id):
def bulk_operations(self, course_id):
"""
A context manager for notifying the store of bulk write events.
A context manager for notifying the store of bulk operations.
If course_id is None, the default store is used.
"""
store = self._get_modulestore_for_courseid(course_id)
if hasattr(store, 'bulk_write_operations'):
with store.bulk_write_operations(course_id):
yield
else:
with store.bulk_operations(course_id):
yield
......@@ -436,7 +436,7 @@ class MongoModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
connection.drop_database(self.collection.database)
connection.close()
def _begin_bulk_write_operation(self, course_id):
def _begin_bulk_operation(self, course_id):
"""
Prevent updating the meta-data inheritance cache for the given course
"""
......@@ -445,7 +445,7 @@ class MongoModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
self.ignore_write_events_on_courses.courses.add(course_id)
def _end_bulk_write_operation(self, course_id):
def _end_bulk_operation(self, course_id):
"""
Restart updating the meta-data inheritance cache for the given course.
Refresh the meta-data inheritance cache now since it was temporarily disabled.
......
......@@ -68,8 +68,7 @@ def path_to_location(modulestore, usage_key):
newpath = (next_usage, path)
queue.append((parent, newpath))
# doesn't write but does multiple reads. bulk_write minimizes reads too
with modulestore.bulk_write_operations(usage_key.course_key):
with modulestore.bulk_operations(usage_key.course_key):
if not modulestore.has_item(usage_key):
raise ItemNotFoundError(usage_key)
......
......@@ -55,7 +55,7 @@ class SplitMigrator(object):
new_run = source_course_key.run
new_course_key = CourseLocator(new_org, new_course, new_run, branch=ModuleStoreEnum.BranchName.published)
with self.split_modulestore.bulk_write_operations(new_course_key):
with self.split_modulestore.bulk_operations(new_course_key):
new_fields = self._get_fields_translate_references(original_course, new_course_key, None)
if fields:
new_fields.update(fields)
......@@ -73,7 +73,7 @@ class SplitMigrator(object):
# TODO: This should be merged back into the above transaction, but can't be until split.py
# is refactored to have more coherent access patterns
with self.split_modulestore.bulk_write_operations(new_course_key):
with self.split_modulestore.bulk_operations(new_course_key):
# create a new version for the drafts
self._add_draft_modules_to_course(new_course.location, source_course_key, user_id, **kwargs)
......@@ -84,7 +84,7 @@ class SplitMigrator(object):
"""
Copy all of the modules from the 'direct' version of the course to the new split course.
"""
course_version_locator = new_course.id.replace(version_guid=None)
course_version_locator = new_course.id.version_agnostic()
# iterate over published course elements. Wildcarding rather than descending b/c some elements are orphaned (e.g.,
# course about pages, conditionals)
......
......@@ -183,14 +183,14 @@ class BulkWriteRecord(object):
class BulkWriteMixin(object):
"""
This implements the :meth:`bulk_write_operations` modulestore semantics for the :class:`SplitMongoModuleStore`.
This implements the :meth:`bulk_operations` modulestore semantics for the :class:`SplitMongoModuleStore`.
In particular, it implements :meth:`_begin_bulk_write_operation` and
:meth:`_end_bulk_write_operation` to provide the external interface, and then exposes a set of methods
In particular, it implements :meth:`_begin_bulk_operation` and
:meth:`_end_bulk_operation` to provide the external interface, and then exposes a set of methods
for interacting with course_indexes and structures that can be used by :class:`SplitMongoModuleStore`.
Internally, this mixin records the set of all active bulk operations (keyed on the active course),
and only writes those values to ``self.mongo_connection`` when :meth:`_end_bulk_write_operation` is called.
and only writes those values to ``self.mongo_connection`` when :meth:`_end_bulk_operation` is called.
If a bulk write operation isn't active, then the changes are immediately written to the underlying
mongo_connection.
"""
......@@ -247,7 +247,7 @@ class BulkWriteMixin(object):
else:
del self._active_bulk_writes.records[course_key.replace(org=None, course=None, run=None, branch=None)]
def _begin_bulk_write_operation(self, course_key):
def _begin_bulk_operation(self, course_key):
"""
Begin a bulk write operation on course_key.
"""
......@@ -263,7 +263,7 @@ class BulkWriteMixin(object):
# Ensure that any edits to the index don't pollute the initial_index
bulk_write_record.index = copy.deepcopy(bulk_write_record.initial_index)
def _end_bulk_write_operation(self, course_key):
def _end_bulk_operation(self, course_key):
"""
End the active bulk write operation on course_key.
"""
......@@ -581,7 +581,7 @@ class SplitMongoModuleStore(BulkWriteMixin, ModuleStoreWriteBase):
depth: how deep below these to prefetch
lazy: whether to fetch definitions or use placeholders
'''
with self.bulk_write_operations(course_key):
with self.bulk_operations(course_key):
new_module_data = {}
for block_id in base_block_ids:
new_module_data = self.descendants(
......@@ -1209,7 +1209,7 @@ class SplitMongoModuleStore(BulkWriteMixin, ModuleStoreWriteBase):
the course id'd by version_guid but instead in one w/ a new version_guid. Ensure in this case that you get
the new version_guid from the locator in the returned object!
"""
with self.bulk_write_operations(course_key):
with self.bulk_operations(course_key):
# split handles all the fields in one dict not separated by scope
fields = fields or {}
fields.update(kwargs.pop('metadata', {}) or {})
......@@ -1295,7 +1295,7 @@ class SplitMongoModuleStore(BulkWriteMixin, ModuleStoreWriteBase):
fields (dict): A dictionary specifying initial values for some or all fields
in the newly created block
"""
with self.bulk_write_operations(parent_usage_key.course_key):
with self.bulk_operations(parent_usage_key.course_key):
xblock = self.create_item(
user_id, parent_usage_key.course_key, block_type, block_id=block_id, fields=fields,
**kwargs)
......@@ -1471,7 +1471,7 @@ class SplitMongoModuleStore(BulkWriteMixin, ModuleStoreWriteBase):
draft_structure = self._lookup_course(draft_version)['structure']
locator = locator.replace(version_guid=new_id)
with self.bulk_write_operations(locator):
with self.bulk_operations(locator):
self.update_structure(locator, draft_structure)
index_entry = {
'_id': ObjectId(),
......@@ -1520,7 +1520,7 @@ class SplitMongoModuleStore(BulkWriteMixin, ModuleStoreWriteBase):
"""
Broke out guts of update_item for short-circuited internal use only
"""
with self.bulk_write_operations(course_key):
with self.bulk_operations(course_key):
if allow_not_found and isinstance(block_id, (LocalId, NoneType)):
fields = {}
for subfields in partitioned_fields.itervalues():
......@@ -1660,7 +1660,7 @@ class SplitMongoModuleStore(BulkWriteMixin, ModuleStoreWriteBase):
"""
# find course_index entry if applicable and structures entry
course_key = xblock.location.course_key
with self.bulk_write_operations(course_key):
with self.bulk_operations(course_key):
index_entry = self._get_index_if_valid(course_key, force)
structure = self._lookup_course(course_key)['structure']
new_structure = self.version_structure(course_key, structure, user_id)
......@@ -1794,8 +1794,8 @@ class SplitMongoModuleStore(BulkWriteMixin, ModuleStoreWriteBase):
subtree but the ancestors up to and including the course root are not published.
"""
# get the destination's index, and source and destination structures.
with self.bulk_write_operations(source_course):
with self.bulk_write_operations(destination_course):
with self.bulk_operations(source_course):
with self.bulk_operations(destination_course):
source_structure = self._lookup_course(source_course)['structure']
index_entry = self.get_course_index(destination_course)
if index_entry is None:
......@@ -1871,7 +1871,7 @@ class SplitMongoModuleStore(BulkWriteMixin, ModuleStoreWriteBase):
# The supplied UsageKey is of the wrong type, so it can't possibly be stored in this modulestore.
raise ItemNotFoundError(usage_locator)
with self.bulk_write_operations(usage_locator.course_key):
with self.bulk_operations(usage_locator.course_key):
original_structure = self._lookup_course(usage_locator.course_key)['structure']
if original_structure['root'] == usage_locator.block_id:
raise ValueError("Cannot delete the root of a course")
......
......@@ -31,7 +31,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
Returns: a CourseDescriptor
"""
master_branch = kwargs.pop('master_branch', ModuleStoreEnum.BranchName.draft)
with self.bulk_write_operations(CourseLocator(org, course, run)):
with self.bulk_operations(CourseLocator(org, course, run)):
item = super(DraftVersioningModuleStore, self).create_course(
org, course, run, user_id, master_branch=master_branch, **kwargs
)
......@@ -89,7 +89,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
def update_item(self, descriptor, user_id, allow_not_found=False, force=False, **kwargs):
descriptor.location = self._map_revision_to_branch(descriptor.location)
with self.bulk_write_operations(descriptor.location.course_key):
with self.bulk_operations(descriptor.location.course_key):
item = super(DraftVersioningModuleStore, self).update_item(
descriptor,
user_id,
......@@ -109,7 +109,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
See :py:meth `ModuleStoreDraftAndPublished.create_item`
"""
course_key = self._map_revision_to_branch(course_key)
with self.bulk_write_operations(course_key):
with self.bulk_operations(course_key):
item = super(DraftVersioningModuleStore, self).create_item(
user_id, course_key, block_type, block_id=block_id,
definition_locator=definition_locator, fields=fields,
......@@ -124,7 +124,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
fields=None, **kwargs
):
parent_usage_key = self._map_revision_to_branch(parent_usage_key)
with self.bulk_write_operations(parent_usage_key.course_key):
with self.bulk_operations(parent_usage_key.course_key):
item = super(DraftVersioningModuleStore, self).create_child(
user_id, parent_usage_key, block_type, block_id=block_id,
fields=fields, **kwargs
......@@ -146,7 +146,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
currently only provided by contentstore.views.item.orphan_handler
Otherwise, raises a ValueError.
"""
with self.bulk_write_operations(location.course_key):
with self.bulk_operations(location.course_key):
if revision == ModuleStoreEnum.RevisionOption.published_only:
branches_to_delete = [ModuleStoreEnum.BranchName.published]
elif revision == ModuleStoreEnum.RevisionOption.all:
......@@ -274,7 +274,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
Deletes the published version of the item.
Returns the newly unpublished item.
"""
with self.bulk_write_operations(location.course_key):
with self.bulk_operations(location.course_key):
self.delete_item(location, user_id, revision=ModuleStoreEnum.RevisionOption.published_only)
return self.get_item(location.for_branch(ModuleStoreEnum.BranchName.draft), **kwargs)
......@@ -357,7 +357,7 @@ class DraftVersioningModuleStore(ModuleStoreDraftAndPublished, SplitMongoModuleS
"""
Split-based modulestores need to import published blocks to both branches
"""
with self.bulk_write_operations(course_key):
with self.bulk_operations(course_key):
# hardcode course root block id
if block_type == 'course':
block_id = self.DEFAULT_ROOT_BLOCK_ID
......
......@@ -287,7 +287,7 @@ class ModuleStoreTestCase(TestCase):
course_loc: the CourseKey for the created course
"""
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, None):
# with self.store.bulk_write_operations(self.store.make_course_key(org, course, run)):
# with self.store.bulk_operations(self.store.make_course_key(org, course, run)):
course = self.store.create_course(org, course, run, self.user.id, fields=course_fields)
self.course_loc = course.location
......@@ -314,7 +314,7 @@ class ModuleStoreTestCase(TestCase):
"""
Create an equivalent to the toy xml course
"""
# with self.store.bulk_write_operations(self.store.make_course_key(org, course, run)):
# with self.store.bulk_operations(self.store.make_course_key(org, course, run)):
self.toy_loc = self.create_sample_course(
org, course, run, TOY_BLOCK_INFO_TREE,
{
......
......@@ -127,7 +127,7 @@ class TestMixedModuleStore(unittest.TestCase):
Create a course w/ one item in the persistence store using the given course & item location.
"""
# create course
with self.store.bulk_write_operations(course_key):
with self.store.bulk_operations(course_key):
self.course = self.store.create_course(course_key.org, course_key.course, course_key.run, self.user_id)
if isinstance(self.course.id, CourseLocator):
self.course_locations[self.MONGO_COURSEID] = self.course.location
......@@ -189,7 +189,7 @@ class TestMixedModuleStore(unittest.TestCase):
create_sub_tree(block, tree)
setattr(self, block_info.field_name, block.location)
with self.store.bulk_write_operations(self.course.id):
with self.store.bulk_operations(self.course.id):
for tree in trees:
create_sub_tree(self.course, tree)
......
......@@ -23,7 +23,7 @@ class TestPublish(SplitWMongoCourseBoostrapper):
super(TestPublish, self)._create_course(split=False) # 2 inserts (course and overview)
# with bulk will delay all inheritance computations which won't be added into the mongo_calls
with self.draft_mongo.bulk_write_operations(self.old_course_key):
with self.draft_mongo.bulk_operations(self.old_course_key):
# finds: 1 for parent to add child
# sends: 1 for insert, 1 for parent (add child)
with check_mongo_calls(1, 2):
......
......@@ -1106,14 +1106,14 @@ class TestItemCrud(SplitModuleTest):
chapter = modulestore().get_item(chapter_locator)
self.assertIn(problem_locator, version_agnostic(chapter.children))
def test_create_bulk_write_operations(self):
def test_create_bulk_operations(self):
"""
Test create_item using bulk_write_operations
Test create_item using bulk_operations
"""
# start transaction w/ simple creation
user = random.getrandbits(32)
course_key = CourseLocator('test_org', 'test_transaction', 'test_run')
with modulestore().bulk_write_operations(course_key):
with modulestore().bulk_operations(course_key):
new_course = modulestore().create_course('test_org', 'test_transaction', 'test_run', user, BRANCH_NAME_DRAFT)
new_course_locator = new_course.id
index_history_info = modulestore().get_course_history_info(new_course.location.course_key)
......@@ -1147,7 +1147,7 @@ class TestItemCrud(SplitModuleTest):
)
# start a new transaction
with modulestore().bulk_write_operations(course_key):
with modulestore().bulk_operations(course_key):
new_ele = modulestore().create_child(
user, new_course.location, 'chapter',
fields={'display_name': 'chapter 2'},
......
......@@ -36,10 +36,10 @@ class TestBulkWriteMixinPreviousTransaction(TestBulkWriteMixin):
"""
def setUp(self):
super(TestBulkWriteMixinPreviousTransaction, self).setUp()
self.bulk._begin_bulk_write_operation(self.course_key)
self.bulk._begin_bulk_operation(self.course_key)
self.bulk.insert_course_index(self.course_key, MagicMock('prev-index-entry'))
self.bulk.update_structure(self.course_key, {'this': 'is', 'the': 'previous structure', '_id': ObjectId()})
self.bulk._end_bulk_write_operation(self.course_key)
self.bulk._end_bulk_operation(self.course_key)
self.conn.reset_mock()
self.clear_cache.reset_mock()
......@@ -83,47 +83,47 @@ class TestBulkWriteMixinClosed(TestBulkWriteMixin):
self.assertCacheNotCleared()
def test_out_of_order_end(self):
# Calling _end_bulk_write_operation without a corresponding _begin...
# Calling _end_bulk_operation without a corresponding _begin...
# is a noop
self.bulk._end_bulk_write_operation(self.course_key)
self.bulk._end_bulk_operation(self.course_key)
def test_write_new_index_on_close(self):
self.conn.get_course_index.return_value = None
self.bulk._begin_bulk_write_operation(self.course_key)
self.bulk._begin_bulk_operation(self.course_key)
self.conn.reset_mock()
self.bulk.insert_course_index(self.course_key, self.index_entry)
self.assertConnCalls()
self.bulk._end_bulk_write_operation(self.course_key)
self.bulk._end_bulk_operation(self.course_key)
self.conn.insert_course_index.assert_called_once_with(self.index_entry)
def test_write_updated_index_on_close(self):
old_index = {'this': 'is', 'an': 'old index'}
self.conn.get_course_index.return_value = old_index
self.bulk._begin_bulk_write_operation(self.course_key)
self.bulk._begin_bulk_operation(self.course_key)
self.conn.reset_mock()
self.bulk.insert_course_index(self.course_key, self.index_entry)
self.assertConnCalls()
self.bulk._end_bulk_write_operation(self.course_key)
self.bulk._end_bulk_operation(self.course_key)
self.conn.update_course_index.assert_called_once_with(self.index_entry, from_index=old_index)
def test_write_structure_on_close(self):
self.conn.get_course_index.return_value = None
self.bulk._begin_bulk_write_operation(self.course_key)
self.bulk._begin_bulk_operation(self.course_key)
self.conn.reset_mock()
self.bulk.update_structure(self.course_key, self.structure)
self.assertConnCalls()
self.bulk._end_bulk_write_operation(self.course_key)
self.bulk._end_bulk_operation(self.course_key)
self.assertConnCalls(call.upsert_structure(self.structure))
def test_write_multiple_structures_on_close(self):
self.conn.get_course_index.return_value = None
self.bulk._begin_bulk_write_operation(self.course_key)
self.bulk._begin_bulk_operation(self.course_key)
self.conn.reset_mock()
self.bulk.update_structure(self.course_key.replace(branch='a'), self.structure)
other_structure = {'another': 'structure', '_id': ObjectId()}
self.bulk.update_structure(self.course_key.replace(branch='b'), other_structure)
self.assertConnCalls()
self.bulk._end_bulk_write_operation(self.course_key)
self.bulk._end_bulk_operation(self.course_key)
self.assertItemsEqual(
[call.upsert_structure(self.structure), call.upsert_structure(other_structure)],
self.conn.mock_calls
......@@ -132,12 +132,12 @@ class TestBulkWriteMixinClosed(TestBulkWriteMixin):
def test_write_index_and_structure_on_close(self):
original_index = {'versions': {}}
self.conn.get_course_index.return_value = copy.deepcopy(original_index)
self.bulk._begin_bulk_write_operation(self.course_key)
self.bulk._begin_bulk_operation(self.course_key)
self.conn.reset_mock()
self.bulk.update_structure(self.course_key, self.structure)
self.bulk.insert_course_index(self.course_key, {'versions': {self.course_key.branch: self.structure['_id']}})
self.assertConnCalls()
self.bulk._end_bulk_write_operation(self.course_key)
self.bulk._end_bulk_operation(self.course_key)
self.assertConnCalls(
call.upsert_structure(self.structure),
call.update_course_index(
......@@ -149,13 +149,13 @@ class TestBulkWriteMixinClosed(TestBulkWriteMixin):
def test_write_index_and_multiple_structures_on_close(self):
original_index = {'versions': {'a': ObjectId(), 'b': ObjectId()}}
self.conn.get_course_index.return_value = copy.deepcopy(original_index)
self.bulk._begin_bulk_write_operation(self.course_key)
self.bulk._begin_bulk_operation(self.course_key)
self.conn.reset_mock()
self.bulk.update_structure(self.course_key.replace(branch='a'), self.structure)
other_structure = {'another': 'structure', '_id': ObjectId()}
self.bulk.update_structure(self.course_key.replace(branch='b'), other_structure)
self.bulk.insert_course_index(self.course_key, {'versions': {'a': self.structure['_id'], 'b': other_structure['_id']}})
self.bulk._end_bulk_write_operation(self.course_key)
self.bulk._end_bulk_operation(self.course_key)
self.assertItemsEqual(
[
call.upsert_structure(self.structure),
......@@ -251,7 +251,7 @@ class TestBulkWriteMixinFindMethods(TestBulkWriteMixin):
db_indexes = [Mock(name='from_db')]
for n, index in enumerate(matching + unmatching):
course_key = CourseLocator('org', 'course', 'run{}'.format(n))
self.bulk._begin_bulk_write_operation(course_key)
self.bulk._begin_bulk_operation(course_key)
self.bulk.insert_course_index(course_key, index)
expected = matching + db_indexes
......@@ -283,7 +283,7 @@ class TestBulkWriteMixinFindMethods(TestBulkWriteMixin):
db_structures = [db_structure(_id) for _id in db_ids if _id not in active_ids]
for n, _id in enumerate(active_ids):
course_key = CourseLocator('org', 'course', 'run{}'.format(n))
self.bulk._begin_bulk_write_operation(course_key)
self.bulk._begin_bulk_operation(course_key)
self.bulk.update_structure(course_key, active_structure(_id))
self.conn.find_structures_by_id.return_value = db_structures
......@@ -332,7 +332,7 @@ class TestBulkWriteMixinFindMethods(TestBulkWriteMixin):
active_structures = []
for n, _id in enumerate(active_ids):
course_key = CourseLocator('org', 'course', 'run{}'.format(n))
self.bulk._begin_bulk_write_operation(course_key)
self.bulk._begin_bulk_operation(course_key)
structure = active_structure(_id)
self.bulk.update_structure(course_key, structure)
active_structures.append(structure)
......@@ -392,7 +392,7 @@ class TestBulkWriteMixinFindMethods(TestBulkWriteMixin):
for n, structure in enumerate(active_match + active_unmatch):
course_key = CourseLocator('org', 'course', 'run{}'.format(n))
self.bulk._begin_bulk_write_operation(course_key)
self.bulk._begin_bulk_operation(course_key)
self.bulk.update_structure(course_key, structure)
self.conn.find_ancestor_structures.return_value = db_match + db_unmatch
......@@ -407,7 +407,7 @@ class TestBulkWriteMixinOpen(TestBulkWriteMixin):
"""
def setUp(self):
super(TestBulkWriteMixinOpen, self).setUp()
self.bulk._begin_bulk_write_operation(self.course_key)
self.bulk._begin_bulk_operation(self.course_key)
@ddt.data('deadbeef1234' * 2, u'deadbeef1234' * 2, ObjectId())
def test_read_structure_without_write_from_db(self, version_guid):
......@@ -512,7 +512,7 @@ class TestBulkWriteMixinOpen(TestBulkWriteMixin):
index_copy = copy.deepcopy(index)
index_copy['versions']['draft'] = index['versions']['published']
self.bulk.update_course_index(self.course_key, index_copy)
self.bulk._end_bulk_write_operation(self.course_key)
self.bulk._end_bulk_operation(self.course_key)
self.conn.upsert_structure.assert_called_once_with(published_structure)
self.conn.update_course_index.assert_called_once_with(index_copy, from_index=self.conn.get_course_index.return_value)
self.conn.get_course_index.assert_called_once_with(self.course_key)
......
......@@ -206,7 +206,7 @@ def import_from_xml(
)
continue
with store.bulk_write_operations(dest_course_id):
with store.bulk_operations(dest_course_id):
source_course = xml_module_store.get_course(course_key)
# STEP 1: find and import course module
course, course_data_path = _import_course_module(
......
......@@ -110,7 +110,7 @@ class FieldDataCache(object):
return descriptors
with modulestore().bulk_write_operations(descriptor.location.course_key):
with modulestore().bulk_operations(descriptor.location.course_key):
descriptors = get_child_descriptors(descriptor, depth, descriptor_filter)
return FieldDataCache(descriptors, course_id, user, select_for_update)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment