Commit 3cf75198 by Don Mitchell

Merge pull request #1954 from edx/dhm/loc_mapper_memoize

loc mapper memoize
parents 7bf68907 9ad7a1c1
...@@ -17,6 +17,11 @@ Common: Switch from mitx.db to edx.db for sqlite databases. This will effectivel ...@@ -17,6 +17,11 @@ Common: Switch from mitx.db to edx.db for sqlite databases. This will effectivel
reset state for local instances of the code, unless you manually rename your reset state for local instances of the code, unless you manually rename your
mitx.db file to edx.db. mitx.db file to edx.db.
Common: significant performance improvement for authorization checks and location translations.
Ensure all auth checks, check all possible permutations of the auth key (Instructor dashboard
now shows when it should for all courses in lms).
Made queries for Studio dashboard 2 orders of magnitude faster (and fewer).
Blades: Video Transcripts: Fix clear and download buttons. BLD-438. Blades: Video Transcripts: Fix clear and download buttons. BLD-438.
Common: Switch over from MITX_FEATURES to just FEATURES. To override items in Common: Switch over from MITX_FEATURES to just FEATURES. To override items in
......
...@@ -63,7 +63,7 @@ def get_all_course_role_groupnames(location, role, use_filter=True): ...@@ -63,7 +63,7 @@ def get_all_course_role_groupnames(location, role, use_filter=True):
# filter to the ones which exist # filter to the ones which exist
default = groupnames[0] default = groupnames[0]
if use_filter: if use_filter:
groupnames = [group for group in groupnames if Group.objects.filter(name=group).exists()] groupnames = [group.name for group in Group.objects.filter(name__in=groupnames)]
return groupnames, default return groupnames, default
...@@ -203,12 +203,8 @@ def remove_user_from_course_group(caller, user, location, role): ...@@ -203,12 +203,8 @@ def remove_user_from_course_group(caller, user, location, role):
# see if the user is actually in that role, if not then we don't have to do anything # see if the user is actually in that role, if not then we don't have to do anything
groupnames, _ = get_all_course_role_groupnames(location, role) groupnames, _ = get_all_course_role_groupnames(location, role)
for groupname in groupnames: user.groups.remove(*user.groups.filter(name__in=groupnames))
groups = user.groups.filter(name=groupname) user.save()
if groups:
# will only be one with that name
user.groups.remove(groups[0])
user.save()
def remove_user_from_creator_group(caller, user): def remove_user_from_creator_group(caller, user):
...@@ -243,7 +239,7 @@ def is_user_in_course_group_role(user, location, role, check_staff=True): ...@@ -243,7 +239,7 @@ def is_user_in_course_group_role(user, location, role, check_staff=True):
if check_staff and user.is_staff: if check_staff and user.is_staff:
return True return True
groupnames, _ = get_all_course_role_groupnames(location, role) groupnames, _ = get_all_course_role_groupnames(location, role)
return any(user.groups.filter(name=groupname).exists() for groupname in groupnames) return user.groups.filter(name__in=groupnames).exists()
return False return False
...@@ -266,7 +262,7 @@ def is_user_in_creator_group(user): ...@@ -266,7 +262,7 @@ def is_user_in_creator_group(user):
# Feature flag for using the creator group setting. Will be removed once the feature is complete. # Feature flag for using the creator group setting. Will be removed once the feature is complete.
if settings.FEATURES.get('ENABLE_CREATOR_GROUP', False): if settings.FEATURES.get('ENABLE_CREATOR_GROUP', False):
return user.groups.filter(name=COURSE_CREATOR_GROUP_NAME).count() > 0 return user.groups.filter(name=COURSE_CREATOR_GROUP_NAME).exists()
return True return True
......
...@@ -7,7 +7,6 @@ from textwrap import dedent ...@@ -7,7 +7,6 @@ from textwrap import dedent
from django.test.utils import override_settings from django.test.utils import override_settings
from django.conf import settings from django.conf import settings
from django.core.urlresolvers import reverse
from path import path from path import path
from tempdir import mkdtemp_clean from tempdir import mkdtemp_clean
from fs.osfs import OSFS from fs.osfs import OSFS
...@@ -427,7 +426,7 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase): ...@@ -427,7 +426,7 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
course = module_store.get_item(course_location) course = module_store.get_item(course_location)
num_tabs = len(course.tabs) num_tabs = len(course.tabs)
last_tab = course.tabs[num_tabs - 1] last_tab = course.tabs[-1]
url_slug = last_tab['url_slug'] url_slug = last_tab['url_slug']
delete_url = self._get_tab_locator(course, last_tab).url_reverse('xblock') delete_url = self._get_tab_locator(course, last_tab).url_reverse('xblock')
...@@ -446,7 +445,7 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase): ...@@ -446,7 +445,7 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
def _get_tab_locator(self, course, tab): def _get_tab_locator(self, course, tab):
""" Returns the locator for a given tab. """ """ Returns the locator for a given tab. """
tab_location = 'i4x://MITx/999/static_tab/{0}'.format(tab['url_slug']) tab_location = 'i4x://edX/999/static_tab/{0}'.format(tab['url_slug'])
return loc_mapper().translate_location( return loc_mapper().translate_location(
course.location.course_id, Location(tab_location), False, True course.location.course_id, Location(tab_location), False, True
) )
......
...@@ -128,7 +128,8 @@ def course_listing(request): ...@@ -128,7 +128,8 @@ def course_listing(request):
""" """
List all courses available to the logged in user List all courses available to the logged in user
""" """
courses = modulestore('direct').get_items(Location('i4x', None, None, 'course', None)) # there's an index on category which will be used if none of its antecedents are set
courses = modulestore('direct').get_items(Location(None, None, None, 'course', None))
# filter out courses that we don't have access too # filter out courses that we don't have access too
def course_filter(course): def course_filter(course):
......
...@@ -129,7 +129,12 @@ CACHES = { ...@@ -129,7 +129,12 @@ CACHES = {
'LOCATION': '/var/tmp/mongo_metadata_inheritance', 'LOCATION': '/var/tmp/mongo_metadata_inheritance',
'TIMEOUT': 300, 'TIMEOUT': 300,
'KEY_FUNCTION': 'util.memcache.safe_key', 'KEY_FUNCTION': 'util.memcache.safe_key',
} },
'loc_cache': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_mem_cache',
},
} }
# Make the keyedcache startup warnings go away # Make the keyedcache startup warnings go away
......
...@@ -138,7 +138,12 @@ CACHES = { ...@@ -138,7 +138,12 @@ CACHES = {
'LOCATION': '/var/tmp/mongo_metadata_inheritance', 'LOCATION': '/var/tmp/mongo_metadata_inheritance',
'TIMEOUT': 300, 'TIMEOUT': 300,
'KEY_FUNCTION': 'util.memcache.safe_key', 'KEY_FUNCTION': 'util.memcache.safe_key',
} },
'loc_cache': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_mem_cache',
},
} }
# hide ratelimit warnings while running tests # hide ratelimit warnings while running tests
......
...@@ -129,8 +129,12 @@ def loc_mapper(): ...@@ -129,8 +129,12 @@ def loc_mapper():
global _loc_singleton global _loc_singleton
# pylint: disable=W0212 # pylint: disable=W0212
if _loc_singleton is None: if _loc_singleton is None:
try:
loc_cache = get_cache('loc_cache')
except InvalidCacheBackendError:
loc_cache = get_cache('default')
# instantiate # instantiate
_loc_singleton = LocMapperStore(**settings.DOC_STORE_CONFIG) _loc_singleton = LocMapperStore(loc_cache, **settings.DOC_STORE_CONFIG)
# inject into split mongo modulestore # inject into split mongo modulestore
if 'split' in _MODULESTORES: if 'split' in _MODULESTORES:
_MODULESTORES['split'].loc_mapper = _loc_singleton _MODULESTORES['split'].loc_mapper = _loc_singleton
......
...@@ -4,10 +4,10 @@ Method for converting among our differing Location/Locator whatever reprs ...@@ -4,10 +4,10 @@ Method for converting among our differing Location/Locator whatever reprs
from random import randint from random import randint
import re import re
import pymongo import pymongo
import bson.son
from xmodule.modulestore.exceptions import InvalidLocationError, ItemNotFoundError, DuplicateItemError from xmodule.modulestore.exceptions import InvalidLocationError, ItemNotFoundError, DuplicateItemError
from xmodule.modulestore.locator import BlockUsageLocator from xmodule.modulestore.locator import BlockUsageLocator
from xmodule.modulestore.mongo import draft
from xmodule.modulestore import Location from xmodule.modulestore import Location
import urllib import urllib
...@@ -28,7 +28,7 @@ class LocMapperStore(object): ...@@ -28,7 +28,7 @@ class LocMapperStore(object):
''' '''
def __init__( def __init__(
self, host, db, collection, port=27017, user=None, password=None, self, cache, host, db, collection, port=27017, user=None, password=None,
**kwargs **kwargs
): ):
''' '''
...@@ -48,6 +48,7 @@ class LocMapperStore(object): ...@@ -48,6 +48,7 @@ class LocMapperStore(object):
self.location_map = self.db[collection + '.location_map'] self.location_map = self.db[collection + '.location_map']
self.location_map.write_concern = {'w': 1} self.location_map.write_concern = {'w': 1}
self.cache = cache
# location_map functions # location_map functions
def create_map_entry(self, course_location, course_id=None, draft_branch='draft', prod_branch='published', def create_map_entry(self, course_location, course_id=None, draft_branch='draft', prod_branch='published',
...@@ -91,9 +92,10 @@ class LocMapperStore(object): ...@@ -91,9 +92,10 @@ class LocMapperStore(object):
else: else:
course_id = "{0.org}.{0.course}".format(course_location) course_id = "{0.org}.{0.course}".format(course_location)
# very like _interpret_location_id but w/o the _id # very like _interpret_location_id but w/o the _id
location_id = {'org': course_location.org, 'course': course_location.course} location_id = self._construct_location_son(
if course_location.category == 'course': course_location.org, course_location.course,
location_id['name'] = course_location.name course_location.name if course_location.category == 'course' else None
)
self.location_map.insert({ self.location_map.insert({
'_id': location_id, '_id': location_id,
...@@ -127,26 +129,33 @@ class LocMapperStore(object): ...@@ -127,26 +129,33 @@ class LocMapperStore(object):
of locations including course. of locations including course.
""" """
location_id = self._interpret_location_course_id(old_style_course_id, location) location_id = self._interpret_location_course_id(old_style_course_id, location)
if old_style_course_id is None:
old_style_course_id = self._generate_location_course_id(location_id)
maps = self.location_map.find(location_id).sort('_id.name', pymongo.ASCENDING) cached_value = self._get_locator_from_cache(old_style_course_id, location, published)
if maps.count() == 0: if cached_value:
return cached_value
maps = self.location_map.find(location_id)
maps = list(maps)
if len(maps) == 0:
if add_entry_if_missing: if add_entry_if_missing:
# create a new map # create a new map
course_location = location.replace(category='course', name=location_id['_id.name']) course_location = location.replace(category='course', name=location_id['_id']['name'])
self.create_map_entry(course_location) self.create_map_entry(course_location)
entry = self.location_map.find_one(location_id) entry = self.location_map.find_one(location_id)
else: else:
raise ItemNotFoundError() raise ItemNotFoundError()
elif maps.count() > 1: elif len(maps) == 1:
# if more than one, prefer the one w/o a name if that exists. Otherwise, choose the first (alphabetically)
entry = maps[0] entry = maps[0]
else: else:
# find entry w/o name, if any; otherwise, pick arbitrary
entry = maps[0] entry = maps[0]
for item in maps:
if published: if 'name' not in item['_id']:
branch = entry['prod_branch'] entry = item
else: break
branch = entry['draft_branch']
usage_id = entry['block_map'].get(self._encode_for_mongo(location.name)) usage_id = entry['block_map'].get(self._encode_for_mongo(location.name))
if usage_id is None: if usage_id is None:
...@@ -165,7 +174,18 @@ class LocMapperStore(object): ...@@ -165,7 +174,18 @@ class LocMapperStore(object):
else: else:
raise InvalidLocationError() raise InvalidLocationError()
return BlockUsageLocator(course_id=entry['course_id'], branch=branch, usage_id=usage_id) published_usage = BlockUsageLocator(
course_id=entry['course_id'], branch=entry['prod_branch'], usage_id=usage_id)
draft_usage = BlockUsageLocator(
course_id=entry['course_id'], branch=entry['draft_branch'], usage_id=usage_id)
if published:
result = published_usage
else:
result = draft_usage
self._cache_location_map_entry(old_style_course_id, location, published_usage, draft_usage)
return result
def translate_locator_to_location(self, locator, get_course=False): def translate_locator_to_location(self, locator, get_course=False):
""" """
...@@ -185,161 +205,51 @@ class LocMapperStore(object): ...@@ -185,161 +205,51 @@ class LocMapperStore(object):
:param locator: a BlockUsageLocator :param locator: a BlockUsageLocator
""" """
if get_course:
cached_value = self._get_course_location_from_cache(locator.course_id)
else:
cached_value = self._get_location_from_cache(locator)
if cached_value:
return cached_value
# This does not require that the course exist in any modulestore # This does not require that the course exist in any modulestore
# only that it has a mapping entry. # only that it has a mapping entry.
maps = self.location_map.find({'course_id': locator.course_id}) maps = self.location_map.find({'course_id': locator.course_id})
# look for one which maps to this block usage_id # look for one which maps to this block usage_id
if maps.count() == 0: if maps.count() == 0:
return None return None
result = None
for candidate in maps: for candidate in maps:
old_course_id = self._generate_location_course_id(candidate['_id'])
for old_name, cat_to_usage in candidate['block_map'].iteritems(): for old_name, cat_to_usage in candidate['block_map'].iteritems():
for category, usage_id in cat_to_usage.iteritems(): for category, usage_id in cat_to_usage.iteritems():
if get_course: # cache all entries and then figure out if we have the one we want
if category == 'course': # Always return revision=None because the
return Location( # old draft module store wraps locations as draft before
'i4x', # trying to access things.
candidate['_id']['org'], location = Location(
candidate['_id']['course'], 'i4x',
'course', candidate['_id']['org'],
self._decode_from_mongo(old_name), candidate['_id']['course'],
None) category,
elif usage_id == locator.usage_id: self._decode_from_mongo(old_name),
# Always return revision=None because the None)
# old draft module store wraps locations as draft before published_locator = BlockUsageLocator(
# trying to access things. candidate['course_id'], branch=candidate['prod_branch'], usage_id=usage_id
return Location( )
'i4x', draft_locator = BlockUsageLocator(
candidate['_id']['org'], candidate['course_id'], branch=candidate['draft_branch'], usage_id=usage_id
candidate['_id']['course'], )
category, self._cache_location_map_entry(old_course_id, location, published_locator, draft_locator)
self._decode_from_mongo(old_name),
None) if get_course and category == 'course':
result = location
elif not get_course and usage_id == locator.usage_id:
result = location
if result is not None:
return result
return None return None
def add_block_location_translator(self, location, old_course_id=None, usage_id=None):
"""
Similar to translate_location which adds an entry if none is found, but this cannot create a new
course mapping entry, only a block within such a mapping entry. If it finds no existing
course maps, it raises ItemNotFoundError.
In the case that there are more than one mapping record for the course identified by location, this
method adds the mapping to all matching records! (translate_location only adds to one)
It allows the caller to specify
the new-style usage_id for the target rather than having the translate concoct its own.
If the provided usage_id already exists in one of the found maps for the org/course, this function
raises DuplicateItemError unless the old item id == the new one.
If the caller does not provide a usage_id and there exists an entry in one of the course variants,
it will use that entry. If more than one variant uses conflicting entries, it will raise DuplicateItemError.
Returns the usage_id used in the mapping
:param location: a fully specified Location
:param old_course_id: the old-style org/course or org/course/run string (optional)
:param usage_id: the desired new block_id. If left as None, this will generate one as per translate_location
"""
location_id = self._interpret_location_course_id(old_course_id, location)
maps = self.location_map.find(location_id)
if maps.count() == 0:
raise ItemNotFoundError()
# turn maps from cursor to list
map_list = list(maps)
encoded_location_name = self._encode_for_mongo(location.name)
# check whether there's already a usage_id for this location (and it agrees w/ any passed in or found)
for map_entry in map_list:
if (encoded_location_name in map_entry['block_map'] and
location.category in map_entry['block_map'][encoded_location_name]):
if usage_id is None:
usage_id = map_entry['block_map'][encoded_location_name][location.category]
elif usage_id != map_entry['block_map'][encoded_location_name][location.category]:
raise DuplicateItemError(usage_id, self, 'location_map')
computed_usage_id = usage_id
# update the maps (and generate a usage_id if it's not been set yet)
for map_entry in map_list:
if computed_usage_id is None:
computed_usage_id = self._add_to_block_map(location, location_id, map_entry['block_map'])
elif (encoded_location_name not in map_entry['block_map'] or
location.category not in map_entry['block_map'][encoded_location_name]):
alt_usage_id = self._verify_uniqueness(computed_usage_id, map_entry['block_map'])
if alt_usage_id != computed_usage_id:
if usage_id is not None:
raise DuplicateItemError(usage_id, self, 'location_map')
else:
# revise already set ones and add to remaining ones
computed_usage_id = self.update_block_location_translator(
location,
alt_usage_id,
old_course_id,
True
)
map_entry['block_map'].setdefault(encoded_location_name, {})[location.category] = computed_usage_id
self.location_map.update({'_id': map_entry['_id']}, {'$set': {'block_map': map_entry['block_map']}})
return computed_usage_id
def update_block_location_translator(self, location, usage_id, old_course_id=None, autogenerated_usage_id=False):
"""
Update all existing maps from location's block to the new usage_id. Used for changing the usage_id,
thus the usage_id is required.
Returns the usage_id. (which is primarily useful in the case of autogenerated_usage_id)
:param location: a fully specified Location
:param usage_id: the desired new block_id.
:param old_course_id: the old-style org/course or org/course/run string (optional)
:param autogenerated_usage_id: a flag used mostly for internal calls to indicate that this usage_id
was autogenerated and thus can be overridden if it's not unique. If you set this flag, the stored
usage_id may not be the one you submitted.
"""
location_id = self._interpret_location_course_id(old_course_id, location)
maps = self.location_map.find(location_id)
encoded_location_name = self._encode_for_mongo(location.name)
for map_entry in maps:
# handle noop of renaming to same name
if (encoded_location_name in map_entry['block_map'] and
map_entry['block_map'][encoded_location_name].get(location.category) == usage_id):
continue
alt_usage_id = self._verify_uniqueness(usage_id, map_entry['block_map'])
if alt_usage_id != usage_id:
if autogenerated_usage_id:
# revise already set ones and add to remaining ones
usage_id = self.update_block_location_translator(location, alt_usage_id, old_course_id, True)
return usage_id
else:
raise DuplicateItemError(usage_id, self, 'location_map')
if location.category in map_entry['block_map'].setdefault(encoded_location_name, {}):
map_entry['block_map'][encoded_location_name][location.category] = usage_id
self.location_map.update({'_id': map_entry['_id']}, {'$set': {'block_map': map_entry['block_map']}})
return usage_id
def delete_block_location_translator(self, location, old_course_id=None):
"""
Remove all existing maps from location's block.
:param location: a fully specified Location
:param old_course_id: the old-style org/course or org/course/run string (optional)
"""
location_id = self._interpret_location_course_id(old_course_id, location)
maps = self.location_map.find(location_id)
encoded_location_name = self._encode_for_mongo(location.name)
for map_entry in maps:
if location.category in map_entry['block_map'].setdefault(encoded_location_name, {}):
if len(map_entry['block_map'][encoded_location_name]) == 1:
del map_entry['block_map'][encoded_location_name]
else:
del map_entry['block_map'][encoded_location_name][location.category]
self.location_map.update({'_id': map_entry['_id']}, {'$set': {'block_map': map_entry['block_map']}})
def _add_to_block_map(self, location, location_id, block_map): def _add_to_block_map(self, location, location_id, block_map):
'''add the given location to the block_map and persist it''' '''add the given location to the block_map and persist it'''
if self._block_id_is_guid(location.name): if self._block_id_is_guid(location.name):
...@@ -357,7 +267,7 @@ class LocMapperStore(object): ...@@ -357,7 +267,7 @@ class LocMapperStore(object):
def _interpret_location_course_id(self, course_id, location): def _interpret_location_course_id(self, course_id, location):
""" """
Take the old style course id (org/course/run) and return a dict for querying the mapping table. Take the old style course id (org/course/run) and return a dict w/ a SON for querying the mapping table.
If the course_id is empty, it uses location, but this may result in an inadequate id. If the course_id is empty, it uses location, but this may result in an inadequate id.
:param course_id: old style 'org/course/run' id from Location.course_id where Location.category = 'course' :param course_id: old style 'org/course/run' id from Location.course_id where Location.category = 'course'
...@@ -367,12 +277,35 @@ class LocMapperStore(object): ...@@ -367,12 +277,35 @@ class LocMapperStore(object):
if course_id: if course_id:
# re doesn't allow ?P<_id.org> and ilk # re doesn't allow ?P<_id.org> and ilk
matched = re.match(r'([^/]+)/([^/]+)/([^/]+)', course_id) matched = re.match(r'([^/]+)/([^/]+)/([^/]+)', course_id)
return dict(zip(['_id.org', '_id.course', '_id.name'], matched.groups())) return {'_id': self._construct_location_son(*matched.groups())}
location_id = {'_id.org': location.org, '_id.course': location.course}
if location.category == 'course': if location.category == 'course':
location_id['_id.name'] = location.name return {'_id': self._construct_location_son(location.org, location.course, location.name)}
return location_id else:
return bson.son.SON([('_id.org', location.org), ('_id.course', location.course)])
def _generate_location_course_id(self, entry_id):
"""
Generate a Location course_id for the given entry's id
"""
# strip id envelope if any
entry_id = entry_id.get('_id', entry_id)
if entry_id.get('name', False):
return '{0[org]}/{0[course]}/{0[name]}'.format(entry_id)
elif entry_id.get('_id.org', False):
# the odd format one
return '{0[_id.org]}/{0[_id.course]}'.format(entry_id)
else:
return '{0[org]}/{0[course]}'.format(entry_id)
def _construct_location_son(self, org, course, name=None):
"""
Construct the SON needed to repr the location for either a query or an insertion
"""
if name:
return bson.son.SON([('org', org), ('course', course), ('name', name)])
else:
return bson.son.SON([('org', org), ('course', course)])
def _block_id_is_guid(self, name): def _block_id_is_guid(self, name):
""" """
...@@ -413,3 +346,41 @@ class LocMapperStore(object): ...@@ -413,3 +346,41 @@ class LocMapperStore(object):
""" """
return urllib.unquote(fieldname) return urllib.unquote(fieldname)
def _get_locator_from_cache(self, old_course_id, location, published):
"""
See if the location x published pair is in the cache. If so, return the mapped locator.
"""
entry = self.cache.get('{}+{}'.format(old_course_id, location.url()))
if entry is not None:
if published:
return entry[0]
else:
return entry[1]
return None
def _get_location_from_cache(self, locator):
"""
See if the locator is in the cache. If so, return the mapped location.
"""
return self.cache.get(unicode(locator))
def _get_course_location_from_cache(self, locator_course_id):
"""
See if the course_id is in the cache. If so, return the mapped location to the
course root.
"""
return self.cache.get('courseId+{}'.format(locator_course_id))
def _cache_location_map_entry(self, old_course_id, location, published_usage, draft_usage):
"""
Cache the mapping from location to the draft and published Locators in entry.
Also caches the inverse. If the location is category=='course', it caches it for
the get_course query
"""
setmany = {}
if location.category == 'course':
setmany['courseId+{}'.format(published_usage.course_id)] = location
setmany[unicode(published_usage)] = location
setmany[unicode(draft_usage)] = location
setmany['{}+{}'.format(old_course_id, location.url())] = (published_usage, draft_usage)
self.cache.set_many(setmany)
...@@ -7,8 +7,9 @@ import unittest ...@@ -7,8 +7,9 @@ import unittest
import uuid import uuid
from xmodule.modulestore import Location from xmodule.modulestore import Location
from xmodule.modulestore.locator import BlockUsageLocator from xmodule.modulestore.locator import BlockUsageLocator
from xmodule.modulestore.exceptions import ItemNotFoundError, DuplicateItemError from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.modulestore.loc_mapper_store import LocMapperStore from xmodule.modulestore.loc_mapper_store import LocMapperStore
from mock import Mock
class TestLocationMapper(unittest.TestCase): class TestLocationMapper(unittest.TestCase):
...@@ -23,8 +24,10 @@ class TestLocationMapper(unittest.TestCase): ...@@ -23,8 +24,10 @@ class TestLocationMapper(unittest.TestCase):
'collection': 'modulestore{0}'.format(uuid.uuid4().hex[:5]), 'collection': 'modulestore{0}'.format(uuid.uuid4().hex[:5]),
} }
cache_standin = TrivialCache()
self.instrumented_cache = Mock(spec=cache_standin, wraps=cache_standin)
# pylint: disable=W0142 # pylint: disable=W0142
TestLocationMapper.loc_store = LocMapperStore(**modulestore_options) TestLocationMapper.loc_store = LocMapperStore(self.instrumented_cache, **modulestore_options)
def tearDown(self): def tearDown(self):
dbref = TestLocationMapper.loc_store.db dbref = TestLocationMapper.loc_store.db
...@@ -36,8 +39,9 @@ class TestLocationMapper(unittest.TestCase): ...@@ -36,8 +39,9 @@ class TestLocationMapper(unittest.TestCase):
org = 'foo_org' org = 'foo_org'
course = 'bar_course' course = 'bar_course'
loc_mapper().create_map_entry(Location('i4x', org, course, 'course', 'baz_run')) loc_mapper().create_map_entry(Location('i4x', org, course, 'course', 'baz_run'))
# pylint: disable=protected-access
entry = loc_mapper().location_map.find_one({ entry = loc_mapper().location_map.find_one({
'_id': {'org': org, 'course': course, 'name': 'baz_run'} '_id': loc_mapper()._construct_location_son(org, course, 'baz_run')
}) })
self.assertIsNotNone(entry, "Didn't find entry") self.assertIsNotNone(entry, "Didn't find entry")
self.assertEqual(entry['course_id'], '{}.{}.baz_run'.format(org, course)) self.assertEqual(entry['course_id'], '{}.{}.baz_run'.format(org, course))
...@@ -48,8 +52,9 @@ class TestLocationMapper(unittest.TestCase): ...@@ -48,8 +52,9 @@ class TestLocationMapper(unittest.TestCase):
# ensure create_entry does the right thing when not given a course (creates org/course # ensure create_entry does the right thing when not given a course (creates org/course
# rather than org/course/run course_id) # rather than org/course/run course_id)
loc_mapper().create_map_entry(Location('i4x', org, course, 'vertical', 'baz_vert')) loc_mapper().create_map_entry(Location('i4x', org, course, 'vertical', 'baz_vert'))
# find the one which has no name
entry = loc_mapper().location_map.find_one({ entry = loc_mapper().location_map.find_one({
'_id': {'org': org, 'course': course} '_id' : loc_mapper()._construct_location_son(org, course, None)
}) })
self.assertIsNotNone(entry, "Didn't find entry") self.assertIsNotNone(entry, "Didn't find entry")
self.assertEqual(entry['course_id'], '{}.{}'.format(org, course)) self.assertEqual(entry['course_id'], '{}.{}'.format(org, course))
...@@ -63,15 +68,28 @@ class TestLocationMapper(unittest.TestCase): ...@@ -63,15 +68,28 @@ class TestLocationMapper(unittest.TestCase):
'wip', 'wip',
'live', 'live',
block_map) block_map)
entry = loc_mapper().location_map.find_one({ entry = loc_mapper().location_map.find_one({'_id.org': org, '_id.course': course})
'_id': {'org': org, 'course': course}
})
self.assertIsNotNone(entry, "Didn't find entry") self.assertIsNotNone(entry, "Didn't find entry")
self.assertEqual(entry['course_id'], 'foo_org.geek_dept.quux_course.baz_run') self.assertEqual(entry['course_id'], 'foo_org.geek_dept.quux_course.baz_run')
self.assertEqual(entry['draft_branch'], 'wip') self.assertEqual(entry['draft_branch'], 'wip')
self.assertEqual(entry['prod_branch'], 'live') self.assertEqual(entry['prod_branch'], 'live')
self.assertEqual(entry['block_map'], block_map) self.assertEqual(entry['block_map'], block_map)
def translate_n_check(self, location, old_style_course_id, new_style_course_id, usage_id, branch, add_entry=False):
"""
Request translation, check course_id, usage_id, and branch
"""
prob_locator = loc_mapper().translate_location(
old_style_course_id,
location,
published= (branch=='published'),
add_entry_if_missing=add_entry
)
self.assertEqual(prob_locator.course_id, new_style_course_id)
self.assertEqual(prob_locator.usage_id, usage_id)
self.assertEqual(prob_locator.branch, branch)
def test_translate_location_read_only(self): def test_translate_location_read_only(self):
""" """
Test the variants of translate_location which don't create entries, just decode Test the variants of translate_location which don't create entries, just decode
...@@ -88,83 +106,67 @@ class TestLocationMapper(unittest.TestCase): ...@@ -88,83 +106,67 @@ class TestLocationMapper(unittest.TestCase):
) )
new_style_course_id = '{}.geek_dept.{}.baz_run'.format(org, course) new_style_course_id = '{}.geek_dept.{}.baz_run'.format(org, course)
block_map = {'abc123': {'problem': 'problem2'}} block_map = {
'abc123': {'problem': 'problem2'},
'def456': {'problem': 'problem4'},
'ghi789': {'problem': 'problem7'},
}
loc_mapper().create_map_entry( loc_mapper().create_map_entry(
Location('i4x', org, course, 'course', 'baz_run'), Location('i4x', org, course, 'course', 'baz_run'),
new_style_course_id, new_style_course_id,
block_map=block_map block_map=block_map
) )
test_problem_locn = Location('i4x', org, course, 'problem', 'abc123')
# only one course matches # only one course matches
prob_locator = loc_mapper().translate_location( self.translate_n_check(test_problem_locn, old_style_course_id, new_style_course_id, 'problem2', 'published')
old_style_course_id, # look for w/ only the Location (works b/c there's only one possible course match). Will force
Location('i4x', org, course, 'problem', 'abc123'), # cache as default translation for this problemid
add_entry_if_missing=False self.translate_n_check(test_problem_locn, None, new_style_course_id, 'problem2', 'published')
)
self.assertEqual(prob_locator.course_id, new_style_course_id)
self.assertEqual(prob_locator.branch, 'published')
self.assertEqual(prob_locator.usage_id, 'problem2')
# look for w/ only the Location (works b/c there's only one possible course match)
prob_locator = loc_mapper().translate_location(
None,
Location('i4x', org, course, 'problem', 'abc123'),
add_entry_if_missing=False
)
self.assertEqual(prob_locator.course_id, new_style_course_id)
# look for non-existent problem # look for non-existent problem
with self.assertRaises(ItemNotFoundError): with self.assertRaises(ItemNotFoundError):
prob_locator = loc_mapper().translate_location( loc_mapper().translate_location(
None, None,
Location('i4x', org, course, 'problem', '1def23'), Location('i4x', org, course, 'problem', '1def23'),
add_entry_if_missing=False add_entry_if_missing=False
) )
# add a distractor course # add a distractor course (note that abc123 has a different translation in this one)
block_map = {'abc123': {'problem': 'problem3'}} distractor_block_map = {
'abc123': {'problem': 'problem3'},
'def456': {'problem': 'problem4'},
'ghi789': {'problem': 'problem7'},
}
test_delta_new_id = '{}.geek_dept.{}.{}'.format(org, course, 'delta_run')
test_delta_old_id = '{}/{}/{}'.format(org, course, 'delta_run')
loc_mapper().create_map_entry( loc_mapper().create_map_entry(
Location('i4x', org, course, 'course', 'delta_run'), Location('i4x', org, course, 'course', 'delta_run'),
'{}.geek_dept.{}.{}'.format(org, course, 'delta_run'), test_delta_new_id,
block_map=block_map block_map=distractor_block_map
) )
prob_locator = loc_mapper().translate_location( # test that old translation still works
old_style_course_id, self.translate_n_check(test_problem_locn, old_style_course_id, new_style_course_id, 'problem2', 'published')
Location('i4x', org, course, 'problem', 'abc123'), # and new returns new id
add_entry_if_missing=False self.translate_n_check(test_problem_locn, test_delta_old_id, test_delta_new_id, 'problem3', 'published')
) # look for default translation of uncached Location (not unique; so, just verify it returns something)
self.assertEqual(prob_locator.course_id, new_style_course_id)
self.assertEqual(prob_locator.usage_id, 'problem2')
# look for w/ only the Location (not unique; so, just verify it returns something)
prob_locator = loc_mapper().translate_location( prob_locator = loc_mapper().translate_location(
None, None,
Location('i4x', org, course, 'problem', 'abc123'), Location('i4x', org, course, 'problem', 'def456'),
add_entry_if_missing=False add_entry_if_missing=False
) )
self.assertIsNotNone(prob_locator, "couldn't find ambiguous location") self.assertIsNotNone(prob_locator, "couldn't find ambiguous location")
# add a default course pointing to the delta_run # make delta_run default course: anything not cached using None as old_course_id will use this
loc_mapper().create_map_entry( loc_mapper().create_map_entry(
Location('i4x', org, course, 'problem', '789abc123efg456'), Location('i4x', org, course, 'problem', '789abc123efg456'),
'{}.geek_dept.{}.{}'.format(org, course, 'delta_run'), test_delta_new_id,
block_map=block_map block_map=block_map
) )
# now the ambiguous query should return delta # now an uncached ambiguous query should return delta
prob_locator = loc_mapper().translate_location( test_unused_locn = Location('i4x', org, course, 'problem', 'ghi789')
None, self.translate_n_check(test_unused_locn, None, test_delta_new_id, 'problem7', 'published')
Location('i4x', org, course, 'problem', 'abc123'),
add_entry_if_missing=False
)
self.assertEqual(prob_locator.course_id, '{}.geek_dept.{}.{}'.format(org, course, 'delta_run'))
self.assertEqual(prob_locator.usage_id, 'problem3')
# get the draft one (I'm sorry this is getting long) # get the draft one (I'm sorry this is getting long)
prob_locator = loc_mapper().translate_location( self.translate_n_check(test_unused_locn, None, test_delta_new_id, 'problem7', 'draft')
None,
Location('i4x', org, course, 'problem', 'abc123'),
published=False,
add_entry_if_missing=False
)
self.assertEqual(prob_locator.course_id, '{}.geek_dept.{}.{}'.format(org, course, 'delta_run'))
self.assertEqual(prob_locator.usage_id, 'problem3')
self.assertEqual(prob_locator.branch, 'draft')
def test_translate_location_dwim(self): def test_translate_location_dwim(self):
""" """
...@@ -176,48 +178,39 @@ class TestLocationMapper(unittest.TestCase): ...@@ -176,48 +178,39 @@ class TestLocationMapper(unittest.TestCase):
old_style_course_id = '{}/{}/{}'.format(org, course, 'baz_run') old_style_course_id = '{}/{}/{}'.format(org, course, 'baz_run')
problem_name = 'abc123abc123abc123abc123abc123f9' problem_name = 'abc123abc123abc123abc123abc123f9'
location = Location('i4x', org, course, 'problem', problem_name) location = Location('i4x', org, course, 'problem', problem_name)
prob_locator = loc_mapper().translate_location(
old_style_course_id,
location,
add_entry_if_missing=True
)
new_style_course_id = '{}.{}.{}'.format(org, course, 'baz_run') new_style_course_id = '{}.{}.{}'.format(org, course, 'baz_run')
self.assertEqual(prob_locator.course_id, new_style_course_id) self.translate_n_check(location, old_style_course_id, new_style_course_id, 'problemabc', 'published', True)
self.assertEqual(prob_locator.branch, 'published') # look for w/ only the Location (works b/c there's only one possible course match): causes cache
self.assertEqual(prob_locator.usage_id, 'problemabc') self.translate_n_check(location, None, new_style_course_id, 'problemabc', 'published', True)
# look for w/ only the Location (works b/c there's only one possible course match)
prob_locator = loc_mapper().translate_location(
None,
location,
add_entry_if_missing=True
)
self.assertEqual(prob_locator.course_id, new_style_course_id)
# create an entry w/o a guid name # create an entry w/o a guid name
other_location = Location('i4x', org, course, 'chapter', 'intro') other_location = Location('i4x', org, course, 'chapter', 'intro')
locator = loc_mapper().translate_location( self.translate_n_check(other_location, old_style_course_id, new_style_course_id, 'intro', 'published', True)
old_style_course_id,
other_location,
add_entry_if_missing=True
)
self.assertEqual(locator.usage_id, 'intro')
# add a distractor course # add a distractor course
delta_new_course_id = '{}.geek_dept.{}.{}'.format(org, course, 'delta_run')
delta_course_locn = Location('i4x', org, course, 'course', 'delta_run')
loc_mapper().create_map_entry( loc_mapper().create_map_entry(
Location('i4x', org, course, 'course', 'delta_run'), delta_course_locn,
'{}.geek_dept.{}.{}'.format(org, course, 'delta_run'), delta_new_course_id,
block_map={problem_name: {'problem': 'problem3'}} block_map={problem_name: {'problem': 'problem3'}}
) )
prob_locator = loc_mapper().translate_location( self.translate_n_check(location, old_style_course_id, new_style_course_id, 'problemabc', 'published', True)
old_style_course_id,
location, # add a new one to both courses (ensure name doesn't have same beginning)
add_entry_if_missing=True new_prob_name = uuid.uuid4().hex
while new_prob_name.startswith('abc'):
new_prob_name = uuid.uuid4().hex
new_prob_locn = location.replace(name=new_prob_name)
new_usage_id = 'problem{}'.format(new_prob_name[:3])
self.translate_n_check(new_prob_locn, old_style_course_id, new_style_course_id, new_usage_id, 'published', True)
self.translate_n_check(
new_prob_locn, delta_course_locn.course_id, delta_new_course_id, new_usage_id, 'published', True
) )
self.assertEqual(prob_locator.course_id, new_style_course_id) # look for w/ only the Location: causes caching and not unique; so, can't check which course
self.assertEqual(prob_locator.usage_id, 'problemabc')
# look for w/ only the Location (not unique; so, just verify it returns something)
prob_locator = loc_mapper().translate_location( prob_locator = loc_mapper().translate_location(
None, None,
location, new_prob_locn,
add_entry_if_missing=True add_entry_if_missing=True
) )
self.assertIsNotNone(prob_locator, "couldn't find ambiguous location") self.assertIsNotNone(prob_locator, "couldn't find ambiguous location")
...@@ -225,17 +218,20 @@ class TestLocationMapper(unittest.TestCase): ...@@ -225,17 +218,20 @@ class TestLocationMapper(unittest.TestCase):
# add a default course pointing to the delta_run # add a default course pointing to the delta_run
loc_mapper().create_map_entry( loc_mapper().create_map_entry(
Location('i4x', org, course, 'problem', '789abc123efg456'), Location('i4x', org, course, 'problem', '789abc123efg456'),
'{}.geek_dept.{}.{}'.format(org, course, 'delta_run'), delta_new_course_id,
block_map={problem_name: {'problem': 'problem3'}} block_map={problem_name: {'problem': 'problem3'}}
) )
# now the ambiguous query should return delta # now the ambiguous query should return delta
prob_locator = loc_mapper().translate_location( again_prob_name = uuid.uuid4().hex
None, while again_prob_name.startswith('abc') or again_prob_name.startswith(new_prob_name[:3]):
location, again_prob_name = uuid.uuid4().hex
add_entry_if_missing=False again_prob_locn = location.replace(name=again_prob_name)
again_usage_id = 'problem{}'.format(again_prob_name[:3])
self.translate_n_check(again_prob_locn, old_style_course_id, new_style_course_id, again_usage_id, 'published', True)
self.translate_n_check(
again_prob_locn, delta_course_locn.course_id, delta_new_course_id, again_usage_id, 'published', True
) )
self.assertEqual(prob_locator.course_id, '{}.geek_dept.{}.{}'.format(org, course, 'delta_run')) self.translate_n_check(again_prob_locn, None, delta_new_course_id, again_usage_id, 'published', True)
self.assertEqual(prob_locator.usage_id, 'problem3')
def test_translate_locator(self): def test_translate_locator(self):
""" """
...@@ -352,185 +348,6 @@ class TestLocationMapper(unittest.TestCase): ...@@ -352,185 +348,6 @@ class TestLocationMapper(unittest.TestCase):
reverted_location = loc_mapper().translate_locator_to_location(prob_locator) reverted_location = loc_mapper().translate_locator_to_location(prob_locator)
self.assertEqual(location, reverted_location) self.assertEqual(location, reverted_location)
def test_add_block(self):
"""
Test add_block_location_translator(location, old_course_id=None, usage_id=None)
"""
# call w/ no matching courses
org = 'foo_org'
course = 'bar_course'
old_style_course_id = '{}/{}/{}'.format(org, course, 'baz_run')
problem_name = 'abc123abc123abc123abc123abc123f9'
location = Location('i4x', org, course, 'problem', problem_name)
with self.assertRaises(ItemNotFoundError):
loc_mapper().add_block_location_translator(location)
with self.assertRaises(ItemNotFoundError):
loc_mapper().add_block_location_translator(location, old_style_course_id)
# w/ one matching course
new_style_course_id = '{}.{}.{}'.format(org, course, 'baz_run')
loc_mapper().create_map_entry(
Location('i4x', org, course, 'course', 'baz_run'),
new_style_course_id,
)
new_usage_id = loc_mapper().add_block_location_translator(location)
self.assertEqual(new_usage_id, 'problemabc')
# look it up
translated_loc = loc_mapper().translate_location(old_style_course_id, location, add_entry_if_missing=False)
self.assertEqual(translated_loc.course_id, new_style_course_id)
self.assertEqual(translated_loc.usage_id, new_usage_id)
# w/ one distractor which has one entry already
new_style_course_id = '{}.geek_dept.{}.{}'.format(org, course, 'delta_run')
loc_mapper().create_map_entry(
Location('i4x', org, course, 'course', 'delta_run'),
new_style_course_id,
block_map={'48f23a10395384929234': {'chapter': 'chapter48f'}}
)
# try adding the one added before
new_usage_id2 = loc_mapper().add_block_location_translator(location)
self.assertEqual(new_usage_id, new_usage_id2)
# it should be in the distractor now
new_location = loc_mapper().translate_locator_to_location(
BlockUsageLocator(course_id=new_style_course_id, usage_id=new_usage_id2, branch='published')
)
self.assertEqual(new_location, location)
# add one close to the existing chapter (cause name collision)
location = Location('i4x', org, course, 'chapter', '48f23a103953849292341234567890ab')
new_usage_id = loc_mapper().add_block_location_translator(location)
self.assertRegexpMatches(new_usage_id, r'^chapter48f\d')
# retrievable from both courses
new_location = loc_mapper().translate_locator_to_location(
BlockUsageLocator(course_id=new_style_course_id, usage_id=new_usage_id, branch='published')
)
self.assertEqual(new_location, location)
new_location = loc_mapper().translate_locator_to_location(
BlockUsageLocator(
course_id='{}.{}.{}'.format(org, course, 'baz_run'),
usage_id=new_usage_id,
branch='published'
)
)
self.assertEqual(new_location, location)
# provoke duplicate item errors
location = location.replace(name='44f23a103953849292341234567890ab')
with self.assertRaises(DuplicateItemError):
loc_mapper().add_block_location_translator(location, usage_id=new_usage_id)
new_usage_id = loc_mapper().add_block_location_translator(location, old_course_id=old_style_course_id)
other_course_old_style = '{}/{}/{}'.format(org, course, 'delta_run')
new_usage_id2 = loc_mapper().add_block_location_translator(
location,
old_course_id=other_course_old_style,
usage_id='{}b'.format(new_usage_id)
)
with self.assertRaises(DuplicateItemError):
loc_mapper().add_block_location_translator(location)
def test_update_block(self):
"""
test update_block_location_translator(location, usage_id, old_course_id=None)
"""
org = 'foo_org'
course = 'bar_course'
new_style_course_id = '{}.geek_dept.{}.baz_run'.format(org, course)
loc_mapper().create_map_entry(
Location('i4x', org, course, 'course', 'baz_run'),
new_style_course_id,
block_map={
'abc123': {'problem': 'problem2'},
'48f23a10395384929234': {'chapter': 'chapter48f'},
'1': {'chapter': 'chapter1', 'problem': 'problem1'},
}
)
new_style_course_id2 = '{}.geek_dept.{}.delta_run'.format(org, course)
loc_mapper().create_map_entry(
Location('i4x', org, course, 'course', 'delta_run'),
new_style_course_id2,
block_map={
'abc123': {'problem': 'problem3'},
'48f23a10395384929234': {'chapter': 'chapter48b'},
'1': {'chapter': 'chapter2', 'problem': 'problem2'},
}
)
location = Location('i4x', org, course, 'problem', '1')
# change in all courses to same value
loc_mapper().update_block_location_translator(location, 'problem1')
trans_loc = loc_mapper().translate_locator_to_location(
BlockUsageLocator(course_id=new_style_course_id, usage_id='problem1', branch='published')
)
self.assertEqual(trans_loc, location)
trans_loc = loc_mapper().translate_locator_to_location(
BlockUsageLocator(course_id=new_style_course_id2, usage_id='problem1', branch='published')
)
self.assertEqual(trans_loc, location)
# try to change to overwrite used usage_id
location = Location('i4x', org, course, 'chapter', '48f23a10395384929234')
with self.assertRaises(DuplicateItemError):
loc_mapper().update_block_location_translator(location, 'chapter2')
# just change the one course
loc_mapper().update_block_location_translator(location, 'chapter2', '{}/{}/{}'.format(org, course, 'baz_run'))
trans_loc = loc_mapper().translate_locator_to_location(
BlockUsageLocator(course_id=new_style_course_id, usage_id='chapter2', branch='published')
)
self.assertEqual(trans_loc.name, '48f23a10395384929234')
# but this still points to the old
trans_loc = loc_mapper().translate_locator_to_location(
BlockUsageLocator(course_id=new_style_course_id2, usage_id='chapter2', branch='published')
)
self.assertEqual(trans_loc.name, '1')
def test_delete_block(self):
"""
test delete_block_location_translator(location, old_course_id=None)
"""
org = 'foo_org'
course = 'bar_course'
new_style_course_id = '{}.geek_dept.{}.baz_run'.format(org, course)
loc_mapper().create_map_entry(
Location('i4x', org, course, 'course', 'baz_run'),
new_style_course_id,
block_map={
'abc123': {'problem': 'problem2'},
'48f23a10395384929234': {'chapter': 'chapter48f'},
'1': {'chapter': 'chapter1', 'problem': 'problem1'},
}
)
new_style_course_id2 = '{}.geek_dept.{}.delta_run'.format(org, course)
loc_mapper().create_map_entry(
Location('i4x', org, course, 'course', 'delta_run'),
new_style_course_id2,
block_map={
'abc123': {'problem': 'problem3'},
'48f23a10395384929234': {'chapter': 'chapter48b'},
'1': {'chapter': 'chapter2', 'problem': 'problem2'},
}
)
location = Location('i4x', org, course, 'problem', '1')
# delete from all courses
loc_mapper().delete_block_location_translator(location)
self.assertIsNone(loc_mapper().translate_locator_to_location(
BlockUsageLocator(course_id=new_style_course_id, usage_id='problem1', branch='published')
))
self.assertIsNone(loc_mapper().translate_locator_to_location(
BlockUsageLocator(course_id=new_style_course_id2, usage_id='problem2', branch='published')
))
# delete from one course
location = location.replace(name='abc123')
loc_mapper().delete_block_location_translator(location, '{}/{}/{}'.format(org, course, 'baz_run'))
with self.assertRaises(ItemNotFoundError):
loc_mapper().translate_location(
'{}/{}/{}'.format(org, course, 'baz_run'),
location,
add_entry_if_missing=False
)
locator = loc_mapper().translate_location(
'{}/{}/{}'.format(org, course, 'delta_run'),
location,
add_entry_if_missing=False
)
self.assertEqual(locator.usage_id, 'problem3')
#================================== #==================================
# functions to mock existing services # functions to mock existing services
...@@ -545,3 +362,23 @@ def render_to_template_mock(*_args): ...@@ -545,3 +362,23 @@ def render_to_template_mock(*_args):
""" """
Mocks the mako render_to_template w/ a noop Mocks the mako render_to_template w/ a noop
""" """
class TrivialCache(object):
"""
A trivial cache impl
"""
def __init__(self):
self.cache = {}
def get(self, key):
"""
Mock the .get
"""
return self.cache.get(key)
def set_many(self, entries):
"""
mock set_many
"""
self.cache.update(entries)
...@@ -43,7 +43,10 @@ class TestMigration(unittest.TestCase): ...@@ -43,7 +43,10 @@ class TestMigration(unittest.TestCase):
def setUp(self): def setUp(self):
super(TestMigration, self).setUp() super(TestMigration, self).setUp()
self.loc_mapper = LocMapperStore(**self.db_config) noop_cache = mock.Mock(spec=['get', 'set_many'])
noop_cache.configure_mock(**{'get.return_value': None})
# pylint: disable=W0142
self.loc_mapper = LocMapperStore(noop_cache, **self.db_config)
self.old_mongo = MongoModuleStore(self.db_config, **self.modulestore_options) self.old_mongo = MongoModuleStore(self.db_config, **self.modulestore_options)
self.draft_mongo = DraftModuleStore(self.db_config, **self.modulestore_options) self.draft_mongo = DraftModuleStore(self.db_config, **self.modulestore_options)
self.split_mongo = SplitMongoModuleStore( self.split_mongo = SplitMongoModuleStore(
......
...@@ -4,7 +4,6 @@ adding users, removing users, and listing members ...@@ -4,7 +4,6 @@ adding users, removing users, and listing members
""" """
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
from functools import partial
from django.contrib.auth.models import User, Group from django.contrib.auth.models import User, Group
......
...@@ -85,7 +85,11 @@ CACHES = { ...@@ -85,7 +85,11 @@ CACHES = {
'LOCATION': '/var/tmp/mongo_metadata_inheritance', 'LOCATION': '/var/tmp/mongo_metadata_inheritance',
'TIMEOUT': 300, 'TIMEOUT': 300,
'KEY_FUNCTION': 'util.memcache.safe_key', 'KEY_FUNCTION': 'util.memcache.safe_key',
} },
'loc_cache': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_mem_cache',
},
} }
......
...@@ -165,7 +165,12 @@ CACHES = { ...@@ -165,7 +165,12 @@ CACHES = {
'LOCATION': '/var/tmp/mongo_metadata_inheritance', 'LOCATION': '/var/tmp/mongo_metadata_inheritance',
'TIMEOUT': 300, 'TIMEOUT': 300,
'KEY_FUNCTION': 'util.memcache.safe_key', 'KEY_FUNCTION': 'util.memcache.safe_key',
} },
'loc_cache': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_mem_cache',
},
} }
# Dummy secret key for dev # Dummy secret key for dev
......
These are the indexes each mongo db should have in order to perform well.
Each section states the collection name and then the indexes. To create an index,
you'll typically either use the mongohq type web interface or a standard terminal console.
If a terminal, this assumes you've logged in and gotten to the mongo prompt
```
mongo mydatabasename
```
If using the terminal, to add an index to a collection, you'll need to prefix ```ensureIndex``` with
```
db.collection_name
```
as in ```db.location_map.ensureIndex({'course_id': 1}{background: true})```
location_map:
=============
```
ensureIndex({'_id.org': 1, '_id.course': 1})
ensureIndex({'course_id': 1})
```
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment