Commit 2379e7c3 by Don Mitchell

Merge pull request #1029 from edx/dhm/migrate_mongo

Migrate courses from old mongo to split
parents 7d4ccf62 f63aa0f5
......@@ -2,7 +2,7 @@ import unittest
from xmodule import templates
from xmodule.modulestore.tests import persistent_factories
from xmodule.course_module import CourseDescriptor
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.django import modulestore, loc_mapper
from xmodule.seq_module import SequenceDescriptor
from xmodule.capa_module import CapaDescriptor
from xmodule.modulestore.locator import CourseLocator, BlockUsageLocator
......@@ -191,6 +191,26 @@ class TemplateTests(unittest.TestCase):
version_history = modulestore('split').get_block_generations(second_problem.location)
self.assertNotEqual(version_history.locator.version_guid, first_problem.location.version_guid)
def test_split_inject_loc_mapper(self):
"""
Test that creating a loc_mapper causes it to automatically attach to the split mongo store
"""
# instantiate location mapper before split
mapper = loc_mapper()
# split must inject the location mapper itself since the mapper existed before it did
self.assertEqual(modulestore('split').loc_mapper, mapper)
def test_loc_inject_into_split(self):
"""
Test that creating a loc_mapper causes it to automatically attach to the split mongo store
"""
# force instantiation of split modulestore before there's a location mapper and verify
# it has no pointer to loc mapper
self.assertIsNone(modulestore('split').loc_mapper)
# force instantiation of location mapper which must inject itself into the split
mapper = loc_mapper()
self.assertEqual(modulestore('split').loc_mapper, mapper)
# ================================= JSON PARSING ===========================
# These are example methods for creating xmodules in memory w/o persisting them.
# They were in x_module but since xblock is not planning to support them but will
......
......@@ -24,14 +24,17 @@ from random import choice, randint
def seed():
return os.getppid()
MODULESTORE_OPTIONS = {
'default_class': 'xmodule.raw_module.RawDescriptor',
DOC_STORE_CONFIG = {
'host': 'localhost',
'db': 'acceptance_xmodule',
'collection': 'acceptance_modulestore_%s' % seed(),
}
MODULESTORE_OPTIONS = dict({
'default_class': 'xmodule.raw_module.RawDescriptor',
'fs_root': TEST_ROOT / "data",
'render_template': 'mitxmako.shortcuts.render_to_string',
}
}, **DOC_STORE_CONFIG)
MODULESTORE = {
'default': {
......@@ -97,7 +100,7 @@ SELENIUM_GRID = {
#####################################################################
# Lastly, see if the developer has any local overrides.
try:
from .private import * # pylint: disable=F0401
from .private import * # pylint: disable=F0401
except ImportError:
pass
......
......@@ -16,14 +16,17 @@ LOGGING = get_logger_config(ENV_ROOT / "log",
dev_env=True,
debug=True)
modulestore_options = {
'default_class': 'xmodule.raw_module.RawDescriptor',
DOC_STORE_CONFIG = {
'host': 'localhost',
'db': 'xmodule',
'collection': 'modulestore',
}
modulestore_options = dict({
'default_class': 'xmodule.raw_module.RawDescriptor',
'fs_root': GITHUB_REPO_ROOT,
'render_template': 'mitxmako.shortcuts.render_to_string',
}
}, **DOC_STORE_CONFIG)
MODULESTORE = {
'default': {
......@@ -185,6 +188,6 @@ if SEGMENT_IO_KEY:
#####################################################################
# Lastly, see if the developer has any local overrides.
try:
from .private import * # pylint: disable=F0401
from .private import * # pylint: disable=F0401
except ImportError:
pass
......@@ -42,14 +42,17 @@ STATICFILES_DIRS += [
if os.path.isdir(COMMON_TEST_DATA_ROOT / course_dir)
]
MODULESTORE_OPTIONS = {
'default_class': 'xmodule.raw_module.RawDescriptor',
DOC_STORE_CONFIG = {
'host': 'localhost',
'db': 'test_xmodule',
'collection': 'test_modulestore',
}
MODULESTORE_OPTIONS = dict({
'default_class': 'xmodule.raw_module.RawDescriptor',
'fs_root': TEST_ROOT / "data",
'render_template': 'mitxmako.shortcuts.render_to_string',
}
}, **DOC_STORE_CONFIG)
MODULESTORE = {
'default': {
......
......@@ -75,6 +75,9 @@ def modulestore(name='default'):
if name not in _MODULESTORES:
_MODULESTORES[name] = create_modulestore_instance(settings.MODULESTORE[name]['ENGINE'],
settings.MODULESTORE[name]['OPTIONS'])
# inject loc_mapper into newly created modulestore if it needs it
if name == 'split' and _loc_singleton is not None:
_MODULESTORES['split'].loc_mapper = _loc_singleton
return _MODULESTORES[name]
......@@ -90,7 +93,10 @@ def loc_mapper():
# pylint: disable=W0212
if _loc_singleton is None:
# instantiate
_loc_singleton = LocMapperStore(settings.modulestore_options)
_loc_singleton = LocMapperStore(**settings.DOC_STORE_CONFIG)
# inject into split mongo modulestore
if 'split' in _MODULESTORES:
_MODULESTORES['split'].loc_mapper = _loc_singleton
return _loc_singleton
......
......@@ -28,7 +28,14 @@ class NoPathToItem(Exception):
class DuplicateItemError(Exception):
pass
"""
Attempted to create an item which already exists.
"""
def __init__(self, element_id, store=None, collection=None):
super(DuplicateItemError, self).__init__()
self.element_id = element_id
self.store = store
self.collection = collection
class VersionConflictError(Exception):
......
......@@ -56,8 +56,7 @@ def compute_inherited_metadata(descriptor):
parent_metadata = descriptor.xblock_kvs.inherited_settings.copy()
# add any of descriptor's explicitly set fields to the inheriting list
for field in InheritanceMixin.fields.values():
# pylint: disable = W0212
if descriptor._field_data.has(descriptor, field.name):
if field.is_set_on(descriptor):
# inherited_settings values are json repr
parent_metadata[field.name] = field.read_json(descriptor)
......
......@@ -28,7 +28,10 @@ class LocMapperStore(object):
# C0103: varnames and attrs must be >= 3 chars, but db defined by long time usage
# pylint: disable = C0103
def __init__(self, host, db, collection, port=27017, user=None, password=None, **kwargs):
def __init__(
self, host, db, collection, port=27017, user=None, password=None,
**kwargs
):
'''
Constructor
'''
......@@ -100,6 +103,7 @@ class LocMapperStore(object):
'prod_branch': prod_branch,
'block_map': block_map or {},
})
return course_id
def translate_location(self, old_style_course_id, location, published=True, add_entry_if_missing=True):
"""
......@@ -150,7 +154,7 @@ class LocMapperStore(object):
if add_entry_if_missing:
usage_id = self._add_to_block_map(location, location_id, entry['block_map'])
else:
raise ItemNotFoundError()
raise ItemNotFoundError(location)
elif isinstance(usage_id, dict):
# name is not unique, look through for the right category
if location.category in usage_id:
......@@ -244,7 +248,7 @@ class LocMapperStore(object):
if usage_id is None:
usage_id = map_entry['block_map'][location.name][location.category]
elif usage_id != map_entry['block_map'][location.name][location.category]:
raise DuplicateItemError()
raise DuplicateItemError(usage_id, self, 'location_map')
computed_usage_id = usage_id
......@@ -257,7 +261,7 @@ class LocMapperStore(object):
alt_usage_id = self._verify_uniqueness(computed_usage_id, map_entry['block_map'])
if alt_usage_id != computed_usage_id:
if usage_id is not None:
raise DuplicateItemError()
raise DuplicateItemError(usage_id, self, 'location_map')
else:
# revise already set ones and add to remaining ones
computed_usage_id = self.update_block_location_translator(
......@@ -301,7 +305,7 @@ class LocMapperStore(object):
usage_id = self.update_block_location_translator(location, alt_usage_id, old_course_id, True)
return usage_id
else:
raise DuplicateItemError()
raise DuplicateItemError(usage_id, self, 'location_map')
if location.category in map_entry['block_map'].setdefault(location.name, {}):
map_entry['block_map'][location.name][location.category] = usage_id
......@@ -335,6 +339,8 @@ class LocMapperStore(object):
# the block ids will likely be out of sync and collide from an id perspective. HOWEVER,
# if there are few == org/course roots or their content is unrelated, this will work well.
usage_id = self._verify_uniqueness(location.category + location.name[:3], block_map)
else:
usage_id = location.name
block_map.setdefault(location.name, {})[location.category] = usage_id
self.location_map.update(location_id, {'$set': {'block_map': block_map}})
return usage_id
......
......@@ -377,7 +377,7 @@ class BlockUsageLocator(CourseLocator):
:param block_locator:
"""
if self.course_id and self.version_guid:
if self.version_guid:
return BlockUsageLocator(version_guid=self.version_guid,
branch=self.branch,
usage_id=self.usage_id)
......
......@@ -324,16 +324,14 @@ class MongoModuleStore(ModuleStoreBase):
for result in resultset:
location = Location(result['_id'])
# We need to collate between draft and non-draft
# i.e. draft verticals can have children which are not in non-draft versions
# i.e. draft verticals will have draft children but will have non-draft parents currently
location = location.replace(revision=None)
location_url = location.url()
if location_url in results_by_url:
existing_children = results_by_url[location_url].get('definition', {}).get('children', [])
additional_children = result.get('definition', {}).get('children', [])
total_children = existing_children + additional_children
if 'definition' not in results_by_url[location_url]:
results_by_url[location_url]['definition'] = {}
results_by_url[location_url]['definition']['children'] = total_children
results_by_url[location_url].setdefault('definition', {})['children'] = total_children
results_by_url[location.url()] = result
if location.category == 'course':
root = location.url()
......@@ -643,12 +641,11 @@ class MongoModuleStore(ModuleStoreBase):
"""
# Save any changes to the xmodule to the MongoKeyValueStore
xmodule.save()
# split mongo's persist_dag is more general and useful.
self.collection.save({
'_id': xmodule.location.dict(),
'metadata': own_metadata(xmodule),
'definition': {
'data': xmodule.xblock_kvs._data,
'data': xmodule.get_explicitly_set_fields_by_scope(Scope.content),
'children': xmodule.children if xmodule.has_children else []
}
})
......
......@@ -15,6 +15,7 @@ from xmodule.modulestore.inheritance import own_metadata
from xmodule.modulestore.mongo.base import location_to_query, namedtuple_to_son, get_course_id_no_run, MongoModuleStore
import pymongo
from pytz import UTC
from xblock.fields import Scope
DRAFT = 'draft'
# Things w/ these categories should never be marked as version='draft'
......@@ -237,8 +238,9 @@ class DraftModuleStore(MongoModuleStore):
draft.published_date = datetime.now(UTC)
draft.published_by = published_by_id
super(DraftModuleStore, self).update_item(location, draft._field_data._kvs._data)
super(DraftModuleStore, self).update_children(location, draft._field_data._kvs._children)
super(DraftModuleStore, self).update_item(location, draft.get_explicitly_set_fields_by_scope(Scope.content))
if draft.has_children:
super(DraftModuleStore, self).update_children(location, draft.children)
super(DraftModuleStore, self).update_metadata(location, own_metadata(draft))
self.delete_item(location)
......
......@@ -27,27 +27,27 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
modulestore: the module store that can be used to retrieve additional
modules
course_entry: the originally fetched enveloped course_structure w/ branch and course_id info.
Callers to _load_item provide an override but that function ignores the provided structure and
only looks at the branch and course_id
module_data: a dict mapping Location -> json that was cached from the
underlying modulestore
"""
# TODO find all references to resources_fs and make handle None
super(CachingDescriptorSystem, self).__init__(load_item=self._load_item, **kwargs)
self.modulestore = modulestore
self.course_entry = course_entry
self.lazy = lazy
self.module_data = module_data
# TODO see if self.course_id is needed: is already in course_entry but could be > 1 value
# Compute inheritance
modulestore.inherit_settings(
course_entry.get('blocks', {}),
course_entry.get('blocks', {}).get(course_entry.get('root'))
course_entry['structure'].get('blocks', {}),
course_entry['structure'].get('blocks', {}).get(course_entry['structure'].get('root'))
)
self.default_class = default_class
self.local_modules = {}
def _load_item(self, usage_id, course_entry_override=None):
# TODO ensure all callers of system.load_item pass just the id
if isinstance(usage_id, BlockUsageLocator) and isinstance(usage_id.usage_id, LocalId):
try:
return self.local_modules[usage_id]
......@@ -60,7 +60,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
self.modulestore.cache_items(self, [usage_id], lazy=self.lazy)
json_data = self.module_data.get(usage_id)
if json_data is None:
raise ItemNotFoundError
raise ItemNotFoundError(usage_id)
class_ = XModuleDescriptor.load_class(
json_data.get('category'),
......@@ -68,9 +68,24 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
)
return self.xblock_from_json(class_, usage_id, json_data, course_entry_override)
# xblock's runtime does not always pass enough contextual information to figure out
# which named container (course x branch) or which parent is requesting an item. Because split allows
# a many:1 mapping from named containers to structures and because item's identities encode
# context as well as unique identity, this function must sometimes infer whether the access is
# within an unspecified named container. In most cases, course_entry_override will give the
# explicit context; however, runtime.get_block(), e.g., does not. HOWEVER, there are simple heuristics
# which will work 99.999% of the time: a runtime is thread & even context specific. The likelihood that
# the thread is working with more than one named container pointing to the same specific structure is
# low; thus, the course_entry is most likely correct. If the thread is looking at > 1 named container
# pointing to the same structure, the access is likely to be chunky enough that the last known container
# is the intended one when not given a course_entry_override; thus, the caching of the last branch/course_id.
def xblock_from_json(self, class_, usage_id, json_data, course_entry_override=None):
if course_entry_override is None:
course_entry_override = self.course_entry
else:
# most recent retrieval is most likely the right one for next caller (see comment above fn)
self.course_entry['branch'] = course_entry_override['branch']
self.course_entry['course_id'] = course_entry_override['course_id']
# most likely a lazy loader or the id directly
definition = json_data.get('definition', {})
definition_id = self.modulestore.definition_locator(definition)
......@@ -80,7 +95,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
usage_id = LocalId()
block_locator = BlockUsageLocator(
version_guid=course_entry_override['_id'],
version_guid=course_entry_override['structure']['_id'],
usage_id=usage_id,
course_id=course_entry_override.get('course_id'),
branch=course_entry_override.get('branch')
......@@ -105,7 +120,7 @@ class CachingDescriptorSystem(MakoDescriptorSystem):
json_data,
self,
BlockUsageLocator(
version_guid=course_entry_override['_id'],
version_guid=course_entry_override['structure']['_id'],
usage_id=usage_id
),
error_msg=exc_info_to_str(sys.exc_info())
......
import copy
from xblock.fields import Scope
from collections import namedtuple
from xblock.runtime import KeyValueStore
from xblock.exceptions import InvalidScopeError
from .definition_lazy_loader import DefinitionLazyLoader
from xmodule.modulestore.inheritance import InheritanceKeyValueStore
......@@ -25,7 +24,8 @@ class SplitMongoKVS(InheritanceKeyValueStore):
Note, local fields may override and disagree w/ this b/c this says what the value
should be if the field is undefined.
"""
super(SplitMongoKVS, self).__init__(copy.copy(fields), inherited_settings)
# deepcopy so that manipulations of fields does not pollute the source
super(SplitMongoKVS, self).__init__(copy.deepcopy(fields), inherited_settings)
self._definition = definition # either a DefinitionLazyLoader or the db id of the definition.
# if the db id, then the definition is presumed to be loaded into _fields
......
......@@ -192,6 +192,14 @@ class TestLocationMapper(unittest.TestCase):
add_entry_if_missing=True
)
self.assertEqual(prob_locator.course_id, new_style_course_id)
# create an entry w/o a guid name
other_location = Location('i4x', org, course, 'chapter', 'intro')
locator = loc_mapper().translate_location(
old_style_course_id,
other_location,
add_entry_if_missing=True
)
self.assertEqual(locator.usage_id, 'intro')
# add a distractor course
loc_mapper().create_map_entry(
......
......@@ -208,6 +208,12 @@ class LocatorTest(TestCase):
block=expected_block_ref)
self.assertEqual(str(testobj), testurn)
self.assertEqual(testobj.url(), 'edx://' + testurn)
agnostic = testobj.version_agnostic()
self.assertIsNone(agnostic.version_guid)
self.check_block_locn_fields(agnostic, 'test_block constructor',
course_id=expected_id,
branch=expected_branch,
block=expected_block_ref)
def test_block_constructor_url_version_prefix(self):
test_id_loc = '519665f6223ebd6980884f2b'
......@@ -220,6 +226,14 @@ class LocatorTest(TestCase):
block='lab2',
version_guid=ObjectId(test_id_loc)
)
agnostic = testobj.version_agnostic()
self.check_block_locn_fields(
agnostic, 'error parsing URL with version and block',
block='lab2',
course_id=None,
version_guid=ObjectId(test_id_loc)
)
self.assertIsNone(agnostic.course_id)
def test_block_constructor_url_kitchen_sink(self):
test_id_loc = '519665f6223ebd6980884f2b'
......
......@@ -11,12 +11,14 @@ from importlib import import_module
from xblock.fields import Scope
from xmodule.course_module import CourseDescriptor
from xmodule.modulestore.exceptions import InsufficientSpecificationError, ItemNotFoundError, VersionConflictError
from xmodule.modulestore.exceptions import InsufficientSpecificationError, ItemNotFoundError, VersionConflictError, \
DuplicateItemError
from xmodule.modulestore.locator import CourseLocator, BlockUsageLocator, VersionTree, DescriptionLocator
from xmodule.modulestore.inheritance import InheritanceMixin
from pytz import UTC
from path import path
import re
import random
class SplitModuleTest(unittest.TestCase):
......@@ -250,7 +252,6 @@ class SplitModuleCourseTests(SplitModuleTest):
self.assertEqual(str(result.children[0].locator.version_guid), self.GUID_D1)
self.assertEqual(len(result.children[0].children), 1)
class SplitModuleItemTests(SplitModuleTest):
'''
Item read tests including inheritance
......@@ -492,7 +493,7 @@ class TestItemCrud(SplitModuleTest):
"""
Test create update and delete of items
"""
# TODO do I need to test this case which I believe won't work:
# DHM do I need to test this case which I believe won't work:
# 1) fetch a course and some of its blocks
# 2) do a series of CRUD operations on those previously fetched elements
# The problem here will be that the version_guid of the items will be the version at time of fetch.
......@@ -604,7 +605,91 @@ class TestItemCrud(SplitModuleTest):
self.assertGreaterEqual(new_history['edited_on'], premod_time)
another_history = modulestore().get_definition_history_info(another_module.definition_locator)
self.assertEqual(another_history['previous_version'], 'problem12345_3_1')
# TODO check that default fields are set
def test_create_continue_version(self):
"""
Test create_item using the continue_version flag
"""
# start transaction w/ simple creation
user = random.getrandbits(32)
new_course = modulestore().create_course('test_org', 'test_transaction', user)
new_course_locator = new_course.location.as_course_locator()
index_history_info = modulestore().get_course_history_info(new_course.location)
course_block_prev_version = new_course.previous_version
course_block_update_version = new_course.update_version
self.assertIsNotNone(new_course_locator.version_guid, "Want to test a definite version")
versionless_course_locator = CourseLocator(
course_id=new_course_locator.course_id, branch=new_course_locator.branch
)
# positive simple case: no force, add chapter
new_ele = modulestore().create_item(
new_course.location, 'chapter', user,
fields={'display_name': 'chapter 1'},
continue_version=True
)
# version info shouldn't change
self.assertEqual(new_ele.update_version, course_block_update_version)
self.assertEqual(new_ele.update_version, new_ele.location.version_guid)
refetch_course = modulestore().get_course(versionless_course_locator)
self.assertEqual(refetch_course.location.version_guid, new_course.location.version_guid)
self.assertEqual(refetch_course.previous_version, course_block_prev_version)
self.assertEqual(refetch_course.update_version, course_block_update_version)
refetch_index_history_info = modulestore().get_course_history_info(refetch_course.location)
self.assertEqual(refetch_index_history_info, index_history_info)
self.assertIn(new_ele.location.usage_id, refetch_course.children)
# try to create existing item
with self.assertRaises(DuplicateItemError):
_fail = modulestore().create_item(
new_course.location, 'chapter', user,
usage_id=new_ele.location.usage_id,
fields={'display_name': 'chapter 2'},
continue_version=True
)
# start a new transaction
new_ele = modulestore().create_item(
new_course.location, 'chapter', user,
fields={'display_name': 'chapter 2'},
continue_version=False
)
transaction_guid = new_ele.location.version_guid
# ensure force w/ continue gives exception
with self.assertRaises(VersionConflictError):
_fail = modulestore().create_item(
new_course.location, 'chapter', user,
fields={'display_name': 'chapter 2'},
force=True, continue_version=True
)
# ensure trying to continue the old one gives exception
with self.assertRaises(VersionConflictError):
_fail = modulestore().create_item(
new_course.location, 'chapter', user,
fields={'display_name': 'chapter 3'},
continue_version=True
)
# add new child to old parent in continued (leave off version_guid)
course_module_locator = BlockUsageLocator(
course_id=new_course.location.course_id,
usage_id=new_course.location.usage_id,
branch=new_course.location.branch
)
new_ele = modulestore().create_item(
course_module_locator, 'chapter', user,
fields={'display_name': 'chapter 4'},
continue_version=True
)
self.assertNotEqual(new_ele.update_version, course_block_update_version)
self.assertEqual(new_ele.location.version_guid, transaction_guid)
# check children, previous_version
refetch_course = modulestore().get_course(versionless_course_locator)
self.assertIn(new_ele.location.usage_id, refetch_course.children)
self.assertEqual(refetch_course.previous_version, course_block_update_version)
self.assertEqual(refetch_course.update_version, transaction_guid)
def test_update_metadata(self):
"""
......@@ -967,6 +1052,27 @@ class TestCourseCreation(SplitModuleTest):
course = modulestore().get_course(CourseLocator(course_id=locator.course_id, branch="published"))
self.assertEqual(str(course.location.version_guid), self.GUID_D1)
def test_create_with_root(self):
"""
Test create_course with a specified root id and category
"""
user = random.getrandbits(32)
new_course = modulestore().create_course(
'test_org', 'test_transaction', user,
root_usage_id='top', root_category='chapter'
)
self.assertEqual(new_course.location.usage_id, 'top')
self.assertEqual(new_course.category, 'chapter')
# look at db to verify
db_structure = modulestore().structures.find_one({
'_id': new_course.location.as_object_id(new_course.location.version_guid)
})
self.assertIsNotNone(db_structure, "Didn't find course")
self.assertNotIn('course', db_structure['blocks'])
self.assertIn('top', db_structure['blocks'])
self.assertEqual(db_structure['blocks']['top']['category'], 'chapter')
class TestInheritance(SplitModuleTest):
"""
......
......@@ -662,8 +662,8 @@ class XModuleDescriptor(XModuleFields, HTMLSnippet, ResourceTemplates, XBlock):
"""
result = {}
for field in self.fields.values():
if (field.scope == scope and self._field_data.has(self, field.name)):
result[field.name] = self._field_data.get(self, field.name)
if (field.scope == scope and field.is_set_on(self)):
result[field.name] = field.read_json(self)
return result
@property
......
......@@ -26,14 +26,17 @@ def seed():
return os.getppid()
# Use the mongo store for acceptance tests
modulestore_options = {
'default_class': 'xmodule.raw_module.RawDescriptor',
DOC_STORE_CONFIG = {
'host': 'localhost',
'db': 'acceptance_xmodule',
'collection': 'acceptance_modulestore_%s' % seed(),
}
modulestore_options = dict({
'default_class': 'xmodule.raw_module.RawDescriptor',
'fs_root': TEST_ROOT / "data",
'render_template': 'mitxmako.shortcuts.render_to_string',
}
}, **DOC_STORE_CONFIG)
MODULESTORE = {
'default': {
......@@ -119,7 +122,7 @@ SELENIUM_GRID = {
#####################################################################
# See if the developer has any local overrides.
try:
from .private import * # pylint: disable=F0401
from .private import * # pylint: disable=F0401
except ImportError:
pass
......
......@@ -22,14 +22,17 @@ MITX_FEATURES['ENABLE_LMS_MIGRATION'] = False
META_UNIVERSITIES = {}
modulestore_options = {
'default_class': 'xmodule.raw_module.RawDescriptor',
DOC_STORE_CONFIG = {
'host': 'localhost',
'db': 'xmodule',
'collection': 'modulestore',
}
modulestore_options = dict({
'default_class': 'xmodule.raw_module.RawDescriptor',
'fs_root': DATA_DIR,
'render_template': 'mitxmako.shortcuts.render_to_string',
}
}, **DOC_STORE_CONFIG)
MODULESTORE = {
'default': {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment