Commit 1a7b8331 by Calen Pennington

Merge pull request #269 from edx/dhm/incidental-functionality-improvements

Incidental functionality improvements from Next Gen Modulestore work
parents 60e9a77a ef9c2994
...@@ -105,7 +105,6 @@ class CourseDetailsTestCase(CourseTestCase): ...@@ -105,7 +105,6 @@ class CourseDetailsTestCase(CourseTestCase):
self.assertEqual(jsondetails['string'], 'string') self.assertEqual(jsondetails['string'], 'string')
def test_update_and_fetch(self): def test_update_and_fetch(self):
# # NOTE: I couldn't figure out how to validly test time setting w/ all the conversions
jsondetails = CourseDetails.fetch(self.course_location) jsondetails = CourseDetails.fetch(self.course_location)
jsondetails.syllabus = "<a href='foo'>bar</a>" jsondetails.syllabus = "<a href='foo'>bar</a>"
# encode - decode to convert date fields and other data which changes form # encode - decode to convert date fields and other data which changes form
...@@ -128,6 +127,11 @@ class CourseDetailsTestCase(CourseTestCase): ...@@ -128,6 +127,11 @@ class CourseDetailsTestCase(CourseTestCase):
CourseDetails.update_from_json(jsondetails.__dict__).effort, CourseDetails.update_from_json(jsondetails.__dict__).effort,
jsondetails.effort, "After set effort" jsondetails.effort, "After set effort"
) )
jsondetails.start_date = datetime.datetime(2010, 10, 1, 0, tzinfo=UTC())
self.assertEqual(
CourseDetails.update_from_json(jsondetails.__dict__).start_date,
jsondetails.start_date
)
@override_settings(MKTG_URLS={'ROOT': 'dummy-root'}) @override_settings(MKTG_URLS={'ROOT': 'dummy-root'})
def test_marketing_site_fetch(self): def test_marketing_site_fetch(self):
...@@ -235,8 +239,7 @@ class CourseDetailsViewTest(CourseTestCase): ...@@ -235,8 +239,7 @@ class CourseDetailsViewTest(CourseTestCase):
dt1 = date.from_json(encoded[field]) dt1 = date.from_json(encoded[field])
dt2 = details[field] dt2 = details[field]
expected_delta = datetime.timedelta(0) self.assertEqual(dt1, dt2, msg="{} != {} at {}".format(dt1, dt2, context))
self.assertEqual(dt1 - dt2, expected_delta, str(dt1) + "!=" + str(dt2) + " at " + context)
else: else:
self.fail(field + " missing from encoded but in details at " + context) self.fail(field + " missing from encoded but in details at " + context)
elif field in encoded and encoded[field] is not None: elif field in encoded and encoded[field] is not None:
......
...@@ -2,12 +2,13 @@ from auth.authz import STAFF_ROLE_NAME, INSTRUCTOR_ROLE_NAME ...@@ -2,12 +2,13 @@ from auth.authz import STAFF_ROLE_NAME, INSTRUCTOR_ROLE_NAME
from auth.authz import is_user_in_course_group_role from auth.authz import is_user_in_course_group_role
from django.core.exceptions import PermissionDenied from django.core.exceptions import PermissionDenied
from ..utils import get_course_location_for_item from ..utils import get_course_location_for_item
from xmodule.modulestore import Location
def get_location_and_verify_access(request, org, course, name): def get_location_and_verify_access(request, org, course, name):
""" """
Create the location tuple verify that the user has permissions Create the location, verify that the user has permissions
to view the location. Returns the location. to view the location. Returns the location as a Location
""" """
location = ['i4x', org, course, 'course', name] location = ['i4x', org, course, 'course', name]
...@@ -15,7 +16,7 @@ def get_location_and_verify_access(request, org, course, name): ...@@ -15,7 +16,7 @@ def get_location_and_verify_access(request, org, course, name):
if not has_access(request.user, location): if not has_access(request.user, location):
raise PermissionDenied() raise PermissionDenied()
return location return Location(location)
def has_access(user, location, role=STAFF_ROLE_NAME): def has_access(user, location, role=STAFF_ROLE_NAME):
......
...@@ -258,7 +258,7 @@ def import_course(request, org, course, name): ...@@ -258,7 +258,7 @@ def import_course(request, org, course, name):
_module_store, course_items = import_from_xml(modulestore('direct'), settings.GITHUB_REPO_ROOT, _module_store, course_items = import_from_xml(modulestore('direct'), settings.GITHUB_REPO_ROOT,
[course_subdir], load_error_modules=False, [course_subdir], load_error_modules=False,
static_content_store=contentstore(), static_content_store=contentstore(),
target_location_namespace=Location(location), target_location_namespace=location,
draft_store=modulestore()) draft_store=modulestore())
# we can blow this away when we're done importing. # we can blow this away when we're done importing.
......
...@@ -67,7 +67,9 @@ def update_checklist(request, org, course, name, checklist_index=None): ...@@ -67,7 +67,9 @@ def update_checklist(request, org, course, name, checklist_index=None):
if checklist_index is not None and 0 <= int(checklist_index) < len(course_module.checklists): if checklist_index is not None and 0 <= int(checklist_index) < len(course_module.checklists):
index = int(checklist_index) index = int(checklist_index)
course_module.checklists[index] = json.loads(request.body) course_module.checklists[index] = json.loads(request.body)
checklists, modified = expand_checklist_action_urls(course_module) # seeming noop which triggers kvs to record that the metadata is not default
course_module.checklists = course_module.checklists
checklists, _ = expand_checklist_action_urls(course_module)
modulestore.update_metadata(location, own_metadata(course_module)) modulestore.update_metadata(location, own_metadata(course_module))
return HttpResponse(json.dumps(checklists[index]), mimetype="application/json") return HttpResponse(json.dumps(checklists[index]), mimetype="application/json")
else: else:
......
...@@ -38,7 +38,8 @@ __all__ = ['OPEN_ENDED_COMPONENT_TYPES', ...@@ -38,7 +38,8 @@ __all__ = ['OPEN_ENDED_COMPONENT_TYPES',
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
COMPONENT_TYPES = ['customtag', 'discussion', 'html', 'problem', 'video'] # NOTE: edit_unit assumes this list is disjoint from ADVANCED_COMPONENT_TYPES
COMPONENT_TYPES = ['discussion', 'html', 'problem', 'video']
OPEN_ENDED_COMPONENT_TYPES = ["combinedopenended", "peergrading"] OPEN_ENDED_COMPONENT_TYPES = ["combinedopenended", "peergrading"]
NOTE_COMPONENT_TYPES = ['notes'] NOTE_COMPONENT_TYPES = ['notes']
...@@ -220,7 +221,7 @@ def edit_unit(request, location): ...@@ -220,7 +221,7 @@ def edit_unit(request, location):
'section': containing_section, 'section': containing_section,
'create_new_unit_template': Location('i4x', 'edx', 'templates', 'vertical', 'Empty'), 'create_new_unit_template': Location('i4x', 'edx', 'templates', 'vertical', 'Empty'),
'unit_state': unit_state, 'unit_state': unit_state,
'published_date': item.cms.published_date.strftime('%B %d, %Y') if item.cms.published_date is not None else None, 'published_date': get_default_time_display(item.cms.published_date) if item.cms.published_date is not None else None
}) })
......
...@@ -153,7 +153,7 @@ def course_info(request, org, course, name, provided_id=None): ...@@ -153,7 +153,7 @@ def course_info(request, org, course, name, provided_id=None):
course_module = modulestore().get_item(location) course_module = modulestore().get_item(location)
# get current updates # get current updates
location = ['i4x', org, course, 'course_info', "updates"] location = Location(['i4x', org, course, 'course_info', "updates"])
return render_to_response('course_info.html', { return render_to_response('course_info.html', {
'active_tab': 'courseinfo-tab', 'active_tab': 'courseinfo-tab',
......
...@@ -25,4 +25,4 @@ class SessionKeyValueStore(KeyValueStore): ...@@ -25,4 +25,4 @@ class SessionKeyValueStore(KeyValueStore):
del self._session[tuple(key)] del self._session[tuple(key)]
def has(self, key): def has(self, key):
return key in self._descriptor_model_data or key in self._session return key.field_name in self._descriptor_model_data or tuple(key) in self._session
...@@ -74,7 +74,7 @@ class CourseDetails(object): ...@@ -74,7 +74,7 @@ class CourseDetails(object):
Decode the json into CourseDetails and save any changed attrs to the db Decode the json into CourseDetails and save any changed attrs to the db
""" """
# TODO make it an error for this to be undefined & for it to not be retrievable from modulestore # TODO make it an error for this to be undefined & for it to not be retrievable from modulestore
course_location = jsondict['course_location'] course_location = Location(jsondict['course_location'])
# Will probably want to cache the inflight courses because every blur generates an update # Will probably want to cache the inflight courses because every blur generates an update
descriptor = get_modulestore(course_location).get_item(course_location) descriptor = get_modulestore(course_location).get_item(course_location)
......
...@@ -28,7 +28,7 @@ MODULESTORE_OPTIONS = { ...@@ -28,7 +28,7 @@ MODULESTORE_OPTIONS = {
MODULESTORE = { MODULESTORE = {
'default': { 'default': {
'ENGINE': 'xmodule.modulestore.mongo.DraftMongoModuleStore', 'ENGINE': 'xmodule.modulestore.draft.DraftModuleStore',
'OPTIONS': MODULESTORE_OPTIONS 'OPTIONS': MODULESTORE_OPTIONS
}, },
'direct': { 'direct': {
...@@ -36,7 +36,7 @@ MODULESTORE = { ...@@ -36,7 +36,7 @@ MODULESTORE = {
'OPTIONS': MODULESTORE_OPTIONS 'OPTIONS': MODULESTORE_OPTIONS
}, },
'draft': { 'draft': {
'ENGINE': 'xmodule.modulestore.mongo.DraftMongoModuleStore', 'ENGINE': 'xmodule.modulestore.draft.DraftModuleStore',
'OPTIONS': MODULESTORE_OPTIONS 'OPTIONS': MODULESTORE_OPTIONS
} }
} }
......
...@@ -27,7 +27,7 @@ modulestore_options = { ...@@ -27,7 +27,7 @@ modulestore_options = {
MODULESTORE = { MODULESTORE = {
'default': { 'default': {
'ENGINE': 'xmodule.modulestore.mongo.DraftMongoModuleStore', 'ENGINE': 'xmodule.modulestore.draft.DraftModuleStore',
'OPTIONS': modulestore_options 'OPTIONS': modulestore_options
}, },
'direct': { 'direct': {
......
...@@ -53,7 +53,7 @@ MODULESTORE_OPTIONS = { ...@@ -53,7 +53,7 @@ MODULESTORE_OPTIONS = {
MODULESTORE = { MODULESTORE = {
'default': { 'default': {
'ENGINE': 'xmodule.modulestore.mongo.DraftMongoModuleStore', 'ENGINE': 'xmodule.modulestore.draft.DraftModuleStore',
'OPTIONS': MODULESTORE_OPTIONS 'OPTIONS': MODULESTORE_OPTIONS
}, },
'direct': { 'direct': {
...@@ -61,7 +61,7 @@ MODULESTORE = { ...@@ -61,7 +61,7 @@ MODULESTORE = {
'OPTIONS': MODULESTORE_OPTIONS 'OPTIONS': MODULESTORE_OPTIONS
}, },
'draft': { 'draft': {
'ENGINE': 'xmodule.modulestore.mongo.DraftMongoModuleStore', 'ENGINE': 'xmodule.modulestore.draft.DraftModuleStore',
'OPTIONS': MODULESTORE_OPTIONS 'OPTIONS': MODULESTORE_OPTIONS
} }
} }
......
<%inherit file="base.html" /> <%inherit file="base.html" />
<%! <%!
import logging import logging
from xmodule.util.date_utils import get_default_time_display from xmodule.util.date_utils import get_default_time_display, almost_same_datetime
%> %>
<%! from django.core.urlresolvers import reverse %> <%! from django.core.urlresolvers import reverse %>
...@@ -47,9 +47,10 @@ ...@@ -47,9 +47,10 @@
placeholder="HH:MM" class="time" size='10' autocomplete="off"/> placeholder="HH:MM" class="time" size='10' autocomplete="off"/>
</div> </div>
</div> </div>
% if subsection.lms.start != parent_item.lms.start and subsection.lms.start: % if subsection.lms.start and not almost_same_datetime(subsection.lms.start, parent_item.lms.start):
% if parent_item.lms.start is None: % if parent_item.lms.start is None:
<p class="notice">The date above differs from the release date of ${parent_item.display_name_with_default}, which is unset. <p class="notice">The date above differs from the release date of
${parent_item.display_name_with_default}, which is unset.
% else: % else:
<p class="notice">The date above differs from the release date of ${parent_item.display_name_with_default} – <p class="notice">The date above differs from the release date of ${parent_item.display_name_with_default} –
${get_default_time_display(parent_item.lms.start)}. ${get_default_time_display(parent_item.lms.start)}.
......
...@@ -15,8 +15,7 @@ def expect_json(view_function): ...@@ -15,8 +15,7 @@ def expect_json(view_function):
# e.g. 'charset', so we can't do a direct string compare # e.g. 'charset', so we can't do a direct string compare
if request.META.get('CONTENT_TYPE', '').lower().startswith("application/json"): if request.META.get('CONTENT_TYPE', '').lower().startswith("application/json"):
cloned_request = copy.copy(request) cloned_request = copy.copy(request)
cloned_request.POST = cloned_request.POST.copy() cloned_request.POST = json.loads(request.body)
cloned_request.POST.update(json.loads(request.body))
return view_function(cloned_request, *args, **kwargs) return view_function(cloned_request, *args, **kwargs)
else: else:
return view_function(request, *args, **kwargs) return view_function(request, *args, **kwargs)
......
...@@ -2,7 +2,8 @@ from pymongo import Connection ...@@ -2,7 +2,8 @@ from pymongo import Connection
import gridfs import gridfs
from gridfs.errors import NoFile from gridfs.errors import NoFile
from xmodule.modulestore.mongo import location_to_query, Location from xmodule.modulestore import Location
from xmodule.modulestore.mongo.base import location_to_query
from xmodule.contentstore.content import XASSET_LOCATION_TAG from xmodule.contentstore.content import XASSET_LOCATION_TAG
import logging import logging
......
...@@ -16,16 +16,7 @@ log = logging.getLogger('mitx.' + 'modulestore') ...@@ -16,16 +16,7 @@ log = logging.getLogger('mitx.' + 'modulestore')
URL_RE = re.compile(""" URL_RE = re.compile("""
(?P<tag>[^:]+):// (?P<tag>[^:]+)://?
(?P<org>[^/]+)/
(?P<course>[^/]+)/
(?P<category>[^/]+)/
(?P<name>[^@]+)
(@(?P<revision>[^/]+))?
""", re.VERBOSE)
MISSING_SLASH_URL_RE = re.compile("""
(?P<tag>[^:]+):/
(?P<org>[^/]+)/ (?P<org>[^/]+)/
(?P<course>[^/]+)/ (?P<course>[^/]+)/
(?P<category>[^/]+)/ (?P<category>[^/]+)/
...@@ -180,13 +171,8 @@ class Location(_LocationBase): ...@@ -180,13 +171,8 @@ class Location(_LocationBase):
if isinstance(location, basestring): if isinstance(location, basestring):
match = URL_RE.match(location) match = URL_RE.match(location)
if match is None: if match is None:
# cdodge: log.debug('location is instance of %s but no URL match' % basestring)
# check for a dropped slash near the i4x:// element of the location string. This can happen with some raise InvalidLocationError(location)
# redirects (e.g. edx.org -> www.edx.org which I think happens in Nginx)
match = MISSING_SLASH_URL_RE.match(location)
if match is None:
log.debug('location is instance of %s but no URL match' % basestring)
raise InvalidLocationError(location)
groups = match.groupdict() groups = match.groupdict()
check_dict(groups) check_dict(groups)
return _LocationBase.__new__(_cls, **groups) return _LocationBase.__new__(_cls, **groups)
......
from xmodule.modulestore.mongo.base import MongoModuleStore, MongoKeyValueStore, MongoUsage
# Backwards compatibility for prod systems that refererence
# xmodule.modulestore.mongo.DraftMongoModuleStore
from xmodule.modulestore.mongo.draft import DraftModuleStore as DraftMongoModuleStore
...@@ -18,11 +18,10 @@ from xmodule.error_module import ErrorDescriptor ...@@ -18,11 +18,10 @@ from xmodule.error_module import ErrorDescriptor
from xblock.runtime import DbModel, KeyValueStore, InvalidScopeError from xblock.runtime import DbModel, KeyValueStore, InvalidScopeError
from xblock.core import Scope from xblock.core import Scope
from . import ModuleStoreBase, Location, namedtuple_to_son from xmodule.modulestore import ModuleStoreBase, Location, namedtuple_to_son
from .draft import DraftModuleStore from xmodule.modulestore.exceptions import (ItemNotFoundError,
from .exceptions import (ItemNotFoundError,
DuplicateItemError) DuplicateItemError)
from .inheritance import own_metadata, INHERITABLE_METADATA, inherit_metadata from xmodule.modulestore.inheritance import own_metadata, INHERITABLE_METADATA, inherit_metadata
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -761,12 +760,3 @@ class MongoModuleStore(ModuleStoreBase): ...@@ -761,12 +760,3 @@ class MongoModuleStore(ModuleStoreBase):
return {} return {}
# DraftModuleStore is first, because it needs to intercept calls to MongoModuleStore
class DraftMongoModuleStore(DraftModuleStore, MongoModuleStore):
"""
Version of MongoModuleStore with draft capability mixed in
"""
"""
Version of MongoModuleStore with draft capability mixed in
"""
pass
from datetime import datetime
from xmodule.modulestore import Location, namedtuple_to_son
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.modulestore.inheritance import own_metadata
from xmodule.exceptions import InvalidVersionError
from xmodule.modulestore.mongo.base import MongoModuleStore
from pytz import UTC
DRAFT = 'draft'
# Things w/ these categories should never be marked as version='draft'
DIRECT_ONLY_CATEGORIES = ['course', 'chapter', 'sequential', 'about', 'static_tab', 'course_info']
def as_draft(location):
"""
Returns the Location that is the draft for `location`
"""
return Location(location).replace(revision=DRAFT)
def as_published(location):
"""
Returns the Location that is the published version for `location`
"""
return Location(location).replace(revision=None)
def wrap_draft(item):
"""
Sets `item.is_draft` to `True` if the item is a
draft, and `False` otherwise. Sets the item's location to the
non-draft location in either case
"""
setattr(item, 'is_draft', item.location.revision == DRAFT)
item.location = item.location.replace(revision=None)
return item
class DraftModuleStore(MongoModuleStore):
"""
This mixin modifies a modulestore to give it draft semantics.
That is, edits made to units are stored to locations that have the revision DRAFT,
and when reads are made, they first read with revision DRAFT, and then fall back
to the baseline revision only if DRAFT doesn't exist.
This module also includes functionality to promote DRAFT modules (and optionally
their children) to published modules.
"""
def get_item(self, location, depth=0):
"""
Returns an XModuleDescriptor instance for the item at location.
If location.revision is None, returns the item with the most
recent revision
If any segment of the location is None except revision, raises
xmodule.modulestore.exceptions.InsufficientSpecificationError
If no object is found at that location, raises
xmodule.modulestore.exceptions.ItemNotFoundError
location: Something that can be passed to Location
depth (int): An argument that some module stores may use to prefetch
descendents of the queried modules for more efficient results later
in the request. The depth is counted in the number of calls to
get_children() to cache. None indicates to cache all descendents
"""
try:
return wrap_draft(super(DraftModuleStore, self).get_item(as_draft(location), depth=depth))
except ItemNotFoundError:
return wrap_draft(super(DraftModuleStore, self).get_item(location, depth=depth))
def get_instance(self, course_id, location, depth=0):
"""
Get an instance of this location, with policy for course_id applied.
TODO (vshnayder): this may want to live outside the modulestore eventually
"""
try:
return wrap_draft(super(DraftModuleStore, self).get_instance(course_id, as_draft(location), depth=depth))
except ItemNotFoundError:
return wrap_draft(super(DraftModuleStore, self).get_instance(course_id, location, depth=depth))
def get_items(self, location, course_id=None, depth=0):
"""
Returns a list of XModuleDescriptor instances for the items
that match location. Any element of location that is None is treated
as a wildcard that matches any value
location: Something that can be passed to Location
depth: An argument that some module stores may use to prefetch
descendents of the queried modules for more efficient results later
in the request. The depth is counted in the number of calls to
get_children() to cache. None indicates to cache all descendents
"""
draft_loc = as_draft(location)
draft_items = super(DraftModuleStore, self).get_items(draft_loc, course_id=course_id, depth=depth)
items = super(DraftModuleStore, self).get_items(location, course_id=course_id, depth=depth)
draft_locs_found = set(item.location.replace(revision=None) for item in draft_items)
non_draft_items = [
item
for item in items
if (item.location.revision != DRAFT
and item.location.replace(revision=None) not in draft_locs_found)
]
return [wrap_draft(item) for item in draft_items + non_draft_items]
def clone_item(self, source, location):
"""
Clone a new item that is a copy of the item at the location `source`
and writes it to `location`
"""
if Location(location).category in DIRECT_ONLY_CATEGORIES:
raise InvalidVersionError(location)
return wrap_draft(super(DraftModuleStore, self).clone_item(source, as_draft(location)))
def update_item(self, location, data, allow_not_found=False):
"""
Set the data in the item specified by the location to
data
location: Something that can be passed to Location
data: A nested dictionary of problem data
"""
draft_loc = as_draft(location)
try:
draft_item = self.get_item(location)
if not getattr(draft_item, 'is_draft', False):
self.clone_item(location, draft_loc)
except ItemNotFoundError, e:
if not allow_not_found:
raise e
return super(DraftModuleStore, self).update_item(draft_loc, data)
def update_children(self, location, children):
"""
Set the children for the item specified by the location to
children
location: Something that can be passed to Location
children: A list of child item identifiers
"""
draft_loc = as_draft(location)
draft_item = self.get_item(location)
if not getattr(draft_item, 'is_draft', False):
self.clone_item(location, draft_loc)
return super(DraftModuleStore, self).update_children(draft_loc, children)
def update_metadata(self, location, metadata):
"""
Set the metadata for the item specified by the location to
metadata
location: Something that can be passed to Location
metadata: A nested dictionary of module metadata
"""
draft_loc = as_draft(location)
draft_item = self.get_item(location)
if not getattr(draft_item, 'is_draft', False):
self.clone_item(location, draft_loc)
if 'is_draft' in metadata:
del metadata['is_draft']
return super(DraftModuleStore, self).update_metadata(draft_loc, metadata)
def delete_item(self, location, delete_all_versions=False):
"""
Delete an item from this modulestore
location: Something that can be passed to Location
"""
super(DraftModuleStore, self).delete_item(as_draft(location))
if delete_all_versions:
super(DraftModuleStore, self).delete_item(as_published(location))
return
def get_parent_locations(self, location, course_id):
'''Find all locations that are the parents of this location. Needed
for path_to_location().
returns an iterable of things that can be passed to Location.
'''
return super(DraftModuleStore, self).get_parent_locations(location, course_id)
def publish(self, location, published_by_id):
"""
Save a current draft to the underlying modulestore
"""
draft = self.get_item(location)
draft.cms.published_date = datetime.now(UTC)
draft.cms.published_by = published_by_id
super(DraftModuleStore, self).update_item(location, draft._model_data._kvs._data)
super(DraftModuleStore, self).update_children(location, draft._model_data._kvs._children)
super(DraftModuleStore, self).update_metadata(location, own_metadata(draft))
self.delete_item(location)
def unpublish(self, location):
"""
Turn the published version into a draft, removing the published version
"""
if Location(location).category in DIRECT_ONLY_CATEGORIES:
raise InvalidVersionError(location)
super(DraftModuleStore, self).clone_item(location, as_draft(location))
super(DraftModuleStore, self).delete_item(location)
def _query_children_for_cache_children(self, items):
# first get non-draft in a round-trip
queried_children = []
to_process_non_drafts = super(DraftModuleStore, self)._query_children_for_cache_children(items)
to_process_dict = {}
for non_draft in to_process_non_drafts:
to_process_dict[Location(non_draft["_id"])] = non_draft
# now query all draft content in another round-trip
query = {
'_id': {'$in': [namedtuple_to_son(as_draft(Location(item))) for item in items]}
}
to_process_drafts = list(self.collection.find(query))
# now we have to go through all drafts and replace the non-draft
# with the draft. This is because the semantics of the DraftStore is to
# always return the draft - if available
for draft in to_process_drafts:
draft_loc = Location(draft["_id"])
draft_as_non_draft_loc = draft_loc.replace(revision=None)
# does non-draft exist in the collection
# if so, replace it
if draft_as_non_draft_loc in to_process_dict:
to_process_dict[draft_as_non_draft_loc] = draft
# convert the dict - which is used for look ups - back into a list
for key, value in to_process_dict.iteritems():
queried_children.append(value)
return queried_children
...@@ -3,7 +3,24 @@ from xmodule.modulestore import Location ...@@ -3,7 +3,24 @@ from xmodule.modulestore import Location
from xmodule.modulestore.inheritance import own_metadata from xmodule.modulestore.inheritance import own_metadata
from fs.osfs import OSFS from fs.osfs import OSFS
from json import dumps from json import dumps
import json
from json.encoder import JSONEncoder
import datetime
class EdxJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Location):
return obj.url()
elif isinstance(obj, datetime.datetime):
if obj.tzinfo is not None:
if obj.utcoffset() is None:
return obj.isoformat() + 'Z'
else:
return obj.isoformat()
else:
return obj.isoformat()
else:
return super(EdxJSONEncoder, self).default(obj)
def export_to_xml(modulestore, contentstore, course_location, root_dir, course_dir, draft_modulestore=None): def export_to_xml(modulestore, contentstore, course_location, root_dir, course_dir, draft_modulestore=None):
...@@ -35,12 +52,12 @@ def export_to_xml(modulestore, contentstore, course_location, root_dir, course_d ...@@ -35,12 +52,12 @@ def export_to_xml(modulestore, contentstore, course_location, root_dir, course_d
policies_dir = export_fs.makeopendir('policies') policies_dir = export_fs.makeopendir('policies')
course_run_policy_dir = policies_dir.makeopendir(course.location.name) course_run_policy_dir = policies_dir.makeopendir(course.location.name)
with course_run_policy_dir.open('grading_policy.json', 'w') as grading_policy: with course_run_policy_dir.open('grading_policy.json', 'w') as grading_policy:
grading_policy.write(dumps(course.grading_policy)) grading_policy.write(dumps(course.grading_policy, cls=EdxJSONEncoder))
# export all of the course metadata in policy.json # export all of the course metadata in policy.json
with course_run_policy_dir.open('policy.json', 'w') as course_policy: with course_run_policy_dir.open('policy.json', 'w') as course_policy:
policy = {'course/' + course.location.name: own_metadata(course)} policy = {'course/' + course.location.name: own_metadata(course)}
course_policy.write(dumps(policy)) course_policy.write(dumps(policy, cls=EdxJSONEncoder))
# export draft content # export draft content
# NOTE: this code assumes that verticals are the top most draftable container # NOTE: this code assumes that verticals are the top most draftable container
......
...@@ -59,7 +59,7 @@ class SequenceModule(SequenceFields, XModule): ...@@ -59,7 +59,7 @@ class SequenceModule(SequenceFields, XModule):
# TODO: Cache progress or children array? # TODO: Cache progress or children array?
children = self.get_children() children = self.get_children()
progresses = [child.get_progress() for child in children] progresses = [child.get_progress() for child in children]
progress = reduce(Progress.add_counts, progresses) progress = reduce(Progress.add_counts, progresses, None)
return progress return progress
def handle_ajax(self, dispatch, data): # TODO: bounds checking def handle_ajax(self, dispatch, data): # TODO: bounds checking
......
...@@ -49,7 +49,7 @@ class CustomTagDescriptor(RawDescriptor): ...@@ -49,7 +49,7 @@ class CustomTagDescriptor(RawDescriptor):
else: else:
# TODO (vshnayder): better exception type # TODO (vshnayder): better exception type
raise Exception("Could not find impl attribute in customtag {0}" raise Exception("Could not find impl attribute in customtag {0}"
.format(location)) .format(self.location))
params = dict(xmltree.items()) params = dict(xmltree.items())
......
# Tests for xmodule.util.date_utils # Tests for xmodule.util.date_utils
from nose.tools import assert_equals from nose.tools import assert_equals, assert_false
from xmodule.util import date_utils from xmodule.util.date_utils import get_default_time_display, almost_same_datetime
import datetime from datetime import datetime, timedelta, tzinfo
from pytz import UTC from pytz import UTC
def test_get_default_time_display(): def test_get_default_time_display():
assert_equals("", date_utils.get_default_time_display(None)) assert_equals("", get_default_time_display(None))
test_time = datetime.datetime(1992, 3, 12, 15, 3, 30, tzinfo=UTC) test_time = datetime(1992, 3, 12, 15, 3, 30, tzinfo=UTC)
assert_equals( assert_equals(
"Mar 12, 1992 at 15:03 UTC", "Mar 12, 1992 at 15:03 UTC",
date_utils.get_default_time_display(test_time)) get_default_time_display(test_time))
assert_equals( assert_equals(
"Mar 12, 1992 at 15:03 UTC", "Mar 12, 1992 at 15:03 UTC",
date_utils.get_default_time_display(test_time, True)) get_default_time_display(test_time, True))
assert_equals( assert_equals(
"Mar 12, 1992 at 15:03", "Mar 12, 1992 at 15:03",
date_utils.get_default_time_display(test_time, False)) get_default_time_display(test_time, False))
def test_get_default_time_display_notz(): def test_get_default_time_display_notz():
test_time = datetime.datetime(1992, 3, 12, 15, 3, 30) test_time = datetime(1992, 3, 12, 15, 3, 30)
assert_equals( assert_equals(
"Mar 12, 1992 at 15:03 UTC", "Mar 12, 1992 at 15:03 UTC",
date_utils.get_default_time_display(test_time)) get_default_time_display(test_time))
assert_equals( assert_equals(
"Mar 12, 1992 at 15:03 UTC", "Mar 12, 1992 at 15:03 UTC",
date_utils.get_default_time_display(test_time, True)) get_default_time_display(test_time, True))
assert_equals( assert_equals(
"Mar 12, 1992 at 15:03", "Mar 12, 1992 at 15:03",
date_utils.get_default_time_display(test_time, False)) get_default_time_display(test_time, False))
# pylint: disable=W0232 # pylint: disable=W0232
class NamelessTZ(datetime.tzinfo): class NamelessTZ(tzinfo):
def utcoffset(self, _dt): def utcoffset(self, _dt):
return datetime.timedelta(hours=-3) return timedelta(hours=-3)
def dst(self, _dt): def dst(self, _dt):
return datetime.timedelta(0) return timedelta(0)
def test_get_default_time_display_no_tzname(): def test_get_default_time_display_no_tzname():
assert_equals("", date_utils.get_default_time_display(None)) assert_equals("", get_default_time_display(None))
test_time = datetime.datetime(1992, 3, 12, 15, 3, 30, tzinfo=NamelessTZ()) test_time = datetime(1992, 3, 12, 15, 3, 30, tzinfo=NamelessTZ())
assert_equals( assert_equals(
"Mar 12, 1992 at 15:03-0300", "Mar 12, 1992 at 15:03-0300",
date_utils.get_default_time_display(test_time)) get_default_time_display(test_time))
assert_equals( assert_equals(
"Mar 12, 1992 at 15:03-0300", "Mar 12, 1992 at 15:03-0300",
date_utils.get_default_time_display(test_time, True)) get_default_time_display(test_time, True))
assert_equals( assert_equals(
"Mar 12, 1992 at 15:03", "Mar 12, 1992 at 15:03",
date_utils.get_default_time_display(test_time, False)) get_default_time_display(test_time, False))
def test_almost_same_datetime():
assert almost_same_datetime(
datetime(2013, 5, 3, 10, 20, 30),
datetime(2013, 5, 3, 10, 21, 29)
)
assert almost_same_datetime(
datetime(2013, 5, 3, 11, 20, 30),
datetime(2013, 5, 3, 10, 21, 29),
timedelta(hours=1)
)
assert_false(
almost_same_datetime(
datetime(2013, 5, 3, 11, 20, 30),
datetime(2013, 5, 3, 10, 21, 29)
)
)
assert_false(
almost_same_datetime(
datetime(2013, 5, 3, 11, 20, 30),
datetime(2013, 5, 3, 10, 21, 29),
timedelta(minutes=10)
)
)
import unittest import unittest
import pytz
from datetime import datetime, timedelta, tzinfo
from fs.osfs import OSFS from fs.osfs import OSFS
from mock import Mock
from path import path from path import path
from tempfile import mkdtemp from tempfile import mkdtemp
import shutil import shutil
from xmodule.modulestore.xml import XMLModuleStore from xmodule.modulestore.xml import XMLModuleStore
from xmodule.modulestore.xml_exporter import EdxJSONEncoder
from xmodule.modulestore import Location
# from ~/mitx_all/mitx/common/lib/xmodule/xmodule/tests/ # from ~/mitx_all/mitx/common/lib/xmodule/xmodule/tests/
# to ~/mitx_all/mitx/common/test # to ~/mitx_all/mitx/common/test
...@@ -127,3 +133,61 @@ class RoundTripTestCase(unittest.TestCase): ...@@ -127,3 +133,61 @@ class RoundTripTestCase(unittest.TestCase):
def test_word_cloud_roundtrip(self): def test_word_cloud_roundtrip(self):
self.check_export_roundtrip(DATA_DIR, "word_cloud") self.check_export_roundtrip(DATA_DIR, "word_cloud")
class TestEdxJsonEncoder(unittest.TestCase):
def setUp(self):
self.encoder = EdxJSONEncoder()
class OffsetTZ(tzinfo):
"""A timezone with non-None utcoffset"""
def utcoffset(self, dt):
return timedelta(hours=4)
self.offset_tz = OffsetTZ()
class NullTZ(tzinfo):
"""A timezone with None as its utcoffset"""
def utcoffset(self, dt):
return None
self.null_utc_tz = NullTZ()
def test_encode_location(self):
loc = Location('i4x', 'org', 'course', 'category', 'name')
self.assertEqual(loc.url(), self.encoder.default(loc))
loc = Location('i4x', 'org', 'course', 'category', 'name', 'version')
self.assertEqual(loc.url(), self.encoder.default(loc))
def test_encode_naive_datetime(self):
self.assertEqual(
"2013-05-03T10:20:30.000100",
self.encoder.default(datetime(2013, 5, 3, 10, 20, 30, 100))
)
self.assertEqual(
"2013-05-03T10:20:30",
self.encoder.default(datetime(2013, 5, 3, 10, 20, 30))
)
def test_encode_utc_datetime(self):
self.assertEqual(
"2013-05-03T10:20:30+00:00",
self.encoder.default(datetime(2013, 5, 3, 10, 20, 30, 0, pytz.UTC))
)
self.assertEqual(
"2013-05-03T10:20:30+04:00",
self.encoder.default(datetime(2013, 5, 3, 10, 20, 30, 0, self.offset_tz))
)
self.assertEqual(
"2013-05-03T10:20:30Z",
self.encoder.default(datetime(2013, 5, 3, 10, 20, 30, 0, self.null_utc_tz))
)
def test_fallthrough(self):
with self.assertRaises(TypeError):
self.encoder.default(None)
with self.assertRaises(TypeError):
self.encoder.default({})
import datetime
def get_default_time_display(dt, show_timezone=True): def get_default_time_display(dt, show_timezone=True):
""" """
Converts a datetime to a string representation. This is the default Converts a datetime to a string representation. This is the default
...@@ -11,7 +12,7 @@ def get_default_time_display(dt, show_timezone=True): ...@@ -11,7 +12,7 @@ def get_default_time_display(dt, show_timezone=True):
if dt is None: if dt is None:
return "" return ""
timezone = "" timezone = ""
if dt is not None and show_timezone: if show_timezone:
if dt.tzinfo is not None: if dt.tzinfo is not None:
try: try:
timezone = " " + dt.tzinfo.tzname(dt) timezone = " " + dt.tzinfo.tzname(dt)
...@@ -20,3 +21,14 @@ def get_default_time_display(dt, show_timezone=True): ...@@ -20,3 +21,14 @@ def get_default_time_display(dt, show_timezone=True):
else: else:
timezone = " UTC" timezone = " UTC"
return dt.strftime("%b %d, %Y at %H:%M") + timezone return dt.strftime("%b %d, %Y at %H:%M") + timezone
def almost_same_datetime(dt1, dt2, allowed_delta=datetime.timedelta(minutes=1)):
"""
Returns true if these are w/in a minute of each other. (in case secs saved to db
or timezone aren't same)
:param dt1:
:param dt2:
"""
return abs(dt1 - dt2) < allowed_delta
...@@ -10,6 +10,7 @@ from xblock.core import Dict, Scope ...@@ -10,6 +10,7 @@ from xblock.core import Dict, Scope
from xmodule.x_module import (XModuleDescriptor, policy_key) from xmodule.x_module import (XModuleDescriptor, policy_key)
from xmodule.modulestore import Location from xmodule.modulestore import Location
from xmodule.modulestore.inheritance import own_metadata from xmodule.modulestore.inheritance import own_metadata
from xmodule.modulestore.xml_exporter import EdxJSONEncoder
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -84,7 +85,7 @@ def serialize_field(value): ...@@ -84,7 +85,7 @@ def serialize_field(value):
By default, this is the result of calling json.dumps on the input value. By default, this is the result of calling json.dumps on the input value.
""" """
return json.dumps(value) return json.dumps(value, cls=EdxJSONEncoder)
def deserialize_field(field, value): def deserialize_field(field, value):
......
...@@ -10,7 +10,7 @@ from .dev import * ...@@ -10,7 +10,7 @@ from .dev import *
MODULESTORE = { MODULESTORE = {
'default': { 'default': {
'ENGINE': 'xmodule.modulestore.mongo.DraftMongoModuleStore', 'ENGINE': 'xmodule.modulestore.draft.DraftModuleStore',
'OPTIONS': modulestore_options 'OPTIONS': modulestore_options
}, },
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment