Commit ae60661f by Diana Huang

Get previews to work in Studio and use the correct type of editors

for these XModules within Studio.
parent f161bdd0
......@@ -6,7 +6,6 @@ from pkg_resources import resource_string
from xmodule.raw_module import RawDescriptor
from .x_module import XModule
from .xml_module import XmlDescriptor
from xmodule.open_ended_grading_classes.combined_open_ended_modulev1 import CombinedOpenEndedV1Module, CombinedOpenEndedV1Descriptor
log = logging.getLogger("mitx.courseware")
......@@ -169,7 +168,7 @@ class CombinedOpenEndedDescriptor(RawDescriptor):
"""
Module for adding combined open ended questions
"""
mako_template = "widgets/html-edit.html"
mako_template = "widgets/raw-edit.html"
module_class = CombinedOpenEndedModule
filename_extension = "xml"
......@@ -177,6 +176,3 @@ class CombinedOpenEndedDescriptor(RawDescriptor):
has_score = True
template_dir_name = "combinedopenended"
js = {'coffee': [resource_string(__name__, 'js/src/html/edit.coffee')]}
js_module_name = "HTMLEditingDescriptor"
......@@ -22,7 +22,7 @@ from xmodule.stringify import stringify_children
from xmodule.xml_module import XmlDescriptor
from xmodule.modulestore import Location
from capa.util import *
from peer_grading_service import PeerGradingService
from peer_grading_service import PeerGradingService, MockPeerGradingService
import controller_query_service
from datetime import datetime
......@@ -106,8 +106,14 @@ class OpenEndedChild(object):
# Used for progress / grading. Currently get credit just for
# completion (doesn't matter if you self-assessed correct/incorrect).
self._max_score = static_data['max_score']
self.peer_gs = PeerGradingService(system.open_ended_grading_interface, system)
self.controller_qs = controller_query_service.ControllerQueryService(system.open_ended_grading_interface,system)
if system.open_ended_grading_interface:
self.peer_gs = PeerGradingService(system.open_ended_grading_interface, system)
self.controller_qs = controller_query_service.ControllerQueryService(system.open_ended_grading_interface,system)
else:
self.peer_gs = MockPeerGradingService()
self.controller_qs = None
self.system = system
......@@ -461,11 +467,14 @@ class OpenEndedChild(object):
return success, allowed_to_submit, error_message
def get_eta(self):
response = self.controller_qs.check_for_eta(self.location_string)
try:
response = json.loads(response)
except:
pass
if self.controller_qs:
response = self.controller_qs.check_for_eta(self.location_string)
try:
response = json.loads(response)
except:
pass
else:
return ""
success = response['success']
if isinstance(success, basestring):
......
......@@ -14,7 +14,7 @@ from xmodule.modulestore import Location
from xmodule.modulestore.django import modulestore
from timeinfo import TimeInfo
from xmodule.open_ended_grading_classes.peer_grading_service import PeerGradingService, GradingServiceError
from xmodule.open_ended_grading_classes.peer_grading_service import PeerGradingService, GradingServiceError, MockPeerGradingService
log = logging.getLogger(__name__)
......@@ -53,7 +53,10 @@ class PeerGradingModule(XModule):
#We need to set the location here so the child modules can use it
system.set('location', location)
self.system = system
self.peer_gs = PeerGradingService(self.system.open_ended_grading_interface, self.system)
if(self.system.open_ended_grading_interface):
self.peer_gs = PeerGradingService(self.system.open_ended_grading_interface, self.system)
else:
self.peer_gs = MockPeerGradingService()
self.use_for_single_location = self.metadata.get('use_for_single_location', USE_FOR_SINGLE_LOCATION)
......@@ -563,7 +566,7 @@ class PeerGradingDescriptor(XmlDescriptor, EditingDescriptor):
"""
Module for adding combined open ended questions
"""
mako_template = "widgets/html-edit.html"
mako_template = "widgets/raw-edit.html"
module_class = PeerGradingModule
filename_extension = "xml"
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment