Commit 700a69ce by gradyward

Finished OA editing in three tabs

parents 721d1748 d92cce76
...@@ -4,7 +4,7 @@ ...@@ -4,7 +4,7 @@
<div class="openassessment-editor-content-and-tabs"> <div class="openassessment-editor-content-and-tabs">
<div class="openassessment-editor-header"> <div class="openassessment-editor-header">
<h6 id="oa-editor-window-title" class="title modal-window-title" >Editing: Open Assessment</h6> <h6 id="oa-editor-window-title" class="title modal-window-title" >{% trans "Editing: Open Assessment" %}</h6>
<ul class="editor-modes action-list action-modes editor-tabs"> <ul class="editor-modes action-list action-modes editor-tabs">
<li class="view-button oa-editor-tab"><a href="#oa-settings-editor-wrapper">{% trans "Settings" %}</a></li> <li class="view-button oa-editor-tab"><a href="#oa-settings-editor-wrapper">{% trans "Settings" %}</a></li>
<li class="view-button oa-editor-tab"><a href="#oa-rubric-editor-wrapper">{% trans "Rubric" %}</a></li> <li class="view-button oa-editor-tab"><a href="#oa-rubric-editor-wrapper">{% trans "Rubric" %}</a></li>
...@@ -13,27 +13,29 @@ ...@@ -13,27 +13,29 @@
</div> </div>
<div id = "oa-prompt-editor-wrapper" class="oa-editor-content-wrapper"> <div id = "oa-prompt-editor-wrapper" class="oa-editor-content-wrapper">
<h2>Prompt Editor</h2> <h2>{% trans "Prompt Editor" %}</h2>
<textarea class="openassessment-prompt-editor"></textarea> <textarea class="openassessment-prompt-editor"></textarea>
</div> </div>
<div id="oa-rubric-editor-wrapper" class="oa-editor-content-wrapper"> <div id="oa-rubric-editor-wrapper" class="oa-editor-content-wrapper">
<h2>Rubric Editor</h2> <h2>{% trans "Rubric Editor" %}</h2>
<textarea class="openassessment-rubric-editor"></textarea> <div id="rubric-editor-wrapper">
<textarea class="openassessment-rubric-editor"></textarea>
</div>
</div> </div>
<div id="oa-settings-editor-wrapper" class="oa-editor-content-wrapper"> <div id="oa-settings-editor-wrapper" class="oa-editor-content-wrapper">
<div id="oa-settings-editor-text-fields"> <div id="oa-settings-editor-text-fields">
<h2>Settings Editor</h2> <h2>{% trans "Settings Editor" %}</h2>
<h2>Title:</h2> <h2>{% trans "Title:" %}</h2>
<input type="text" name="title" class="openassessment-title-editor"> <input type="text" name="title" class="openassessment-title-editor">
<h2>Submission Start Date: </h2> <h2>{% trans "Submission Start Date:" %}</h2>
<input type="text" name="start_date" class="openassessment-submission-start-editor"> <input type="text" name="start_date" class="openassessment-submission-start-editor">
<h2>Submission Due Date:</h2> <h2>{% trans "Submission Due Date:" %}</h2>
<input type="text" name="due_date" class="openassessment-submission-due-editor"> <input type="text" name="due_date" class="openassessment-submission-due-editor">
</div> </div>
<div id="oa-settings-assessments"> <div id="oa-settings-assessments">
XML For Assessments: <h2> {% trans "XML for Assessments" %} </h2>
<textarea class="openassessment-assessments-editor"></textarea> <textarea class="openassessment-assessments-editor"></textarea>
</div> </div>
</div> </div>
......
...@@ -334,7 +334,7 @@ class OpenAssessmentBlock( ...@@ -334,7 +334,7 @@ class OpenAssessmentBlock(
xblock_validator = validator(block, strict_post_release=False) xblock_validator = validator(block, strict_post_release=False)
xblock_validator( xblock_validator(
rubric, rubric,
{'due': config['submission_due']}, { 'due': config['submission_due'], 'start': config['submission_start']},
config['rubric_assessments'] config['rubric_assessments']
) )
......
...@@ -12,18 +12,32 @@ describe("OpenAssessment.StudioView", function() { ...@@ -12,18 +12,32 @@ describe("OpenAssessment.StudioView", function() {
var StubServer = function() { var StubServer = function() {
this.loadError = false; this.loadError = false;
this.updateError = false; this.updateError = false;
this.xml = '<openassessment></openassessment>'; this.promptBox = "";
this.rubricXmlBox = "";
this.titleField = "";
this.submissionStartField = "";
this.submissionDueField = "";
this.assessmentsXmlBox = "";
this.isReleased = false; this.isReleased = false;
this.errorPromise = $.Deferred(function(defer) { this.errorPromise = $.Deferred(function(defer) {
defer.rejectWith(this, ['Test error']); defer.rejectWith(this, ['Test error']);
}).promise(); }).promise();
this.loadXml = function() { this.loadEditorContext = function() {
var xml = this.xml; var prompt = this.promptBox;
var rubric = this.rubricXmlBox;
var settings = {
title: this.titleField,
submission_start: this.submissionStartField,
submission_due: this.submissionDueField,
assessments: this.assessmentsXmlBox
};
if (!this.loadError) { if (!this.loadError) {
return $.Deferred(function(defer) { return $.Deferred(function(defer) {
defer.resolveWith(this, [xml]); defer.resolveWith(this, [prompt, rubric, settings]);
}).promise(); }).promise();
} }
else { else {
...@@ -31,9 +45,14 @@ describe("OpenAssessment.StudioView", function() { ...@@ -31,9 +45,14 @@ describe("OpenAssessment.StudioView", function() {
} }
}; };
this.updateXml = function(xml) { this.updateEditorContext = function(prompt, rubricXml, title, sub_start, sub_due, assessmentsXml) {
if (!this.updateError) { if (!this.updateError) {
this.xml = xml; this.promptBox = prompt;
this.rubricXmlBox = rubricXml;
this.titleField = title;
this.submissionStartField = sub_start;
this.submissionDueField = sub_due;
this.assessmentsXmlBox = assessmentsXml;
return $.Deferred(function(defer) { return $.Deferred(function(defer) {
defer.resolve(); defer.resolve();
}).promise(); }).promise();
...@@ -70,8 +89,8 @@ describe("OpenAssessment.StudioView", function() { ...@@ -70,8 +89,8 @@ describe("OpenAssessment.StudioView", function() {
var el = $('#openassessment-editor').get(0); var el = $('#openassessment-editor').get(0);
view = new OpenAssessment.StudioView(runtime, el, server); view = new OpenAssessment.StudioView(runtime, el, server);
}); });
/*
it("loads the XML definition", function() { it("loads the editor context definition", function() {
// Initialize the view // Initialize the view
view.load(); view.load();
...@@ -81,23 +100,23 @@ describe("OpenAssessment.StudioView", function() { ...@@ -81,23 +100,23 @@ describe("OpenAssessment.StudioView", function() {
var assessments = view.assessmentsXmlBox.getValue() var assessments = view.assessmentsXmlBox.getValue()
expect(prompt).toEqual(''); expect(prompt).toEqual('');
expect(rubric).toEqual('<rubric></rubric>'); expect(rubric).toEqual('');
expect(assessments).toEqual('<assessments></assessments>'); expect(assessments).toEqual('');
}); });
it("saves the XML definition", function() { it("saves the Editor Context definition", function() {
// Update the XML // Update the Context
view.codeBox.setValue('<openassessment>test!</openassessment>'); view.titleField.value = 'THIS IS THE NEW TITLE';
// Save the updated XML // Save the updated editor definition
view.save(); view.save();
// Expect the saving notification to start/end // Expect the saving notification to start/end
expect(runtime.notify).toHaveBeenCalledWith('save', {state: 'start'}); expect(runtime.notify).toHaveBeenCalledWith('save', {state: 'start'});
expect(runtime.notify).toHaveBeenCalledWith('save', {state: 'end'}); expect(runtime.notify).toHaveBeenCalledWith('save', {state: 'end'});
// Expect the server's XML to have been updated // Expect the server's context to have been updated
expect(server.xml).toEqual('<openassessment>test!</openassessment>'); expect(server.titleField).toEqual('THIS IS THE NEW TITLE');
}); });
it("confirms changes for a released problem", function() { it("confirms changes for a released problem", function() {
...@@ -109,7 +128,7 @@ describe("OpenAssessment.StudioView", function() { ...@@ -109,7 +128,7 @@ describe("OpenAssessment.StudioView", function() {
function(onConfirm) { onConfirm(); } function(onConfirm) { onConfirm(); }
); );
// Save the updated XML // Save the updated context
view.save(); view.save();
// Verify that the user was asked to confirm the changes // Verify that the user was asked to confirm the changes
...@@ -129,8 +148,7 @@ describe("OpenAssessment.StudioView", function() { ...@@ -129,8 +148,7 @@ describe("OpenAssessment.StudioView", function() {
it("displays an error when server reports an update XML error", function() { it("displays an error when server reports an update XML error", function() {
server.updateError = true; server.updateError = true;
view.save('<openassessment>test!</openassessment>'); view.save();
expect(runtime.notify).toHaveBeenCalledWith('error', {msg: 'Test error'}); expect(runtime.notify).toHaveBeenCalledWith('error', {msg: 'Test error'});
}); });
*/
}); });
...@@ -30,6 +30,39 @@ describe("OpenAssessment.Server", function() { ...@@ -30,6 +30,39 @@ describe("OpenAssessment.Server", function() {
); );
}; };
var PROMPT = "Hello this is the prompt yes.";
var RUBRIC = '<rubric>'+
'<criterion>'+
'<name>𝓒𝓸𝓷𝓬𝓲𝓼𝓮</name>'+
'<prompt>How concise is it?</prompt>'+
'<option points="3">'+
'<name>ﻉซƈﻉɭɭﻉกՇ</name>'+
'<explanation>Extremely concise</explanation>'+
'</option>'+
'<option points="2">'+
'<name>Ġööḋ</name>'+
'<explanation>Concise</explanation>'+
'</option>'+
'<option points="1">'+
'<name>ק๏๏г</name>'+
'<explanation>Wordy</explanation>'+
'</option>'+
'</criterion>'+
'</rubric>';
var assessments = '<assessments>' +
'<assessment name="peer-assessment" must_grade="1" must_be_graded_by="1" due="2000-01-02"/>' +
'<assessment name="self-assessment" due="2000-01-8"/>' +
'</assessments>';
var SETTINGS = {
title: 'This is the title.',
submission_start: '2012-10-09T00:00:00',
submission_due: '2015-10-10T00:00:00',
assessments: assessments
};
beforeEach(function() { beforeEach(function() {
// Create the server // Create the server
// Since the runtime is a stub implementation that ignores the element passed to it, // Since the runtime is a stub implementation that ignores the element passed to it,
...@@ -150,31 +183,38 @@ describe("OpenAssessment.Server", function() { ...@@ -150,31 +183,38 @@ describe("OpenAssessment.Server", function() {
}); });
}); });
/* it("loads the XBlock's Context definition", function() {
it("loads the XBlock's XML definition", function() { stubAjax(true, { success: true, prompt: PROMPT, rubric: RUBRIC, settings: SETTINGS});
stubAjax(true, { success: true, xml: "<openassessment />" });
var loadedXml = ""; var loadedPrompt = "";
server.loadXml().done(function(xml) { var loadedRubric = "";
loadedXml = xml; var loadedSettings = "";
server.loadEditorContext().done(function(prompt, rubric, settings) {
loadedPrompt = prompt;
loadedRubric = rubric;
loadedSettings = settings;
}); });
expect(loadedXml).toEqual('<openassessment />'); expect(loadedPrompt).toEqual(PROMPT);
expect(loadedRubric).toEqual(RUBRIC);
expect(loadedSettings).toEqual(SETTINGS);
expect($.ajax).toHaveBeenCalledWith({ expect($.ajax).toHaveBeenCalledWith({
url: '/xml', type: "POST", data: '""' url: '/editor_context', type: "POST", data: '""'
}); });
}); });
it("updates the XBlock's XML definition", function() { it("updates the XBlock's Context definition", function() {
stubAjax(true, { success: true }); stubAjax(true, { success: true });
server.updateXml('<openassessment />'); server.updateEditorContext(
PROMPT, RUBRIC, SETTINGS.title, SETTINGS.submission_start, SETTINGS.submission_due, SETTINGS.assessments
);
expect($.ajax).toHaveBeenCalledWith({ expect($.ajax).toHaveBeenCalledWith({
url: '/update_xml', type: "POST", type: "POST", url: '/update_editor_context',
data: JSON.stringify({xml: '<openassessment />'}) data: JSON.stringify({prompt: PROMPT, rubric: RUBRIC, settings: SETTINGS})
}); });
}); });
*/
it("Checks whether the XBlock has been released", function() { it("Checks whether the XBlock has been released", function() {
stubAjax(true, { success: true, is_released: true }); stubAjax(true, { success: true, is_released: true });
...@@ -253,51 +293,51 @@ describe("OpenAssessment.Server", function() { ...@@ -253,51 +293,51 @@ describe("OpenAssessment.Server", function() {
expect(receivedMsg).toEqual('test error'); expect(receivedMsg).toEqual('test error');
}); });
/*
it("informs the caller of an Ajax error when loading XML", function() { it("informs the caller of an Ajax error when loading the editor context", function() {
stubAjax(false, null); stubAjax(false, null);
var receivedMsg = null; var receivedMsg = null;
server.loadXml().fail(function(msg) { server.loadEditorContext().fail(function(msg) {
receivedMsg = msg; receivedMsg = msg;
}); });
expect(receivedMsg).toContain("This problem could not be loaded"); expect(receivedMsg).toContain("This problem could not be loaded");
}); });
it("informs the caller of an Ajax error when updating XML", function() { it("informs the caller of an Ajax error when updating the editor context", function() {
stubAjax(false, null); stubAjax(false, null);
var receivedMsg = null; var receivedMsg = null;
server.updateXml('test').fail(function(msg) { server.updateEditorContext('prompt', 'rubric', 'title', 'start', 'due', 'assessments').fail(function(msg) {
receivedMsg = msg; receivedMsg = msg;
}); });
expect(receivedMsg).toContain("This problem could not be saved"); expect(receivedMsg).toContain("This problem could not be saved");
}); });
it("informs the caller of a server error when loading XML", function() { it("informs the caller of a server error when loading the editor context", function() {
stubAjax(true, { success: false, msg: "Test error" }); stubAjax(true, { success: false, msg: "Test error" });
var receivedMsg = null; var receivedMsg = null;
server.updateXml('test').fail(function(msg) { server.updateEditorContext('prompt', 'rubric', 'title', 'start', 'due', 'assessments').fail(function(msg) {
receivedMsg = msg; receivedMsg = msg;
}); });
expect(receivedMsg).toEqual("Test error"); expect(receivedMsg).toEqual("Test error");
}); });
it("informs the caller of a server error when updating XML", function() { it("informs the caller of a server error when updating the editor context", function() {
stubAjax(true, { success: false, msg: "Test error" }); stubAjax(true, { success: false, msg: "Test error" });
var receivedMsg = null; var receivedMsg = null;
server.loadXml().fail(function(msg) { server.loadEditorContext().fail(function(msg) {
receivedMsg = msg; receivedMsg = msg;
}); });
expect(receivedMsg).toEqual("Test error"); expect(receivedMsg).toEqual("Test error");
}); });
*/
it("informs the caller of a server error when sending a peer assessment", function() { it("informs the caller of a server error when sending a peer assessment", function() {
stubAjax(true, {success:false, msg:'Test error!'}); stubAjax(true, {success:false, msg:'Test error!'});
......
...@@ -3,12 +3,12 @@ ...@@ -3,12 +3,12 @@
The constructor initializes the DOM for editing. The constructor initializes the DOM for editing.
Args: Args:
runtime (Runtime): an XBlock runtime instance. runtime (Runtime): an XBlock runtime instance.
element (DOM element): The DOM element representing this XBlock. element (DOM element): The DOM element representing this XBlock.
server (OpenAssessment.Server): The interface to the XBlock server. server (OpenAssessment.Server): The interface to the XBlock server.
Returns: Returns:
OpenAssessment.StudioView OpenAssessment.StudioView
**/ **/
OpenAssessment.StudioView = function(runtime, element, server) { OpenAssessment.StudioView = function(runtime, element, server) {
...@@ -17,37 +17,44 @@ OpenAssessment.StudioView = function(runtime, element, server) { ...@@ -17,37 +17,44 @@ OpenAssessment.StudioView = function(runtime, element, server) {
// Initialize the code box // Initialize the code box
this.promptBox = $('.openassessment-prompt-editor').first().get(0); live_element = $(element)
this.promptBox = live_element.find('.openassessment-prompt-editor').first().get(0);
this.rubricXmlBox = CodeMirror.fromTextArea( this.rubricXmlBox = CodeMirror.fromTextArea(
$(element).find('.openassessment-rubric-editor').first().get(0), live_element.find('.openassessment-rubric-editor').first().get(0),
{mode: "xml", lineNumbers: true, lineWrapping: true} {mode: "xml", lineNumbers: true, lineWrapping: true}
); );
this.titleField = $(element).find('.openassessment-title-editor'); this.titleField = live_element.find('.openassessment-title-editor').first().get(0);
this.submissionStartField = $(element).find('.openassessment-submission-start-editor').first().get(0); this.submissionStartField = live_element.find('.openassessment-submission-start-editor').first().get(0);
this.submissionDueField = $(element).find('.openassessment-submission-due-editor').first().get(0); this.submissionDueField = live_element.find('.openassessment-submission-due-editor').first().get(0);
this.assessmentsXmlBox = CodeMirror.fromTextArea( this.assessmentsXmlBox = CodeMirror.fromTextArea(
$(element).find('.openassessment-assessments-editor').first().get(0), live_element.find('.openassessment-assessments-editor').first().get(0),
{mode: "xml", lineNumbers: true, lineWrapping: true} {mode: "xml", lineNumbers: true, lineWrapping: true}
); );
// Install click handlers // Install click handlers
var view = this; var view = this;
$(element).find('.openassessment-save-button').click( live_element.find('.openassessment-save-button').click(
function (eventData) { function (eventData) {
view.save(); view.save();
}); });
$(element).find('.openassessment-cancel-button').click( live_element.find('.openassessment-cancel-button').click(
function (eventData) { function (eventData) {
view.cancel(); view.cancel();
}); });
$('.openassessment-editor-content-and-tabs').tabs(); live_element.find('.openassessment-editor-content-and-tabs').tabs({
activate: function (event, ui){
view.rubricXmlBox.refresh();
view.assessmentsXmlBox.refresh();
}
});
}; };
OpenAssessment.StudioView.prototype = { OpenAssessment.StudioView.prototype = {
...@@ -61,8 +68,6 @@ OpenAssessment.StudioView.prototype = { ...@@ -61,8 +68,6 @@ OpenAssessment.StudioView.prototype = {
function (prompt, rubricXml, settings) { function (prompt, rubricXml, settings) {
view.rubricXmlBox.setValue(rubricXml); view.rubricXmlBox.setValue(rubricXml);
view.assessmentsXmlBox.setValue(settings.assessments); view.assessmentsXmlBox.setValue(settings.assessments);
view.rubricXmlBox.refresh();
view.assessmentsXmlBox.refresh();
view.submissionStartField.value = settings.submission_start; view.submissionStartField.value = settings.submission_start;
view.submissionDueField.value = settings.submission_due; view.submissionDueField.value = settings.submission_due;
view.promptBox.value = prompt; view.promptBox.value = prompt;
...@@ -85,7 +90,7 @@ OpenAssessment.StudioView.prototype = { ...@@ -85,7 +90,7 @@ OpenAssessment.StudioView.prototype = {
this.server.checkReleased().done( this.server.checkReleased().done(
function (isReleased) { function (isReleased) {
if (isReleased) { if (isReleased) {
view.confirmPostReleaseUpdate($.proxy(view.updateEditorContext(), view)); view.confirmPostReleaseUpdate($.proxy(view.updateEditorContext, view));
} }
else { else {
view.updateEditorContext(); view.updateEditorContext();
...@@ -101,8 +106,8 @@ OpenAssessment.StudioView.prototype = { ...@@ -101,8 +106,8 @@ OpenAssessment.StudioView.prototype = {
that has already been released. that has already been released.
Args: Args:
onConfirm (function): A function that accepts no arguments, onConfirm (function): A function that accepts no arguments,
executed if the user confirms the update. executed if the user confirms the update.
**/ **/
confirmPostReleaseUpdate: function (onConfirm) { confirmPostReleaseUpdate: function (onConfirm) {
var msg = gettext("This problem has already been released. Any changes will apply only to future assessments."); var msg = gettext("This problem has already been released. Any changes will apply only to future assessments.");
...@@ -153,7 +158,7 @@ OpenAssessment.StudioView.prototype = { ...@@ -153,7 +158,7 @@ OpenAssessment.StudioView.prototype = {
Display an error message to the user. Display an error message to the user.
Args: Args:
errorMsg (string): The error message to display. errorMsg (string): The error message to display.
**/ **/
showError: function (errorMsg) { showError: function (errorMsg) {
this.runtime.notify('error', {msg: errorMsg}); this.runtime.notify('error', {msg: errorMsg});
......
...@@ -66,15 +66,15 @@ OpenAssessment.Server.prototype = { ...@@ -66,15 +66,15 @@ OpenAssessment.Server.prototype = {
continue grading peers. continue grading peers.
Returns: Returns:
A JQuery promise, which resolves with the HTML of the rendered peer A JQuery promise, which resolves with the HTML of the rendered peer
assessment section or fails with an error message. assessment section or fails with an error message.
Example: Example:
server.render_continued_peer().done( server.render_continued_peer().done(
function(html) { console.log(html); } function(html) { console.log(html); }
).fail( ).fail(
function(err) { console.log(err); } function(err) { console.log(err); }
) )
**/ **/
renderContinuedPeer: function() { renderContinuedPeer: function() {
var url = this.url('render_peer_assessment'); var url = this.url('render_peer_assessment');
......
...@@ -118,13 +118,9 @@ class StudioMixin(object): ...@@ -118,13 +118,9 @@ class StudioMixin(object):
# panel. This will adjust according to the fields laid out in this # panel. This will adjust according to the fields laid out in this
# section. # section.
submission_due = self.submission_due submission_due = self.submission_due if self.submission_due else ''
if not self.submission_due:
submission_due = ''
submission_start = self.submission_start submission_start = self.submission_start if self.submission_start else ''
if not submission_start:
submission_start = ''
settings = { settings = {
'submission_due': submission_due, 'submission_due': submission_due,
......
...@@ -78,6 +78,6 @@ ...@@ -78,6 +78,6 @@
"submission_due": "2012-02-27T09:46:28", "submission_due": "2012-02-27T09:46:28",
"submission_start": "2015-02-10T09:46:28" "submission_start": "2015-02-10T09:46:28"
}, },
"expected_error": "cannot be later" "expected_error": "cannot be earlier"
} }
} }
\ No newline at end of file
...@@ -208,6 +208,7 @@ class ValidationIntegrationTest(TestCase): ...@@ -208,6 +208,7 @@ class ValidationIntegrationTest(TestCase):
} }
SUBMISSION = { SUBMISSION = {
"start": None,
"due": None "due": None
} }
......
...@@ -124,16 +124,15 @@ class TestSerializeContent(TestCase): ...@@ -124,16 +124,15 @@ class TestSerializeContent(TestCase):
parsed_expected = etree.fromstring("".join(data['expected_xml'])) parsed_expected = etree.fromstring("".join(data['expected_xml']))
# Pretty-print and reparse the expected XML # Pretty-print and reparse the expected XML
pretty_expected = etree.tostring(parsed_expected, pretty_print=True, encoding='utf-8') pretty_expected = etree.tostring(parsed_expected, pretty_print=True, encoding='unicode')
parsed_expected = etree.fromstring(pretty_expected) parsed_expected = etree.fromstring(pretty_expected)
# Walk both trees, comparing elements and attributes # Walk both trees, comparing elements and attributes
actual_elements = [el for el in parsed_actual.getiterator()] actual_elements = [el for el in parsed_actual.getiterator()]
expected_elements = [el for el in parsed_expected.getiterator()] expected_elements = [el for el in parsed_expected.getiterator()]
self.assertEqual( self.assertEqual(
len(actual_elements), len(expected_elements), len(actual_elements), len(expected_elements),
msg="Incorrect XML output:\nActual: {}\nExpected: {}".format(xml, pretty_expected) msg=u"Incorrect XML output:\nActual: {}\nExpected: {}".format(xml, pretty_expected)
) )
for actual, expected in zip(actual_elements, expected_elements): for actual, expected in zip(actual_elements, expected_elements):
...@@ -155,18 +154,23 @@ class TestSerializeContent(TestCase): ...@@ -155,18 +154,23 @@ class TestSerializeContent(TestCase):
def test_serialize_rubric(self, data): def test_serialize_rubric(self, data):
self._configure_xblock(data) self._configure_xblock(data)
xml_str = serialize_rubric_to_xml_str(self.oa_block) xml_str = serialize_rubric_to_xml_str(self.oa_block)
self.assertIn("<rubric>", xml_str)
if data['prompt']:
self.assertNotIn(data['prompt'], xml_str)
@ddt.file_data('data/serialize.json') @ddt.file_data('data/serialize.json')
def test_serialize_examples(self, data): def test_serialize_examples(self, data):
self._configure_xblock(data) self._configure_xblock(data)
for assessment in data['assessments']: for assessment in data['assessments']:
if 'student-training' == assessment['name']: if 'student-training' == assessment['name'] and assessment['examples']:
xml_str = serialize_examples_to_xml_str(assessment) xml_str = serialize_examples_to_xml_str(assessment)
self.assertIn(assessment['examples'][0]['answer'], xml_str)
@ddt.file_data('data/serialize.json') @ddt.file_data('data/serialize.json')
def test_serialize_assessments(self, data): def test_serialize_assessments(self, data):
self._configure_xblock(data) self._configure_xblock(data)
xml_str = serialize_assessments_to_xml_str(self.oa_block) xml_str = serialize_assessments_to_xml_str(self.oa_block)
self.assertIn(data['assessments'][0]['name'], xml_str)
def test_mutated_criteria_dict(self): def test_mutated_criteria_dict(self):
self.oa_block.title = "Test title" self.oa_block.title = "Test title"
......
...@@ -124,7 +124,7 @@ def _serialize_criteria(criteria_root, criteria_list): ...@@ -124,7 +124,7 @@ def _serialize_criteria(criteria_root, criteria_list):
_serialize_options(criterion_el, options_list) _serialize_options(criterion_el, options_list)
def serialize_rubric(rubric_root, oa_block): def serialize_rubric(rubric_root, oa_block, include_prompt=True):
""" """
Serialize a rubric dictionary as XML, adding children to the XML Serialize a rubric dictionary as XML, adding children to the XML
with root node `rubric_root`. with root node `rubric_root`.
...@@ -138,11 +138,14 @@ def serialize_rubric(rubric_root, oa_block): ...@@ -138,11 +138,14 @@ def serialize_rubric(rubric_root, oa_block):
rubric_dict (dict): A dictionary representation of the rubric, of the form rubric_dict (dict): A dictionary representation of the rubric, of the form
described in the serialized Rubric model (peer grading serializers). described in the serialized Rubric model (peer grading serializers).
Kwargs:
include_prompt (bool): Whether or not to include the prompt in the
serialized format for a rubric. Defaults to True.
Returns: Returns:
None None
""" """
# Rubric prompt (default to empty text); None indicates no input element # Rubric prompt (default to empty text); None indicates no input element
if oa_block.prompt is not None: if include_prompt and oa_block.prompt is not None:
prompt = etree.SubElement(rubric_root, 'prompt') prompt = etree.SubElement(rubric_root, 'prompt')
prompt.text = unicode(oa_block.prompt) prompt.text = unicode(oa_block.prompt)
...@@ -172,7 +175,7 @@ def parse_date(date_str): ...@@ -172,7 +175,7 @@ def parse_date(date_str):
Raises: Raises:
UpdateFromXmlError UpdateFromXmlError
""" """
if date_str == "" or date_str == u"": if date_str == "":
return None return None
try: try:
# Get the date into ISO format # Get the date into ISO format
...@@ -180,7 +183,10 @@ def parse_date(date_str): ...@@ -180,7 +183,10 @@ def parse_date(date_str):
formatted_date = parsed_date.strftime("%Y-%m-%dT%H:%M:%S") formatted_date = parsed_date.strftime("%Y-%m-%dT%H:%M:%S")
return unicode(formatted_date) return unicode(formatted_date)
except (ValueError, TypeError): except (ValueError, TypeError):
raise UpdateFromXmlError(_('The format for the submission due date is invalid. Make sure the date is formatted as YYYY-MM-DDTHH:MM:SS.')) msg = (
'The format for the given date ({}) is invalid. Make sure the date is formatted as YYYY-MM-DDTHH:MM:SS.'
).format(date_str)
raise UpdateFromXmlError(_(msg))
def _parse_options_xml(options_root): def _parse_options_xml(options_root):
...@@ -560,12 +566,15 @@ def serialize_content(oa_block): ...@@ -560,12 +566,15 @@ def serialize_content(oa_block):
serialize_content_to_xml(oa_block, root) serialize_content_to_xml(oa_block, root)
# Return a UTF-8 representation of the XML # Return a UTF-8 representation of the XML
return etree.tostring(root, pretty_print=True, encoding='utf-8') return etree.tostring(root, pretty_print=True, encoding='unicode')
def serialize_rubric_to_xml_str(oa_block): def serialize_rubric_to_xml_str(oa_block):
""" """
Serialize the OpenAssessment XBlock's rubric into an XML string. Serialize the OpenAssessment XBlock's rubric into an XML string. This is
designed to serialize the XBlock's rubric specifically for authoring. Since
the authoring view splits the prompt from the rubric, the serialized format
for the rubric does not contain the prompt.
Args: Args:
oa_block (OpenAssessmentBlock): The open assessment block to serialize oa_block (OpenAssessmentBlock): The open assessment block to serialize
...@@ -576,8 +585,8 @@ def serialize_rubric_to_xml_str(oa_block): ...@@ -576,8 +585,8 @@ def serialize_rubric_to_xml_str(oa_block):
""" """
rubric_root = etree.Element('rubric') rubric_root = etree.Element('rubric')
serialize_rubric(rubric_root, oa_block) serialize_rubric(rubric_root, oa_block, include_prompt=False)
return etree.tostring(rubric_root, pretty_print=True, encoding='utf-8') return etree.tostring(rubric_root, pretty_print=True, encoding='unicode')
def serialize_examples_to_xml_str(assessment): def serialize_examples_to_xml_str(assessment):
...@@ -599,7 +608,7 @@ def serialize_examples_to_xml_str(assessment): ...@@ -599,7 +608,7 @@ def serialize_examples_to_xml_str(assessment):
examples = [] examples = []
examples_root = etree.Element('examples') examples_root = etree.Element('examples')
serialize_training_examples(examples, examples_root) serialize_training_examples(examples, examples_root)
return etree.tostring(examples_root, pretty_print=True, encoding='utf-8') return etree.tostring(examples_root, pretty_print=True, encoding='unicode')
def serialize_assessments_to_xml_str(oa_block): def serialize_assessments_to_xml_str(oa_block):
...@@ -612,7 +621,7 @@ def serialize_assessments_to_xml_str(oa_block): ...@@ -612,7 +621,7 @@ def serialize_assessments_to_xml_str(oa_block):
""" """
assessments_root = etree.Element('assessments') assessments_root = etree.Element('assessments')
serialize_assessments(assessments_root, oa_block) serialize_assessments(assessments_root, oa_block)
return etree.tostring(assessments_root, pretty_print=True, encoding='utf-8') return etree.tostring(assessments_root, pretty_print=True, encoding='unicode')
def parse_from_xml(root): def parse_from_xml(root):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment