Commit 86a5d3be by Stephen Sanchez

Merge pull request #480 from edx/sanchez/dynamic-html-modification

Updating the studio mixin to deliver context with initial rendering
parents 318341cb de75db12
{% load i18n %}
{% spaceless %}
<div id="openassessment-editor" class="editor-with-buttons editor-with-tabs">
<div class="openassessment_editor_content_and_tabs">
<div id="openassessment_editor_header">
<h6 id="oa_editor_window_title" class="title modal_window_title" >{% trans "Component: Open Response Problem" %}</h6>
<ul class="editor_modes action_list action_modes editor_tabs">
<li class="view-button oa_editor_tab"><a href="#oa_settings_editor_wrapper">{% trans "Settings" %}</a></li>
<li class="view-button oa_editor_tab"><a href="#oa_rubric_editor_wrapper">{% trans "Rubric" %}</a></li>
<li class="view-button oa_editor_tab"><a href="#oa_prompt_editor_wrapper">{% trans "Prompt" %}</a></li>
</ul>
</div>
<div id = "oa_prompt_editor_wrapper" class="oa_editor_content_wrapper">
<textarea id="openassessment_prompt_editor">{{ prompt }}</textarea>
</div>
{% include "openassessmentblock/edit/oa_edit_rubric.html" %}
<div id="oa_settings_editor_wrapper" class="oa_editor_content_wrapper wrapper-comp-settings">
<ul id="oa_basic_settings_editor" class="list-input settings-list">
<li id="openassessment_title_editor_wrapper" class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="openassessment_title_editor" class="setting-label">{% trans "Display Name "%}</label>
<input type="text" id="openassessment_title_editor" class="input setting-input" value="{{ title }}">
</div>
<p class="setting-help">{% trans "This name appears when you hover over the unit in the course ribbon at the top of the page." %}</p>
</li>
<li class="openassessment_date_editor field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="openassessment_submission_start_editor" class="setting-label">{% trans "Response Submission Start Date"%} </label>
<input type="datetime-local" class="input setting-input" id="openassessment_submission_start_editor" value="{{ submission_start }}">
</div>
<p class="setting-help">{% trans "The date at which submissions will first be accepted." %}</p>
</li>
<li class="openassessment_date_editor field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="openassessment_submission_due_editor" class="setting-label">{% trans "Response Submission Due Date" %}</label>
<input type="datetime-local" class="input setting-input" id="openassessment_submission_due_editor" value="{{ submission_due }}">
</div>
<p class="setting-help">{% trans "The date at which submissions will stop being accepted." %}</p>
</li>
<li id="openassessment_sumbission_image_wrapper" class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="openassessment_submission_image_editor" class="setting-label">{% trans "Allow Image Submissions"%}</label>
<select id="openassessment_submission_image_editor" class="input setting-input" name="image submission">
<option value="0">{% trans "Disabled"%}</option>
<option value="1">{% trans "Enabled"%}</option>
</select>
</div>
<p class="setting-help">{% trans "If enabled, students will be able to submit an image along with their open response." %}</p>
</li>
</ul>
<p class="openassessment_description" id="openassessment_step_select_description">
{% trans "Select the steps that students must complete. All steps are optional, but every assignment must include at least one step." %}
</p>
{% include "openassessmentblock/edit/oa_edit_training.html" %}
{% include "openassessmentblock/edit/oa_edit_peer.html" %}
{% include "openassessmentblock/edit/oa_edit_self.html" %}
{% include "openassessmentblock/edit/oa_edit_ai.html" %}
</div>
</div>
<div class="openassessment_editor_buttons xblock-actions">
<h3 class="sr">Actions</h3>
<ul>
<li class="action-item">
<a href="#" class="button action-primary openassessment_save_button">{% trans "Save" %}</a>
</li>
<li class="action-item">
<a href="#" class="button openassessment_cancel_button">{% trans "Cancel" %}</a>
</li>
</ul>
</div>
</div>
{% endspaceless %}
{% load i18n %}
{% spaceless %}
<div class="openassessment_assessment_module_settings_editor" id="oa_ai_assessment_editor">
<div class = "openassessment_inclusion_wrapper">
<input id="include_ai_assessment" type="checkbox"
{% if assessments.example_based_assessment %} checked="true" {% endif %}>
<label for="include_ai_assessment">{% trans "Step: Example-Based Assessment" %}</label>
</div>
<div class="openassessment_assessment_module_editor">
{% if not assessments.example_based_assessment %}
<p id="ai_assessment_description_closed" class="openassessment_description_closed">
{% trans "An algorithm assesses students' responses by comparing the responses to pre-assessed sample responses that the instructor provides."%}
</p>
{% endif %}
<div id="ai_assessment_settings_editor" class="assessment_settings_wrapper {% if not assessments.example_based_assessment %} is--hidden {% endif %}">
<p class="openassessment_description">
{% trans "Enter one or more sample responses that you've created, together with the scores you would give those responses. Be sure to format the responses and scores according to the placeholder text below. The algorithm assesses students' responses by comparing them to the sample responses and scores that you provide."%}
</p>
<textarea id="ai_training_examples">{{ assessments.example_based_assessment.examples }}</textarea>
</div>
</div>
</div>
{% endspaceless %}
\ No newline at end of file
{% load i18n %}
{% spaceless %}
<div class="openassessment_assessment_module_settings_editor" id="oa_peer_assessment_editor">
<div class="openassessment_inclusion_wrapper">
<input type="checkbox" id="include_peer_assessment"
{% if assessments.peer_assessment %} checked="true" {% endif %}>
<label for="include_peer_assessment">{% trans "Step: Peer Assessment" %}</label>
</div>
<div class = "openassessment_assessment_module_editor">
{% if not assessments.peer_assessment %}
<p id="peer_assessment_description_closed" class="openassessment_description_closed">
{% trans "Students assess a specified number of other students' responses using the rubric for the assignment." %}
</p>
{% endif %}
<div id="peer_assessment_settings_editor" class="assessment_settings_wrapper {% if not assessments.peer_assessment %} is--hidden {% endif %}">
<p class="openassessment_description">
{% trans "Specify the following values for the peer assessment step. The numeric grading requirements must be given a value." %}
</p>
<ul class="list-input settings-list">
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_must_grade" class="setting-label">{% trans "Must Grade" %}</label>
<input id="peer_assessment_must_grade" class="input setting-input" type="number" value="{{ assessments.peer_assessment.must_grade }}">
</div>
<p class="setting-help">{% trans "Each student must assess this number of peer responses in order to recieve a grade."%}</p>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_graded_by" class="setting-label"> {% trans "Graded By" %}</label>
<input id="peer_assessment_graded_by" class="input setting-input" type="number" value="{{ assessments.peer_assessment.must_be_graded_by }}">
</div>
<p class="setting-help">{% trans "Each response must be assessed by at least this many students in order to tabulate a score."%}</p>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_start_date" class="setting-label">{% trans "Start Date" %}</label>
<input id="peer_assessment_start_date" type="datetime-local" class="input setting-input" value="{{ assessments.peer_assessment.start }}">
</div>
<p class="setting-help">{% trans "If desired, specify a start date for the peer assessment period. If no date is specified, peer assessment can begin when submissions begin."%}</p>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_due_date" class="setting-label">{% trans "Due Date" %}</label>
<input id="peer_assessment_due_date" type="datetime-local" class="input setting-input" value="{{ assessments.peer_assessment.due }}">
</div>
<p class="setting-help">{% trans "If desired, specify a due date for the peer assessment period. If no date is specified, peer assessment can run as long as the problem is open."%}</p>
</li>
</ul>
</div>
</div>
</div>
{% endspaceless %}
\ No newline at end of file
{% load i18n %}
{% spaceless %}
<div class="openassessment_assessment_module_settings_editor" id="oa_self_assessment_editor">
<div class = "openassessment_inclusion_wrapper">
<input id="include_self_assessment" type="checkbox"
{% if assessments.self_assessment %} checked="true" {% endif %}>
<label for="include_self_assessment">{% trans "Step: Self Assessment" %}</label>
</div>
<div class="openassessment_assessment_module_editor">
{% if not assessments.self_assessment %}
<p id="self_assessment_description_closed" class="openassessment_description_closed">
{% trans "Students assess their own responses using the rubric for the assignment." %}
</p>
{% endif %}
<div id="self_assessment_settings_editor" class="assessment_settings_wrapper {% if not assessments.self_assessment %} is--hidden {% endif %}">
<p class="openassessment_description">
{% trans "Specify start and due dates for the self assessment step. To allow self assessment to run as long as the assignment is open, leave both fields blank." %}
</p>
<ul class="list-input settings-list">
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="self_assessment_start_date" class="setting-label">{% trans "Start Date" %}</label>
<input id="self_assessment_start_date" type="datetime-local" class="input setting-input" value="{{ assessments.self_assessment.start }}">
</div>
<p class="setting-help">{% trans "If desired, specify a start date for the self assessment period. If no date is specified, self assessment can begin when submissions begin."%}</p>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="self_assessment_due_date" class="setting-label">{% trans "Due Date" %}</label>
<input id="self_assessment_due_date" type="datetime-local" class="input setting-input" value="{{ assessments.self_assessment.due }}">
</div>
<p class="setting-help">{% trans "If desired, specify a due date for the self assessment period. If no date is specified, self assessment can run as long as the problem is open."%}</p>
</li>
</ul>
</div>
</div>
</div>
{% endspaceless %}
\ No newline at end of file
{% load i18n %}
{% spaceless %}
<div class="openassessment_assessment_module_settings_editor" id="oa_student_training_editor">
<div class = "openassessment_inclusion_wrapper">
<input type="checkbox" id="include_student_training"
{% if assessments.student_training %} checked="true" {% endif %}>
<label for="include_student_training">{% trans "Step: Student Training" %}</label>
</div>
<div class = "openassessment_assessment_module_editor">
{% if not assessments.student_training %}
<p id="student_training_description_closed" class="openassessment_description_closed">
{% trans "Students learn to assess responses by scoring pre-assessed sample responses that the instructor provides. Students move to the next step when the scores they give match the instructor's scores. Note that Student Training Requires that the Peer Assessment module is also selected." %}
</p>
{% endif %}
<div id="student_training_settings_editor" class="assessment_settings_wrapper {% if not assessments.student_training %} is--hidden {% endif %}">
<p class="openassessment_description">
{% trans "Enter one or more sample responses that you've created, together with the scores you would give those responses. Be sure to format the responses and scores according to the placeholder text below." %}
</p>
<textarea id="student_training_examples">{{ assessments.student_training.examples }}</textarea>
</div>
</div>
</div>
{% endspaceless %}
\ No newline at end of file
......@@ -379,8 +379,66 @@
"output": "oa_grade_complete.html"
},
{
"template": "openassessmentblock/oa_edit.html",
"context": {},
"template": "openassessmentblock/edit/oa_edit.html",
"context": {
"prompt": "How much do you like waffles?",
"title": "The most important of all questions.",
"submission_due": "2014-10-1T10:00:00",
"criteria": [
{
"name": "Criterion 1",
"prompt": "Prompt 1",
"order_num": 0,
"feedback": "optional",
"options": [
{
"order_num": 2,
"points": 2,
"name": "Good"
}
],
"points_possible": 2
},
{
"name": "Criterion 2",
"prompt": "Prompt 2",
"order_num": 1,
"options": [
{
"order_num": 1,
"points": 1,
"name": "Fair"
}
],
"points_possible": 2
},
{
"name": "Criterion 3",
"prompt": "Prompt 3",
"order_num": 2,
"feedback": "optional",
"options": [
{
"order_num": 2,
"points": 2,
"name": "Good"
}
],
"points_possible": 2
}
],
"assessments": {
"peer_assessment": {
"start": "2014-10-04T00:00:00",
"must_grade": 5,
"must_be_graded_by": 3
},
"self_assessment": {
"due": ""
}
}
},
"output": "oa_edit.html"
},
{
......
......@@ -199,39 +199,6 @@ describe("OpenAssessment.Server", function() {
});
});
it("loads the XBlock's Context definition", function() {
stubAjax(true, {
success: true, prompt: PROMPT, rubric: RUBRIC, title: TITLE,
submission_start: SUBMISSION_START, submission_due: SUBMISSION_DUE, assessments: ASSESSMENTS
});
var loadedPrompt = "";
var loadedRubric = "";
var loadedAssessments = [];
var loadedTitle = "";
var loadedStart = "";
var loadedDue = "";
server.loadEditorContext().done(function(prompt, rubric, title, sub_start, sub_due, assessments) {
loadedPrompt = prompt;
loadedRubric = rubric;
loadedTitle = title;
loadedStart = sub_start;
loadedDue = sub_due;
loadedAssessments = assessments;
});
expect(loadedPrompt).toEqual(PROMPT);
expect(loadedRubric).toEqual(RUBRIC);
expect(loadedTitle).toEqual(TITLE);
expect(loadedStart).toEqual(SUBMISSION_START);
expect(loadedDue).toEqual(SUBMISSION_DUE);
expect(loadedAssessments).toEqual(ASSESSMENTS);
expect($.ajax).toHaveBeenCalledWith({
url: '/editor_context', type: "POST", data: '""'
});
});
it("updates the XBlock's Context definition", function() {
stubAjax(true, { success: true });
......@@ -336,18 +303,6 @@ describe("OpenAssessment.Server", function() {
expect(receivedMsg).toEqual('test error');
});
it("informs the caller of an Ajax error when loading the editor context", function() {
stubAjax(false, null);
var receivedMsg = null;
server.loadEditorContext().fail(function(msg) {
receivedMsg = msg;
});
expect(receivedMsg).toContain("This problem could not be loaded");
});
it("informs the caller of an Ajax error when updating the editor context", function() {
stubAjax(false, null);
......@@ -370,17 +325,6 @@ describe("OpenAssessment.Server", function() {
expect(receivedMsg).toEqual("Test error");
});
it("informs the caller of a server error when updating the editor context", function() {
stubAjax(true, { success: false, msg: "Test error" });
var receivedMsg = null;
server.loadEditorContext().fail(function(msg) {
receivedMsg = msg;
});
expect(receivedMsg).toEqual("Test error");
});
it("informs the caller of a server error when sending a peer assessment", function() {
stubAjax(true, {success:false, msg:'Test error!'});
......
......@@ -37,16 +37,6 @@ OpenAssessment.StudioView = function(runtime, element, server) {
selfDue: $('#self_assessment_due_date', liveElement)
};
this.aiTrainingExamplesCodeBox = CodeMirror.fromTextArea(
$('#ai_training_examples', liveElement).first().get(0),
{mode: "xml", lineNumbers: true, lineWrapping: true}
);
this.studentTrainingExamplesCodeBox = CodeMirror.fromTextArea(
$('#student_training_examples', liveElement).first().get(0),
{mode: "xml", lineNumbers: true, lineWrapping: true}
);
// Captures the HTML definition of the original criterion element. This will be the template
// used for all other criterion creations
var criterionHtml = $("#openassessment_criterion_1", liveElement).parent().html();
......@@ -104,85 +94,6 @@ OpenAssessment.StudioView.prototype = {
**/
load: function () {
var view = this;
this.server.loadEditorContext().done(
function (prompt, rubric, title, subStart, subDue, assessments) {
view.settingsFieldSelectors.submissionStartField.prop('value', subStart);
view.settingsFieldSelectors.submissionDueField.prop('value', subDue);
view.settingsFieldSelectors.promptBox.prop('value', prompt);
view.settingsFieldSelectors.titleField.prop('value', title);
view.settingsFieldSelectors.hasTraining.prop('checked', false).change();
view.settingsFieldSelectors.hasPeer.prop('checked', false).change();
view.settingsFieldSelectors.hasSelf.prop('checked', false).change();
view.settingsFieldSelectors.hasAI.prop('checked', false).change();
for (var i = 0; i < assessments.length; i++) {
var assessment = assessments[i];
if (assessment.name == 'peer-assessment') {
view.settingsFieldSelectors.peerMustGrade.prop('value', assessment.must_grade);
view.settingsFieldSelectors.peerGradedBy.prop('value', assessment.must_be_graded_by);
view.settingsFieldSelectors.peerStart.prop('value', assessment.start);
view.settingsFieldSelectors.peerDue.prop('value', assessment.due);
view.settingsFieldSelectors.hasPeer.prop('checked', true).change();
} else if (assessment.name == 'self-assessment') {
view.settingsFieldSelectors.selfStart.prop('value', assessment.start);
view.settingsFieldSelectors.selfDue.prop('value', assessment.due);
view.settingsFieldSelectors.hasSelf.prop('checked', true).change();
} else if (assessment.name == 'example-based-assessment') {
view.settingsFieldSelectors.aiTrainingExamplesCodeBox.setValue(assessment.examples);
view.settingsFieldSelectors.hasAI.prop('checked', true).change();
} else if (assessment.name == 'student-training') {
view.studentTrainingExamplesCodeBox.setValue(assessment.examples);
view.settingsFieldSelectors.hasTraining.prop('checked', true).change();
}
}
// Corrects the length of the number of criteria
while(view.numberOfCriteria < rubric.criteria.length){
view.addNewCriterionToRubric();
}
while(view.numberOfCriteria > rubric.criteria.length){
view.removeCriterionFromRubric(1);
}
// Corrects the number of options in each criterion
for (i = 0; i < rubric.criteria.length; i++){
while(view.numberOfOptions[i+1] < rubric.criteria[i].options.length){
view.addNewOptionToCriterion(view.liveElement, i+1);
}
while(view.numberOfOptions[i+1] > rubric.criteria[i].options.length){
view.removeOptionFromCriterion(view.liveElement, i+1, 1);
}
}
// Inserts the data from the rubric into the GUI's fields
for (i = 0; i < rubric.criteria.length; i++){
var criterion = rubric.criteria[i];
var selectors = view.rubricCriteriaSelectors[i+1];
// Transfers the Criteria Fields
selectors.name.prop('value', criterion.name);
selectors.prompt.prop('value', criterion.prompt);
selectors.feedback = criterion.feedback;
for (var j = 0; j < criterion.options.length; j++){
var option = criterion.options[j];
var optionSelectors = selectors.options[j+1];
// Transfers all of the option data.
optionSelectors.name.prop('value', option.name);
optionSelectors.points.prop('value', option.points);
optionSelectors.explanation.prop('value', option.explanation);
}
}
if (rubric.feedbackprompt){
view.rubricFeedbackPrompt.prop('value', rubric.feedbackprompt);
view.hasRubricFeedbackPrompt = true;
} else {
view.rubricFeedbackPrompt.prop('value', "");
view.hasRubricFeedbackPrompt = false;
}
}).fail(function (msg) {
view.showError(msg);
}
);
},
/**
......@@ -217,13 +128,8 @@ OpenAssessment.StudioView.prototype = {
*/
addSettingsAssessmentCheckboxListener: function (name, liveElement) {
$("#include_" + name , liveElement) .change(function () {
if (this.checked){
$("#" + name + "_description_closed", liveElement).fadeOut('fast');
$("#" + name + "_settings_editor", liveElement).fadeIn();
} else {
$("#" + name + "_settings_editor", liveElement).fadeOut('fast');
$("#" + name + "_description_closed", liveElement).fadeIn();
}
$("#" + name + "_description_closed", liveElement).toggleClass('is--hidden', this.checked);
$("#" + name + "_settings_editor", liveElement).toggleClass('is--hidden', !this.checked);
});
},
......
......@@ -395,36 +395,6 @@ OpenAssessment.Server.prototype = {
},
/**
Load the XBlock's XML definition from the server.
Returns:
A JQuery promise, which resolves with the XML definition
and fails with an error message.
Example:
server.loadXml().done(
function(xml) { console.log(xml); }
).fail(
function(err) { console.log(err); }
);
**/
loadEditorContext: function() {
var url = this.url('editor_context');
return $.Deferred(function(defer) {
$.ajax({
type: "POST", url: url, data: "\"\""
}).done(function(data) {
if (data.success) { defer.resolveWith(this, [
data.prompt, data.rubric, data.title, data.submission_start, data.submission_due, data.assessments
]); }
else { defer.rejectWith(this, [data.msg]); }
}).fail(function(data) {
defer.rejectWith(this, [gettext('This problem could not be loaded.')]);
});
}).promise();
},
/**
Update the XBlock's XML definition on the server.
Kwargs:
......
"""
Studio editing view for OpenAssessment XBlock.
"""
from django.template import Context
import pkg_resources
import copy
import logging
from django.template.context import Context
from django.template.loader import get_template
from django.utils.translation import ugettext as _
from voluptuous import MultipleInvalid
......@@ -20,6 +20,16 @@ logger = logging.getLogger(__name__)
class StudioMixin(object):
DEFAULT_CRITERIA = [
{
'options': [
{
},
]
}
]
"""
Studio editing view for OpenAssessment XBlock.
"""
......@@ -34,12 +44,73 @@ class StudioMixin(object):
Returns:
(Fragment): An HTML fragment for editing the configuration of this XBlock.
"""
rendered_template = get_template('openassessmentblock/oa_edit.html').render(Context({}))
rendered_template = get_template(
'openassessmentblock/edit/oa_edit.html'
).render(Context(self.editor_context()))
frag = Fragment(rendered_template)
frag.add_javascript(pkg_resources.resource_string(__name__, "static/js/openassessment.min.js"))
frag.initialize_js('OpenAssessmentEditor')
return frag
def editor_context(self):
"""
Retrieve the XBlock's content definition.
Returns:
dict with keys
'rubric' (unicode), 'prompt' (unicode), 'title' (unicode),
'submission_start' (unicode), 'submission_due' (unicode),
'assessments (dict)
"""
# Copies the rubric assessments so that we can change student
# training examples from dict -> str without negatively modifying
# the openassessmentblock definition.
# Django Templates cannot handle dict keys with dashes, so we'll convert
# the dashes to underscores.
assessments = {}
for assessment in self.rubric_assessments:
name = assessment['name']
assessments[name.replace('-', '_')] = copy.deepcopy(
assessment
)
student_training_module = self.get_assessment_module(
'student-training'
)
if student_training_module:
student_training_module = copy.deepcopy(student_training_module)
try:
examples = xml.serialize_examples_to_xml_str(
student_training_module
)
student_training_module["examples"] = examples
assessments['training'] = student_training_module
# We do not expect serialization to raise an exception, but if it does,
# handle it gracefully.
except:
logger.exception("An error occurred while serializing the XBlock")
submission_due = self.submission_due if self.submission_due else ''
submission_start = self.submission_start if self.submission_start else ''
# Every rubric requires one criterion. If there is no criteria
# configured for the XBlock, return one empty default criterion, with
# an empty default option.
criteria = copy.deepcopy(self.rubric_criteria)
if not criteria:
criteria = self.DEFAULT_CRITERIA
return {
'prompt': self.prompt,
'title': self.title,
'submission_due': submission_due,
'submission_start': submission_start,
'assessments': assessments,
'criteria': criteria,
'feedbackprompt': unicode(self.rubric_feedback_prompt),
}
@XBlock.json_handler
def update_editor_context(self, data, suffix=''):
"""
......@@ -94,66 +165,6 @@ class StudioMixin(object):
return {'success': True, 'msg': _(u'Successfully updated OpenAssessment XBlock')}
@XBlock.json_handler
def editor_context(self, data, suffix=''):
"""
Retrieve the XBlock's content definition, serialized as a JSON object
containing all the configuration as it will be displayed for studio
editing.
Args:
data (dict): Not used
Kwargs:
suffix (str): Not used
Returns:
dict with keys
'success' (bool), 'message' (unicode), 'rubric' (unicode), 'prompt' (unicode),
'title' (unicode), 'submission_start' (unicode), 'submission_due' (unicode), 'assessments (dict)
"""
try:
# Copies the rubric assessments so that we can change student training examples from dict -> str without
# negatively modifying the openassessmentblock definition.
assessment_list = copy.deepcopy(self.rubric_assessments)
# Finds the student training dictionary, if it exists, and replaces the examples with their XML definition
student_training_dictionary = [d for d in assessment_list if d["name"] == "student-training"]
if student_training_dictionary:
# Our for loop will return a list. Select the first element of that list if it exists.
student_training_dictionary = student_training_dictionary[0]
examples = xml.serialize_examples_to_xml_str(student_training_dictionary)
student_training_dictionary["examples"] = examples
# We do not expect serialization to raise an exception, but if it does, handle it gracefully.
except:
logger.exception("An error occurred while serializing the XBlock")
msg = _('An unexpected error occurred while loading the problem')
return {'success': False, 'msg': msg, 'xml': u''}
# Populates the context for the assessments section of the editing
# panel. This will adjust according to the fields laid out in this
# section.
submission_due = self.submission_due if self.submission_due else ''
submission_start = self.submission_start if self.submission_start else ''
rubric_dict = {
'criteria' : self.rubric_criteria,
'feedbackprompt': unicode(self.rubric_feedback_prompt)
}
return {
'success': True,
'msg': '',
'rubric': rubric_dict,
'prompt': self.prompt,
'submission_due': submission_due,
'submission_start': submission_start,
'title': self.title,
'assessments': assessment_list
}
@XBlock.json_handler
def check_released(self, data, suffix=''):
"""
Check whether the problem has been released.
......
......@@ -21,23 +21,6 @@ class StudioViewTest(XBlockHandlerTestCase):
frag = self.runtime.render(xblock, 'studio_view')
self.assertTrue(frag.body_html().find('openassessment-edit'))
@scenario('data/basic_scenario.xml')
def test_get_editor_context(self, xblock):
resp = self.request(xblock, 'editor_context', '""', response_format='json')
self.assertTrue(resp['success'])
self.assertEqual(resp['msg'], u'')
# Verify that the Rubric has criteria, and that they are a list of dictionaries
self.assertTrue(isinstance(resp['rubric']['criteria'], list))
self.assertTrue(isinstance(resp['rubric']['criteria'][0], dict))
# Verify that every assessment in the list of assessments has a name.
for assessment_dict in resp['assessments']:
self.assertTrue(assessment_dict.get('name', False))
if assessment_dict.get('name') == 'student-training':
examples = etree.fromstring(assessment_dict['examples'])
self.assertEqual(examples.tag, 'examples')
@file_data('data/update_xblock.json')
@scenario('data/basic_scenario.xml')
def test_update_context(self, xblock, data):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment