Commit 31b4b455 by Will Daly

Add schema validation for input from Studio JavaScript

Use text inputs for dates
Make feedback prompt mandatory
parent 9db810a4
......@@ -131,18 +131,10 @@
<div id="openassessment_rubric_feedback_wrapper" class="wrapper-comp-settings">
<div id="openassessment_rubric_feedback_header_open">
<div id="openassessment_rubric_feedback_header">
<span>
{% trans "Rubric Feedback" %}
</span>
<div class="openassessment_rubric_remove_button" id="openassessment_rubric_feedback_remove">
<h2>{% trans "Remove" %}</h2>
</div>
<div id="openassessment_rubric_feedback_header_closed">
<h2>
{% trans "Add Rubric Feedback" %}
</h2>
</div>
</div>
<ul class="list-input settings-list">
<li class="field comp-setting-entry">
......@@ -152,7 +144,7 @@
</div>
</li>
<p class="setting-help">
{% trans "If you would like your students to be able to provide feedback on the rubric, add a prompt to ask them for it." %}
{% trans "Directions shown to students when they give feedback." %}
</p>
</ul>
</div>
......@@ -171,14 +163,14 @@
<li class="openassessment_date_editor field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="openassessment_submission_start_editor" class="setting-label">{% trans "Response Submission Start Date"%} </label>
<input type="datetime-local" class="input setting-input" id="openassessment_submission_start_editor">
<input type="text" class="input setting-input" id="openassessment_submission_start_editor">
</div>
<p class="setting-help">{% trans "The date at which submissions will first be accepted." %}</p>
</li>
<li class="openassessment_date_editor field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="openassessment_submission_due_editor" class="setting-label">{% trans "Response Submission Due Date" %}</label>
<input type="datetime-local" class="input setting-input" id="openassessment_submission_due_editor">
<input type="text" class="input setting-input" id="openassessment_submission_due_editor">
</div>
<p class="setting-help">{% trans "The date at which submissions will stop being accepted." %}</p>
</li>
......@@ -247,14 +239,14 @@
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_start_date" class="setting-label">{% trans "Start Date" %}</label>
<input id="peer_assessment_start_date" type="datetime-local" class="input setting-input">
<input id="peer_assessment_start_date" type="text" class="input setting-input">
</div>
<p class="setting-help">{% trans "If desired, specify a start date for the peer assessment period. If no date is specified, peer assessment can begin when submissions begin."%}</p>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_due_date" class="setting-label">{% trans "Due Date" %}</label>
<input id="peer_assessment_due_date" type="datetime-local" class="input setting-input">
<input id="peer_assessment_due_date" type="text" class="input setting-input">
</div>
<p class="setting-help">{% trans "If desired, specify a due date for the peer assessment period. If no date is specified, peer assessment can run as long as the problem is open."%}</p>
</li>
......@@ -280,14 +272,14 @@
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="self_assessment_start_date" class="setting-label">{% trans "Start Date" %}</label>
<input id="self_assessment_start_date" type="datetime-local" class="input setting-input">
<input id="self_assessment_start_date" type="text" class="input setting-input">
</div>
<p class="setting-help">{% trans "If desired, specify a start date for the self assessment period. If no date is specified, self assessment can begin when submissions begin."%}</p>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="self_assessment_due_date" class="setting-label">{% trans "Due Date" %}</label>
<input id="self_assessment_due_date" type="datetime-local" class="input setting-input">
<input id="self_assessment_due_date" type="text" class="input setting-input">
</div>
<p class="setting-help">{% trans "If desired, specify a due date for the self assessment period. If no date is specified, self assessment can run as long as the problem is open."%}</p>
</li>
......
......@@ -25,11 +25,11 @@ from openassessment.xblock.studio_mixin import StudioMixin
from openassessment.xblock.xml import parse_from_xml, serialize_content_to_xml
from openassessment.xblock.staff_info_mixin import StaffInfoMixin
from openassessment.xblock.workflow_mixin import WorkflowMixin
from openassessment.workflow import api as workflow_api
from openassessment.workflow.errors import AssessmentWorkflowError
from openassessment.xblock.student_training_mixin import StudentTrainingMixin
from openassessment.xblock.validation import validator
from openassessment.xblock.resolve_dates import resolve_dates, DISTANT_PAST, DISTANT_FUTURE
from openassessment.xblock.data_conversion import create_rubric_dict
logger = logging.getLogger(__name__)
......@@ -345,60 +345,26 @@ class OpenAssessmentBlock(
Inherited by XBlock core.
"""
block = runtime.construct_xblock_from_class(cls, keys)
config = parse_from_xml(node)
rubric = {
"prompt": config["prompt"],
"feedbackprompt": config["rubric_feedback_prompt"],
"criteria": config["rubric_criteria"],
}
block = runtime.construct_xblock_from_class(cls, keys)
xblock_validator = validator(block, strict_post_release=False)
xblock_validator(
rubric,
{ 'due': config['submission_due'], 'start': config['submission_start']},
config['rubric_assessments']
)
block.update(
config['rubric_criteria'],
config['rubric_feedback_prompt'],
create_rubric_dict(config['prompt'], config['rubric_criteria']),
config['rubric_assessments'],
config['submission_due'],
config['submission_start'],
config['title'],
config['prompt']
submission_start=config['submission_start'],
submission_due=config['submission_due']
)
return block
def update(self, criteria, feedback_prompt, assessments, submission_due,
submission_start, title, prompt):
"""
Given a dictionary of properties, update the XBlock
block.rubric_criteria = config['rubric_criteria']
block.rubric_feedback_prompt = config['rubric_feedback_prompt']
block.rubric_assessments = config['rubric_assessments']
block.submission_start = config['submission_start']
block.submission_due = config['submission_due']
block.title = config['title']
block.prompt = config['prompt']
Args:
criteria (list): A list of rubric criteria for this XBlock.
feedback_prompt (str):
assessments (list): A list of assessment module configurations for
this XBlock.
submission_due (str): ISO formatted submission due date.
submission_start (str): ISO formatted submission start date.
title (str): The title of this XBlock
prompt (str): The prompt for this XBlock.
Returns:
None
"""
# If we've gotten this far, then we've successfully parsed the XML
# and validated the contents. At long last, we can safely update the XBlock.
self.title = title
self.prompt = prompt
self.rubric_criteria = criteria
self.rubric_assessments = assessments
self.rubric_feedback_prompt = feedback_prompt
self.submission_start = submission_start
self.submission_due = submission_due
return block
@property
def valid_assessments(self):
......
"""
Schema for validating and sanitizing data received from the JavaScript client.
"""
import dateutil
from pytz import utc
from voluptuous import Schema, Required, All, Any, Range, In, Invalid
from openassessment.xblock.xml import parse_examples_xml_str, UpdateFromXmlError
def utf8_validator(value):
"""Validate and sanitize unicode strings.
If we're given a bytestring, assume that the encoding is UTF-8
Args:
value: The value to validate
Returns:
unicode
Raises:
Invalid
"""
try:
if isinstance(value, str):
return value.decode('utf-8')
else:
return unicode(value)
except (ValueError, TypeError):
raise Invalid(u"Could not load unicode from value \"{val}\"".format(val=value))
def datetime_validator(value):
"""Validate and sanitize a datetime string in ISO format.
Args:
value: The value to validate
Returns:
unicode: ISO-formatted datetime string
Raises:
Invalid
"""
try:
# The dateutil parser defaults empty values to the current day,
# which is NOT what we want.
if value is None or value == '':
raise Invalid(u"Datetime value cannot be \"{val}\"".format(val=value))
# Parse the date and interpret it as UTC
value = dateutil.parser.parse(value).replace(tzinfo=utc)
return unicode(value.isoformat())
except (ValueError, TypeError):
raise Invalid(u"Could not parse datetime from value \"{val}\"".format(val=value))
def examples_xml_validator(value):
"""Parse and validate student training examples XML.
Args:
value: The value to parse.
Returns:
list of training examples, serialized as dictionaries.
Raises:
Invalid
"""
try:
return parse_examples_xml_str(value)
except UpdateFromXmlError:
raise Invalid(u"Could not parse examples from XML")
# Schema definition for an update from the Studio JavaScript editor.
EDITOR_UPDATE_SCHEMA = Schema({
Required('prompt'): utf8_validator,
Required('title'): utf8_validator,
Required('feedback_prompt'): utf8_validator,
Required('submission_start'): Any(datetime_validator, None),
Required('submission_due'): Any(datetime_validator, None),
Required('assessments'): [
Schema({
Required('name'): All(
utf8_validator,
In([
u'peer-assessment',
u'self-assessment',
u'example-based-assessment',
u'student-training'
])
),
Required('start', default=None): Any(datetime_validator, None),
Required('due', default=None): Any(datetime_validator, None),
'must_grade': All(int, Range(min=0)),
'must_be_graded_by': All(int, Range(min=0)),
'examples': All(utf8_validator, examples_xml_validator)
})
],
Required('feedbackprompt', default=u""): utf8_validator,
Required('criteria'): [
Schema({
Required('order_num'): All(int, Range(min=0)),
Required('name'): utf8_validator,
Required('prompt'): utf8_validator,
Required('feedback'): All(
utf8_validator,
In([
'disabled',
'optional',
'required',
])
),
Required('options'): [
Schema({
Required('order_num'): All(int, Range(min=0)),
Required('name'): utf8_validator,
Required('explanation'): utf8_validator,
Required('points'): All(int, Range(min=0)),
})
]
})
]
})
\ No newline at end of file
......@@ -31,6 +31,7 @@ describe("OpenAssessment.Server", function() {
};
var PROMPT = "Hello this is the prompt yes.";
var FEEDBACK_PROMPT = "Prompt for feedback";
var RUBRIC = '<rubric>'+
'<criterion>'+
......@@ -51,6 +52,14 @@ describe("OpenAssessment.Server", function() {
'</criterion>'+
'</rubric>';
var CRITERIA = [
'criteria',
'objects',
'would',
'be',
'here'
];
var ASSESSMENTS = [
{
"name": "peer-assessment",
......@@ -226,14 +235,25 @@ describe("OpenAssessment.Server", function() {
it("updates the XBlock's Context definition", function() {
stubAjax(true, { success: true });
server.updateEditorContext(
PROMPT, RUBRIC, TITLE, SUBMISSION_START, SUBMISSION_DUE, ASSESSMENTS
);
server.updateEditorContext({
prompt: PROMPT,
feedbackPrompt: FEEDBACK_PROMPT,
title: TITLE,
submissionStart: SUBMISSION_START,
submissionDue: SUBMISSION_DUE,
criteria: CRITERIA,
assessments: ASSESSMENTS
});
expect($.ajax).toHaveBeenCalledWith({
type: "POST", url: '/update_editor_context',
data: JSON.stringify({
prompt: PROMPT, rubric: RUBRIC, title: TITLE, submission_start: SUBMISSION_START,
submission_due: SUBMISSION_DUE, assessments: ASSESSMENTS
prompt: PROMPT,
feedback_prompt: FEEDBACK_PROMPT,
title: TITLE,
submission_start: SUBMISSION_START,
submission_due: SUBMISSION_DUE,
criteria: CRITERIA,
assessments: ASSESSMENTS
})
});
});
......
......@@ -68,7 +68,6 @@ OpenAssessment.StudioView = function(runtime, element, server) {
this.numberOfOptions = [];
this.rubricCriteriaSelectors = [];
this.rubricFeedbackPrompt = $('#openassessment_rubric_feedback', liveElement);
this.hasRubricFeedbackPrompt = true;
$('#openassessment_criterion_list', liveElement).empty();
this.addNewCriterionToRubric();
......@@ -96,25 +95,6 @@ OpenAssessment.StudioView = function(runtime, element, server) {
view.addNewCriterionToRubric(liveElement);
});
// Adds a listener which removes rubric feedback
$("#openassessment_rubric_feedback_remove", liveElement). click( function(eventData){
$("#openassessment_rubric_feedback_header_open", liveElement).fadeOut();
$("#openassessment_rubric_feedback_input_wrapper", liveElement).fadeOut();
$("#openassessment_rubric_feedback_header_closed", liveElement).fadeIn();
view.hasRubricFeedbackPrompt = false;
});
// Adds a listener which adds rubric feedback if not already displayed.
$("#openassessment_rubric_feedback_header_closed", liveElement). click( function(eventData){
$("#openassessment_rubric_feedback_header_closed", liveElement).fadeOut();
$("#openassessment_rubric_feedback_header_open", liveElement).fadeIn();
$("#openassessment_rubric_feedback_input_wrapper", liveElement).fadeIn();
view.hasRubricFeedbackPrompt = true;
});
// Initially Hides the rubric "add rubric feedback" div
$("#openassessment_rubric_feedback_header_closed", liveElement).hide();
};
OpenAssessment.StudioView.prototype = {
......@@ -339,7 +319,6 @@ OpenAssessment.StudioView.prototype = {
// Hides the criterion header used for adding
$(".openassessment_criterion_feedback_header_closed", liveElement).hide();
},
/**
......@@ -444,7 +423,7 @@ OpenAssessment.StudioView.prototype = {
criterionID (string): The criterion ID that we are deleting from
optionToRemove (string): The option ID that we are "deleting"
*/
removeOptionFromCriterion: function(liveElement, criterionID, optionToRemove){
removeOptionFromCriterion: function(liveElement, criterionID, optionToRemove) {
var view = this;
var numberOfOptions = view.numberOfOptions[criterionID];
var optionSelectors = view.rubricCriteriaSelectors[criterionID].options;
......@@ -473,12 +452,6 @@ OpenAssessment.StudioView.prototype = {
// to save so it can show the "Saving..." notification
this.runtime.notify('save', {state: 'start'});
// Send the updated XML to the server
var prompt = this.settingsFieldSelectors.promptBox.prop('value');
var title = this.settingsFieldSelectors.titleField.prop('value');
var subStart = this.settingsFieldSelectors.submissionStartField.prop('value');
var subDue = this.settingsFieldSelectors.submissionDueField.prop('value');
// Grabs values from all of our fields, and stores them in a format which can be easily validated.
var rubricCriteria = [];
......@@ -497,77 +470,64 @@ OpenAssessment.StudioView.prototype = {
var optionSelectors = optionSelectorList[j];
optionValueList = optionValueList.concat([{
order_num: j-1,
points: optionSelectors.points.prop('value'),
name: optionSelectors.name.prop('value'),
explanation: optionSelectors.explanation.prop('value')
points: this._getInt(optionSelectors.points),
name: optionSelectors.name.val(),
explanation: optionSelectors.explanation.val()
}]);
}
criterionValueDict.options = optionValueList;
rubricCriteria = rubricCriteria.concat([criterionValueDict]);
}
var rubric = { 'criteria': rubricCriteria };
if (this.hasRubricFeedbackPrompt){
rubric.feedbackprompt = this.rubricFeedbackPrompt.prop('value');
}
var assessments = [];
if (this.settingsFieldSelectors.hasTraining.prop('checked')){
assessments[assessments.length] = {
"name": "student-training",
"examples": this.studentTrainingExamplesCodeBox.getValue()
};
assessments.push({
name: "student-training",
examples: this.studentTrainingExamplesCodeBox.getValue()
});
}
if (this.settingsFieldSelectors.hasPeer.prop('checked')) {
var assessment = {
"name": "peer-assessment",
"must_grade": parseInt(this.settingsFieldSelectors.peerMustGrade.prop('value')),
"must_be_graded_by": parseInt(this.settingsFieldSelectors.peerGradedBy.prop('value'))
};
var startStr = this.settingsFieldSelectors.peerStart.prop('value');
var dueStr = this.settingsFieldSelectors.peerDue.prop('value');
if (startStr){
assessment = $.extend(assessment, {"start": startStr});
}
if (dueStr){
assessment = $.extend(assessment, {"due": dueStr});
}
assessments[assessments.length] = assessment;
assessments.push({
name: "peer-assessment",
must_grade: this._getInt(this.settingsFieldSelectors.peerMustGrade),
must_be_graded_by: this._getInt(this.settingsFieldSelectors.peerGradedBy),
start: this._getDateTime(this.settingsFieldSelectors.peerStart),
due: this._getDateTime(this.settingsFieldSelectors.peerDue)
});
}
if (this.settingsFieldSelectors.hasSelf.prop('checked')) {
var assessment = {
"name": "self-assessment"
};
var startStr = this.settingsFieldSelectors.selfStart.prop('value');
var dueStr = this.settingsFieldSelectors.selfDue.prop('value');
if (startStr){
assessment = $.extend(assessment, {"start": startStr});
}
if (dueStr){
assessment = $.extend(assessment, {"due": dueStr});
}
assessments[assessments.length] = assessment;
assessments.push({
name: "self-assessment",
start: this._getDateTime(this.settingsFieldSelectors.selfStart),
due: this._getDateTime(this.settingsFieldSelectors.selfDue)
});
}
if (this.settingsFieldSelectors.hasAI.prop('checked')) {
assessments[assessments.length] = {
"name": "example-based-assessment",
"examples": this.aiTrainingExamplesCodeBox.getValue()
};
assessments.push({
name: "example-based-assessment",
examples: this.aiTrainingExamplesCodeBox.getValue()
});
}
var view = this;
this.server.updateEditorContext(prompt, rubric, title, subStart, subDue, assessments).done(function () {
// Notify the client-side runtime that we finished saving
// so it can hide the "Saving..." notification.
view.runtime.notify('save', {state: 'end'});
// Reload the XML definition in the editor
view.load();
this.server.updateEditorContext({
title: this.settingsFieldSelectors.titleField.val(),
prompt: this.settingsFieldSelectors.promptBox.val(),
feedbackPrompt: this.rubricFeedbackPrompt.val(),
submissionStart: this._getDateTime(this.settingsFieldSelectors.submissionStartField),
submissionDue: this._getDateTime(this.settingsFieldSelectors.submissionDueField),
criteria: rubricCriteria,
assessments: assessments
}).done(
function () {
// Notify the client-side runtime that we finished saving
// so it can hide the "Saving..." notification.
// Then reload the view.
view.runtime.notify('save', {state: 'end'});
}).fail(function (msg) {
view.showError(msg);
});
......@@ -589,7 +549,55 @@ OpenAssessment.StudioView.prototype = {
**/
showError: function (errorMsg) {
this.runtime.notify('error', {msg: errorMsg});
},
/**
Retrieve a value from a datetime input.
Args:
selector: The JQuery selector for the datetime input.
Returns:
ISO-formatted datetime string or null
**/
_getDateTime: function(selector) {
var dateStr = selector.val();
// By convention, empty date strings are null,
// meaning choose the default date based on
// other dates set in the problem configuration.
if (dateStr === "") {
return null;
}
// Attempt to parse the date string
// TO DO: currently invalid dates also are set as null,
// which is probably NOT what the user wants!
// We should add proper validation here.
var timestamp = Date.parse(dateStr);
if (isNaN(timestamp)) {
return null;
}
// Send the datetime in ISO format
// This will also convert the timezone to UTC
return new Date(timestamp).toISOString();
},
/**
Retrieve an integer value from an input.
Args:
selector: The JQuery selector for the input.
Returns:
int
**/
_getInt: function(selector) {
return parseInt(selector.val(), 10);
}
};
......
......@@ -427,26 +427,30 @@ OpenAssessment.Server.prototype = {
/**
Update the XBlock's XML definition on the server.
Return
Kwargs:
title (string): The title of the problem.
prompt (string): The question prompt.
feedbackPrompt (string): The directions to the student for giving overall feedback on a submission.
submissionStart (ISO-formatted datetime string or null): The start date of the submission.
submissionDue (ISO-formatted datetime string or null): The date the submission is due.
criteria (list of object literals): The rubric criteria.
assessments (list of object literals): The assessments the student will be evaluated on.
Returns:
A JQuery promise, which resolves with no arguments
and fails with an error message.
Example usage:
server.updateXml(xml).done(
function() {}
).fail(
function(err) { console.log(err); }
);
**/
updateEditorContext: function(prompt, rubric, title, sub_start, sub_due, assessments) {
updateEditorContext: function(kwargs) {
var url = this.url('update_editor_context');
var payload = JSON.stringify({
'prompt': prompt,
'rubric': rubric,
'title': title,
'submission_start': sub_start,
'submission_due': sub_due,
'assessments': assessments
prompt: kwargs.prompt,
feedback_prompt: kwargs.feedbackPrompt,
title: kwargs.title,
submission_start: kwargs.submissionStart,
submission_due: kwargs.submissionDue,
criteria: kwargs.criteria,
assessments: kwargs.assessments
});
return $.Deferred(function(defer) {
$.ajax({
......
......@@ -120,7 +120,10 @@ class StudentTrainingMixin(object):
examples
)
context['training_essay'] = example['answer']
context['training_rubric'] = example['rubric']
context['training_rubric'] = {
'criteria': example['rubric']['criteria'],
'points_possible': example['rubric']['points_possible']
}
template = 'openassessmentblock/student_training/student_training.html'
return template, context
......
{
"no-dates": {
"assessments_list": [
{
"name": "peer-assessment",
"start": "",
"due": "",
"must_grade": 5,
"must_be_graded_by": 3
},
{
"name": "self-assessment",
"due": "",
"start": ""
}
],
"results": [
{
"name": "peer-assessment",
"start": null,
"due": null,
"must_grade": 5,
"must_be_graded_by": 3
},
{
"name": "self-assessment",
"due": null,
"start": null
}
]
},
"student-training": {
"assessments_list": [
{
"name": "student-training",
"start": "",
"due": "",
"examples": "<example><answer>ẗëṡẗ äṅṡẅëṛ</answer><select criterion=\"Test criterion\" option=\"Yes\" /><select criterion=\"Another test criterion\" option=\"No\" /></example><example><answer>äṅöẗḧëṛ ẗëṡẗ äṅṡẅëṛ</answer><select criterion=\"Another test criterion\" option=\"Yes\" /><select criterion=\"Test criterion\" option=\"No\" /></example>"
},
{
"name": "peer-assessment",
"start": "",
"due": "",
"must_grade": 5,
"must_be_graded_by": 3
},
{
"name": "self-assessment",
"due": "",
"start": ""
}
],
"results": [
{
"name": "student-training",
"due": null,
"start": null,
"examples": [
{
"answer": "ẗëṡẗ äṅṡẅëṛ",
"options_selected": [
{
"criterion": "Test criterion",
"option": "Yes"
},
{
"criterion": "Another test criterion",
"option": "No"
}
]
},
{
"answer": "äṅöẗḧëṛ ẗëṡẗ äṅṡẅëṛ",
"options_selected": [
{
"criterion": "Another test criterion",
"option": "Yes"
},
{
"criterion": "Test criterion",
"option": "No"
}
]
}
]
},
{
"name": "peer-assessment",
"start": null,
"due": null,
"must_grade": 5,
"must_be_graded_by": 3
},
{
"name": "self-assessment",
"due": null,
"start": null
}
]
},
"date-parsing": {
"assessments_list": [
{
"name": "student-training",
"start": "2014-10-10T01:00:01",
"due": "",
"examples": "<example><answer>ẗëṡẗ äṅṡẅëṛ</answer><select criterion=\"Test criterion\" option=\"Yes\" /><select criterion=\"Another test criterion\" option=\"No\" /></example><example><answer>äṅöẗḧëṛ ẗëṡẗ äṅṡẅëṛ</answer><select criterion=\"Another test criterion\" option=\"Yes\" /><select criterion=\"Test criterion\" option=\"No\" /></example>"
},
{
"name": "peer-assessment",
"start": "",
"due": "2015-01-01T00:00:00",
"must_grade": 5,
"must_be_graded_by": 3
},
{
"name": "self-assessment",
"due": "2015-01-01T00:00:00",
"start": ""
}
],
"results": [
{
"name": "student-training",
"due": null,
"start": "2014-10-10T01:00:01",
"examples": [
{
"answer": "ẗëṡẗ äṅṡẅëṛ",
"options_selected": [
{
"criterion": "Test criterion",
"option": "Yes"
},
{
"criterion": "Another test criterion",
"option": "No"
}
]
},
{
"answer": "äṅöẗḧëṛ ẗëṡẗ äṅṡẅëṛ",
"options_selected": [
{
"criterion": "Another test criterion",
"option": "Yes"
},
{
"criterion": "Test criterion",
"option": "No"
}
]
}
]
},
{
"name": "peer-assessment",
"start": null,
"due": "2015-01-01T00:00:00",
"must_grade": 5,
"must_be_graded_by": 3
},
{
"name": "self-assessment",
"due": "2015-01-01T00:00:00",
"start": null
}
]
}
}
\ No newline at end of file
{
"date-parsing-due": {
"assessments_list": [
{
"name": "student-training",
"start": "2014-10-10T01:00:01",
"due": "",
"examples": "<examples><example><answer>ẗëṡẗ äṅṡẅëṛ</answer><select criterion=\"Test criterion\" option=\"Yes\" /><select criterion=\"Another test criterion\" option=\"No\" /></example><example><answer>äṅöẗḧëṛ ẗëṡẗ äṅṡẅëṛ</answer><select criterion=\"Another test criterion\" option=\"Yes\" /><select criterion=\"Test criterion\" option=\"No\" /></example></examples>"
},
{
"name": "peer-assessment",
"start": "",
"due": "2015-01-01T00:00:HI",
"must_grade": 5,
"must_be_graded_by": 3
},
{
"name": "self-assessment",
"due": "2015-014-01",
"start": ""
}
]
},
"date-parsing-start": {
"assessments_list": [
{
"name": "peer-assessment",
"start": "2014-13-13T00:00:00",
"due": "",
"must_grade": 5,
"must_be_graded_by": 3
},
{
"name": "self-assessment",
"due": "",
"start": ""
}
]
},
"no-answers-in-examples": {
"assessments_list": [
{
"name": "student-training",
"start": "",
"due": "",
"examples": "<example><select criterion=\"Test criterion\" option=\"Yes\" /><select criterion=\"Another test criterion\" option=\"No\" /></example><example><answer>äṅöẗḧëṛ ẗëṡẗ äṅṡẅëṛ</answer><select criterion=\"Another test criterion\" option=\"Yes\" /><select criterion=\"Test criterion\" option=\"No\" /></example>"
},
{
"name": "peer-assessment",
"start": "",
"due": "",
"must_grade": 5,
"must_be_graded_by": 3
},
{
"name": "self-assessment",
"due": "",
"start": ""
}
]
},
"must_grade": {
"assessments_list": [
{
"name": "peer-assessment",
"start": "",
"due": "",
"must_grade": "Not a number fool!",
"must_be_graded_by": 3
},
{
"name": "self-assessment",
"due": "",
"start": ""
}
]
},
"must_be_graded_by": {
"assessments_list": [
{
"name": "peer-assessment",
"start": "",
"due": "",
"must_grade": 3,
"must_be_graded_by": "Not a number fool!"
},
{
"name": "self-assessment",
"due": "",
"start": ""
}
]
}
}
\ No newline at end of file
......@@ -7,9 +7,6 @@
"training_num_available": 2,
"training_essay": "This is my answer.",
"training_rubric": {
"id": 2,
"content_hash": "de2bb2b7e2c6e3df014e53b8c65f37d511cc4344",
"structure_hash": "a513b20d93487d6d80e31e1d974bf22519332567",
"criteria": [
{
"order_num": 0,
......
{
"simple": {
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
],
"feedback": "required"
}
]
},
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
],
"feedback": "required"
}
],
"prompt": "My new prompt.",
"submission_due": "4014-02-27T09:46:28",
"submission_start": "4014-02-10T09:46:28",
"feedback_prompt": "Feedback prompt",
"submission_due": "4014-02-27T09:46",
"submission_start": "4014-02-10T09:46",
"title": "My new title.",
"assessments": [
{
"name": "peer-assessment",
"must_grade": 5,
"must_be_graded_by": 3,
"start": "",
"due": "4014-03-10T00:00:00"
"start": null,
"due": "4014-03-10T00:00"
},
{
"name": "self-assessment",
"start": "",
"due": ""
"start": null,
"due": null
}
],
"expected-assessment": "peer-assessment",
"expected-criterion-prompt": "Test criterion prompt"
]
},
"unicode": {
"rubric": {
"prompt": "Ṫëṡẗ ṗṛöṁṗẗ",
"criteria": [
{
"order_num": 0,
"name": "Ṫëṡẗ ċṛïẗëïṛöṅ",
"prompt": "Téśt ćŕítéíŕőń ṕŕőḿṕt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "Ṅö",
"explanation": "Ńő éxṕĺáńátíőń"
},
{
"order_num": 1,
"points": 2,
"name": "sǝʎ",
"explanation": "Чэѕ эхрlаиатіои"
}
],
"feedback": "required"
}
]
},
"criteria": [
{
"order_num": 0,
"name": "Ṫëṡẗ ċṛïẗëïṛöṅ",
"prompt": "Téśt ćŕítéíŕőń ṕŕőḿṕt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "Ṅö",
"explanation": "Ńő éxṕĺáńátíőń"
},
{
"order_num": 1,
"points": 2,
"name": "sǝʎ",
"explanation": "Чэѕ эхрlаиатіои"
}
],
"feedback": "required"
}
],
"prompt": "Ṁÿ ṅëẅ ṗṛöṁṗẗ.",
"submission_due": "4014-02-27T09:46:28",
"submission_start": "4014-02-10T09:46:28",
"feedback_prompt": "ḟëëḋḅäċḳ ṗṛöṁṗẗ",
"submission_due": "4014-02-27T09:46",
"submission_start": "4014-02-10T09:46",
"title": "ɯʎ uǝʍ ʇıʇןǝ",
"assessments": [
{
"name": "peer-assessment",
"must_grade": 5,
"must_be_graded_by": 3,
"start": "",
"due": "4014-03-10T00:00:00"
"start": null,
"due": "4014-03-10T00:00"
},
{
"name": "self-assessment",
"start": "",
"due": ""
"start": null,
"due": null
}
],
"expected-assessment": "peer-assessment",
"expected-criterion-prompt": "Ṫëṡẗ ċṛïẗëṛïöṅ ṗṛöṁṗẗ"
]
}
}
......@@ -5,6 +5,7 @@ Tests for the student training step in the Open Assessment XBlock.
import datetime
import ddt
import json
import pprint
from mock import patch
import pytz
from django.db import DatabaseError
......@@ -196,7 +197,11 @@ class StudentTrainingAssessTest(XBlockHandlerTestCase):
iso_date = context['training_due'].isoformat()
self.assertEqual(iso_date, expected_context[key])
else:
self.assertEqual(context[key], expected_context[key])
msg = u"Expected \n {expected} \n but found \n {actual}".format(
actual=pprint.pformat(context[key]),
expected=pprint.pformat(expected_context[key])
)
self.assertEqual(context[key], expected_context[key], msg=msg)
# Verify that we render without error
resp = self.request(xblock, 'render_student_training', json.dumps({}))
......
......@@ -48,10 +48,11 @@ class StudioViewTest(XBlockHandlerTestCase):
@file_data('data/invalid_update_xblock.json')
@scenario('data/basic_scenario.xml')
def test_update_context_invalid_request_data(self, xblock, data):
expected_error = data.pop('expected_error')
xblock.published_date = None
resp = self.request(xblock, 'update_editor_context', json.dumps(data), response_format='json')
self.assertFalse(resp['success'])
self.assertIn(data['expected_error'], resp['msg'].lower())
self.assertIn(expected_error, resp['msg'].lower())
@file_data('data/invalid_rubric.json')
@scenario('data/basic_scenario.xml')
......@@ -67,7 +68,7 @@ class StudioViewTest(XBlockHandlerTestCase):
# Verify the response fails
resp = self.request(xblock, 'update_editor_context', request, response_format='json')
self.assertFalse(resp['success'])
self.assertIn("the following keys were missing", resp['msg'].lower())
self.assertIn("error updating xblock configuration", resp['msg'].lower())
# Check that the XBlock fields were NOT updated
# We don't need to be exhaustive here, because we have other unit tests
......
......@@ -235,11 +235,6 @@ class ValidationIntegrationTest(TestCase):
]
}
SUBMISSION = {
"start": None,
"due": None
}
EXAMPLES = [
{
"answer": "ẗëṡẗ äṅṡẅëṛ",
......@@ -293,7 +288,7 @@ class ValidationIntegrationTest(TestCase):
self.validator = validator(self.oa_block)
def test_validates_successfully(self):
is_valid, msg = self.validator(self.RUBRIC, self.SUBMISSION, self.ASSESSMENTS)
is_valid, msg = self.validator(self.RUBRIC, self.ASSESSMENTS)
self.assertTrue(is_valid, msg=msg)
self.assertEqual(msg, "")
......@@ -303,7 +298,7 @@ class ValidationIntegrationTest(TestCase):
mutated_assessments[0]['examples'][0]['options_selected'][0]['criterion'] = 'Invalid criterion!'
# Expect a validation error
is_valid, msg = self.validator(self.RUBRIC, self.SUBMISSION, mutated_assessments)
is_valid, msg = self.validator(self.RUBRIC, mutated_assessments)
self.assertFalse(is_valid)
self.assertEqual(msg, u'Example 1 has an extra option for "Invalid criterion!"; Example 1 is missing an option for "vocabulary"')
......@@ -313,7 +308,7 @@ class ValidationIntegrationTest(TestCase):
mutated_assessments[0]['examples'][0]['options_selected'][0]['option'] = 'Invalid option!'
# Expect a validation error
is_valid, msg = self.validator(self.RUBRIC, self.SUBMISSION, mutated_assessments)
is_valid, msg = self.validator(self.RUBRIC, mutated_assessments)
self.assertFalse(is_valid)
self.assertEqual(msg, u'Example 1 has an invalid option for "vocabulary": "Invalid option!"')
......@@ -327,12 +322,12 @@ class ValidationIntegrationTest(TestCase):
option['points'] = 1
# Expect a validation error
is_valid, msg = self.validator(mutated_rubric, self.SUBMISSION, self.ASSESSMENTS)
is_valid, msg = self.validator(mutated_rubric, self.ASSESSMENTS)
self.assertFalse(is_valid)
self.assertEqual(msg, u'Example-based assessments cannot have duplicate point values.')
# But it should be okay if we don't have example-based assessment
no_example_based = copy.deepcopy(self.ASSESSMENTS)[1:]
is_valid, msg = self.validator(mutated_rubric, self.SUBMISSION, no_example_based)
is_valid, msg = self.validator(mutated_rubric, no_example_based)
self.assertTrue(is_valid)
self.assertEqual(msg, u'')
......@@ -11,7 +11,6 @@ import dateutil.parser
from django.test import TestCase
import ddt
from openassessment.xblock.openassessmentblock import OpenAssessmentBlock
from openassessment.xblock.studio_mixin import parse_assessment_dictionaries
from openassessment.xblock.xml import (
serialize_content, parse_from_xml_str, parse_rubric_xml_str,
parse_examples_xml_str, parse_assessments_xml_str,
......@@ -359,25 +358,6 @@ class TestParseAssessmentsFromXml(TestCase):
self.assertEqual(assessments, data['assessments'])
@ddt.ddt
class TestParseAssessmentsFromDictionaries(TestCase):
@ddt.file_data('data/parse_assessment_dicts.json')
def test_parse_assessments_dictionary(self, data):
config = parse_assessment_dictionaries(data['assessments_list'])
if len(config) == 0:
# Prevents this test from passing benignly if parse_assessment_dictionaries returns []
self.assertTrue(False)
for config_assessment, correct_assessment in zip(config, data['results']):
self.assertEqual(config_assessment, correct_assessment)
@ddt.file_data('data/parse_assessment_dicts_error.json')
def test_parse_assessments_dictionary_error(self, data):
with self.assertRaises(UpdateFromXmlError):
parse_assessment_dictionaries(data['assessments_list'])
@ddt.ddt
class TestUpdateFromXml(TestCase):
......
......@@ -299,7 +299,7 @@ def validator(oa_block, strict_post_release=True):
callable, of a form that can be passed to `update_from_xml`.
"""
def _inner(rubric_dict, submission_dict, assessments):
def _inner(rubric_dict, assessments, submission_start=None, submission_due=None):
is_released = strict_post_release and oa_block.is_released()
......@@ -325,7 +325,7 @@ def validator(oa_block, strict_post_release=True):
return (False, msg)
# Dates
submission_dates = [(submission_dict['start'], submission_dict['due'])]
submission_dates = [(submission_start, submission_due)]
assessment_dates = [(asmnt.get('start'), asmnt.get('due')) for asmnt in assessments]
success, msg = validate_dates(oa_block.start, oa_block.due, submission_dates + assessment_dates)
if not success:
......
......@@ -304,9 +304,6 @@ def parse_rubric_xml(rubric_root):
Args:
rubric_root (lxml.etree.Element): The root of the <rubric> node in the tree.
validator (callable): Function that accepts a rubric dict and returns
a boolean indicating whether the rubric is semantically valid
and an error message string.
Returns:
dict, a serialized representation of a rubric, as defined by the peer grading serializers.
......
......@@ -22,6 +22,7 @@ loremipsum==1.0.2
python-dateutil==2.1
pytz==2012h
South==0.7.6
voluptuous==0.8.5
# AI grading
git+https://github.com/edx/ease.git@f9f47fb6b5c7c8b6c3360efa72eb56561e1a03b0#egg=ease
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment