Commit ec889c39 by Will Daly

Add editing view for Open Assessment block in Studio

parent b7417383
......@@ -30,6 +30,7 @@ pip-log.txt
.coverage
.tox
nosetests.xml
htmlcov
# Translations
*.mo
......
{% load i18n %}
<div id="openassessment-edit">
<textarea class="openassessment-editor"></textarea>
<input type="button" class="openassessment-save-button" value="{% trans 'Save' %}"/>
<input type="button" class="openassessment-cancel-button" value="{% trans 'Cancel' %}"/>
</div>
......@@ -54,7 +54,6 @@
<form id="peer-assessment--001__assessment" class="peer-assessment__assessment" method="post">
<fieldset class="assessment__fields">
<legend class="assessment__instruction">{{ rubric_instructions }}</legend>
<ol class="list list--fields assessment__rubric">
{% for criterion in rubric_criteria %}
......
......@@ -72,7 +72,6 @@ class PeerAssessmentMixin(object):
peer_sub = self.get_peer_submission(self.get_student_item_dict(), assessment)
context_dict = {
"peer_submission": peer_sub,
"rubric_instructions": self.rubric_instructions,
"rubric_criteria": self.rubric_criteria
}
return self.render_assessment('openassessmentblock/oa_peer_assessment.html', context_dict)
......
......@@ -69,7 +69,7 @@ class ScenarioParser(object):
)
rubric_criteria.append(crit)
return (e.text.strip(), rubric_criteria)
return rubric_criteria
def get_assessments(self, assessments):
"""<assessments>
......@@ -113,8 +113,7 @@ class ScenarioParser(object):
if child.tag == 'prompt':
self.xblock.prompt = self.get_prompt(child)
elif child.tag == 'rubric':
(self.xblock.rubric_instructions,
self.xblock.rubric_criteria) = self.get_rubric(child)
self.xblock.rubric_criteria = self.get_rubric(child)
elif child.tag == 'title':
self.xblock.title = self.get_title(child)
elif child.tag == 'assessments':
......
/* JavaScript for Studio editing view of Open Assessment XBlock */
function OpenAssessmentBlock(runtime, element) {
function displayError(errorMsg) {
runtime.notify('error', {msg: errorMsg});
}
// Update editor with the XBlock's current content
function updateEditorFromXBlock(editor) {
$.ajax({
type: "POST",
url: runtime.handlerUrl(element, 'xml'),
data: "\"\"",
success: function(data) {
if (data.success) {
editor.setValue(data.xml);
}
else {
displayError(data.msg);
}
}
});
}
function initializeEditor() {
var textAreas = $(element).find('.openassessment-editor');
if (textAreas.length < 1) {
console.warn("Could not find element for OpenAssessmentBlock XML editor");
return null;
}
else {
return CodeMirror.fromTextArea(
textAreas[0], {mode: "xml", lineNumbers: true, lineWrapping: true}
);
}
}
function initializeSaveButton(editor) {
saveButtons = $(element).find('.openassessment-save-button');
if (saveButtons.length < 1) {
console.warn("Could not find element for OpenAssessmentBlock save button");
}
else {
saveButtons.click(function (eventObject) {
// Notify the client-side runtime that we are starting
// to save so it can show the "Saving..." notification
runtime.notify('save', {state: 'start'});
// POST the updated description to the XBlock
// The server-side code is responsible for validating and persisting
// the updated content.
$.ajax({
type: "POST",
url: runtime.handlerUrl(element, 'update_xml'),
data: JSON.stringify({ xml: editor.getValue() }),
success: function(data) {
// Notify the client-side runtime that we finished saving
// so it can hide the "Saving..." notification.
if (data.success) {
runtime.notify('save', {state: 'end'});
}
// Display an error alert if any errors occurred
else {
displayError(data.msg);
}
}
});
});
}
}
function initializeCancelButton(editor) {
cancelButtons = $(element).find('.openassessment-cancel-button');
if (cancelButtons.length < 1) {
console.warn("Could not find element for OpenAssessmentBlock cancel button");
}
else {
cancelButtons.click(function (eventObject) {
// Revert to the XBlock's current content
updateEditorFromXBlock(editor);
// Notify the client-side runtime so it will close the editing modal.
runtime.notify('cancel', {});
});
}
}
$(function ($) {
editor = initializeEditor();
if (editor) {
updateEditorFromXBlock(editor);
initializeSaveButton(editor);
initializeCancelButton(editor);
}
});
}
"""
Studio editing view for OpenAssessment XBlock.
"""
import pkg_resources
import logging
import dateutil.parser
from django.template.context import Context
from django.template.loader import get_template
from django.utils.translation import ugettext as _
from xblock.core import XBlock
from xblock.fragment import Fragment
from openassessment.xblock.xml import (
serialize_content, update_from_xml,
UpdateFromXmlError, InvalidRubricError
)
from openassessment.peer.serializers import (
rubric_from_dict, AssessmentSerializer, InvalidRubric
)
logger = logging.getLogger(__name__)
class StudioMixin(object):
"""
Studio editing view for OpenAssessment XBlock.
"""
def studio_view(self, context=None):
"""
Render the OpenAssessment XBlock for editing in Studio.
Args:
context: Not actively used for this view.
Returns:
(Fragment): An HTML fragment for editing the configuration of this XBlock.
"""
rendered_template = get_template('openassessmentblock/oa_edit.html').render(Context({}))
frag = Fragment(rendered_template)
frag.add_javascript(pkg_resources.resource_string(__name__, "static/js/src/oa_edit.js"))
frag.initialize_js('OpenAssessmentBlock')
return frag
@XBlock.json_handler
def update_xml(self, data, suffix=''):
"""
Update the XBlock's XML.
Args:
data (dict): Data from the request; should have a value for the key 'xml'
containing the XML for this XBlock.
Kwargs:
suffix (str): Not used
Returns:
dict with keys 'success' (bool) and 'msg' (str)
"""
if 'xml' in data:
try:
update_from_xml(
self, data['xml'],
rubric_validator=self._validate_rubric,
assessment_validator=self._validate_assessment
)
except InvalidRubricError:
return {'success': False, 'msg': _('Rubric definition was not valid.')}
except UpdateFromXmlError as ex:
return {'success': False, 'msg': _('An error occurred while saving: {error}').format(error=ex.message)}
else:
return {'success': True, 'msg': _('Successfully updated OpenAssessment XBlock')}
else:
return {'success': False, 'msg': _('Must specify "xml" in request JSON dict.')}
@XBlock.json_handler
def xml(self, data, suffix=''):
"""
Retrieve the XBlock's content definition, serialized as XML.
Args:
data (dict): Not used
Kwargs:
suffix (str): Not used
Returns:
dict with keys 'success' (bool), 'message' (unicode), and 'xml' (unicode)
"""
try:
xml = serialize_content(self)
# We do not expect `serialize_content` to raise an exception,
# but if it does, handle it gracefully.
except Exception as ex:
msg = _('An unexpected error occurred while loading the problem: {error}').format(error=ex.message)
logger.error(msg)
return {'success': False, 'msg': msg, 'xml': u''}
else:
return {'success': True, 'msg': '', 'xml': xml}
def _validate_rubric(self, rubric_dict):
"""
Check that the rubric is semantically valid.
Args:
rubric_dict (dict): Serialized Rubric model from the peer grading app.
Returns:
boolean indicating whether the rubric is semantically valid.
"""
try:
rubric_from_dict(rubric_dict)
except InvalidRubric as ex:
return (False, ex.message)
else:
return (True, u'')
def _validate_assessment(self, assessment_dict):
"""
Check that the assessment is semantically valid.
Args:
assessment (dict): Serialized Assessment model from the peer grading app.
Returns:
boolean indicating whether the assessment is semantically valid.
"""
# Supported assessment
if not assessment_dict.get('name') in ['peer-assessment', 'self-assessment']:
return (False, _("Assessment type is not supported"))
# Number you need to grade is >= the number of people that need to grade you
if assessment_dict.get('must_grade') < assessment_dict.get('must_be_graded_by'):
return (False, _('"must_grade" should be less than "must_be_graded_by"'))
# Due date is after start date, if both are specified.
start_datetime = assessment_dict.get('start_datetime')
due_datetime = assessment_dict.get('due_datetime')
if start_datetime is not None and due_datetime is not None:
start = dateutil.parser.parse(assessment_dict.get('start_datetime'))
due = dateutil.parser.parse(assessment_dict.get('due_datetime'))
if start > due:
return (False, _('Due date must be after start date'))
return (True, u'')
......@@ -156,7 +156,6 @@ class SubmissionMixin(object):
assessment_ui_model["must_be_graded_by"]
)
context["peer_assessments"] = assessments
context["rubric_instructions"] = self.rubric_instructions
context["rubric_criteria"] = self.rubric_criteria
for criterion in context["rubric_criteria"]:
criterion["median_score"] = median_scores[criterion["name"]]
......
{
"simple": {
"xml": [
"<openassessmentblock>",
"<title>Foo</title>",
"<assessments>",
"<assessment name=\"peer-assessment\" start=\"2014-02-27T09:46:28\" due=\"2014-03-01T00:00:00\" must_grade=\"5\" must_be_graded_by=\"3\" />",
"<assessment name=\"self-assessment\" start=\"2014-04-01T00:00:00\" due=\"2014-06-01T00:00:00\" must_grade=\"2\" must_be_graded_by=\"1\" />",
"</assessments>",
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>",
"</openassessmentblock>"
],
"title": "Foo",
"prompt": "Test prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
]
}
],
"assessments": [
{
"name": "peer-assessment",
"start_datetime": "2014-02-27T09:46:28",
"due_datetime": "2014-03-01T00:00:00",
"must_grade": 5,
"must_be_graded_by": 3
},
{
"name": "self-assessment",
"start_datetime": "2014-04-01T00:00:00",
"due_datetime": "2014-06-01T00:00:00",
"must_grade": 2,
"must_be_graded_by": 1
}
]
},
"unicode": {
"xml": [
"<openassessmentblock>",
"<title>िѻѻ</title>",
"<assessments>",
"<assessment name=\"peer-assessment\" start=\"2014-02-27T09:46:28\" due=\"2014-03-01T00:00:00\" must_grade=\"5\" must_be_graded_by=\"3\" />",
"</assessments>",
"<rubric>",
"<prompt>ՇєรՇ קг๏๓קՇ</prompt>",
"<criterion>",
"<name>𝓣𝓮𝓼𝓽 𝓬𝓻𝓲𝓽𝓮𝓻𝓲𝓸𝓷</name>",
"<prompt>Ŧɇsŧ ȼɍɨŧɇɍɨøn ꝑɍømꝑŧ</prompt>",
"<option points=\"0\"><name>𝕹𝖔</name><explanation>𝕹𝖔 𝖊𝖝𝖕𝖑𝖆𝖓𝖆𝖙𝖎𝖔𝖓</explanation></option>",
"<option points=\"2\"><name>ﻉร</name><explanation>ﻉร ﻉซρɭคกคՇٱѻก</explanation></option>",
"</criterion>",
"</rubric>",
"</openassessmentblock>"
],
"title": "िѻѻ",
"prompt": "ՇєรՇ קг๏๓קՇ",
"criteria": [
{
"order_num": 0,
"name": "𝓣𝓮𝓼𝓽 𝓬𝓻𝓲𝓽𝓮𝓻𝓲𝓸𝓷",
"prompt": "Ŧɇsŧ ȼɍɨŧɇɍɨøn ꝑɍømꝑŧ",
"options": [
{
"order_num": 0,
"points": 0,
"name": "𝕹𝖔",
"explanation": "𝕹𝖔 𝖊𝖝𝖕𝖑𝖆𝖓𝖆𝖙𝖎𝖔𝖓"
},
{
"order_num": 1,
"points": 2,
"name": "ﻉร",
"explanation": "ﻉร ﻉซρɭคกคՇٱѻก"
}
]
}
],
"assessments": [
{
"name": "peer-assessment",
"start_datetime": "2014-02-27T09:46:28",
"due_datetime": "2014-03-01T00:00:00",
"must_grade": 5,
"must_be_graded_by": 3
}
]
},
"multiple_criteria": {
"xml": [
"<openassessmentblock>",
"<title>Foo</title>",
"<assessments>",
"<assessment name=\"peer-assessment\" start=\"2014-02-27T09:46:28\" due=\"2014-03-01T00:00:00\" must_grade=\"5\" must_be_graded_by=\"3\" />",
"</assessments>",
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"<criterion>",
"<name>Second criterion</name>",
"<prompt>Second criterion prompt</prompt>",
"<option points=\"1\"><name>Maybe</name><explanation>Maybe explanation</explanation></option>",
"</criterion>",
"</rubric>",
"</openassessmentblock>"
],
"title": "Foo",
"prompt": "Test prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
]
},
{
"order_num": 1,
"name": "Second criterion",
"prompt": "Second criterion prompt",
"options": [
{
"order_num": 0,
"points": 1,
"name": "Maybe",
"explanation": "Maybe explanation"
}
]
}
],
"assessments": [
{
"name": "peer-assessment",
"start_datetime": "2014-02-27T09:46:28",
"due_datetime": "2014-03-01T00:00:00",
"must_grade": 5,
"must_be_graded_by": 3
}
]
},
"no_dates_specified": {
"xml": [
"<openassessmentblock>",
"<title>Foo</title>",
"<assessments>",
"<assessment name=\"peer-assessment\" must_grade=\"5\" must_be_graded_by=\"3\" />",
"</assessments>",
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>",
"</openassessmentblock>"
],
"title": "Foo",
"prompt": "Test prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
]
}
],
"assessments": [
{
"name": "peer-assessment",
"start_datetime": null,
"due_datetime": null,
"must_grade": 5,
"must_be_graded_by": 3
}
]
}
}
{
"empty_string": {"xml": [""]},
"invalid_syntax": {"xml": ["<openassessmentblock><div>no closing tag</openassessmentblock>"]},
"missing_root": {"xml": "<div>Incorrect</div>"},
"missing_assessment_name": {
"xml": [
"<openassessmentblock>",
"<title>Foo</title>",
"<assessments>",
"<assessment start=\"2014-02-27T09:46:28\" due=\"2014-03-01T00:00:00\" must_grade=\"5\" must_be_graded_by=\"3\" />",
"</assessments>",
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>",
"</openassessmentblock>"
]
},
"missing_title": {
"xml": [
"<openassessmentblock>",
"<assessments>",
"<assessment name=\"peer-assessment\" start=\"2014-02-27T09:46:28\" due=\"2014-03-01T00:00:00\" must_grade=\"5\" must_be_graded_by=\"3\" />",
"</assessments>",
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>",
"</openassessmentblock>"
]
},
"missing_rubric": {
"xml": [
"<openassessmentblock>",
"<title>Foo</title>",
"<assessments>",
"<assessment name=\"peer-assessment\" start=\"2014-02-27T09:46:28\" due=\"2014-03-01T00:00:00\" must_grade=\"5\" must_be_graded_by=\"3\" />",
"</assessments>",
"</openassessmentblock>"
]
},
"missing_assessments": {
"xml": [
"<openassessmentblock>",
"<title>Foo</title>",
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>",
"</openassessmentblock>"
]
},
"non_numeric_points": {
"xml": [
"<openassessmentblock>",
"<title>Foo</title>",
"<assessments>",
"<assessment name=\"peer-assessment\" start=\"2014-02-27T09:46:28\" due=\"2014-03-01T00:00:00\" must_grade=\"5\" must_be_graded_by=\"3\" />",
"</assessments>",
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"non-numeric\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>",
"</openassessmentblock>"
]
},
"non_numeric_must_grade": {
"xml": [
"<openassessmentblock>",
"<title>Foo</title>",
"<assessments>",
"<assessment name=\"peer-assessment\" start=\"2014-02-27T09:46:28\" due=\"2014-03-01T00:00:00\" must_grade=\"non-numeric\" must_be_graded_by=\"3\" />",
"</assessments>",
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"5\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>",
"</openassessmentblock>"
]
},
"non_numeric_must_be_graded_by": {
"xml": [
"<openassessmentblock>",
"<title>Foo</title>",
"<assessments>",
"<assessment name=\"peer-assessment\" start=\"2014-02-27T09:46:28\" due=\"2014-03-01T00:00:00\" must_grade=\"2\" must_be_graded_by=\"non-numeric\" />",
"</assessments>",
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"5\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>",
"</openassessmentblock>"
]
},
"invalid_start_date": {
"xml": [
"<openassessmentblock>",
"<title>Foo</title>",
"<assessments>",
"<assessment name=\"peer-assessment\" start=\"non-date\" due=\"2014-03-01T00:00:00\" must_grade=\"2\" must_be_graded_by=\"5\" />",
"</assessments>",
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"5\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>",
"</openassessmentblock>"
]
},
"invalid_due_date": {
"xml": [
"<openassessmentblock>",
"<title>Foo</title>",
"<assessments>",
"<assessment name=\"peer-assessment\" start=\"2014-03-01T00:00:00\" due=\"non-date\" must_grade=\"2\" must_be_graded_by=\"5\" />",
"</assessments>",
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"5\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>",
"</openassessmentblock>"
]
}
}
......@@ -53,7 +53,7 @@ RUBRIC_CONFIG = """
due="2014-12-21T22:22"
must_grade="5"
must_be_graded_by="3" />
<self-assessment/>
<self-assessment name="self-assessment"/>
</assessments>
</openassessment>
"""
......
......@@ -43,10 +43,9 @@ class TestScenarioParser(TestCase):
cit=criterion_prompt_text,
coet=criterion_option_explain_text)
rubric_xml = etree.fromstring(rubric_text)
rubric_prompt, rubric_criteria = self.test_parser.get_rubric(rubric_xml)
rubric_criteria = self.test_parser.get_rubric(rubric_xml)
# Basic shape of the rubric: prompt and criteria
self.assertEqual(rubric_prompt, rubric_prompt_text)
self.assertEqual(len(rubric_criteria), 1)
# Look inside the criterion to make sure it's shaped correctly
......
......@@ -3,6 +3,7 @@ git+https://github.com/edx/XBlock.git@923978c5#egg=XBlock
git+https://github.com/ormsbee/xblock-sdk.git@295678ff#egg=xblock-sdk
# Third Party Requirements
defusedxml==0.4.1
django==1.4.8
django-extensions==1.2.5
djangorestframework==2.3.5
......
# Grab everything in base requirements
-r base.txt
ddt==0.4.0
ddt==0.7.0
django-nose==1.2
mock==1.0.1
nose==1.3.0
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment