Commit 3acf5609 by Stephen Sanchez

Allowing the modification of the peer feedback prompt

parent cef7c430
......@@ -23,7 +23,6 @@ from openassessment.assessment.serializers import (
full_assessment_dict, rubric_from_dict, serialize_assessments,
)
from submissions import api as sub_api
from submissions.api import get_submission_and_student
logger = logging.getLogger("openassessment.assessment.peer_api")
......
......@@ -102,10 +102,9 @@
</div>
</li>
{% endfor %}
<li class="wrapper--input field field--textarea assessment__rubric__question assessment__rubric__question--feedback" id="assessment__rubric__question--feedback">
<label class="question__title" for="assessment__rubric__question--feedback__value">
<span class="question__title__copy">{% trans "(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?" %}</span>
<span class="question__title__copy">{{ rubric_feedback_prompt }}</span>
</label>
<div class="wrapper--input">
......
......@@ -87,10 +87,10 @@
<li class="wrapper--input field field--textarea assessment__rubric__question assessment__rubric__question--feedback" id="assessment__rubric__question--feedback">
<label class="question__title" for="assessment__rubric__question--feedback__value">
<span class="question__title__copy">{% trans "(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?" %}</span>
<span class="question__title__copy">{{ rubric_feedback_prompt }}</span>
</label>
<div class="wrapper--input">
<textarea id="assessment__rubric__question--feedback__value" placeholder="{% trans "I felt this response was..." %}"></textarea>
<textarea id="assessment__rubric__question--feedback__value" placeholder="{% trans "I noticed that this response..." %}"></textarea>
</div>
</li>
</ol>
......
......@@ -57,6 +57,12 @@ DEFAULT_RUBRIC_CRITERIA = [
},
]
# The rubric's feedback prompt is a set of instructions letting the student
# know they can provide additional free form feedback in their assessment.
DEFAULT_RUBRIC_FEEDBACK_PROMPT = """
(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?
"""
# The Default Peer Assessment is created as an example of how this XBlock can be
# configured. If no configuration is specified, this is the default assessment
......
......@@ -105,6 +105,12 @@ class OpenAssessmentBlock(
help="The different parts of grading for students giving feedback."
)
rubric_feedback_prompt = String(
default=DEFAULT_RUBRIC_FEEDBACK_PROMPT,
scope=Scope.content,
help="The rubric feedback prompt displayed to the student"
)
rubric_assessments = List(
default=DEFAULT_ASSESSMENT_MODULES,
scope=Scope.content,
......
......@@ -142,6 +142,9 @@ class PeerAssessmentMixin(object):
"estimated_time": "20 minutes" # TODO: Need to configure this.
}
if self.rubric_feedback_prompt is not None:
context_dict["rubric_feedback_prompt"] = self.rubric_feedback_prompt
# We display the due date whether the problem is open or closed.
# If no date is set, it defaults to the distant future, in which
# case we don't display the date.
......
......@@ -57,6 +57,11 @@
<explanation></explanation>
</option>
</criterion>
<feedbackprompt>
Please provide additional free-form feedback on this submission.
This feedback is completely optional.
</feedbackprompt>
</rubric>
<assessments>
<assessment name="peer-assessment"
......
......@@ -2,6 +2,7 @@
"simple": {
"title": "Foo",
"prompt": "Test prompt",
"rubric_feedback_prompt": "Test Feedback Prompt",
"start": null,
"due": null,
"submission_start": null,
......@@ -56,6 +57,7 @@
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"<feedbackprompt>Test Feedback Prompt</feedbackprompt>",
"</rubric>",
"</openassessment>"
]
......@@ -64,6 +66,7 @@
"unicode": {
"title": "ƒσσ",
"prompt": "Ṫëṡẗ ṗṛöṁṗẗ",
"rubric_feedback_prompt": "†es† Feedbåck Prømp†",
"start": null,
"due": null,
"submission_start": null,
......@@ -112,14 +115,143 @@
"<option points=\"0\"><name>ℕ𝕠</name><explanation>ℕ𝕠 𝕖𝕩𝕡𝕝𝕒𝕟𝕒𝕥𝕚𝕠𝕟</explanation></option>",
"<option points=\"2\"><name>Чэѕ</name><explanation>Чэѕ эхрlаиатіои</explanation></option>",
"</criterion>",
"<feedbackprompt>†es† Feedbåck Prømp†</feedbackprompt>",
"</rubric>",
"</openassessment>"
]
},
"empty_feedback_prompt": {
"title": "Foo",
"prompt": "Test prompt",
"rubric_feedback_prompt": "",
"start": null,
"due": null,
"submission_start": null,
"submission_due": null,
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
]
}
],
"assessments": [
{
"name": "peer-assessment",
"start": "2014-02-27T09:46:28",
"due": "2014-03-01T00:00:00",
"must_grade": 5,
"must_be_graded_by": 3
},
{
"name": "self-assessment",
"start": "2014-04-01T00:00:00",
"due": "2014-06-01T00:00:00"
}
],
"expected_xml": [
"<openassessment>",
"<title>Foo</title>",
"<assessments>",
"<assessment name=\"peer-assessment\" start=\"2014-02-27T09:46:28\" due=\"2014-03-01T00:00:00\" must_grade=\"5\" must_be_graded_by=\"3\" />",
"<assessment name=\"self-assessment\" start=\"2014-04-01T00:00:00\" due=\"2014-06-01T00:00:00\" />",
"</assessments>",
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"<feedbackprompt></feedbackprompt>",
"</rubric>",
"</openassessment>"
]
},
"no_feedback_prompt": {
"title": "Foo",
"prompt": "Test prompt",
"rubric_feedback_prompt": null,
"start": null,
"due": null,
"submission_start": null,
"submission_due": null,
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
]
}
],
"assessments": [
{
"name": "peer-assessment",
"start": "2014-02-27T09:46:28",
"due": "2014-03-01T00:00:00",
"must_grade": 5,
"must_be_graded_by": 3
},
{
"name": "self-assessment",
"start": "2014-04-01T00:00:00",
"due": "2014-06-01T00:00:00"
}
],
"expected_xml": [
"<openassessment>",
"<title>Foo</title>",
"<assessments>",
"<assessment name=\"peer-assessment\" start=\"2014-02-27T09:46:28\" due=\"2014-03-01T00:00:00\" must_grade=\"5\" must_be_graded_by=\"3\" />",
"<assessment name=\"self-assessment\" start=\"2014-04-01T00:00:00\" due=\"2014-06-01T00:00:00\" />",
"</assessments>",
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>",
"</openassessment>"
]
},
"reverse_option_order": {
"title": "Foo",
"prompt": "Test prompt",
"rubric_feedback_prompt": "Test Feedback Prompt",
"start": null,
"due": null,
"submission_start": null,
......@@ -168,6 +300,7 @@
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"</criterion>",
"<feedbackprompt>Test Feedback Prompt</feedbackprompt>",
"</rubric>",
"</openassessment>"
]
......@@ -176,6 +309,7 @@
"reverse_criteria_order": {
"title": "Foo",
"prompt": "Test prompt",
"rubric_feedback_prompt": "Test Feedback Prompt",
"start": null,
"due": null,
"submission_start": null,
......@@ -242,6 +376,7 @@
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"</criterion>",
"<feedbackprompt>Test Feedback Prompt</feedbackprompt>",
"</rubric>",
"</openassessment>"
]
......@@ -250,6 +385,7 @@
"default_dates": {
"title": "Foo",
"prompt": "Test prompt",
"rubric_feedback_prompt": "Test Feedback Prompt",
"start": null,
"due": null,
"submission_start": null,
......@@ -306,6 +442,7 @@
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"<feedbackprompt>Test Feedback Prompt</feedbackprompt>",
"</rubric>",
"</openassessment>"
]
......@@ -314,6 +451,7 @@
"set_dates": {
"title": "Foo",
"prompt": "Test prompt",
"rubric_feedback_prompt": "Test Feedback Prompt",
"start": "2010-04-01T00:00:00",
"due": "2030-05-01T00:00:00",
"submission_start": null,
......@@ -370,6 +508,7 @@
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"<feedbackprompt>Test Feedback Prompt</feedbackprompt>",
"</rubric>",
"</openassessment>"
]
......
......@@ -80,6 +80,7 @@ class TestSerializeContent(TestCase):
def test_serialize(self, data):
self.oa_block.title = data['title']
self.oa_block.prompt = data['prompt']
self.oa_block.rubric_feedback_prompt = data['rubric_feedback_prompt']
self.oa_block.start = _parse_date(data['start'])
self.oa_block.due = _parse_date(data['due'])
self.oa_block.submission_start = data['submission_start']
......
......@@ -146,6 +146,9 @@ def _serialize_rubric(rubric_root, oa_block):
if isinstance(criteria_list, list):
_serialize_criteria(rubric_root, criteria_list)
if oa_block.rubric_feedback_prompt is not None:
feedback_prompt = etree.SubElement(rubric_root, 'feedbackprompt')
feedback_prompt.text = unicode(oa_block.rubric_feedback_prompt)
def _parse_date(date_str):
"""
......@@ -156,8 +159,8 @@ def _parse_date(date_str):
date_str (str): The date string to parse.
Returns:
unicode in ISO format (without milliseconds) if the date string is parseable
None if parsing fails.
unicode in ISO format (without milliseconds) if the date string is
parse-able. None if parsing fails.
"""
try:
# Get the date into ISO format
......@@ -292,6 +295,12 @@ def _parse_rubric_xml(rubric_root):
else:
raise UpdateFromXmlError(_('Every "criterion" element must contain a "prompt" element.'))
feedback_prompt_el = rubric_root.find('feedbackprompt')
if feedback_prompt_el is not None:
rubric_dict['feedbackprompt'] = _safe_get_text(feedback_prompt_el)
else:
rubric_dict['feedbackprompt'] = None
# Criteria
rubric_dict['criteria'] = _parse_criteria_xml(rubric_root)
......@@ -516,6 +525,7 @@ def update_from_xml(oa_block, root, validator=DEFAULT_VALIDATOR):
oa_block.prompt = rubric['prompt']
oa_block.rubric_criteria = rubric['criteria']
oa_block.rubric_assessments = assessments
oa_block.rubric_feedback_prompt = rubric['feedbackprompt']
oa_block.submission_start = submission_start
oa_block.submission_due = submission_due
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment