Commit f9bdc85d by Stephen Sanchez

Merge pull request #289 from edx/sanchez/TIM-485

Renaming feedback to comments for a peer assessment.
parents 280d7478 73df4fa7
......@@ -104,7 +104,7 @@
{% endfor %}
<li class="wrapper--input field field--textarea assessment__rubric__question assessment__rubric__question--feedback" id="assessment__rubric__question--feedback">
<label class="question__title" for="assessment__rubric__question--feedback__value">
<span class="question__title__copy">{{ rubric_feedback_prompt }}</span>
<span class="question__title__copy">{{ rubric_comment_prompt }}</span>
</label>
<div class="wrapper--input">
......
......@@ -87,7 +87,7 @@
<li class="wrapper--input field field--textarea assessment__rubric__question assessment__rubric__question--feedback" id="assessment__rubric__question--feedback">
<label class="question__title" for="assessment__rubric__question--feedback__value">
<span class="question__title__copy">{{ rubric_feedback_prompt }}</span>
<span class="question__title__copy">{{ rubric_comment_prompt }}</span>
</label>
<div class="wrapper--input">
<textarea id="assessment__rubric__question--feedback__value" placeholder="{% trans "I noticed that this response..." %}"></textarea>
......
......@@ -59,7 +59,7 @@ DEFAULT_RUBRIC_CRITERIA = [
# The rubric's feedback prompt is a set of instructions letting the student
# know they can provide additional free form feedback in their assessment.
DEFAULT_RUBRIC_FEEDBACK_PROMPT = """
DEFAULT_RUBRIC_COMMENT_PROMPT = """
(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?
"""
......
......@@ -105,10 +105,10 @@ class OpenAssessmentBlock(
help="The different parts of grading for students giving feedback."
)
rubric_feedback_prompt = String(
default=DEFAULT_RUBRIC_FEEDBACK_PROMPT,
rubric_comment_prompt = String(
default=DEFAULT_RUBRIC_COMMENT_PROMPT,
scope=Scope.content,
help="The rubric feedback prompt displayed to the student"
help="The rubric comment prompt displayed to the student"
)
rubric_assessments = List(
......
......@@ -142,8 +142,8 @@ class PeerAssessmentMixin(object):
"estimated_time": "20 minutes" # TODO: Need to configure this.
}
if self.rubric_feedback_prompt is not None:
context_dict["rubric_feedback_prompt"] = self.rubric_feedback_prompt
if self.rubric_comment_prompt is not None:
context_dict["rubric_comment_prompt"] = self.rubric_comment_prompt
# We display the due date whether the problem is open or closed.
# If no date is set, it defaults to the distant future, in which
......
......@@ -57,11 +57,11 @@
<explanation></explanation>
</option>
</criterion>
<feedbackprompt>
Please provide additional free-form feedback on this submission.
<commentprompt>
Please provide additional free-form comments on this submission.
This feedback is completely optional.
</feedbackprompt>
These comments are completely optional.
</commentprompt>
</rubric>
<assessments>
<assessment name="peer-assessment"
......
......@@ -2,7 +2,7 @@
"simple": {
"title": "Foo",
"prompt": "Test prompt",
"rubric_feedback_prompt": "Test Feedback Prompt",
"rubric_comment_prompt": "Test Comment Prompt",
"start": null,
"due": null,
"submission_start": null,
......@@ -57,7 +57,7 @@
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"<feedbackprompt>Test Feedback Prompt</feedbackprompt>",
"<commentprompt>Test Comment Prompt</commentprompt>",
"</rubric>",
"</openassessment>"
]
......@@ -66,7 +66,7 @@
"unicode": {
"title": "ƒσσ",
"prompt": "Ṫëṡẗ ṗṛöṁṗẗ",
"rubric_feedback_prompt": "†es† Feedbåck Prømp†",
"rubric_comment_prompt": "†es† çømmëñ† Prømp†",
"start": null,
"due": null,
"submission_start": null,
......@@ -115,16 +115,16 @@
"<option points=\"0\"><name>ℕ𝕠</name><explanation>ℕ𝕠 𝕖𝕩𝕡𝕝𝕒𝕟𝕒𝕥𝕚𝕠𝕟</explanation></option>",
"<option points=\"2\"><name>Чэѕ</name><explanation>Чэѕ эхрlаиатіои</explanation></option>",
"</criterion>",
"<feedbackprompt>†es† Feedbåck Prømp†</feedbackprompt>",
"<commentprompt>†es† çømmëñ† Prømp†</commentprompt>",
"</rubric>",
"</openassessment>"
]
},
"empty_feedback_prompt": {
"empty_comment_prompt": {
"title": "Foo",
"prompt": "Test prompt",
"rubric_feedback_prompt": "",
"rubric_comment_prompt": "",
"start": null,
"due": null,
"submission_start": null,
......@@ -179,16 +179,16 @@
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"<feedbackprompt></feedbackprompt>",
"<commentprompt></commentprompt>",
"</rubric>",
"</openassessment>"
]
},
"no_feedback_prompt": {
"no_comment_prompt": {
"title": "Foo",
"prompt": "Test prompt",
"rubric_feedback_prompt": null,
"rubric_comment_prompt": null,
"start": null,
"due": null,
"submission_start": null,
......@@ -251,7 +251,7 @@
"reverse_option_order": {
"title": "Foo",
"prompt": "Test prompt",
"rubric_feedback_prompt": "Test Feedback Prompt",
"rubric_comment_prompt": "Test Comment Prompt",
"start": null,
"due": null,
"submission_start": null,
......@@ -300,7 +300,7 @@
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"</criterion>",
"<feedbackprompt>Test Feedback Prompt</feedbackprompt>",
"<commentprompt>Test Comment Prompt</commentprompt>",
"</rubric>",
"</openassessment>"
]
......@@ -309,7 +309,7 @@
"reverse_criteria_order": {
"title": "Foo",
"prompt": "Test prompt",
"rubric_feedback_prompt": "Test Feedback Prompt",
"rubric_comment_prompt": "Test Comment Prompt",
"start": null,
"due": null,
"submission_start": null,
......@@ -376,7 +376,7 @@
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"</criterion>",
"<feedbackprompt>Test Feedback Prompt</feedbackprompt>",
"<commentprompt>Test Comment Prompt</commentprompt>",
"</rubric>",
"</openassessment>"
]
......@@ -385,7 +385,7 @@
"default_dates": {
"title": "Foo",
"prompt": "Test prompt",
"rubric_feedback_prompt": "Test Feedback Prompt",
"rubric_comment_prompt": "Test Comment Prompt",
"start": null,
"due": null,
"submission_start": null,
......@@ -442,7 +442,7 @@
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"<feedbackprompt>Test Feedback Prompt</feedbackprompt>",
"<commentprompt>Test Comment Prompt</commentprompt>",
"</rubric>",
"</openassessment>"
]
......@@ -451,7 +451,7 @@
"set_dates": {
"title": "Foo",
"prompt": "Test prompt",
"rubric_feedback_prompt": "Test Feedback Prompt",
"rubric_comment_prompt": "Test Comment Prompt",
"start": "2010-04-01T00:00:00",
"due": "2030-05-01T00:00:00",
"submission_start": null,
......@@ -508,7 +508,7 @@
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"<feedbackprompt>Test Feedback Prompt</feedbackprompt>",
"<commentprompt>Test Comment Prompt</commentprompt>",
"</rubric>",
"</openassessment>"
]
......
......@@ -80,7 +80,7 @@ class TestSerializeContent(TestCase):
def test_serialize(self, data):
self.oa_block.title = data['title']
self.oa_block.prompt = data['prompt']
self.oa_block.rubric_feedback_prompt = data['rubric_feedback_prompt']
self.oa_block.rubric_comment_prompt = data['rubric_comment_prompt']
self.oa_block.start = _parse_date(data['start'])
self.oa_block.due = _parse_date(data['due'])
self.oa_block.submission_start = data['submission_start']
......
......@@ -146,9 +146,9 @@ def _serialize_rubric(rubric_root, oa_block):
if isinstance(criteria_list, list):
_serialize_criteria(rubric_root, criteria_list)
if oa_block.rubric_feedback_prompt is not None:
feedback_prompt = etree.SubElement(rubric_root, 'feedbackprompt')
feedback_prompt.text = unicode(oa_block.rubric_feedback_prompt)
if oa_block.rubric_comment_prompt is not None:
comment_prompt = etree.SubElement(rubric_root, 'commentprompt')
comment_prompt.text = unicode(oa_block.rubric_comment_prompt)
def _parse_date(date_str):
"""
......@@ -295,11 +295,11 @@ def _parse_rubric_xml(rubric_root):
else:
raise UpdateFromXmlError(_('Every "criterion" element must contain a "prompt" element.'))
feedback_prompt_el = rubric_root.find('feedbackprompt')
if feedback_prompt_el is not None:
rubric_dict['feedbackprompt'] = _safe_get_text(feedback_prompt_el)
comment_prompt_el = rubric_root.find('commentprompt')
if comment_prompt_el is not None:
rubric_dict['commentprompt'] = _safe_get_text(comment_prompt_el)
else:
rubric_dict['feedbackprompt'] = None
rubric_dict['commentprompt'] = None
# Criteria
rubric_dict['criteria'] = _parse_criteria_xml(rubric_root)
......@@ -525,7 +525,7 @@ def update_from_xml(oa_block, root, validator=DEFAULT_VALIDATOR):
oa_block.prompt = rubric['prompt']
oa_block.rubric_criteria = rubric['criteria']
oa_block.rubric_assessments = assessments
oa_block.rubric_feedback_prompt = rubric['feedbackprompt']
oa_block.rubric_comment_prompt = rubric['commentprompt']
oa_block.submission_start = submission_start
oa_block.submission_due = submission_due
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment