Commit 75ff564d by njdup

Instructors can edit default response feedback text

Previously, instructors were unable to edit the feedback prompt text ("I noticed that
this response...") displayed in the response box asking a student to provide feedback
on the peer responses they received.

To allow instructors to edit this, I've added an editable xml element (feedback_default_text).
The text within the element defaults to "I noticed that this response...," but an instructor
can change this text to whatever he or she wants by editing the text within the xml element.

Editing of this new element has also been added to the authoring gui.
parent 5c6ee6b6
......@@ -41,8 +41,17 @@
{% trans "Encourage your students to provide feedback on the response they've graded. You can replace the sample text with your own." %}
</p>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting" id="openassessment_rubric_feedback_default_text_input_wrapper">
<label for="openassessment_rubric_feedback_default_text" class="setting-label"> {% trans "Default Feedback Text" %}</label>
<textarea id="openassessment_rubric_feedback_default_text" class="input setting-input">{{ feedback_default_text }}</textarea>
</div>
<p class="setting-help">
{% trans "Enter feedback text that students will see before they enter their own feedback. Use this text to show students a good example peer assessment." %}
</p>
</li>
</ul>
</div>
</div>
{% endspaceless %}
\ No newline at end of file
{% endspaceless %}
......@@ -63,7 +63,7 @@
<div class="wrapper--input">
<textarea
id="assessment__rubric__question--feedback__value"
placeholder="{% trans "I noticed that this response..." %}"
placeholder="{{ rubric_feedback_default_text }}"
maxlength="500"
>
</textarea>
......@@ -71,4 +71,4 @@
</li>
</ol>
</fieldset>
{% endspaceless %}
\ No newline at end of file
{% endspaceless %}
......@@ -127,7 +127,7 @@
<div class="wrapper--input">
<textarea
id="assessment__rubric__question--feedback__value"
placeholder="{% trans "I noticed that this response..." %}"
placeholder="{{ rubric_feedback_default_text }}"
maxlength="500"
>
</textarea>
......
......@@ -66,6 +66,12 @@ DEFAULT_RUBRIC_FEEDBACK_PROMPT = """
(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?
"""
# The rubric's feedback text is the default text displayed and used as
# the student's response to the feedback prompt
DEFAULT_RUBRIC_FEEDBACK_TEXT = """
I noticed that this response...
"""
DEFAULT_EXAMPLE_ANSWER = (
"Replace this text with your own sample response for this assignment. "
"Then, under Response Score to the right, select an option for each criterion. "
......
......@@ -152,6 +152,12 @@ class OpenAssessmentBlock(
help="The rubric feedback prompt displayed to the student"
)
rubric_feedback_default_text = String(
default=DEFAULT_RUBRIC_FEEDBACK_TEXT,
scope=Scope.content,
help="The default rubric feedback text displayed to the student"
)
rubric_assessments = List(
default=DEFAULT_ASSESSMENT_MODULES,
scope=Scope.content,
......@@ -386,6 +392,7 @@ class OpenAssessmentBlock(
block.rubric_criteria = config['rubric_criteria']
block.rubric_feedback_prompt = config['rubric_feedback_prompt']
block.rubric_feedback_default_text = config['rubric_feedback_default_text']
block.rubric_assessments = config['rubric_assessments']
block.submission_start = config['submission_start']
block.submission_due = config['submission_due']
......
......@@ -8,6 +8,7 @@ from openassessment.assessment.errors import (
PeerAssessmentRequestError, PeerAssessmentInternalError, PeerAssessmentWorkflowError
)
from openassessment.workflow.errors import AssessmentWorkflowError
from openassessment.xblock.defaults import DEFAULT_RUBRIC_FEEDBACK_TEXT
from .data_conversion import create_rubric_dict
from .resolve_dates import DISTANT_FUTURE
from .data_conversion import clean_criterion_feedback
......@@ -130,6 +131,12 @@ class PeerAssessmentMixin(object):
return Response(u"")
continue_grading = data.params.get('continue_grading', False)
path, context_dict = self.peer_path_and_context(continue_grading)
# For backwards compatibility, if no feedback default text has been
# set, use the default text
if 'rubric_feedback_default_text' not in context_dict:
context_dict['rubric_feedback_default_text'] = DEFAULT_RUBRIC_FEEDBACK_TEXT
return self.render_assessment(path, context_dict)
def peer_path_and_context(self, continue_grading):
......@@ -155,6 +162,9 @@ class PeerAssessmentMixin(object):
if self.rubric_feedback_prompt is not None:
context_dict["rubric_feedback_prompt"] = self.rubric_feedback_prompt
if self.rubric_feedback_default_text is not None:
context_dict['rubric_feedback_default_text'] = self.rubric_feedback_default_text
# We display the due date whether the problem is open or closed.
# If no date is set, it defaults to the distant future, in which
# case we don't display the date.
......
......@@ -69,6 +69,7 @@ EDITOR_UPDATE_SCHEMA = Schema({
Required('prompt'): utf8_validator,
Required('title'): utf8_validator,
Required('feedback_prompt'): utf8_validator,
Required('feedback_default_text'): utf8_validator,
Required('submission_start'): Any(datetime_validator, None),
Required('submission_due'): Any(datetime_validator, None),
Required('allow_file_upload'): bool,
......@@ -123,4 +124,4 @@ EDITOR_UPDATE_SCHEMA = Schema({
]
})
]
})
\ No newline at end of file
})
......@@ -32,6 +32,7 @@ describe("OpenAssessment.Server", function() {
var PROMPT = "Hello this is the prompt yes.";
var FEEDBACK_PROMPT = "Prompt for feedback";
var FEEDBACK_DEFAULT_TEXT = "Default feedback response text";
var RUBRIC = '<rubric>'+
'<criterion>'+
......@@ -234,6 +235,7 @@ describe("OpenAssessment.Server", function() {
server.updateEditorContext({
prompt: PROMPT,
feedbackPrompt: FEEDBACK_PROMPT,
feedback_default_text: FEEDBACK_DEFAULT_TEXT,
title: TITLE,
submissionStart: SUBMISSION_START,
submissionDue: SUBMISSION_DUE,
......@@ -248,6 +250,7 @@ describe("OpenAssessment.Server", function() {
data: JSON.stringify({
prompt: PROMPT,
feedback_prompt: FEEDBACK_PROMPT,
feedback_default_text: FEEDBACK_DEFAULT_TEXT,
title: TITLE,
submission_start: SUBMISSION_START,
submission_due: SUBMISSION_DUE,
......
......@@ -413,6 +413,7 @@ if (typeof OpenAssessment.Server == "undefined" || !OpenAssessment.Server) {
title (string): The title of the problem.
prompt (string): The question prompt.
feedbackPrompt (string): The directions to the student for giving overall feedback on a submission.
feedback_default_text (string): The default feedback text used as the student's feedback response
submissionStart (ISO-formatted datetime string or null): The start date of the submission.
submissionDue (ISO-formatted datetime string or null): The date the submission is due.
criteria (list of object literals): The rubric criteria.
......@@ -430,6 +431,7 @@ if (typeof OpenAssessment.Server == "undefined" || !OpenAssessment.Server) {
var payload = JSON.stringify({
prompt: kwargs.prompt,
feedback_prompt: kwargs.feedbackPrompt,
feedback_default_text: kwargs.feedback_default_text,
title: kwargs.title,
submission_start: kwargs.submissionStart,
submission_due: kwargs.submissionDue,
......
......@@ -187,6 +187,7 @@ OpenAssessment.StudioView.prototype = {
this.server.updateEditorContext({
prompt: view.promptView.promptText(),
feedbackPrompt: view.rubricView.feedbackPrompt(),
feedback_default_text: view.rubricView.feedback_default_text(),
criteria: view.rubricView.criteriaDefinition(),
title: view.settingsView.displayName(),
submissionStart: view.settingsView.submissionStart(),
......
......@@ -93,6 +93,23 @@ OpenAssessment.EditRubricView.prototype = {
},
/**
Get or set the default feedback response text in the editor.
The text is used as a student's default response to the feedback
prompt.
Args:
text (string, option): If provided, sets the default text to this value.
Returns:
string
**/
feedback_default_text: function(text) {
var sel = $("#openassessment_rubric_feedback_default_text", this.element);
return OpenAssessment.Fields.stringField(sel, text);
},
/**
Add a new criterion to the rubric.
Uses a client-side template to create the new criterion.
**/
......
......@@ -65,5 +65,8 @@
<feedbackprompt>
(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?
</feedbackprompt>
<feedback_default_text>
I noticed that this response...
</feedback_default_text>
</rubric>
</openassessment>
\ No newline at end of file
</openassessment>
......@@ -10,7 +10,7 @@ from voluptuous import MultipleInvalid
from xblock.core import XBlock
from xblock.fields import List, Scope
from xblock.fragment import Fragment
from openassessment.xblock.defaults import DEFAULT_EDITOR_ASSESSMENTS_ORDER
from openassessment.xblock.defaults import DEFAULT_EDITOR_ASSESSMENTS_ORDER, DEFAULT_RUBRIC_FEEDBACK_TEXT
from openassessment.xblock.validation import validator
from openassessment.xblock.data_conversion import create_rubric_dict, make_django_template_key
from openassessment.xblock.schema import EDITOR_UPDATE_SCHEMA
......@@ -105,6 +105,12 @@ class StudioMixin(object):
if not criteria:
criteria = self.DEFAULT_CRITERIA
# To maintain backwards compatibility, if there is no
# feedback_default_text configured for the xblock, use the default text
feedback_default_text = copy.deepcopy(self.rubric_feedback_default_text)
if not feedback_default_text:
feedback_default_text = DEFAULT_RUBRIC_FEEDBACK_TEXT
return {
'prompt': self.prompt,
'title': self.title,
......@@ -113,6 +119,7 @@ class StudioMixin(object):
'assessments': assessments,
'criteria': criteria,
'feedbackprompt': self.rubric_feedback_prompt,
'feedback_default_text': feedback_default_text,
'allow_file_upload': self.allow_file_upload,
'leaderboard_show': self.leaderboard_show,
'editor_assessments_order': [
......@@ -202,6 +209,7 @@ class StudioMixin(object):
self.rubric_assessments = data['assessments']
self.editor_assessments_order = data['editor_assessments_order']
self.rubric_feedback_prompt = data['feedback_prompt']
self.rubric_feedback_default_text = data['feedback_default_text']
self.submission_start = data['submission_start']
self.submission_due = data['submission_due']
self.allow_file_upload = bool(data['allow_file_upload'])
......
......@@ -3,6 +3,7 @@
"prompt": "My new prompt.",
"title": "My new title.",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"allow_file_upload": false,
"leaderboard_show": 0,
"assessments": [
......@@ -26,6 +27,7 @@
"no_prompt": {
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"allow_file_upload": false,
"leaderboard_show": 0,
"criteria": [
......@@ -124,6 +126,7 @@
"no_submission_due": {
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"prompt": "Test Prompt",
"allow_file_upload": false,
"leaderboard_show": 0,
......@@ -173,6 +176,7 @@
"invalid_dates": {
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"prompt": "Test Prompt",
"allow_file_upload": false,
"leaderboard_show": 0,
......@@ -224,6 +228,7 @@
"invalid_dates_two": {
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"prompt": "Test Prompt",
"allow_file_upload": false,
"leaderboard_show": 0,
......@@ -275,6 +280,7 @@
"order_num_is_string": {
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"prompt": "Test Prompt",
"allow_file_upload": false,
"leaderboard_show": 0,
......@@ -318,6 +324,7 @@
"feedback_missing": {
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"prompt": "Test Prompt",
"allow_file_upload": false,
"leaderboard_show": 0,
......@@ -360,6 +367,7 @@
"criterion_not_a_dict": {
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"prompt": "prompty",
"allow_file_upload": false,
"leaderboard_show": 0,
......@@ -381,6 +389,7 @@
"criteria_missing_keys": {
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"prompt": "Test Prompt",
"allow_file_upload": false,
"leaderboard_show": 0,
......@@ -423,6 +432,7 @@
"options_not_a_list": {
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"prompt": "Test Prompt",
"allow_file_upload": false,
"leaderboard_show": 0,
......@@ -451,6 +461,7 @@
"option_not_a_dictionary": {
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"prompt": "Test Prompt",
"allow_file_upload": false,
"leaderboard_show": 0,
......@@ -488,6 +499,7 @@
"option_missing_keys": {
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"prompt": "Test Prompt",
"allow_file_upload": false,
"leaderboard_show": 0,
......@@ -530,6 +542,7 @@
"option_points_must_be_int": {
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"prompt": "Test Prompt",
"allow_file_upload": false,
"leaderboard_show": 0,
......@@ -571,6 +584,7 @@
"allow_file_upload_must_be_boolean": {
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"prompt": "Test Prompt",
"allow_file_upload": 6,
"criteria": [
......@@ -611,6 +625,7 @@
"allow_file_upload_null": {
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"prompt": "Test Prompt",
"allow_file_upload": null,
"criteria": [
......@@ -679,6 +694,7 @@
],
"prompt": "My new prompt.",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"submission_due": "4014-02-27T09:46",
"submission_start": "4014-02-10T09:46",
"title": "My new title.",
......@@ -734,6 +750,7 @@
],
"prompt": "My new prompt.",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"submission_due": "4014-02-27T09:46",
"submission_start": "4014-02-10T09:46",
"title": "My new title.",
......@@ -787,6 +804,7 @@
],
"prompt": "My new prompt.",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"submission_due": "4014-02-27T09:46",
"submission_start": "4014-02-10T09:46",
"title": "My new title.",
......@@ -843,6 +861,7 @@
],
"prompt": "My new prompt.",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"submission_due": "4014-02-27T09:46",
"submission_start": "4014-02-10T09:46",
"title": "My new title.",
......@@ -899,6 +918,7 @@
],
"prompt": "My new prompt.",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"submission_due": "4014-02-27T09:46",
"submission_start": "4014-02-10T09:46",
"title": "My new title.",
......@@ -949,6 +969,7 @@
],
"prompt": "My new prompt.",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"submission_due": "4014-02-27T09:46",
"submission_start": "4014-02-10T09:46",
"title": "My new title.",
......@@ -1006,6 +1027,7 @@
],
"prompt": "My new prompt.",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"submission_due": "4014-02-27T09:46",
"submission_start": "4014-02-10T09:46",
"title": "My new title.",
......@@ -1055,6 +1077,7 @@
],
"prompt": "My new prompt.",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"submission_due": "4014-02-27T09:46",
"submission_start": "4014-02-10T09:46",
"title": "My new title.",
......@@ -1105,6 +1128,7 @@
],
"prompt": "My new prompt.",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"submission_due": "4014-02-27T09:46",
"submission_start": "4014-02-10T09:46",
"title": "My new title.",
......@@ -1134,6 +1158,7 @@
"no_criteria_name": {
"prompt": "Test Prompt",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"allow_file_upload": false,
"leaderboard_show": 0,
"criteria": [
......@@ -1183,6 +1208,7 @@
"no_option_name": {
"prompt": "Test Prompt",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"allow_file_upload": false,
"leaderboard_show": 0,
"criteria": [
......@@ -1232,6 +1258,7 @@
"criteria_names_not_unique": {
"prompt": "Test Prompt",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"allow_file_upload": false,
"leaderboard_show": 0,
"criteria": [
......@@ -1304,6 +1331,7 @@
"expected_error": "validation error: options",
"prompt": "Test Prompt",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"allow_file_upload": false,
"leaderboard_show": 0,
"criteria": [
......
......@@ -27,6 +27,7 @@
],
"prompt": "My new prompt.",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"submission_due": "4014-02-27T09:46",
"submission_start": "4014-02-10T09:46",
"title": "My new title.",
......@@ -77,6 +78,7 @@
],
"prompt": "Ṁÿ ṅëẅ ṗṛöṁṗẗ.",
"feedback_prompt": "ḟëëḋḅäċḳ ṗṛöṁṗẗ",
"feedback_default_text": "Ṫëṡẗ ḋëḟäüḷẗ ẗëẍẗ",
"submission_due": "4014-02-27T09:46",
"submission_start": "4014-02-10T09:46",
"title": "ɯʎ uǝʍ ʇıʇןǝ",
......@@ -127,6 +129,7 @@
],
"prompt": "My new prompt.",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"submission_due": "4014-02-27T09:46",
"submission_start": "4014-02-10T09:46",
"title": "My new title.",
......@@ -189,6 +192,7 @@
],
"prompt": "My new prompt.",
"feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text",
"submission_due": "4014-02-27T09:46",
"submission_start": "4014-02-10T09:46",
"allow_file_upload": false,
......
......@@ -19,6 +19,7 @@ class StudioViewTest(XBlockHandlerTestCase):
"title": "Test title",
"prompt": "Test prompt",
"feedback_prompt": "Test feedback prompt",
"feedback_default_text": "Test feedback default text",
"submission_start": "4014-02-10T09:46",
"submission_due": "4014-02-27T09:46",
"allow_file_upload": False,
......
......@@ -100,6 +100,7 @@ class TestSerializeContent(TestCase):
self.oa_block.title = data.get('title', '')
self.oa_block.prompt = data.get('prompt')
self.oa_block.rubric_feedback_prompt = data.get('rubric_feedback_prompt')
self.oa_block.rubric_feedback_default_text = data.get('rubric_feedback_default_text')
self.oa_block.start = _parse_date(data.get('start'))
self.oa_block.due = _parse_date(data.get('due'))
self.oa_block.submission_start = data.get('submission_start')
......
......@@ -6,6 +6,7 @@ import lxml.etree as etree
import pytz
import dateutil.parser
import defusedxml.ElementTree as safe_etree
from defaults import DEFAULT_RUBRIC_FEEDBACK_TEXT
class UpdateFromXmlError(Exception):
......@@ -173,6 +174,10 @@ def serialize_rubric(rubric_root, oa_block, include_prompt=True):
feedback_prompt = etree.SubElement(rubric_root, 'feedbackprompt')
feedback_prompt.text = unicode(oa_block.rubric_feedback_prompt)
if oa_block.rubric_feedback_default_text is not None:
feedback_text = etree.SubElement(rubric_root, 'feedback_default_text')
feedback_text.text = unicode(oa_block.rubric_feedback_default_text)
def parse_date(date_str, name=""):
"""
......@@ -376,6 +381,12 @@ def parse_rubric_xml(rubric_root):
else:
rubric_dict['feedbackprompt'] = None
feedback_text_el = rubric_root.find('feedback_default_text')
if feedback_text_el is not None:
rubric_dict['feedback_default_text'] = _safe_get_text(feedback_text_el)
else:
rubric_dict['feedback_default_text'] = None
# Criteria
rubric_dict['criteria'] = _parse_criteria_xml(rubric_root)
......@@ -770,6 +781,7 @@ def parse_from_xml(root):
'rubric_criteria': rubric['criteria'],
'rubric_assessments': assessments,
'rubric_feedback_prompt': rubric['feedbackprompt'],
'rubric_feedback_default_text': rubric['feedback_default_text'],
'submission_start': submission_start,
'submission_due': submission_due,
'allow_file_upload': allow_file_upload,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment