Commit 3ddd47ad by Will Daly

Merge pull request #602 from edx/authoring

Authoring!
parents 94888fc3 1bc17f9e
......@@ -8,4 +8,5 @@ Mark Hoeber <hoeber@edx.org>
Sylvia Pearce <spearce@edx.org>
Ned Batchelder <ned@nedbatchelder.com>
David Baumgold <david@davidbaumgold.com>
Grady Ward <gward@brandeis.edu>
Andrew Dekker <a.dekker@uq.edu.au>
......@@ -33,7 +33,8 @@ install-nltk-data:
STATIC_JS = openassessment/xblock/static/js
javascript:
node_modules/.bin/uglifyjs $(STATIC_JS)/src/oa_shared.js $(STATIC_JS)/src/*.js > "$(STATIC_JS)/openassessment.min.js"
node_modules/.bin/uglifyjs $(STATIC_JS)/src/oa_shared.js $(STATIC_JS)/src/*.js $(STATIC_JS)/src/lms/*.js > "$(STATIC_JS)/openassessment-lms.min.js"
node_modules/.bin/uglifyjs $(STATIC_JS)/src/oa_shared.js $(STATIC_JS)/src/*.js $(STATIC_JS)/src/studio/*.js > "$(STATIC_JS)/openassessment-studio.min.js"
install-test:
......
......@@ -25,7 +25,12 @@ module.exports = function(config) {
'lib/*.js',
'src/oa_shared.js',
'src/*.js',
'src/lms/*.js',
'src/studio/*.js',
'spec/test_shared.js',
'spec/*.js',
'spec/lms/*.js',
'spec/studio/*.js',
// fixtures
{
......@@ -44,7 +49,9 @@ module.exports = function(config) {
// preprocess matching files before serving them to the browser
// available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
preprocessors: {
'src/*.js': 'coverage'
'src/*.js': 'coverage',
'src/lms/*.js': 'coverage',
'src/studio/*.js': 'coverage'
},
......
......@@ -26,7 +26,7 @@ class RubricAdmin(admin.ModelAdmin):
"""Short description of criteria for presenting in a list."""
rubric_data = RubricSerializer.serialized_from_cache(rubric_obj)
return u", ".join(
u"{}: {}".format(criterion["name"], criterion["points_possible"])
u"{} - {}: {}".format(criterion["name"], criterion['label'], criterion["points_possible"])
for criterion in rubric_data["criteria"]
)
......@@ -88,11 +88,13 @@ class AssessmentAdmin(admin.ModelAdmin):
def parts_summary(self, assessment_obj):
return "<br/>".join(
html.escape(
u"{}/{} - {}: {} - {}".format(
u"{}/{} - {} - {}: {} - {} - {}".format(
part.points_earned,
part.points_possible,
part.criterion.name,
part.criterion.label,
part.option.name if part.option else "None",
part.option.label if part.option else "None",
part.feedback,
)
)
......
......@@ -189,9 +189,9 @@ def validate_training_examples(rubric, examples):
]
if len(set(criteria_options) - set(criteria_without_options)) == 0:
return [_(
u"When you include a student training assessment, "
u"the rubric for the assessment must contain at least one criterion, "
u"and each criterion must contain at least two options."
"If your assignment includes a student training step, "
"the rubric must have at least one criterion, "
"and that criterion must have at least one option."
)]
# Check each example
......
......@@ -149,7 +149,13 @@ class Criterion(models.Model):
"""
rubric = models.ForeignKey(Rubric, related_name="criteria")
# Backwards compatibility: The "name" field was formerly
# used both as a display name and as a unique identifier.
# Now we're using it only as a unique identifier.
# We include the "label" (which is displayed to the user)
# in the data model so we can include it in analytics data packages.
name = models.CharField(max_length=100, blank=False)
label = models.CharField(max_length=100, blank=True)
# 0-based order in the Rubric
order_num = models.PositiveIntegerField()
......@@ -189,9 +195,13 @@ class CriterionOption(models.Model):
# How many points this option is worth. 0 is allowed.
points = models.PositiveIntegerField()
# Short name of the option. This is visible to the user.
# Examples: "Excellent", "Good", "Fair", "Poor"
# Backwards compatibility: The "name" field was formerly
# used both as a display name and as a unique identifier.
# Now we're using it only as a unique identifier.
# We include the "label" (which is displayed to the user)
# in the data model so we can include it in analytics data packages.
name = models.CharField(max_length=100)
label = models.CharField(max_length=100, blank=True)
# Longer text describing this option and why you should choose it.
# Example: "The response makes 3-5 Monty Python references and at least one
......
......@@ -63,7 +63,7 @@ class CriterionOptionSerializer(NestedModelSerializer):
"""Serializer for :class:`CriterionOption`"""
class Meta:
model = CriterionOption
fields = ('order_num', 'points', 'name', 'explanation')
fields = ('order_num', 'points', 'name', 'label', 'explanation')
class CriterionSerializer(NestedModelSerializer):
......@@ -73,7 +73,7 @@ class CriterionSerializer(NestedModelSerializer):
class Meta:
model = Criterion
fields = ('order_num', 'name', 'prompt', 'options', 'points_possible')
fields = ('order_num', 'name', 'label', 'prompt', 'options', 'points_possible')
class RubricSerializer(NestedModelSerializer):
......
......@@ -577,7 +577,7 @@
"options_selected": {}
}
],
"errors": ["When you include a student training assessment, the rubric for the assessment must contain at least one criterion, and each criterion must contain at least two options."]
"errors": ["If your assignment includes a student training step, the rubric must have at least one criterion, and that criterion must have at least one option."]
}
}
......@@ -28,7 +28,8 @@ class CsvWriter(object):
],
'assessment_part': [
'assessment_id', 'points_earned',
'criterion_name', 'option_name', 'feedback'
'criterion_name', 'criterion_label',
'option_name', 'option_label', 'feedback'
],
'assessment_feedback': [
'submission_uuid', 'feedback_text', 'options'
......@@ -230,7 +231,9 @@ class CsvWriter(object):
part.assessment.id,
part.points_earned,
part.criterion.name,
part.criterion.label,
part.option.name if part.option is not None else u"",
part.option.label if part.option is not None else u"",
part.feedback
])
......
{% load i18n %}
{% load tz %}
{% spaceless %}
<div id="openassessment-editor" class="editor-with-buttons editor-with-tabs">
<div class="openassessment_editor_content_and_tabs">
<div id="openassessment_editor_header">
<h6 id="oa_editor_window_title" class="title modal_window_title" >{% trans "Open Response Assessment" %}</h6>
<ul class="editor_modes action_list action_modes editor_tabs">
<li class="view-button oa_editor_tab"><a href="#oa_settings_editor_wrapper">{% trans "Settings" %}</a></li>
<li class="view-button oa_editor_tab"><a href="#oa_rubric_editor_wrapper">{% trans "Rubric" %}</a></li>
<li class="view-button oa_editor_tab"><a href="#oa_prompt_editor_wrapper">{% trans "Prompt" %}</a></li>
</ul>
</div>
<div id="openassessment_validation_alert" class="covered">
<i class="openassessment_alert_icon"></i>
<div class="openassessment_alert_header">
<h2 class="openassessment_alert_title">{% trans "Rubric Change Impacts Settings Section" %}</h2>
<p class="openassessment_alert_message">{% trans "A change that you made to this assessment's rubric has an impact on some examples laid out in the settings tab. For more information, go to the Settings section and fix areas highlighted in red." %}</p>
</div>
<a href="" rel="view" class="action openassessment_alert_close">
<i class="icon-remove-sign"></i>
<span class="label is--hidden">{% trans "close alert" %}</span>
</a>
</div>
<div id="oa_prompt_editor_wrapper" class="oa_editor_content_wrapper">
<textarea id="openassessment_prompt_editor" maxlength="10000">{{ prompt }}</textarea>
</div>
{% include "openassessmentblock/edit/oa_edit_rubric.html" %}
<div id="oa_settings_editor_wrapper" class="oa_editor_content_wrapper wrapper-comp-settings">
<ul id="oa_basic_settings_editor" class="list-input settings-list">
<li id="openassessment_title_editor_wrapper" class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="openassessment_title_editor" class="setting-label">{% trans "Display Name "%}</label>
<input type="text" id="openassessment_title_editor" class="input setting-input" value="{{ title }}">
</div>
<p class="setting-help">{% trans "This name appears when you hover over the unit in the course ribbon at the top of the page." %}</p>
</li>
<li class="openassessment_date_editor field comp-setting-entry">
<div class="wrapper-comp-setting">
<label
for="openassessment_submission_start_date"
class="setting-label">
{% trans "Response Start Date" %}
</label>
<input
type="text"
class="input setting-input"
id="openassessment_submission_start_date"
value="{{ submission_start|utc|date:"Y-m-d" }}"
>
</div>
<div class="wrapper-comp-setting">
<label
for="openassessment_submission_start_time"
class="setting-label">
{% trans "Response Start Time" %}
</label>
<input
type="text"
class="input setting-input"
id="openassessment_submission_start_time"
value="{{ submission_start|utc|date:"H:i" }}"
>
</div>
<p class="setting-help">{% trans "The date and time when students can begin submitting responses." %}</p>
</li>
<li class="openassessment_date_editor field comp-setting-entry">
<div class="wrapper-comp-setting">
<label
for="openassessment_submission_due_date"
class="setting-label">
{% trans "Response Due Date" %}
</label>
<input
type="text"
class="input setting-input"
id="openassessment_submission_due_date"
value="{{ submission_due|utc|date:"Y-m-d" }}"
>
</div>
<div class="wrapper-comp-setting">
<label
for="openassessment_submission_due_time"
class="setting-label">
{% trans "Response Due Time" %}
</label>
<input
type="text"
class="input setting-input"
id="openassessment_submission_due_time"
value="{{ submission_due|utc|date:"H:i" }}"
>
</div>
<p class="setting-help">{% trans "The date and time when students can no longer submit responses." %}</p>
</li>
<li id="openassessment_submission_image_wrapper" class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="openassessment_submission_image_editor" class="setting-label">{% trans "Allow Image Responses"%}</label>
<select id="openassessment_submission_image_editor" class="input setting-input" name="image submission">
<option value="0">{% trans "False"%}</option>
<option value="1" {% if allow_file_upload %} selected="true" {% endif %}>{% trans "True"%}</option>
</select>
</div>
<p class="setting-help">{% trans "Specify whether students can submit an image file along with their text response." %}</p>
</li>
<li id="openassessment_leaderboard_wrapper" class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="openassessment_leaderboard_editor" class="setting-label">{% trans "Number of Leaderboard Scores" %}</label>
<input
id="openassessment_leaderboard_editor"
class="input setting-input"
type="number"
value="{{ leaderboard_show }}"
min="0" max="99"
/>
</div>
<p class="setting-help">{% trans "Set the number of scores to display on the leaderboard. If set to 0, the leaderboard will not be shown." %}</p>
</li>
<li>
</ul>
<p class="openassessment_description" id="openassessment_step_select_description">
{% if 'example_based_assessment' in editor_assessments_order %}
{% trans "In this assignment, you can include steps for student training, peer assessment, self assessment, and example based assessment. Select the steps that you want below, and then drag them into the order that you want. If you include an example based assessment step, it must precede all other steps. If you include a student training training step, it must precede peer and self assessment steps." %}
{% else %}
{% trans "In this assignment, you can include steps for student training, peer assessment, and self assessment. Select the steps that you want below, and then drag them into the order that you want. If you include a student training step, it must precede all other steps." %}
{% endif %}
</p>
<ol id="openassessment_assessment_module_settings_editors">
{% for assessment in editor_assessments_order %}
{% with "openassessmentblock/edit/oa_edit_"|add:assessment|add:".html" as template %}
{% include template %}
{% endwith %}
{% endfor %}
</ol>
</div>
</div>
<div class="openassessment_editor_buttons xblock-actions">
<h3 class="sr">Actions</h3>
<ul>
<li class="action-item">
<a href="#" class="button action-primary openassessment_save_button">{% trans "Save" %}</a>
</li>
<li class="action-item">
<a href="#" class="button openassessment_cancel_button">{% trans "Cancel" %}</a>
</li>
</ul>
</div>
</div>
{% endspaceless %}
{% load i18n %}
{% spaceless %}
<li class="openassessment_criterion is-collapsible" data-criterion="{{ criterion_name }}">
<div class="openassessment_criterion_header view-outline">
<a class="action expand-collapse collapse"><i class="icon-caret-down ui-toggle-expansion"></i></a>
<h6 class="openassessment_criterion_header_title">{% trans "Criterion" %}</h6>
<div class="openassessment_criterion_remove_button"><h2>{% trans "Remove" %}</h2></div>
</div>
<div class="openassessment_criterion_body wrapper-comp-settings">
<input type="hidden" class="openassessment_criterion_name" value="{{ criterion_name }}" />
<ul class="list-input settings-list openassessment_criterion_basic_editor">
<li class="field comp-setting-entry">
<div class="wrapper-comp-settings">
<label class="openassessment_criterion_name_label setting-label">
{% trans "Criterion Name" %}
<input
class="openassessment_criterion_label input setting-input"
type="text"
value="{{ criterion_label }}"
>
</label>
</div>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-settings">
<label class="openassessment_criterion_prompt_label setting-label">
{% trans "Criterion Prompt" %}
<textarea class="openassessment_criterion_prompt setting-input" maxlength="10000">{{ criterion_prompt }}</textarea>
</label>
</div>
</li>
</ul>
<ul class="openassessment_criterion_option_list">
{% for option in criterion_options %}
{% include "openassessmentblock/edit/oa_edit_option.html" with criterion_name=criterion_name option_name=option.name option_label=option.label option_points=option.points option_explanation=option.explanation %}
{% endfor %}
</ul>
<div class="openassessment_criterion_add_option openassessment_option_header">
<h2>{% trans "Add Option" %}</h2>
</div>
<div class="openassessment_criterion_feedback_wrapper wrapper-comp-settings">
<ul class="list-input settings-list">
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label class="setting-label">
{% trans "Feedback for This Criterion" %}
<select class="openassessment_criterion_feedback input setting-input">
<option value="disabled">{% trans "None" %}</option>
<option value="optional" {% if criterion_feedback == "optional" %} selected="true" {% endif %}>{% trans "Optional" %}</option>
<option value="required" {% if criterion_feedback == "required" %} selected="true" {% endif %}>{% trans "Required" %}</option>
</select>
</label>
</div>
<p class="setting-help">
{% trans "Select one of the options above. This describes whether or not the student will have to provide criterion feedback." %}
</p>
</li>
</ul>
</div>
</div>
</li>
{% endspaceless %}
\ No newline at end of file
{% load i18n %}
{% spaceless %}
<li class="openassessment_assessment_module_settings_editor" id="oa_ai_assessment_editor">
<div class="drag-handle action"></div>
<div class="openassessment_inclusion_wrapper">
<input id="include_ai_assessment" type="checkbox"
{% if assessments.example_based_assessment %} checked="true" {% endif %}>
<label for="include_ai_assessment">{% trans "Step: Example-Based Assessment" %}</label>
</div>
<div class="openassessment_assessment_module_editor">
<p id="ai_assessment_description_closed" class="openassessment_description_closed {% if assessments.example_based_assessment %} is--hidden {% endif %}">
{% trans "An algorithm assesses students' responses by comparing the responses to pre-assessed sample responses that the instructor provides."%}
</p>
<div id="ai_assessment_settings_editor" class="assessment_settings_wrapper {% if not assessments.example_based_assessment %} is--hidden {% endif %}">
<p class="openassessment_description">
{% trans "Enter one or more sample responses that you've created, and then specify the options that you would choose for each criterion in your rubric. Note that you must add your rubric to the Rubric tab before you can complete this step." %}
</p>
<textarea id="ai_training_examples">{{ assessments.example_based_assessment.examples }}</textarea>
</div>
</div>
</li>
{% endspaceless %}
\ No newline at end of file
{% load i18n %}
{% spaceless %}
<li class="openassessment_criterion_option" data-criterion="{{ criterion_name }}" data-option="{{ option_name }}">
<div class="openassessment_option_header">
<div class="openassessment_option_header_title">{% trans "Option" %}</div>
<div class="openassessment_criterion_option_remove_button">
<h2>{% trans "Remove" %}</h2>
</div>
</div>
<div class="wrapper-comp-settings">
<input type="hidden" class="openassessment_criterion_option_name" value="{{ option_name }}" />
<ul class="list-input settings-list">
<li class="field comp-setting-entry openassessment_criterion_option_name_wrapper">
<div class="wrapper-comp-setting">
<label class="openassessment_criterion_option_name_label setting-label">
{% trans "Option Name"%}
<input
class="openassessment_criterion_option_label input input-label"
type="text"
name="{{ option_name }}"
value="{{ option_label }}"
>
</label>
</div>
</li>
<li class="field comp-setting-entry openassessment_criterion_option_point_wrapper">
<div class="wrapper-comp-setting">
<label class="openassessment_criterion_option_points_label setting-label">
{% trans "Option Points"%}
<input
class="openassessment_criterion_option_points input setting-input"
type="number"
value="{{ option_points }}"
min="0" max="999"
>
</label>
</div>
</li>
<li class="field comp-setting-entry openassessment_criterion_option_explanation_wrapper">
<div class="wrapper-comp-setting">
<label class="openassessment_criterion_option_explanation_label setting-label">
{% trans "Option Explanation"%}
<textarea class="openassessment_criterion_option_explanation setting-input" maxlength="10000">{{ option_explanation }}</textarea>
</label>
</div>
</li>
</ul>
</div>
</li>
{% endspaceless %}
{% load i18n %}
{% load tz %}
{% spaceless %}
<li class="openassessment_assessment_module_settings_editor" id="oa_peer_assessment_editor">
<div class="drag-handle action"></div>
<div class="openassessment_inclusion_wrapper">
<input type="checkbox" id="include_peer_assessment"
{% if assessments.peer_assessment %} checked="true" {% endif %}>
<label for="include_peer_assessment">{% trans "Step: Peer Assessment" %}</label>
</div>
<div class="openassessment_assessment_module_editor">
<p id="peer_assessment_description_closed" class="openassessment_description_closed {% if assessments.peer_assessment %} is--hidden {% endif %}">
{% trans "Students assess a specified number of other students' responses using the rubric for the assignment." %}
</p>
<div id="peer_assessment_settings_editor" class="assessment_settings_wrapper {% if not assessments.peer_assessment %} is--hidden {% endif %}">
<p class="openassessment_description">
{% trans "Specify the following values for the peer assessment step." %}
</p>
<ul class="list-input settings-list">
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_must_grade" class="setting-label">{% trans "Must Grade" %}</label>
<input id="peer_assessment_must_grade" class="input setting-input" type="number" value="{{ assessments.peer_assessment.must_grade }}" min="0" max="99">
</div>
<p class="setting-help">{% trans "Specify the number of peer assessments that each student must complete."%}</p>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_graded_by" class="setting-label"> {% trans "Graded By" %}</label>
<input id="peer_assessment_graded_by" class="input setting-input" type="number" value="{{ assessments.peer_assessment.must_be_graded_by }}" min="0" max="99">
</div>
<p class="setting-help">{% trans "Specify the number of students who must assess each response."%}</p>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_start_date" class="setting-label">{% trans "Start Date" %}</label>
<input
id="peer_assessment_start_date"
type="text"
class="input setting-input"
value="{{ assessments.peer_assessment.start|utc|date:"Y-m-d" }}"
>
</div>
<div class="wrapper-comp-setting">
<label for="peer_assessment_start_time" class="setting-label">{% trans "Start Time" %}</label>
<input
id="peer_assessment_start_time"
type="text"
class="input setting-input"
value="{{ assessments.peer_assessment.start|utc|date:"H:i" }}"
>
</div>
<p class="setting-help">{% trans "Enter the date and time when students can begin assessing peer responses." %}</p>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_due_date" class="setting-label">{% trans "Due Date" %}</label>
<input
id="peer_assessment_due_date"
type="text"
class="input setting-input"
value="{{ assessments.peer_assessment.due|utc|date:"Y-m-d" }}"
>
</div>
<div class="wrapper-comp-setting">
<label for="peer_assessment_due_time" class="setting-label">{% trans "Due Time" %}</label>
<input
id="peer_assessment_due_time"
type="text"
class="input setting-input"
value="{{ assessments.peer_assessment.due|utc|date:"H:i" }}"
>
</div>
<p class="setting-help">{% trans "Enter the date and time when all peer assessments must be complete." %}</p>
</li>
</ul>
</div>
</div>
</li>
{% endspaceless %}
\ No newline at end of file
{% load i18n %}
{% spaceless %}
<div id="oa_rubric_editor_wrapper" class="oa_editor_content_wrapper">
<div id="openassessment_criterion_template" class="is--hidden">
{% include "openassessmentblock/edit/oa_edit_criterion.html" with criterion_name="" criterion_label="" criterion_prompt="" criterion_options=False criterion_feedback="disabled" %}
</div>
<div id="openassessment_option_template" class="is--hidden">
{% include "openassessmentblock/edit/oa_edit_option.html" with option_name="" option_label="" option_points=1 option_explanation="" %}
</div>
<div id="openassessment_rubric_instructions">
<p class="openassessment_description">
{% trans "Rubrics are made up of criteria, which usually contain one or more options. Each option has a point value. This template contains two sample criteria and their options. Replace the sample text with your own text. For more information, see the ORA documentation." %}
</p>
</div>
<ul id="openassessment_criterion_list" >
{% for criterion in criteria %}
{% include "openassessmentblock/edit/oa_edit_criterion.html" with criterion_name=criterion.name criterion_label=criterion.label criterion_prompt=criterion.prompt criterion_options=criterion.options criterion_feedback=criterion.feedback %}
{% endfor %}
</ul>
<div id="openassessment_rubric_add_criterion">
<h6>
{% trans "Add Criterion" %}
</h6>
</div>
<div id="openassessment_rubric_feedback_wrapper" class="wrapper-comp-settings">
<div id="openassessment_rubric_feedback_header">
<span>{% trans "Feedback for This Response" %}</span>
</div>
<ul class="list-input settings-list">
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting" id="openassessment_rubric_feedback_input_wrapper">
<label for="openassessment_rubric_feedback" class="setting-label">{% trans "Feedback Instructions" %}</label>
<textarea id="openassessment_rubric_feedback" class="input setting-input">{{ feedbackprompt }}</textarea>
</div>
<p class="setting-help">
{% trans "Encourage your students to provide feedback on the response they've graded. You can replace the sample text with your own." %}
</p>
</li>
</ul>
</div>
</div>
{% endspaceless %}
\ No newline at end of file
{% load i18n %}
{% load tz %}
{% spaceless %}
<li class="openassessment_assessment_module_settings_editor" id="oa_self_assessment_editor">
<div class="drag-handle action"></div>
<div class="openassessment_inclusion_wrapper">
<input id="include_self_assessment" type="checkbox"
{% if assessments.self_assessment %} checked="true" {% endif %}>
<label for="include_self_assessment">{% trans "Step: Self Assessment" %}</label>
</div>
<div class="openassessment_assessment_module_editor">
<p id="self_assessment_description_closed" class="openassessment_description_closed {% if assessments.self_assessment %} is--hidden {% endif %}">
{% trans "Students assess their own responses using the rubric for the assignment." %}
</p>
<div id="self_assessment_settings_editor" class="assessment_settings_wrapper {% if not assessments.self_assessment %} is--hidden {% endif %}">
<p class="openassessment_description">
{% trans "Specify start and due dates for the self assessment step." %}
</p>
<ul class="list-input settings-list">
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="self_assessment_start_date" class="setting-label">{% trans "Start Date" %}</label>
<input
id="self_assessment_start_date"
type="text"
class="input setting-input"
value="{{ assessments.self_assessment.start|utc|date:"Y-m-d" }}"
>
</div>
<div class="wrapper-comp-setting">
<label for="self_assessment_start_time" class="setting-label">{% trans "Start Time" %}</label>
<input
id="self_assessment_start_time"
type="text"
class="input setting-input"
value="{{ assessments.self_assessment.start|utc|date:"H:i" }}"
>
</div>
<p class="setting-help">{% trans "Enter the date and time when students can begin assessing their responses." %}</p>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="self_assessment_due_date" class="setting-label">{% trans "Due Date" %}</label>
<input
id="self_assessment_due_date"
type="text"
class="input setting-input"
value="{{ assessments.self_assessment.due|utc|date:"Y-m-d" }}"
>
</div>
<div class="wrapper-comp-setting">
<label for="self_assessment_due_time" class="setting-label">{% trans "Due Time" %}</label>
<input
id="self_assessment_due_time"
type="text"
class="input setting-input"
value="{{ assessments.self_assessment.due|utc|date:"H:i" }}"
>
</div>
<p class="setting-help">{% trans "Enter the date and time when all self assessments must be complete." %}</p>
</li>
</ul>
</div>
</div>
</li>
{% endspaceless %}
\ No newline at end of file
{% load i18n %}
{% spaceless %}
<li class="openassessment_assessment_module_settings_editor" id="oa_student_training_editor">
<div class="drag-handle action"></div>
<div class="openassessment_inclusion_wrapper">
<input type="checkbox" id="include_student_training"
{% if assessments.student_training %} checked="true" {% endif %}>
<label for="include_student_training">{% trans "Step: Student Training" %}</label>
</div>
<div class="openassessment_assessment_module_editor">
<p id="student_training_description_closed" class="openassessment_description_closed {% if assessments.student_training %} is--hidden {% endif %}">
{% trans "Students learn to assess responses by scoring pre-assessed sample responses that you provide. Students move to the next step when the scores they give match your scores. Note that if you add this step, you must also add a peer assessment step. This step must come before the peer assessment step." %}
</p>
<div id="student_training_settings_editor" class="assessment_settings_wrapper {% if not assessments.student_training %} is--hidden {% endif %}">
<p class="openassessment_description">
{% trans "Enter one or more sample responses that you've created, and then specify the options that you would choose for each criterion in your rubric. Note that you must add your rubric to the Rubric tab before you can complete this step." %}
</p>
<ol id="openassessment_training_example_list">
{% for example in assessments.training.examples %}
{% include "openassessmentblock/edit/oa_training_example.html" with example=example %}
{% endfor %}
</ol>
<div id="openassessment_add_training_example">
<h2 class='openassessment_add_training_example'>{% trans "Add Sample Response" %}</h2>
</div>
</div>
<ol id="openassessment_training_example_template" class="is--hidden">
{% include "openassessmentblock/edit/oa_training_example.html" with example=assessments.training.template %}
</ol>
<ol id="openassessment_training_example_criterion_template" class="is--hidden">
{% include "openassessmentblock/edit/oa_training_example_criterion.html" %}
</ol>
</div>
</li>
{% endspaceless %}
\ No newline at end of file
{% load i18n %}
{% spaceless %}
<li class="openassessment_training_example is-collapsible">
<div class="openassessment_training_example_header view-outline">
<a class="action expand-collapse collapse">
<i class="icon-caret-down ui-toggle-expansion"></i>
</a>
<h6 class="openassessment_training_example_header_title">
{% trans "Scored Response" %}
</h6>
<div class="openassessment_training_example_remove">
<h2>{% trans "Remove" %}</h2>
</div>
</div>
<div class="openassessment_training_example_body">
<div class="openassessment_training_example_scored_rubric wrapper-comp-settings">
<h2>{% trans "Response Score" %}</h2>
<ol class="openassessment_training_example_criteria_selections list-input settings-list">
{% for criterion in example.criteria %}
{% if criterion.options %}
{% include "openassessmentblock/edit/oa_training_example_criterion.html" with criterion=criterion %}
{% endif %}
{% endfor %}
</ol>
</div>
<div class="openassessment_training_example_essay_wrapper">
<h2>{% trans "Response" %}</h2>
<textarea class="openassessment_training_example_essay" maxlength="100000">{{ example.answer }}</textarea>
</div>
</div>
</li>
{% endspaceless %}
\ No newline at end of file
{% load i18n %}
{% spaceless %}
<li class="field comp-setting-entry openassessment_training_example_criterion" data-criterion="{{ criterion.name }}">
<div class="wrapper-comp-setting">
<label class="openassessment_training_example_criterion_name setting-label">
<div class="openassessment_training_example_criterion_name_wrapper">
{{ criterion.label }}
</div>
<select class="openassessment_training_example_criterion_option setting-input" data-criterion="{{ criterion.name }}" data-option="{{ option.name }}">
<option value="">{% trans "Not Selected" %}</option>
{% for option in criterion.options %}
<option value="{{ option.name }}" data-points="{{ option.points }}" data-label="{{ option.label }}"
{% if criterion.option_selected == option.name %} selected {% endif %}
>
{{ option.label }} - {{ option.points }} {% trans "points" %}
</option>
{% endfor %}
</select>
</label>
</div>
</li>
{% endspaceless %}
\ No newline at end of file
......@@ -52,7 +52,7 @@
<li class="question question--{{ criterion_num }} ui-toggle-visibility">
<h4 class="question__title ui-toggle-visibility__control">
<i class="ico icon-caret-right"></i>
<span class="question__title__copy">{{ criterion.name }}</span>
<span class="question__title__copy">{{ criterion.label }}</span>
<span class="question__score">
<span class="label sr">{% trans "Overall Grade" %}</span>
......@@ -82,11 +82,11 @@
<span class="answer__value">
<span class="answer__value__label sr">{% trans "Peer's Assessment" %}: </span>
<span class="answer__value__value">
{{ part.option.name }}
{{ part.option.label }}
<span class="ui-hint hint--top" data-hint="{{ part.option.explanation }}">
<i class="ico icon-info-sign"
title="{% blocktrans with name=part.option.name %}More information about {{ name }}{% endblocktrans %}"></i>
title="{% blocktrans with name=part.option.label %}More information about {{ name }}{% endblocktrans %}"></i>
</span>
</span>
......@@ -113,11 +113,11 @@
<span class="answer__value">
<span class="answer__value__label sr">{% trans "Your Assessment" %}: </span>
<span class="answer__value__value">
{{ part.option.name }}
{{ part.option.label }}
<span class="ui-hint hint--top" data-hint="{{ part.option.explanation }}">
<i class="ico icon-info-sign"
title="{% blocktrans with name=part.option.name %}More information about {{ name }}{% endblocktrans %}"></i>
title="{% blocktrans with name=part.option.label %}More information about {{ name }}{% endblocktrans %}"></i>
</span>
</span>
</span>
......@@ -137,11 +137,11 @@
<span class="answer__value">
<span class="answer__value__label sr">{% trans "Example-Based Assessment" %}: </span>
<span class="answer__value__value">
{{ part.option.name }}
{{ part.option.label }}
<span class="ui-hint hint--top" data-hint="{{ part.option.explanation }}">
<i class="ico icon-info-sign"
title="{% blocktrans with name=part.option.name %}More information about {{ name }}{% endblocktrans %}"></i>
title="{% blocktrans with name=part.option.label %}More information about {{ name }}{% endblocktrans %}"></i>
</span>
</span>
</span>
......
{% load i18n %}
<div id="openassessment-edit" class="editor-with-buttons">
<textarea class="openassessment-editor"></textarea>
<div class="xblock-actions">
<h3 class="sr">Actions</h3>
<ul>
<li class="action-item">
<a href="#" class="button action-primary openassessment-save-button">{% trans "Save" %}</a>
</li>
<li class="action-item">
<a href="#" class="button openassessment-cancel-button">{% trans "Cancel" %}</a>
</li>
</ul>
</div>
</div>
......@@ -26,7 +26,7 @@
value="{{ option.name }}" />
<label for="assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
class="answer__label"
>{{ option.name }}</label>
>{{ option.label }}</label>
</div>
<div class="wrapper--metadata">
<span class="answer__tip">{{ option.explanation }}</span>
......
......@@ -90,7 +90,7 @@
value="{{ option.name }}" />
<label for="assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
class="answer__label"
>{{ option.name }}</label>
>{{ option.label }}</label>
</div>
<div class="wrapper--metadata">
<span class="answer__tip">{{ option.explanation }}</span>
......
......@@ -116,7 +116,7 @@
class="answer__value"
value="{{ option.name }}" />
<label for="assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
class="answer__label">{{ option.name }}</label>
class="answer__label">{{ option.label }}</label>
</div>
<div class="wrapper--metadata">
<span class="answer__tip">{{ option.explanation }}</span>
......
......@@ -13,7 +13,8 @@
"order_num": 0,
"prompt": "How concise is it?",
"rubric": 1,
"name": "concise"
"name": "concise",
"label": "concise label"
}
},
{
......@@ -23,7 +24,8 @@
"order_num": 1,
"prompt": "How clear is the thinking?",
"rubric": 1,
"name": "clear-headed"
"name": "clear-headed",
"label": "clear-headed label"
}
},
{
......@@ -33,183 +35,200 @@
"order_num": 2,
"prompt": "Lastly, how is its form? Punctuation, grammar, and spelling all count.",
"rubric": 1,
"name": "form"
"name": "form",
"label": "form label"
}
},
{
"pk": 1,
"model": "assessment.criterionoption",
"fields": {
"order_num": 0,
"name": "Neal Stephenson (late)",
"explanation": "\n In \"Cryptonomicon\", Stephenson spent multiple pages talking about breakfast cereal.\n While hilarious, in recent years his work has been anything but 'concise'.\n ",
"label": "Neal Stephenson (late) label",
"points": 0,
"criterion": 1,
"name": "Neal Stephenson (late)"
"order_num": 0
}
},
{
"pk": 2,
"model": "assessment.criterionoption",
"fields": {
"order_num": 1,
"name": "HP Lovecraft",
"explanation": "\n If the author wrote something cyclopean that staggers the mind, score it thus.\n ",
"label": "HP Lovecraft label",
"points": 1,
"criterion": 1,
"name": "HP Lovecraft"
"order_num": 1
}
},
{
"pk": 3,
"model": "assessment.criterionoption",
"fields": {
"order_num": 2,
"name": "Robert Heinlein",
"explanation": "\n Tight prose that conveys a wealth of information about the world in relatively\n few words. Example, \"The door irised open and he stepped inside.\"\n ",
"label": "Robert Heinlein label",
"points": 3,
"criterion": 1,
"name": "Robert Heinlein"
"order_num": 2
}
},
{
"pk": 4,
"model": "assessment.criterionoption",
"fields": {
"order_num": 3,
"name": "Neal Stephenson (early)",
"explanation": "\n When Stephenson still had an editor, his prose was dense, with anecdotes about\n nitrox abuse implying main characters' whole life stories.\n ",
"label": "Neal Stephenson (early) label",
"points": 4,
"criterion": 1,
"name": "Neal Stephenson (early)"
"order_num": 3
}
},
{
"pk": 5,
"model": "assessment.criterionoption",
"fields": {
"order_num": 4,
"name": "Earnest Hemingway",
"explanation": "\n Score the work this way if it makes you weep, and the removal of a single\n word would make you sneer.\n ",
"label": "Earnest Hemingway label",
"points": 5,
"criterion": 1,
"name": "Earnest Hemingway"
"order_num": 4
}
},
{
"pk": 6,
"model": "assessment.criterionoption",
"fields": {
"order_num": 0,
"name": "Yogi Berra",
"explanation": "",
"label": "Yogi Berra label",
"points": 0,
"criterion": 2,
"name": "Yogi Berra"
"order_num": 0
}
},
{
"pk": 7,
"model": "assessment.criterionoption",
"fields": {
"order_num": 1,
"name": "Hunter S. Thompson",
"explanation": "",
"label": "Hunter S. Thompson label",
"points": 1,
"criterion": 2,
"name": "Hunter S. Thompson"
"order_num": 1
}
},
{
"pk": 8,
"model": "assessment.criterionoption",
"fields": {
"order_num": 2,
"name": "Robert Heinlein",
"explanation": "",
"label": "Robert Heinlein label",
"points": 2,
"criterion": 2,
"name": "Robert Heinlein"
"order_num": 2
}
},
{
"pk": 9,
"model": "assessment.criterionoption",
"fields": {
"order_num": 3,
"name": "Isaac Asimov",
"explanation": "",
"label": "Isaac Asimov label",
"points": 3,
"criterion": 2,
"name": "Isaac Asimov"
"order_num": 3
}
},
{
"pk": 10,
"model": "assessment.criterionoption",
"fields": {
"order_num": 4,
"name": "Spock",
"explanation": "\n Coolly rational, with a firm grasp of the main topics, a crystal-clear train of thought,\n and unemotional examination of the facts. This is the only item explained in this category,\n to show that explained and unexplained items can be mixed.\n ",
"label": "Spock label",
"points": 10,
"criterion": 2,
"name": "Spock"
"order_num": 4
}
},
{
"pk": 11,
"model": "assessment.criterionoption",
"fields": {
"order_num": 0,
"name": "lolcats",
"explanation": "",
"label": "lolcats label",
"points": 0,
"criterion": 3,
"name": "lolcats"
"order_num": 0
}
},
{
"pk": 12,
"model": "assessment.criterionoption",
"fields": {
"order_num": 1,
"name": "Facebook",
"explanation": "",
"label": "Facebook label",
"points": 1,
"criterion": 3,
"name": "Facebook"
"order_num": 1
}
},
{
"pk": 13,
"model": "assessment.criterionoption",
"fields": {
"order_num": 2,
"name": "Reddit",
"explanation": "",
"label": "Reddit label",
"points": 2,
"criterion": 3,
"name": "Reddit"
"order_num": 2
}
},
{
"pk": 14,
"model": "assessment.criterionoption",
"fields": {
"order_num": 3,
"name": "metafilter",
"explanation": "",
"label": "metafilter label",
"points": 3,
"criterion": 3,
"name": "metafilter"
"order_num": 3
}
},
{
"pk": 15,
"model": "assessment.criterionoption",
"fields": {
"order_num": 4,
"name": "Usenet, 1996",
"explanation": "",
"label": "Usenet, 1996 label",
"points": 4,
"criterion": 3,
"name": "Usenet, 1996"
"order_num": 4
}
},
{
"pk": 16,
"model": "assessment.criterionoption",
"fields": {
"order_num": 5,
"name": "The Elements of Style",
"explanation": "",
"label": "The Elements of Style label",
"points": 5,
"criterion": 3,
"name": "The Elements of Style"
"order_num": 5
}
}
]
\ No newline at end of file
......@@ -13,7 +13,15 @@
["submission_uuid", "feedback_text", "options"]
],
"assessment_part": [
["assessment_id", "points_earned", "criterion_name", "option_name", "feedback"]
[
"assessment_id",
"points_earned",
"criterion_name",
"criterion_label",
"option_name",
"option_label",
"feedback"
]
],
"assessment_feedback_option": [
["id", "text"]
......@@ -62,10 +70,10 @@
]
],
"assessment_part": [
["assessment_id", "points_earned", "criterion_name", "option_name", "feedback"],
["1", "4", "concise", "Neal Stephenson (early)", "Praesent ac lorem ac nunc tincidunt ultricies sit amet ut magna."],
["1", "5", "form", "The Elements of Style", "Fusce varius, elit ut blandit consequat, odio ante mollis lectus"],
["1", "3", "clear-headed", "Isaac Asimov", ""]
["assessment_id", "points_earned", "criterion_name", "criterion_label", "option_name", "option_label", "feedback"],
["1", "4", "concise", "concise label", "Neal Stephenson (early)", "Neal Stephenson (early) label", "Praesent ac lorem ac nunc tincidunt ultricies sit amet ut magna."],
["1", "5", "form", "form label", "The Elements of Style", "The Elements of Style label", "Fusce varius, elit ut blandit consequat, odio ante mollis lectus"],
["1", "3", "clear-headed", "clear-headed label", "Isaac Asimov", "Isaac Asimov label", ""]
]
}
},
......@@ -97,13 +105,13 @@
]
],
"assessment_part": [
["assessment_id", "points_earned", "criterion_name", "option_name", "feedback"],
["1", "4", "concise", "Neal Stephenson (early)", "Praesent ac lorem ac nunc tincidunt ultricies sit amet ut magna."],
["1", "5", "form", "The Elements of Style", "Fusce varius, elit ut blandit consequat, odio ante mollis lectus"],
["1", "3", "clear-headed", "Isaac Asimov", ""],
["2", "5", "concise", "Earnest Hemingway", ""],
["2", "5", "form", "The Elements of Style", ""],
["2", "10", "clear-headed", "Spock", ""]
["assessment_id", "points_earned", "criterion_name", "criterion_label", "option_name", "option_label", "feedback"],
["1", "4", "concise", "concise label", "Neal Stephenson (early)", "Neal Stephenson (early) label", "Praesent ac lorem ac nunc tincidunt ultricies sit amet ut magna."],
["1", "5", "form", "form label", "The Elements of Style", "The Elements of Style label", "Fusce varius, elit ut blandit consequat, odio ante mollis lectus"],
["1", "3", "clear-headed", "clear-headed label", "Isaac Asimov", "Isaac Asimov label", ""],
["2", "5", "concise", "concise label", "Earnest Hemingway", "Earnest Hemingway label", ""],
["2", "5", "form", "form label", "The Elements of Style", "The Elements of Style label", ""],
["2", "10", "clear-headed", "clear-headed label", "Spock", "Spock label", ""]
]
}
},
......@@ -172,11 +180,11 @@
]
],
"assessment_part": [
["assessment_id", "points_earned", "criterion_name", "option_name", "feedback"],
["1", "4", "concise", "Neal Stephenson (early)", "Praesent ac lorem ac nunc tincidunt ultricies sit amet ut magna."],
["1", "5", "form", "The Elements of Style", "Fusce varius, elit ut blandit consequat, odio ante mollis lectus"],
["1", "3", "clear-headed", "Isaac Asimov", ""],
["1", "0", "feedback only", "", "Feedback!"]
["assessment_id", "points_earned", "criterion_name", "criterion_label", "option_name", "option_label", "feedback"],
["1", "4", "concise", "concise label", "Neal Stephenson (early)", "Neal Stephenson (early) label", "Praesent ac lorem ac nunc tincidunt ultricies sit amet ut magna."],
["1", "5", "form", "form label", "The Elements of Style", "The Elements of Style label", "Fusce varius, elit ut blandit consequat, odio ante mollis lectus"],
["1", "3", "clear-headed", "clear-headed label", "Isaac Asimov", "Isaac Asimov label", ""],
["1", "0", "feedback only", "feedback only label", "", "", "Feedback!"]
]
}
}
......
......@@ -74,6 +74,7 @@ def create_rubric_dict(prompt, criteria):
"criteria": criteria
}
def clean_criterion_feedback(rubric_criteria, criterion_feedback):
"""
Remove per-criterion feedback for criteria with feedback disabled
......@@ -93,3 +94,19 @@ def clean_criterion_feedback(rubric_criteria, criterion_feedback):
if criterion['name'] in criterion_feedback
and criterion.get('feedback', 'disabled') in ['optional', 'required']
}
def make_django_template_key(key):
"""
Django templates access dictionary items using dot notation,
which means that dictionary keys with hyphens don't work.
This function sanitizes a key for use in Django templates
by replacing hyphens with underscores.
Args:
key (basestring): The key to sanitize.
Returns:
basestring
"""
return key.replace('-', '_')
......@@ -15,43 +15,45 @@ DEFAULT_PROMPT = """
DEFAULT_RUBRIC_CRITERIA = [
{
'name': "Ideas",
'label': "Ideas",
'prompt': "Determine if there is a unifying theme or main idea.",
'order_num': 0,
'feedback': 'optional',
'options': [
{
'order_num': 0, 'points': 0, 'name': 'Poor',
'order_num': 0, 'points': 0, 'name': 'Poor', 'label': 'Poor',
'explanation': """Difficult for the reader to discern the main idea. Too brief or too repetitive to establish or maintain a focus."""
},
{
'order_num': 1, 'points': 3, 'name': 'Fair',
'order_num': 1, 'points': 3, 'name': 'Fair', 'label': 'Fair',
'explanation': """Presents a unifying theme or main idea, but may include minor tangents. Stays somewhat focused on topic and task."""
},
{
'order_num': 2, 'points': 5, 'name': 'Good',
'order_num': 2, 'points': 5, 'name': 'Good', 'label': 'Good',
'explanation': """Presents a unifying theme or main idea without going off on tangents. Stays completely focused on topic and task."""
},
],
},
{
'name': "Content",
'label': "Content",
'prompt': "Assess the content of the submission",
'order_num': 1,
'options': [
{
'order_num': 0, 'points': 0, 'name': 'Poor',
'order_num': 0, 'points': 0, 'name': 'Poor', 'label': 'Poor',
'explanation': """Includes little information with few or no details or unrelated details. Unsuccessful in attempts to explore any facets of the topic."""
},
{
'order_num': 1, 'points': 1, 'name': 'Fair',
'order_num': 1, 'points': 1, 'name': 'Fair', 'label': 'Fair',
'explanation': """Includes little information and few or no details. Explores only one or two facets of the topic."""
},
{
'order_num': 2, 'points': 3, 'name': 'Good',
'order_num': 2, 'points': 3, 'name': 'Good', 'label': 'Good',
'explanation': """Includes sufficient information and supporting details. (Details may not be fully developed; ideas may be listed.) Explores some facets of the topic."""
},
{
'order_num': 3, 'points': 3, 'name': 'Excellent',
'order_num': 3, 'points': 3, 'name': 'Excellent', 'label': 'Excellent',
'explanation': """Includes in-depth information and exceptional supporting details that are fully developed. Explores all facets of the topic."""
},
],
......@@ -61,18 +63,20 @@ DEFAULT_RUBRIC_CRITERIA = [
# The rubric's feedback prompt is a set of instructions letting the student
# know they can provide additional free form feedback in their assessment.
DEFAULT_RUBRIC_FEEDBACK_PROMPT = """
(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?
(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?
"""
DEFAULT_EXAMPLE_ANSWER = """
(optional) Replace this text with your own sample response for this assignment. Below, list the names of the criteria for this assignment, and then specify the name of the option that you would select for this response. Students will learn to assess responses by assessing this response and comparing the rubric options that they select with the rubric options that you specified.
DEFAULT_EXAMPLE_ANSWER = (
"Replace this text with your own sample response for this assignment. "
"Then, under Response Score to the right, select an option for each criterion. "
"Students learn to assess responses by assessing this response and comparing "
"the options that they select in the rubric with the options that you specified."
)
If you don't want to provide sample responses and scores, delete the entire 'assessment name="student-training"' element.
"""
DEFAULT_EXAMPLE_ANSWER_2 = """
(optional) Replace this text with another sample response, and then specify the options that you would select for this response below. To provide more sample responses, copy an "example" element and paste as many as you want before the closing "assessment" tag.
"""
DEFAULT_EXAMPLE_ANSWER_2 = (
"Replace this text with another sample response, "
"and then specify the options that you would select for this response."
)
DEFAULT_STUDENT_TRAINING = {
"name": "student-training",
......@@ -108,20 +112,24 @@ DEFAULT_STUDENT_TRAINING = {
]
}
DEFAULT_START = "2001-01-01T00:00"
DEFAULT_DUE = "2029-01-01T00:00"
# The Default Peer Assessment is created as an example of how this XBlock can be
# configured. If no configuration is specified, this is the default assessment
# module(s) associated with the XBlock.
DEFAULT_PEER_ASSESSMENT = {
"name": "peer-assessment",
"start": None,
"due": None,
"start": DEFAULT_START,
"due": DEFAULT_DUE,
"must_grade": 5,
"must_be_graded_by": 3,
}
DEFAULT_SELF_ASSESSMENT = {
"name": "self-assessment",
"due": None,
"start": DEFAULT_START,
"due": DEFAULT_DUE,
}
DEFAULT_ASSESSMENT_MODULES = [
......@@ -130,3 +138,8 @@ DEFAULT_ASSESSMENT_MODULES = [
DEFAULT_SELF_ASSESSMENT,
]
DEFAULT_EDITOR_ASSESSMENTS_ORDER = [
"student-training",
"peer-assessment",
"self-assessment",
]
......@@ -3,8 +3,8 @@ Grade step in the OpenAssessment XBlock.
"""
import copy
from collections import defaultdict
from lazy import lazy
from django.utils.translation import ugettext as _
from xblock.core import XBlock
from openassessment.assessment.api import peer as peer_api
......@@ -58,7 +58,7 @@ class GradeMixin(object):
else: # status is 'self' or 'peer', which implies that the workflow is incomplete
path, context = self.render_grade_incomplete(workflow)
except (sub_api.SubmissionError, PeerAssessmentError, SelfAssessmentError):
return self.render_error(_(u"An unexpected error occurred."))
return self.render_error(self._(u"An unexpected error occurred."))
else:
return self.render_assessment(path, context)
......@@ -100,14 +100,21 @@ class GradeMixin(object):
if "peer-assessment" in assessment_steps:
feedback = peer_api.get_assessment_feedback(submission_uuid)
peer_assessments = peer_api.get_assessments(submission_uuid)
peer_assessments = [
self._assessment_grade_context(asmnt)
for asmnt in peer_api.get_assessments(submission_uuid)
]
has_submitted_feedback = feedback is not None
if "self-assessment" in assessment_steps:
self_assessment = self_api.get_assessment(submission_uuid)
self_assessment = self._assessment_grade_context(
self_api.get_assessment(submission_uuid)
)
if "example-based-assessment" in assessment_steps:
example_based_assessment = ai_api.get_latest_assessment(submission_uuid)
example_based_assessment = self._assessment_grade_context(
ai_api.get_latest_assessment(submission_uuid)
)
feedback_text = feedback.get('feedback', '') if feedback else ''
student_submission = sub_api.get_submission(submission_uuid)
......@@ -127,7 +134,7 @@ class GradeMixin(object):
'peer_assessments': peer_assessments,
'self_assessment': self_assessment,
'example_based_assessment': example_based_assessment,
'rubric_criteria': self._rubric_criteria_with_feedback(peer_assessments, self_assessment),
'rubric_criteria': self._rubric_criteria_grade_context(peer_assessments, self_assessment),
'has_submitted_feedback': has_submitted_feedback,
'allow_file_upload': self.allow_file_upload,
'file_url': self.get_download_url_from_submission(student_submission)
......@@ -177,9 +184,9 @@ class GradeMixin(object):
incomplete_steps = []
if _is_incomplete("peer"):
incomplete_steps.append(_("Peer Assessment"))
incomplete_steps.append(self._("Peer Assessment"))
if _is_incomplete("self"):
incomplete_steps.append(_("Self Assessment"))
incomplete_steps.append(self._("Self Assessment"))
return (
'openassessmentblock/grade/oa_grade_incomplete.html',
......@@ -212,7 +219,7 @@ class GradeMixin(object):
'options': feedback_options,
})
except (peer_api.PeerAssessmentInternalError, peer_api.PeerAssessmentRequestError):
return {'success': False, 'msg': _(u"Assessment feedback could not be saved.")}
return {'success': False, 'msg': self._(u"Assessment feedback could not be saved.")}
else:
self.runtime.publish(
self,
......@@ -223,12 +230,16 @@ class GradeMixin(object):
'options': feedback_options,
}
)
return {'success': True, 'msg': _(u"Feedback saved.")}
return {'success': True, 'msg': self._(u"Feedback saved.")}
def _rubric_criteria_with_feedback(self, peer_assessments, self_assessment):
def _rubric_criteria_grade_context(self, peer_assessments, self_assessment):
"""
Add per-criterion feedback from peer assessments to the rubric criteria.
Filters out empty feedback.
Sanitize the rubric criteria into a format that can be passed
into the grade complete Django template.
* Add per-criterion feedback from peer assessments to the rubric criteria.
* Filters out empty feedback.
* Assign a "label" for criteria/options if none is defined (backwards compatibility).
Args:
peer_assessments (list of dict): Serialized assessment models from the peer API.
......@@ -240,7 +251,8 @@ class GradeMixin(object):
Example:
[
{
'name': 'Test name',
'label': 'Test name',
'name': 'f78ac7d4ca1e4134b0ba4b40ca212e72',
'prompt': 'Test prompt',
'order_num': 2,
'options': [...]
......@@ -252,7 +264,7 @@ class GradeMixin(object):
...
]
"""
criteria = copy.deepcopy(self.rubric_criteria)
criteria = copy.deepcopy(self.rubric_criteria_with_labels)
peer_criteria_feedback = defaultdict(list)
self_criteria_feedback = {}
......@@ -274,3 +286,67 @@ class GradeMixin(object):
criterion['self_feedback'] = self_criteria_feedback.get(criterion_name)
return criteria
@lazy
def _criterion_and_option_labels(self):
"""
Retrieve criteria and option labels from the rubric in the XBlock problem definition,
defaulting to the name value if no label is available (backwards compatibility).
Evaluated lazily, so it will return a cached value if called repeatedly.
For the grade mixin, this should be okay, since we can't change the problem
definition in the LMS (the settings fields are read-only).
Returns:
Tuple of dictionaries:
`criterion_labels` maps criterion names to criterion labels.
`option_labels` maps (criterion name, option name) tuples to option labels.
"""
criterion_labels = {}
option_labels = {}
for criterion in self.rubric_criteria_with_labels:
criterion_labels[criterion['name']] = criterion['label']
for option in criterion['options']:
option_label_key = (criterion['name'], option['name'])
option_labels[option_label_key] = option['label']
return criterion_labels, option_labels
def _assessment_grade_context(self, assessment):
"""
Sanitize an assessment dictionary into a format that can be
passed into the grade complete Django template.
Args:
assessment (dict): The serialized assessment model.
Returns:
dict
"""
assessment = copy.deepcopy(assessment)
# Retrieve dictionaries mapping criteria/option names to the associated labels.
# This is a lazy property, so we can call it repeatedly for each assessment.
criterion_labels, option_labels = self._criterion_and_option_labels
# Backwards compatibility: We used to treat "name" as both a user-facing label
# and a unique identifier for criteria and options.
# Now we treat "name" as a unique identifier, and we've added an additional "label"
# field that we display to the user.
# If criteria/options in the problem definition do NOT have a "label" field
# (because they were created before this change),
# we create a new label that has the same value as "name".
for part in assessment['parts']:
criterion_label_key = part['criterion']['name']
part['criterion']['label'] = criterion_labels.get(criterion_label_key, part['criterion']['name'])
# We need to be a little bit careful here: some assessment parts
# have only written feedback, so they're not associated with any options.
# If that's the case, we don't need to add the label field.
if part.get('option') is not None:
option_label_key = (part['criterion']['name'], part['option']['name'])
part['option']['label'] = option_labels.get(option_label_key, part['option']['name'])
return assessment
......@@ -3,12 +3,14 @@
import datetime as dt
import logging
import pkg_resources
import copy
import pytz
from django.template.context import Context
from django.template.loader import get_template
from webob import Response
from lazy import lazy
from xblock.core import XBlock
from xblock.fields import List, Scope, String, Boolean, Integer
......@@ -22,13 +24,14 @@ from openassessment.xblock.lms_mixin import LmsCompatibilityMixin
from openassessment.xblock.self_assessment_mixin import SelfAssessmentMixin
from openassessment.xblock.submission_mixin import SubmissionMixin
from openassessment.xblock.studio_mixin import StudioMixin
from openassessment.xblock.xml import update_from_xml, serialize_content_to_xml
from openassessment.xblock.xml import parse_from_xml, serialize_content_to_xml
from openassessment.xblock.staff_info_mixin import StaffInfoMixin
from openassessment.xblock.workflow_mixin import WorkflowMixin
from openassessment.workflow.errors import AssessmentWorkflowError
from openassessment.xblock.student_training_mixin import StudentTrainingMixin
from openassessment.xblock.validation import validator
from openassessment.xblock.resolve_dates import resolve_dates, DISTANT_PAST, DISTANT_FUTURE
from openassessment.xblock.data_conversion import create_rubric_dict
logger = logging.getLogger(__name__)
......@@ -86,7 +89,7 @@ def load(path):
data = pkg_resources.resource_string(__name__, path)
return data.decode("utf8")
@XBlock.needs("i18n")
class OpenAssessmentBlock(
XBlock,
MessageMixin,
......@@ -104,12 +107,12 @@ class OpenAssessmentBlock(
"""Displays a prompt and provides an area where students can compose a response."""
submission_start = String(
default=None, scope=Scope.settings,
default=DEFAULT_START, scope=Scope.settings,
help="ISO-8601 formatted string representing the submission start date."
)
submission_due = String(
default=None, scope=Scope.settings,
default=DEFAULT_DUE, scope=Scope.settings,
help="ISO-8601 formatted string representing the submission due date."
)
......@@ -246,7 +249,6 @@ class OpenAssessmentBlock(
context_dict = {
"title": self.title,
"question": self.prompt,
"rubric_criteria": self.rubric_criteria,
"rubric_assessments": ui_models,
"show_staff_debug_info": self.is_course_staff and not self.in_studio_preview,
}
......@@ -254,10 +256,11 @@ class OpenAssessmentBlock(
context = Context(context_dict)
frag = Fragment(template.render(context))
frag.add_css(load("static/css/openassessment.css"))
frag.add_javascript(load("static/js/openassessment.min.js"))
frag.add_javascript(load("static/js/openassessment-lms.min.js"))
frag.initialize_js('OpenAssessmentBlock')
return frag
@property
def is_admin(self):
"""
......@@ -369,9 +372,34 @@ class OpenAssessmentBlock(
Inherited by XBlock core.
"""
config = parse_from_xml(node)
block = runtime.construct_xblock_from_class(cls, keys)
return update_from_xml(block, node, validator=validator(block, strict_post_release=False))
xblock_validator = validator(block, block._, strict_post_release=False)
xblock_validator(
create_rubric_dict(config['prompt'], config['rubric_criteria']),
config['rubric_assessments'],
submission_start=config['submission_start'],
submission_due=config['submission_due'],
leaderboard_show=config['leaderboard_show']
)
block.rubric_criteria = config['rubric_criteria']
block.rubric_feedback_prompt = config['rubric_feedback_prompt']
block.rubric_assessments = config['rubric_assessments']
block.submission_start = config['submission_start']
block.submission_due = config['submission_due']
block.title = config['title']
block.prompt = config['prompt']
block.allow_file_upload = config['allow_file_upload']
block.leaderboard_show = config['leaderboard_show']
return block
@property
def _(self):
i18nService = self.runtime.service(self, 'i18n')
return i18nService.ugettext
@property
def valid_assessments(self):
......@@ -394,6 +422,33 @@ class OpenAssessmentBlock(
def assessment_steps(self):
return [asmnt['name'] for asmnt in self.valid_assessments]
@lazy
def rubric_criteria_with_labels(self):
"""
Backwards compatibility: We used to treat "name" as both a user-facing label
and a unique identifier for criteria and options.
Now we treat "name" as a unique identifier, and we've added an additional "label"
field that we display to the user.
If criteria/options in the problem definition do NOT have a "label" field
(because they were created before this change),
we create a new label that has the same value as "name".
The result of this call is cached, so it should NOT be used in a runtime
that can modify the XBlock settings (in the LMS, settings are read-only).
Returns:
list of criteria dictionaries
"""
criteria = copy.deepcopy(self.rubric_criteria)
for criterion in criteria:
if 'label' not in criterion:
criterion['label'] = criterion['name']
for option in criterion['options']:
if 'label' not in option:
option['label'] = option['name']
return criteria
def render_assessment(self, path, context_dict=None):
"""Render an Assessment Module's HTML
......@@ -483,7 +538,7 @@ class OpenAssessmentBlock(
# Resolve unspecified dates and date strings to datetimes
start, due, date_ranges = resolve_dates(
self.start, self.due, [submission_range] + assessment_ranges
self.start, self.due, [submission_range] + assessment_ranges, self._
)
open_range = (start, due)
......
import logging
from django.utils.translation import ugettext as _
from webob import Response
from xblock.core import XBlock
......@@ -9,8 +8,9 @@ from openassessment.assessment.errors import (
PeerAssessmentRequestError, PeerAssessmentInternalError, PeerAssessmentWorkflowError
)
from openassessment.workflow.errors import AssessmentWorkflowError
from .data_conversion import create_rubric_dict
from .resolve_dates import DISTANT_FUTURE
from .data_conversion import create_rubric_dict, clean_criterion_feedback
from .data_conversion import clean_criterion_feedback
logger = logging.getLogger(__name__)
......@@ -49,16 +49,16 @@ class PeerAssessmentMixin(object):
"""
# Validate the request
if 'options_selected' not in data:
return {'success': False, 'msg': _('Must provide options selected in the assessment')}
return {'success': False, 'msg': self._('Must provide options selected in the assessment')}
if 'overall_feedback' not in data:
return {'success': False, 'msg': _('Must provide overall feedback in the assessment')}
return {'success': False, 'msg': self._('Must provide overall feedback in the assessment')}
if 'criterion_feedback' not in data:
return {'success': False, 'msg': _('Must provide feedback for criteria in the assessment')}
return {'success': False, 'msg': self._('Must provide feedback for criteria in the assessment')}
if self.submission_uuid is None:
return {'success': False, 'msg': _('You must submit a response before you can peer-assess.')}
return {'success': False, 'msg': self._('You must submit a response before you can peer-assess.')}
assessment_ui_model = self.get_assessment_module('peer-assessment')
if assessment_ui_model:
......@@ -68,9 +68,9 @@ class PeerAssessmentMixin(object):
self.submission_uuid,
self.get_student_item_dict()["student_id"],
data['options_selected'],
clean_criterion_feedback(self.rubric_criteria, data['criterion_feedback']),
clean_criterion_feedback(self.rubric_criteria_with_labels, data['criterion_feedback']),
data['overall_feedback'],
create_rubric_dict(self.prompt, self.rubric_criteria),
create_rubric_dict(self.prompt, self.rubric_criteria_with_labels),
assessment_ui_model['must_be_graded_by']
)
......@@ -82,12 +82,12 @@ class PeerAssessmentMixin(object):
u"Peer API error for submission UUID {}".format(self.submission_uuid),
exc_info=True
)
return {'success': False, 'msg': _(u"Your peer assessment could not be submitted.")}
return {'success': False, 'msg': self._(u"Your peer assessment could not be submitted.")}
except PeerAssessmentInternalError:
logger.exception(
u"Peer API internal error for submission UUID: {}".format(self.submission_uuid)
)
msg = _("Your peer assessment could not be submitted.")
msg = self._("Your peer assessment could not be submitted.")
return {'success': False, 'msg': msg}
# Update both the workflow that the submission we're assessing
......@@ -101,7 +101,7 @@ class PeerAssessmentMixin(object):
u"Workflow error occurred when submitting peer assessment "
u"for submission {}".format(self.submission_uuid)
)
msg = _('Could not update workflow status.')
msg = self._('Could not update workflow status.')
return {'success': False, 'msg': msg}
# Temp kludge until we fix JSON serialization for datetime
......@@ -110,7 +110,7 @@ class PeerAssessmentMixin(object):
return {'success': True, 'msg': u''}
else:
return {'success': False, 'msg': _('Could not load peer assessment.')}
return {'success': False, 'msg': self._('Could not load peer assessment.')}
@XBlock.handler
def render_peer_assessment(self, data, suffix=''):
......@@ -148,7 +148,7 @@ class PeerAssessmentMixin(object):
problem_closed, reason, start_date, due_date = self.is_closed(step="peer-assessment")
context_dict = {
"rubric_criteria": self.rubric_criteria,
"rubric_criteria": self.rubric_criteria_with_labels,
"estimated_time": "20 minutes" # TODO: Need to configure this.
}
......@@ -177,15 +177,15 @@ class PeerAssessmentMixin(object):
context_dict["review_num"] = count + 1
if continue_grading:
context_dict["submit_button_text"] = _(
context_dict["submit_button_text"] = self._(
"Submit your assessment & review another response"
)
elif assessment["must_grade"] - count == 1:
context_dict["submit_button_text"] = _(
context_dict["submit_button_text"] = self._(
"Submit your assessment & move onto next step"
)
else:
context_dict["submit_button_text"] = _(
context_dict["submit_button_text"] = self._(
"Submit your assessment & move to response #{response_number}"
).format(response_number=(count + 2))
......
......@@ -4,7 +4,6 @@ Resolve unspecified dates and date strings to datetimes.
import datetime as dt
import pytz
from dateutil.parser import parse as parse_date
from django.utils.translation import ugettext as _
class InvalidDateFormat(Exception):
......@@ -25,12 +24,14 @@ DISTANT_PAST = dt.datetime(dt.MINYEAR, 1, 1, tzinfo=pytz.utc)
DISTANT_FUTURE = dt.datetime(dt.MAXYEAR, 1, 1, tzinfo=pytz.utc)
def _parse_date(value):
def _parse_date(value, _):
"""
Parse an ISO formatted datestring into a datetime object with timezone set to UTC.
Args:
value (str or datetime): The ISO formatted date string or datetime object.
_ (function): The i18n service function used to get the appropriate
text for a message.
Returns:
datetime.datetime
......@@ -51,7 +52,7 @@ def _parse_date(value):
raise InvalidDateFormat(_("'{date}' must be a date string or datetime").format(date=value))
def resolve_dates(start, end, date_ranges):
def resolve_dates(start, end, date_ranges, _):
"""
Resolve date strings (including "default" dates) to datetimes.
The basic rules are:
......@@ -124,6 +125,8 @@ def resolve_dates(start, end, date_ranges):
end (str, ISO date format, or datetime): When the problem closes. A value of None indicates that the problem never closes.
date_ranges (list of tuples): list of (start, end) ISO date string tuples indicating
the start/end timestamps (date string or datetime) of each submission/assessment.
_ (function): An i18n service function to use for retrieving the
proper text.
Returns:
start (datetime): The resolved start date
......@@ -135,8 +138,8 @@ def resolve_dates(start, end, date_ranges):
InvalidDateFormat
"""
# Resolve problem start and end dates to minimum and maximum dates
start = _parse_date(start) if start is not None else DISTANT_PAST
end = _parse_date(end) if end is not None else DISTANT_FUTURE
start = _parse_date(start, _) if start is not None else DISTANT_PAST
end = _parse_date(end, _) if end is not None else DISTANT_FUTURE
resolved_starts = []
resolved_ends = []
......@@ -162,11 +165,11 @@ def resolve_dates(start, end, date_ranges):
# defaults. See the docstring above for a more detailed justification.
for step_start, step_end in date_ranges:
if step_start is not None:
parsed_start = _parse_date(step_start)
parsed_start = _parse_date(step_start, _)
start = min(start, parsed_start)
end = max(end, parsed_start + dt.timedelta(milliseconds=1))
if step_end is not None:
parsed_end = _parse_date(step_end)
parsed_end = _parse_date(step_end, _)
end = max(end, parsed_end)
start = min(start, parsed_end - dt.timedelta(milliseconds=1))
......@@ -182,13 +185,13 @@ def resolve_dates(start, end, date_ranges):
# If I set a start date for peer-assessment, but don't set a start date for the following self-assessment,
# then the self-assessment should default to the same start date as the peer-assessment.
step_start, __ = date_ranges[index]
step_start = _parse_date(step_start) if step_start is not None else prev_start
step_start = _parse_date(step_start, _) if step_start is not None else prev_start
# Resolve "default" end dates to the following end date.
# If I set a due date for self-assessment, but don't set a due date for the previous peer-assessment,
# then the peer-assessment should default to the same due date as the self-assessment.
__, step_end = date_ranges[reverse_index]
step_end = _parse_date(step_end) if step_end is not None else prev_end
step_end = _parse_date(step_end, _) if step_end is not None else prev_end
if step_start < prev_start:
msg = _(u"This step's start date '{start}' cannot be earlier than the previous step's start date '{prev}'.").format(
......
"""
Schema for validating and sanitizing data received from the JavaScript client.
"""
import dateutil
from pytz import utc
from voluptuous import Schema, Required, All, Any, Range, In, Invalid
def utf8_validator(value):
"""Validate and sanitize unicode strings.
If we're given a bytestring, assume that the encoding is UTF-8
Args:
value: The value to validate
Returns:
unicode
Raises:
Invalid
"""
try:
if isinstance(value, str):
return value.decode('utf-8')
else:
return unicode(value)
except (ValueError, TypeError):
raise Invalid(u"Could not load unicode from value \"{val}\"".format(val=value))
def datetime_validator(value):
"""Validate and sanitize a datetime string in ISO format.
Args:
value: The value to validate
Returns:
unicode: ISO-formatted datetime string
Raises:
Invalid
"""
try:
# The dateutil parser defaults empty values to the current day,
# which is NOT what we want.
if value is None or value == '':
raise Invalid(u"Datetime value cannot be \"{val}\"".format(val=value))
# Parse the date and interpret it as UTC
value = dateutil.parser.parse(value).replace(tzinfo=utc)
return unicode(value.isoformat())
except (ValueError, TypeError):
raise Invalid(u"Could not parse datetime from value \"{val}\"".format(val=value))
VALID_ASSESSMENT_TYPES = [
u'peer-assessment',
u'self-assessment',
u'example-based-assessment',
u'student-training'
]
# Schema definition for an update from the Studio JavaScript editor.
EDITOR_UPDATE_SCHEMA = Schema({
Required('prompt'): utf8_validator,
Required('title'): utf8_validator,
Required('feedback_prompt'): utf8_validator,
Required('submission_start'): Any(datetime_validator, None),
Required('submission_due'): Any(datetime_validator, None),
Required('allow_file_upload'): bool,
Required('leaderboard_show'): int,
Required('assessments'): [
Schema({
Required('name'): All(utf8_validator, In(VALID_ASSESSMENT_TYPES)),
Required('start', default=None): Any(datetime_validator, None),
Required('due', default=None): Any(datetime_validator, None),
'must_grade': All(int, Range(min=0)),
'must_be_graded_by': All(int, Range(min=0)),
'examples': [
Schema({
Required('answer'): utf8_validator,
Required('options_selected'): [
Schema({
Required('criterion'): utf8_validator,
Required('option'): utf8_validator
})
]
})
],
'examples_xml': utf8_validator,
})
],
Required('editor_assessments_order'): [
All(utf8_validator, In(VALID_ASSESSMENT_TYPES))
],
Required('feedbackprompt', default=u""): utf8_validator,
Required('criteria'): [
Schema({
Required('order_num'): All(int, Range(min=0)),
Required('name'): utf8_validator,
Required('label'): utf8_validator,
Required('prompt'): utf8_validator,
Required('feedback'): All(
utf8_validator,
In([
'disabled',
'optional',
'required',
])
),
Required('options'): [
Schema({
Required('order_num'): All(int, Range(min=0)),
Required('name'): utf8_validator,
Required('label'): utf8_validator,
Required('explanation'): utf8_validator,
Required('points'): All(int, Range(min=0)),
})
]
})
]
})
\ No newline at end of file
import logging
from django.utils.translation import ugettext as _
from xblock.core import XBlock
from webob import Response
......@@ -7,8 +6,9 @@ from webob import Response
from openassessment.assessment.api import self as self_api
from openassessment.workflow import api as workflow_api
from submissions import api as submission_api
from .data_conversion import create_rubric_dict
from .resolve_dates import DISTANT_FUTURE
from .data_conversion import create_rubric_dict, clean_criterion_feedback
from .data_conversion import clean_criterion_feedback
logger = logging.getLogger(__name__)
......@@ -35,7 +35,7 @@ class SelfAssessmentMixin(object):
except:
msg = u"Could not retrieve self assessment for submission {}".format(self.submission_uuid)
logger.exception(msg)
return self.render_error(_(u"An unexpected error occurred."))
return self.render_error(self._(u"An unexpected error occurred."))
else:
return self.render_assessment(path, context)
......@@ -82,7 +82,7 @@ class SelfAssessmentMixin(object):
path = 'openassessmentblock/self/oa_self_closed.html'
else:
submission = submission_api.get_submission(self.submission_uuid)
context["rubric_criteria"] = self.rubric_criteria
context["rubric_criteria"] = self.rubric_criteria_with_labels
context["estimated_time"] = "20 minutes" # TODO: Need to configure this.
context["self_submission"] = submission
......@@ -111,16 +111,16 @@ class SelfAssessmentMixin(object):
and "msg" (unicode) containing additional information if an error occurs.
"""
if 'options_selected' not in data:
return {'success': False, 'msg': _(u"Missing options_selected key in request")}
return {'success': False, 'msg': self._(u"Missing options_selected key in request")}
if 'overall_feedback' not in data:
return {'success': False, 'msg': _('Must provide overall feedback in the assessment')}
return {'success': False, 'msg': self._('Must provide overall feedback in the assessment')}
if 'criterion_feedback' not in data:
return {'success': False, 'msg': _('Must provide feedback for criteria in the assessment')}
return {'success': False, 'msg': self._('Must provide feedback for criteria in the assessment')}
if self.submission_uuid is None:
return {'success': False, 'msg': _(u"You must submit a response before you can perform a self-assessment.")}
return {'success': False, 'msg': self._(u"You must submit a response before you can perform a self-assessment.")}
try:
assessment = self_api.create_assessment(
......@@ -129,7 +129,7 @@ class SelfAssessmentMixin(object):
data['options_selected'],
clean_criterion_feedback(self.rubric_criteria, data['criterion_feedback']),
data['overall_feedback'],
create_rubric_dict(self.prompt, self.rubric_criteria)
create_rubric_dict(self.prompt, self.rubric_criteria_with_labels)
)
self.publish_assessment_event("openassessmentblock.self_assess", assessment)
......@@ -141,14 +141,14 @@ class SelfAssessmentMixin(object):
u"for the submission {}".format(self.submission_uuid),
exc_info=True
)
msg = _(u"Your self assessment could not be submitted.")
msg = self._(u"Your self assessment could not be submitted.")
return {'success': False, 'msg': msg}
except (self_api.SelfAssessmentInternalError, workflow_api.AssessmentWorkflowInternalError):
logger.exception(
u"An error occurred while submitting a self assessment "
u"for the submission {}".format(self.submission_uuid),
)
msg = _(u"Your self assessment could not be submitted.")
msg = self._(u"Your self assessment could not be submitted.")
return {'success': False, 'msg': msg}
else:
return {'success': True, 'msg': u""}
......@@ -5,8 +5,6 @@ determine the flow of the problem.
import copy
from functools import wraps
import logging
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy
from xblock.core import XBlock
from openassessment.assessment.errors.ai import AIError
......@@ -24,13 +22,13 @@ from openassessment.fileupload import api as file_api
logger = logging.getLogger(__name__)
def require_global_admin(error_msg):
def require_global_admin(error_key):
"""
Method decorator to restrict access to an XBlock handler
to only global staff.
Args:
error_msg (unicode): The error message to display to the user
error_key (str): The key to the error message to display to the user
if they do not have sufficient permissions.
Returns:
......@@ -40,22 +38,26 @@ def require_global_admin(error_msg):
def _decorator(func): # pylint: disable=C0111
@wraps(func)
def _wrapped(xblock, *args, **kwargs): # pylint: disable=C0111
permission_errors = {
"SCHEDULE_TRAINING": xblock._(u"You do not have permission to schedule training"),
"RESCHEDULE_TASKS": xblock._(u"You do not have permission to reschedule tasks."),
}
if not xblock.is_admin or xblock.in_studio_preview:
return {'success': False, 'msg': unicode(error_msg)}
return {'success': False, 'msg': permission_errors[error_key]}
else:
return func(xblock, *args, **kwargs)
return _wrapped
return _decorator
def require_course_staff(error_msg):
def require_course_staff(error_key):
"""
Method decorator to restrict access to an XBlock render
method to only course staff.
Args:
error_msg (unicode): The error message to display to the user
if they do not have sufficient permissions.
error_key (str): The key for the error message to display to the
user if they do not have sufficient permissions.
Returns:
decorated function
......@@ -64,8 +66,13 @@ def require_course_staff(error_msg):
def _decorator(func): # pylint: disable=C0111
@wraps(func)
def _wrapped(xblock, *args, **kwargs): # pylint: disable=C0111
permission_errors = {
"STAFF_INFO": xblock._(u"You do not have permission to access staff information"),
"STUDENT_INFO": xblock._(u"You do not have permission to access student information."),
}
if not xblock.is_course_staff or xblock.in_studio_preview:
return xblock.render_error(unicode(error_msg))
return xblock.render_error(permission_errors[error_key])
else:
return func(xblock, *args, **kwargs)
return _wrapped
......@@ -78,7 +85,7 @@ class StaffInfoMixin(object):
"""
@XBlock.handler
@require_course_staff(ugettext_lazy(u"You do not have permission to access staff information"))
@require_course_staff("STAFF_INFO")
def render_staff_info(self, data, suffix=''): # pylint: disable=W0613
"""
Template context dictionary for course staff debug panel.
......@@ -121,7 +128,7 @@ class StaffInfoMixin(object):
context['display_reschedule_unfinished_tasks'] = display_ai_staff_info
if display_ai_staff_info:
context['classifierset'] = ai_api.get_classifier_set_info(
create_rubric_dict(self.prompt, self.rubric_criteria),
create_rubric_dict(self.prompt, self.rubric_criteria_with_labels),
example_based_assessment['algorithm_id'],
student_item['course_id'],
student_item['item_id']
......@@ -147,7 +154,7 @@ class StaffInfoMixin(object):
return path, context
@XBlock.json_handler
@require_global_admin(ugettext_lazy(u"You do not have permission to schedule training"))
@require_global_admin("SCHEDULE_TRAINING")
def schedule_training(self, data, suffix=''): # pylint: disable=W0613
"""
Schedule a new training task for example-based grading.
......@@ -159,7 +166,7 @@ class StaffInfoMixin(object):
examples = assessment["examples"]
try:
workflow_uuid = ai_api.train_classifiers(
create_rubric_dict(self.prompt, self.rubric_criteria),
create_rubric_dict(self.prompt, self.rubric_criteria_with_labels),
convert_training_examples_list_to_dict(examples),
student_item_dict.get('course_id'),
student_item_dict.get('item_id'),
......@@ -168,22 +175,22 @@ class StaffInfoMixin(object):
return {
'success': True,
'workflow_uuid': workflow_uuid,
'msg': _(u"Training scheduled with new Workflow UUID: {uuid}".format(uuid=workflow_uuid))
'msg': self._(u"Training scheduled with new Workflow UUID: {uuid}".format(uuid=workflow_uuid))
}
except AIError as err:
return {
'success': False,
'msg': _(u"An error occurred scheduling classifier training: {error}".format(error=err))
'msg': self._(u"An error occurred scheduling classifier training: {error}".format(error=err))
}
else:
return {
'success': False,
'msg': _(u"Example Based Assessment is not configured for this location.")
'msg': self._(u"Example Based Assessment is not configured for this location.")
}
@XBlock.handler
@require_course_staff(ugettext_lazy(u"You do not have permission to access student information."))
@require_course_staff("STUDENT_INFO")
def render_student_info(self, data, suffix=''): # pylint: disable=W0613
"""
Renders all relative information for a specific student's workflow.
......@@ -258,7 +265,7 @@ class StaffInfoMixin(object):
'submitted_assessments': submitted_assessments,
'self_assessment': self_assessment,
'example_based_assessment': example_based_assessment,
'rubric_criteria': copy.deepcopy(self.rubric_criteria),
'rubric_criteria': copy.deepcopy(self.rubric_criteria_with_labels),
}
if peer_assessments or self_assessment or example_based_assessment:
......@@ -270,7 +277,7 @@ class StaffInfoMixin(object):
return path, context
@XBlock.json_handler
@require_global_admin(ugettext_lazy(u"You do not have permission to reschedule tasks."))
@require_global_admin("RESCHEDULE_TASKS")
def reschedule_unfinished_tasks(self, data, suffix=''): # pylint: disable=W0613
"""
Wrapper which invokes the API call for rescheduling grading tasks.
......@@ -300,10 +307,10 @@ class StaffInfoMixin(object):
ai_api.reschedule_unfinished_tasks(course_id=course_id, item_id=item_id, task_type=u"grade")
return {
'success': True,
'msg': _(u"All AI tasks associated with this item have been rescheduled successfully.")
'msg': self._(u"All AI tasks associated with this item have been rescheduled successfully.")
}
except AIError as ex:
return {
'success': False,
'msg': _(u"An error occurred while rescheduling tasks: {}".format(ex))
'msg': self._(u"An error occurred while rescheduling tasks: {}".format(ex))
}
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -66,7 +66,6 @@ describe("OpenAssessment.BaseView", function() {
beforeEach(function() {
// Load the DOM fixture
jasmine.getFixtures().fixturesPath = 'base/fixtures';
loadFixtures('oa_base.html');
// Create a new stub server
......
/**
Tests for the Openassessment Container Object.
**/
describe("OpenAssessment.Container", function () {
var counter = 0;
var StubContainerItem = function(element) {
this.element = element;
// Assign an ID to the item if it doesn't already have one.
if ($(element).attr("test_id") === "") {
$(element).attr("test_id", counter);
counter += 1;
}
this.getFieldValues = function() {
var testIdNum = parseInt($(element).attr("test_id"), 10);
return { id: testIdNum };
};
this.addHandler = function() {};
this.removeHandler = function() {};
this.updateHandler = function() {};
this.addEventListeners = function() {};
};
var container = null;
var createContainer = function() {
return new OpenAssessment.Container(
StubContainerItem, {
containerElement: $("#container").get(0),
templateElement: $("#template").get(0),
addButtonElement: $("#add_button").get(0),
removeButtonClass: "remove_button",
containerItemClass: "container_item",
}
);
};
beforeEach(function () {
// Reset the counter before each test
counter = 0;
// Install a minimal fixture
// We don't need to use a full ORA2 template for this,
// so we just define the fixture inline.
setFixtures(
'<div id="container" />' +
'<div id="template">' +
'<div class="container_item" test_id="">' +
'<div class="remove_button" />' +
'</div>' +
'</div>' +
'<div id="add_button" />'
);
// Create the container and configure it
// to use the stub container item.
container = createContainer();
// Explicitly add event listeners.
container.addEventListeners();
});
it("adds and removes items", function() {
// Initially, there should be no items
expect(container.getItemValues()).toEqual([]);
// Add an item
container.add();
expect(container.getItemValues()).toEqual([
{ id: 0 }
]);
// Add a second item
container.add();
expect(container.getItemValues()).toEqual([
{ id: 0 },
{ id: 1 }
]);
// Add a third item
container.add();
expect(container.getItemValues()).toEqual([
{ id: 0 },
{ id: 1 },
{ id: 2 }
]);
// Remove the second item
container.remove(container.getItem(1));
expect(container.getItemValues()).toEqual([
{ id: 0 },
{ id: 2 },
]);
// Remove the first item
container.remove(container.getItem(0));
expect(container.getItemValues()).toEqual([
{ id: 2 },
]);
// Remove the last item
container.remove(container.getItem(0));
expect(container.getItemValues()).toEqual([]);
});
it("ignores unrecognized DOM elements", function() {
// Add some items to the container
container.add();
container.add();
expect(container.getItemValues().length).toEqual(2);
// Add an extra element to the container in the DOM
$("<p>Not a container item!</p>").appendTo("#parent_element");
// Expect the count to remain the same
expect(container.getItemValues().length).toEqual(2);
// Add another element
container.add();
expect(container.getItemValues().length).toEqual(3);
// Remove the first element
container.remove(container.getItem(0));
expect(container.getItemValues().length).toEqual(2);
});
it("adds an element when the add button is pressed", function() {
// Press the add button
expect(container.getItemValues().length).toEqual(0);
$("#add_button").click();
expect(container.getItemValues().length).toEqual(1);
});
it("only creates one item when add is fired", function() {
// Initialize multiple containers without explicitly adding event
// listeners
createContainer();
createContainer();
createContainer();
// Press the add button
expect(container.getItemValues().length).toEqual(0);
$("#add_button").click();
expect(container.getItemValues().length).toEqual(1);
});
it("removes an element when the remove button is pressed", function() {
// Add some items
container.add();
container.add();
container.add();
expect(container.getItemValues().length).toEqual(3);
// Press the button to delete the second item
$(".remove_button", container.getItem(1).element).click();
expect(container.getItemValues().length).toEqual(2);
expect(container.getItemValues()).toEqual([
{ id: 0 },
{ id: 2 }
]);
});
it("configures remove buttons for pre-existing items", function() {
// Add an item directly to the container element in the DOM,
// before initializing the container object.
$("#container").append(
'<div class="container_item" test_id="0">' +
'<div class="remove_button" />' +
'<div>'
);
// Initialize the container object
container = createContainer();
// Explicitly add event listeners.
container.addEventListeners();
// Verify that the container recognizes the pre-existing item
expect(container.getItemValues()).toEqual([{ id: 0 }]);
// Expect that we can click the "remove" button
// to remove the item.
$(".remove_button", container.getItem(0).element).click();
expect(container.getItemValues().length).toEqual(0);
});
});
\ No newline at end of file
......@@ -42,7 +42,6 @@ describe("OpenAssessment.GradeView", function() {
beforeEach(function() {
// Load the DOM fixture
jasmine.getFixtures().fixturesPath = 'base/fixtures';
loadFixtures('oa_grade_complete.html');
// Create the stub server
......
......@@ -44,7 +44,6 @@ describe("OpenAssessment.PeerView", function() {
beforeEach(function() {
// Load the DOM fixture
jasmine.getFixtures().fixturesPath = 'base/fixtures';
loadFixtures('oa_peer_assessment.html');
// Create a new stub server
......
......@@ -96,7 +96,6 @@ describe("OpenAssessment.ResponseView", function() {
beforeEach(function() {
// Load the DOM fixture
jasmine.getFixtures().fixturesPath = 'base/fixtures';
loadFixtures('oa_response.html');
// Create stub objects
......
......@@ -39,7 +39,6 @@ describe("OpenAssessment.SelfView", function() {
beforeEach(function() {
// Load the DOM fixture
jasmine.getFixtures().fixturesPath = 'base/fixtures';
loadFixtures('oa_self_assessment.html');
// Create a new stub server
......
......@@ -69,9 +69,6 @@ describe("OpenAssessment.StaffInfoView", function() {
};
beforeEach(function() {
// Configure the Jasmine fixtures path
jasmine.getFixtures().fixturesPath = 'base/fixtures';
// Create a new stub server
server = new StubServer();
......
......@@ -45,7 +45,6 @@ describe("OpenAssessment.StudentTrainingView", function() {
beforeEach(function() {
// Load the DOM fixture
jasmine.getFixtures().fixturesPath = 'base/fixtures';
loadFixtures('oa_student_training.html');
// Create a new stub server
......
/**
Tests for OA XBlock editing.
**/
describe("OpenAssessment.StudioView", function() {
var runtime = {
notify: function(type, data) {}
};
// Stub server that returns dummy data or reports errors.
var StubServer = function() {
this.loadError = false;
this.updateError = false;
this.xml = '<openassessment></openassessment>';
this.isReleased = false;
this.errorPromise = $.Deferred(function(defer) {
defer.rejectWith(this, ['Test error']);
}).promise();
this.loadXml = function() {
var xml = this.xml;
if (!this.loadError) {
return $.Deferred(function(defer) {
defer.resolveWith(this, [xml]);
}).promise();
}
else {
return this.errorPromise;
}
};
this.updateXml = function(xml) {
if (!this.updateError) {
this.xml = xml;
return $.Deferred(function(defer) {
defer.resolve();
}).promise();
}
else {
return this.errorPromise;
}
};
this.checkReleased = function() {
var server = this;
return $.Deferred(function(defer) {
defer.resolveWith(this, [server.isReleased]);
}).promise();
};
};
var server = null;
var view = null;
beforeEach(function() {
// Load the DOM fixture
jasmine.getFixtures().fixturesPath = 'base/fixtures';
loadFixtures('oa_edit.html');
// Create the stub server
server = new StubServer();
// Mock the runtime
spyOn(runtime, 'notify');
// Create the object under test
var el = $('#openassessment-edit').get(0);
view = new OpenAssessment.StudioView(runtime, el, server);
});
it("loads the XML definition", function() {
// Initialize the view
view.load();
// Expect that the XML definition was loaded
var contents = view.codeBox.getValue();
expect(contents).toEqual('<openassessment></openassessment>');
});
it("saves the XML definition", function() {
// Update the XML
view.codeBox.setValue('<openassessment>test!</openassessment>');
// Save the updated XML
view.save();
// Expect the saving notification to start/end
expect(runtime.notify).toHaveBeenCalledWith('save', {state: 'start'});
expect(runtime.notify).toHaveBeenCalledWith('save', {state: 'end'});
// Expect the server's XML to have been updated
expect(server.xml).toEqual('<openassessment>test!</openassessment>');
});
it("confirms changes for a released problem", function() {
// Simulate an XBlock that has been released
server.isReleased = true;
// Stub the confirmation step (avoid showing the dialog)
spyOn(view, 'confirmPostReleaseUpdate').andCallFake(
function(onConfirm) { onConfirm(); }
);
// Save the updated XML
view.save();
// Verify that the user was asked to confirm the changes
expect(view.confirmPostReleaseUpdate).toHaveBeenCalled();
});
it("cancels editing", function() {
view.cancel();
expect(runtime.notify).toHaveBeenCalledWith('cancel', {});
});
it("displays an error when server reports a load XML error", function() {
server.loadError = true;
view.load();
expect(runtime.notify).toHaveBeenCalledWith('error', {msg: 'Test error'});
});
it("displays an error when server reports an update XML error", function() {
server.updateError = true;
view.save('<openassessment>test!</openassessment>');
expect(runtime.notify).toHaveBeenCalledWith('error', {msg: 'Test error'});
});
});
......@@ -7,7 +7,6 @@ describe("OpenAssessment.Rubric", function() {
var rubric = null;
beforeEach(function() {
jasmine.getFixtures().fixturesPath = 'base/fixtures';
loadFixtures('oa_rubric.html');
var el = $("#peer-assessment--001__assessment").get(0);
......
......@@ -30,6 +30,62 @@ describe("OpenAssessment.Server", function() {
);
};
var PROMPT = "Hello this is the prompt yes.";
var FEEDBACK_PROMPT = "Prompt for feedback";
var RUBRIC = '<rubric>'+
'<criterion>'+
'<name>𝓒𝓸𝓷𝓬𝓲𝓼𝓮</name>'+
'<prompt>How concise is it?</prompt>'+
'<option points="3">'+
'<name>ﻉซƈﻉɭɭﻉกՇ</name>'+
'<explanation>Extremely concise</explanation>'+
'</option>'+
'<option points="2">'+
'<name>Ġööḋ</name>'+
'<explanation>Concise</explanation>'+
'</option>'+
'<option points="1">'+
'<name>ק๏๏г</name>'+
'<explanation>Wordy</explanation>'+
'</option>'+
'</criterion>'+
'</rubric>';
var CRITERIA = [
'criteria',
'objects',
'would',
'be',
'here'
];
var ASSESSMENTS = [
{
"name": "peer-assessment",
"must_grade": 5,
"must_be_graded_by": 3,
"start": "",
"due": "4014-03-10T00:00:00"
},
{
"name": "self-assessment",
"start": "",
"due": ""
}
];
var EDITOR_ASSESSMENTS_ORDER = [
"student_training",
"peer_assessment",
"self_assessment",
"example_based_assessment"
];
var TITLE = 'This is the title.';
var SUBMISSION_START = '2012-10-09T00:00:00';
var SUBMISSION_DUE = '2015-10-10T00:00:00';
beforeEach(function() {
// Create the server
// Since the runtime is a stub implementation that ignores the element passed to it,
......@@ -173,27 +229,34 @@ describe("OpenAssessment.Server", function() {
});
});
it("loads the XBlock's XML definition", function() {
stubAjax(true, { success: true, xml: "<openassessment />" });
var loadedXml = "";
server.loadXml().done(function(xml) {
loadedXml = xml;
});
expect(loadedXml).toEqual('<openassessment />');
expect($.ajax).toHaveBeenCalledWith({
url: '/xml', type: "POST", data: '""'
});
});
it("updates the XBlock's XML definition", function() {
it("updates the XBlock's editor context definition", function() {
stubAjax(true, { success: true });
server.updateXml('<openassessment />');
server.updateEditorContext({
prompt: PROMPT,
feedbackPrompt: FEEDBACK_PROMPT,
title: TITLE,
submissionStart: SUBMISSION_START,
submissionDue: SUBMISSION_DUE,
criteria: CRITERIA,
assessments: ASSESSMENTS,
editorAssessmentsOrder: EDITOR_ASSESSMENTS_ORDER,
imageSubmissionEnabled: true,
leaderboardNum: 15
});
expect($.ajax).toHaveBeenCalledWith({
url: '/update_xml', type: "POST",
data: JSON.stringify({xml: '<openassessment />'})
type: "POST", url: '/update_editor_context',
data: JSON.stringify({
prompt: PROMPT,
feedback_prompt: FEEDBACK_PROMPT,
title: TITLE,
submission_start: SUBMISSION_START,
submission_due: SUBMISSION_DUE,
criteria: CRITERIA,
assessments: ASSESSMENTS,
editor_assessments_order: EDITOR_ASSESSMENTS_ORDER,
allow_file_upload: true,
leaderboard_show: 15
})
});
});
......@@ -275,44 +338,22 @@ describe("OpenAssessment.Server", function() {
expect(receivedMsg).toEqual('test error');
});
it("informs the caller of an Ajax error when loading XML", function() {
it("informs the caller of an Ajax error when updating the editor context", function() {
stubAjax(false, null);
var receivedMsg = null;
server.loadXml().fail(function(msg) {
receivedMsg = msg;
});
expect(receivedMsg).toContain("This problem could not be loaded");
});
it("informs the caller of an Ajax error when updating XML", function() {
stubAjax(false, null);
var receivedMsg = null;
server.updateXml('test').fail(function(msg) {
server.updateEditorContext('prompt', 'rubric', 'title', 'start', 'due', 'assessments').fail(function(msg) {
receivedMsg = msg;
});
expect(receivedMsg).toContain("This problem could not be saved");
});
it("informs the caller of a server error when loading XML", function() {
stubAjax(true, { success: false, msg: "Test error" });
var receivedMsg = null;
server.updateXml('test').fail(function(msg) {
receivedMsg = msg;
});
expect(receivedMsg).toEqual("Test error");
});
it("informs the caller of a server error when updating XML", function() {
it("informs the caller of a server error when loading the editor context", function() {
stubAjax(true, { success: false, msg: "Test error" });
var receivedMsg = null;
server.loadXml().fail(function(msg) {
server.updateEditorContext('prompt', 'rubric', 'title', 'start', 'due', 'assessments').fail(function(msg) {
receivedMsg = msg;
});
......
/**
Tests for OA XBlock editing.
**/
describe("OpenAssessment.StudioView", function() {
var runtime = {
notify: function(type, data) {}
};
// Stub server that returns dummy data or reports errors.
var StubServer = function() {
this.updateError = false;
this.isReleased = false;
this.receivedData = null;
this.successPromise = $.Deferred(function(defer) {
defer.resolve();
});
this.errorPromise = $.Deferred(function(defer) {
defer.rejectWith(this, ['Test error']);
}).promise();
this.updateEditorContext = function(kwargs) {
if (this.updateError) {
return this.errorPromise;
}
else {
this.receivedData = kwargs;
return this.successPromise;
}
};
this.checkReleased = function() {
var server = this;
return $.Deferred(function(defer) {
defer.resolveWith(this, [server.isReleased]);
}).promise();
};
};
var server = null;
var view = null;
var EXPECTED_SERVER_DATA = {
title: "The most important of all questions.",
prompt: "How much do you like waffles?",
feedbackPrompt: "",
submissionStart: "2014-01-02T12:15",
submissionDue: "2014-10-01T04:53",
imageSubmissionEnabled: false,
leaderboardNum: 12,
criteria: [
{
order_num: 0,
label: "Criterion with two options",
name: "criterion_1",
prompt: "Prompt for criterion with two options",
feedback: "disabled",
options: [
{
order_num: 0,
points: 1,
name: "option_1",
label: "Fair",
explanation: "Fair explanation"
},
{
order_num: 1,
points: 2,
name: "option_2",
label: "Good",
explanation: "Good explanation"
}
]
},
{
name: "criterion_2",
label: "Criterion with no options",
prompt: "Prompt for criterion with no options",
order_num: 1,
options: [],
feedback: "required",
},
{
name: "criterion_3",
label: "Criterion with optional feedback",
prompt: "Prompt for criterion with optional feedback",
order_num: 2,
feedback: "optional",
options: [
{
order_num: 0,
points: 2,
name: "option_1",
label: "Good",
explanation: "Good explanation"
}
],
}
],
assessments: [
{
name: "peer-assessment",
start: "2014-01-02T00:00",
due: "2014-01-03T00:00",
must_grade: 5,
must_be_graded_by: 3
},
{
name: "self-assessment",
start: "2014-01-04T00:00",
due: "2014-01-05T00:00"
}
],
editorAssessmentsOrder: [
"student-training",
"peer-assessment",
"self-assessment",
"example-based-assessment"
]
};
beforeEach(function() {
// Load the DOM fixture
loadFixtures('oa_edit.html');
// Create the stub server
server = new StubServer();
// Mock the runtime
spyOn(runtime, 'notify');
// Create the object under test
var el = $('#openassessment-editor').get(0);
view = new OpenAssessment.StudioView(runtime, el, server);
});
it("sends the editor context to the server", function() {
// Save the current state of the problem
// (defined by the current state of the DOM),
// and verify that the correct information was sent
// to the server. This depends on the HTML fixture
// used for this test.
view.save();
// Top-level attributes
expect(server.receivedData.title).toEqual(EXPECTED_SERVER_DATA.title);
expect(server.receivedData.prompt).toEqual(EXPECTED_SERVER_DATA.prompt);
expect(server.receivedData.feedbackPrompt).toEqual(EXPECTED_SERVER_DATA.feedbackPrompt);
expect(server.receivedData.submissionStart).toEqual(EXPECTED_SERVER_DATA.submissionStart);
expect(server.receivedData.submissionDue).toEqual(EXPECTED_SERVER_DATA.submissionDue);
expect(server.receivedData.imageSubmissionEnabled).toEqual(EXPECTED_SERVER_DATA.imageSubmissionEnabled);
expect(server.receivedData.leaderboardNum).toEqual(EXPECTED_SERVER_DATA.leaderboardNum);
// Criteria
for (var criterion_idx = 0; criterion_idx < EXPECTED_SERVER_DATA.criteria.length; criterion_idx++) {
var actual_criterion = server.receivedData.criteria[criterion_idx];
var expected_criterion = EXPECTED_SERVER_DATA.criteria[criterion_idx];
expect(actual_criterion).toEqual(expected_criterion);
}
// Assessments
for (var asmnt_idx = 0; asmnt_idx < EXPECTED_SERVER_DATA.assessments.length; asmnt_idx++) {
var actual_asmnt = server.receivedData.assessments[asmnt_idx];
var expected_asmnt = EXPECTED_SERVER_DATA.assessments[asmnt_idx];
expect(actual_asmnt).toEqual(expected_asmnt);
}
// Editor assessment order
expect(server.receivedData.editorAssessmentsOrder).toEqual(EXPECTED_SERVER_DATA.editorAssessmentsOrder);
});
it("confirms changes for a released problem", function() {
// Simulate an XBlock that has been released
server.isReleased = true;
// Stub the confirmation step (avoid showing the dialog)
spyOn(view, 'confirmPostReleaseUpdate').andCallFake(
function(onConfirm) { onConfirm(); }
);
// Save the updated context
view.save();
// Verify that the user was asked to confirm the changes
expect(view.confirmPostReleaseUpdate).toHaveBeenCalled();
});
it("cancels editing", function() {
view.cancel();
expect(runtime.notify).toHaveBeenCalledWith('cancel', {});
});
it("displays an error when server reports an error", function() {
server.updateError = true;
view.save();
expect(runtime.notify).toHaveBeenCalledWith('error', {msg: 'Test error'});
});
it("displays the correct tab on initialization", function() {
$(".oa_editor_tab", view.element).each(function(){
if ($(this).attr('aria-controls') == "oa_prompt_editor_wrapper"){
expect($(this).hasClass('ui-state-active')).toBe(true);
} else {
expect($(this).hasClass('ui-state-active')).toBe(false);
}
});
});
it("validates fields before saving", function() {
// Initially, there should not be a validation alert
expect(view.alert.isVisible()).toBe(false);
// Introduce a validation error (date field does format invalid)
view.settingsView.submissionStart("Not a valid date!", "00:00");
// Try to save the view
view.save();
// Since there was an invalid field, expect that data was NOT sent to the server.
// Also expect that an error is displayed
expect(server.receivedData).toBe(null);
expect(view.alert.isVisible()).toBe(true);
// Expect that individual fields were highlighted
expect(view.validationErrors()).toContain(
"Submission start is invalid"
);
// Fix the error and try to save again
view.settingsView.submissionStart("2014-04-01", "00:00");
view.save();
// Expect that the validation errors were cleared
// and that data was successfully sent to the server.
expect(view.validationErrors()).toEqual([]);
expect(view.alert.isVisible()).toBe(false);
expect(server.receivedData).not.toBe(null);
});
});
/**
Tests for assessment editing views.
**/
describe("OpenAssessment edit assessment views", function() {
var testEnableAndDisable = function(view) {
view.isEnabled(false);
expect(view.isEnabled()).toBe(false);
view.isEnabled(true);
expect(view.isEnabled()).toBe(true);
};
var testValidateDate = function(view, datetimeControl, expectedError) {
// Test an invalid datetime
datetimeControl.datetime("invalid", "invalid");
expect(view.validate()).toBe(false);
expect(view.validationErrors()).toContain(expectedError);
// Clear validation errors (simulate re-saving)
view.clearValidationErrors();
// Test a valid datetime
datetimeControl.datetime("2014-04-05", "00:00");
expect(view.validate()).toBe(true);
expect(view.validationErrors()).toEqual([]);
};
var testAlertOnDisable = function(view) {
var alert = new OpenAssessment.ValidationAlert();
expect(alert.isVisible()).toBe(false);
// Set the assessment to enabled initially
view.isEnabled(true);
// Disable an assessment, which should display an alert
view.toggleEnabled();
expect(alert.isVisible()).toBe(true);
// Enable an assessment, which dismisses the alert
view.toggleEnabled();
expect(alert.isVisible()).toBe(false);
};
var testLoadXMLExamples = function(view) {
var xml = "XML DEFINITIONS WOULD BE HERE";
view.exampleDefinitions(xml);
expect(view.description()).toEqual({ examples_xml: xml });
};
beforeEach(function() {
loadFixtures('oa_edit.html');
});
describe("OpenAssessment.EditPeerAssessmentView", function() {
var view = null;
beforeEach(function() {
var element = $("#oa_peer_assessment_editor").get(0);
view = new OpenAssessment.EditPeerAssessmentView(element);
view.startDatetime("2014-01-01", "00:00");
view.dueDatetime("2014-01-01", "00:00");
});
it("enables and disables", function() { testEnableAndDisable(view); });
it("loads a description", function() {
view.mustGradeNum(1);
view.mustBeGradedByNum(2);
view.startDatetime("2014-01-01", "00:00");
view.dueDatetime("2014-03-04", "00:00");
expect(view.description()).toEqual({
must_grade: 1,
must_be_graded_by: 2,
start: "2014-01-01T00:00",
due: "2014-03-04T00:00"
});
});
it("validates the start date and time", function() {
testValidateDate(
view, view.startDatetimeControl,
"Peer assessment start is invalid"
);
});
it("validates the due date and time", function() {
testValidateDate(
view, view.dueDatetimeControl,
"Peer assessment due is invalid"
);
});
it("validates the must grade field", function() {
// Invalid value (not a number)
view.mustGradeNum("123abc");
expect(view.validate()).toBe(false);
expect(view.validationErrors()).toContain("Peer assessment must grade is invalid");
view.clearValidationErrors();
// Valid value
view.mustGradeNum("34");
expect(view.validate()).toBe(true);
expect(view.validationErrors()).toEqual([]);
});
it("validates the must be graded by field", function() {
// Invalid value (not a number)
view.mustBeGradedByNum("123abc");
expect(view.validate()).toBe(false);
expect(view.validationErrors()).toContain("Peer assessment must be graded by is invalid");
view.clearValidationErrors();
// Valid value
view.mustBeGradedByNum("34");
expect(view.validate()).toBe(true);
expect(view.validationErrors()).toEqual([]);
});
it("shows an alert when disabled", function() { testAlertOnDisable(view); });
});
describe("OpenAssessment.EditSelfAssessmentView", function() {
var view = null;
beforeEach(function() {
var element = $("#oa_self_assessment_editor").get(0);
view = new OpenAssessment.EditSelfAssessmentView(element);
view.startDatetime("2014-01-01", "00:00");
view.dueDatetime("2014-01-01", "00:00");
});
it("enables and disables", function() { testEnableAndDisable(view); });
it("loads a description", function() {
view.startDatetime("2014-01-01", "00:00");
view.dueDatetime("2014-03-04", "00:00");
expect(view.description()).toEqual({
start: "2014-01-01T00:00",
due: "2014-03-04T00:00"
});
});
it("validates the start date and time", function() {
testValidateDate(
view, view.startDatetimeControl,
"Self assessment start is invalid"
);
});
it("validates the due date and time", function() {
testValidateDate(
view, view.dueDatetimeControl,
"Self assessment due is invalid"
);
});
it("shows an alert when disabled", function() { testAlertOnDisable(view); });
});
describe("OpenAssessment.EditStudentTrainingView", function() {
var view = null;
beforeEach(function() {
// We need to load the student-training specific editing view
// so that the student training example template is properly initialized.
loadFixtures('oa_edit_student_training.html');
var element = $("#oa_student_training_editor").get(0);
view = new OpenAssessment.EditStudentTrainingView(element);
});
it("enables and disables", function() { testEnableAndDisable(view); });
it("loads a description", function () {
// This assumes a particular structure of the DOM,
// which is set by the HTML fixture.
expect(view.description()).toEqual({
examples: [
{
answer: 'Test answer',
options_selected: [
{
criterion: 'criterion_with_two_options',
option: 'option_1'
}
]
}
]
});
});
it("modifies a description", function () {
view.exampleContainer.add();
expect(view.description()).toEqual({
examples: [
{
answer: 'Test answer',
options_selected: [
{
criterion: 'criterion_with_two_options',
option: 'option_1'
}
]
},
{
answer: '',
options_selected: [
{
criterion: 'criterion_with_two_options',
option: ''
}
]
}
]
});
});
it("shows an alert when disabled", function() { testAlertOnDisable(view); });
it("validates selected options", function() {
// On page load, the examples should be valid
expect(view.validate()).toBe(true);
expect(view.validationErrors()).toEqual([]);
// Add a new training example (default no option selected)
view.exampleContainer.add();
// Now there should be a validation error
expect(view.validate()).toBe(false);
expect(view.validationErrors()).toContain("Student training example is invalid.");
// Clear validation errors
view.clearValidationErrors();
expect(view.validationErrors()).toEqual([]);
});
});
describe("OpenAssessment.EditExampleBasedAssessmentView", function() {
var view = null;
beforeEach(function() {
var element = $("#oa_ai_assessment_editor").get(0);
view = new OpenAssessment.EditExampleBasedAssessmentView(element);
});
it("Enables and disables", function() { testEnableAndDisable(view); });
it("Loads a description", function() { testLoadXMLExamples(view); });
it("shows an alert when disabled", function() { testAlertOnDisable(view); });
});
});
describe("OpenAssessment.DatetimeControl", function() {
var datetimeControl = null;
beforeEach(function() {
// Install a minimal HTML fixture
// containing text fields for the date and time
setFixtures(
'<div id="datetime_parent">' +
'<input type="text" class="date_field" />' +
'<input type="text" class="time_field" />' +
'</div>'
);
// Create the datetime control, which uses elements
// available in the fixture.
datetimeControl = new OpenAssessment.DatetimeControl(
$("#datetime_parent").get(0),
".date_field",
".time_field"
);
datetimeControl.install();
});
// Set the date and time values, then check whether
// the datetime control has the expected validation status
var testValidateDate = function(control, dateValue, timeValue, isValid, expectedError) {
control.datetime(dateValue, timeValue);
var actualIsValid = control.validate();
expect(actualIsValid).toBe(isValid);
if (isValid) { expect(control.validationErrors()).toEqual([]); }
else { expect(control.validationErrors()).toContain(expectedError); }
};
it("validates invalid dates", function() {
var expectedError = "Date is invalid";
testValidateDate(datetimeControl, "", "00:00", false, expectedError);
testValidateDate(datetimeControl, "1", "00:00", false, expectedError);
testValidateDate(datetimeControl, "123abcd", "00:00", false, expectedError);
testValidateDate(datetimeControl, "2014-", "00:00", false, expectedError);
testValidateDate(datetimeControl, "99999999-01-01", "00:00", false, expectedError);
testValidateDate(datetimeControl, "2014-99999-01", "00:00", false, expectedError);
testValidateDate(datetimeControl, "2014-01-99999", "00:00", false, expectedError);
});
it("validates invalid times", function() {
var expectedError = "Time is invalid";
testValidateDate(datetimeControl, "2014-04-01", "", false, expectedError);
testValidateDate(datetimeControl, "2014-04-01", "00:00abcd", false, expectedError);
testValidateDate(datetimeControl, "2014-04-01", "1", false, expectedError);
testValidateDate(datetimeControl, "2014-04-01", "1.23", false, expectedError);
testValidateDate(datetimeControl, "2014-04-01", "1:1", false, expectedError);
testValidateDate(datetimeControl, "2014-04-01", "000:00", false, expectedError);
testValidateDate(datetimeControl, "2014-04-01", "00:000", false, expectedError);
});
it("validates valid dates and times", function() {
testValidateDate(datetimeControl, "2014-04-01", "00:00", true);
testValidateDate(datetimeControl, "9999-01-01", "00:00", true);
testValidateDate(datetimeControl, "2001-12-31", "00:00", true);
testValidateDate(datetimeControl, "2014-04-01", "12:34", true);
testValidateDate(datetimeControl, "2014-04-01", "23:59", true);
});
it("clears validation errors", function() {
// Set an invalid state
datetimeControl.datetime("invalid", "invalid");
datetimeControl.validate();
expect(datetimeControl.validationErrors().length).toEqual(2);
// Clear validation errors
datetimeControl.clearValidationErrors();
expect(datetimeControl.validationErrors()).toEqual([]);
});
});
describe("OpenAssessment.ToggleControl", function() {
var StubNotifier = function() {
this.receivedNotifications = [];
this.notificationFired = function(name, data) {
this.receivedNotifications.push({
name: name,
data: data
});
};
};
var notifier = null;
var toggleControl = null;
beforeEach(function() {
setFixtures(
'<div id="toggle_test">' +
'<div id="shown_section" />' +
'<div id="hidden_section" class="is--hidden"/>' +
'</div>' +
'<input type="checkbox" id="checkbox" checked />'
);
notifier = new StubNotifier();
toggleControl = new OpenAssessment.ToggleControl(
$("#checkbox"),
$("#shown_section"),
$("#hidden_section"),
notifier
).install();
});
it("shows and hides elements", function() {
var assertIsVisible = function(isVisible) {
expect(toggleControl.hiddenSection.hasClass('is--hidden')).toBe(isVisible);
expect(toggleControl.shownSection.hasClass('is--hidden')).toBe(!isVisible);
};
// Initially, the section is visible (default from the fixture)
assertIsVisible(true);
// Simulate clicking the checkbox, hiding the section
toggleControl.checkbox.click();
assertIsVisible(false);
// Click it again to show it
toggleControl.checkbox.click();
assertIsVisible(true);
});
it("fires notifications", function() {
// Toggle off notification
toggleControl.checkbox.click();
expect(notifier.receivedNotifications).toContain({
name: "toggleOff",
data: {}
});
// Toggle back on
toggleControl.checkbox.click();
expect(notifier.receivedNotifications).toContain({
name: "toggleOn",
data: {}
});
// ... and toggle off
toggleControl.checkbox.click();
expect(notifier.receivedNotifications).toContain({
name: "toggleOff",
data: {}
});
});
});
describe("OpenAssessment.Notifier", function() {
var notifier = null;
var listeners = [];
var StubListener = function() {
this.receivedData = null;
this.testNotification = function(data) {
this.receivedData = data;
};
};
beforeEach(function() {
listeners = [ new StubListener(), new StubListener() ];
notifier = new OpenAssessment.Notifier(listeners);
});
it("notifies listeners when a notification fires", function() {
// Fire a notification that the listeners don't respond to
notifier.notificationFired("ignore this!", {});
expect(listeners[0].receivedData).toBe(null);
expect(listeners[1].receivedData).toBe(null);
// Fire a notification that the listeners care about
var testData = { foo: "bar" };
notifier.notificationFired("testNotification", testData);
// Check that the listeners were notified
expect(listeners[0].receivedData).toBe(testData);
expect(listeners[1].receivedData).toBe(testData);
});
});
/**
Tests for OpenAssessment prompt editing view.
**/
describe("OpenAssessment.EditPromptView", function() {
var view = null;
beforeEach(function() {
// Load the DOM fixture
loadFixtures('oa_edit.html');
// Create the view
var element = $("#oa_prompt_editor_wrapper").get(0);
view = new OpenAssessment.EditPromptView(element);
});
it("sets and loads prompt text", function() {
view.promptText("");
expect(view.promptText()).toEqual("");
view.promptText("This is a test prompt!");
expect(view.promptText()).toEqual("This is a test prompt!");
});
});
/**
Tests for the rubric editing view.
**/
describe("OpenAssessment.EditRubricView", function() {
// Use a stub notifier implementation that simply stores
// the notifications it receives.
var notifier = null;
var StubNotifier = function() {
this.notifications = [];
this.notificationFired = function(name, data) {
this.notifications.push({
name: name,
data: data
});
};
};
var view = null;
beforeEach(function() {
loadFixtures('oa_edit.html');
var el = $("#oa_rubric_editor_wrapper").get(0);
notifier = new StubNotifier();
view = new OpenAssessment.EditRubricView(el, notifier);
});
it("reads a criteria definition from the editor", function() {
// This assumes a particular structure of the DOM,
// which is set by the HTML fixture.
var criteria = view.criteriaDefinition();
expect(criteria.length).toEqual(3);
// Criterion with two options, feedback disabled
expect(criteria[0]).toEqual({
name: "criterion_1",
label: "Criterion with two options",
prompt: "Prompt for criterion with two options",
order_num: 0,
feedback: "disabled",
options: [
{
order_num: 0,
points: 1,
name: "option_1",
label: "Fair",
explanation: "Fair explanation"
},
{
order_num: 1,
points: 2,
name: "option_2",
label: "Good",
explanation: "Good explanation"
}
],
});
// Criterion with no options, feedback required
expect(criteria[1]).toEqual({
name: "criterion_2",
label: "Criterion with no options",
prompt: "Prompt for criterion with no options",
order_num: 1,
feedback: "required",
options: []
});
// Criterion with one option, feeback optional
expect(criteria[2]).toEqual({
name: "criterion_3",
label: "Criterion with optional feedback",
prompt: "Prompt for criterion with optional feedback",
order_num: 2,
feedback: "optional",
options: [
{
order_num: 0,
points: 2,
name: "option_1",
label: "Good",
explanation: "Good explanation"
}
]
});
});
it("creates new criteria and options", function() {
// Delete all existing criteria from the rubric
// Then add new criteria (created from a client-side template)
$.each(view.getAllCriteria(), function() { view.removeCriterion(this); });
view.addCriterion();
view.addCriterion();
// Add an option to the second criterion
view.addOption(1);
// Since no criteria/option names are set, leave them out of the description.
// This will cause the server to assign them unique names.
var criteria = view.criteriaDefinition();
expect(criteria.length).toEqual(2);
expect(criteria[0]).toEqual({
order_num: 0,
name: "0",
label: "",
prompt: "",
feedback: "disabled",
options: []
});
expect(criteria[1]).toEqual({
name: "1",
order_num: 1,
label: "",
prompt: "",
feedback: "disabled",
options: [
{
label: "",
points: 1,
explanation: "",
name: "0",
order_num: 0
}
]
});
});
it("reads the feedback prompt from the editor", function() {
view.feedbackPrompt("");
expect(view.feedbackPrompt()).toEqual("");
var prompt = "How do you think the student did overall?";
view.feedbackPrompt(prompt);
expect(view.feedbackPrompt()).toEqual(prompt);
});
it("fires a notification when an option is added", function() {
view.addOption();
expect(notifier.notifications).toContain({
name: "optionAdd",
data: {
criterionName: 'criterion_1',
criterionLabel: 'Criterion with two options',
name:'0',
label: '',
points : 1
}
});
// Add a second option and ensure that it is given a unique name
view.addOption();
expect(notifier.notifications).toContain({
name: "optionAdd",
data: {
criterionName: 'criterion_1',
criterionLabel: 'Criterion with two options',
name:'1',
label: '',
points : 1
}
});
});
it("fires a notification when an option is removed", function() {
view.removeOption(0, view.getOptionItem(0, 0));
expect(notifier.notifications).toContain({
name: "optionRemove",
data: {
criterionName: 'criterion_1',
name: 'option_1'
}
});
});
it("fires a notification when an option's label or points are updated", function() {
// Simulate what happens when the options label or points are updated
view.getOptionItem(0, 0).updateHandler();
expect(notifier.notifications).toContain({
name: "optionUpdated",
data: {
criterionName: 'criterion_1',
name: 'option_1',
label: 'Fair',
points: 1
}
});
});
it("fires a notification when a criterion's label is updated", function() {
// Simulate what happens when a criterion label is updated
view.getCriterionItem(0).updateHandler();
expect(notifier.notifications).toContain({
name: "criterionUpdated",
data: {
criterionName: 'criterion_1',
criterionLabel: 'Criterion with two options'
}
});
});
it("fires a notification when a criterion is removed", function() {
view.criteriaContainer.remove(view.getCriterionItem(0));
expect(notifier.notifications).toContain({
name: "criterionRemove",
data: {criterionName : 'criterion_1'}
});
});
it("validates option points", function () {
// Test that a particular value is marked as valid/invalid
var testValidateOptionPoints = function(value, isValid) {
var option = view.getOptionItem(0, 0);
option.points(value);
expect(view.validate()).toBe(isValid);
};
// Invalid option point values
testValidateOptionPoints("", false);
testValidateOptionPoints("123abcd", false);
testValidateOptionPoints("-1", false);
testValidateOptionPoints("1000", false);
testValidateOptionPoints("0.5", false);
// Valid option point values
testValidateOptionPoints("0", true);
testValidateOptionPoints("1", true);
testValidateOptionPoints("2", true);
testValidateOptionPoints("998", true);
testValidateOptionPoints("999", true);
});
it("validates the criterion prompt field", function() {
// Filled in prompt should be valid
$.each(view.getAllCriteria(), function() {
this.prompt("This is a prompt.");
});
expect(view.validate()).toBe(true);
// Change one of the prompts to an empty string
view.getCriterionItem(0).prompt("");
// Now the view should be invalid
expect(view.validate()).toBe(false);
expect(view.validationErrors()).toContain("Criterion prompt is invalid.");
// Clear validation errors
view.clearValidationErrors();
expect(view.validationErrors()).toEqual([]);
});
it("validates the number of criteria in the rubric", function() {
// Starting with three criteria, we should be valid.
expect(view.validate()).toBe(true);
// Removes the rubric criteria
$.each(view.getAllCriteria(), function() {
view.removeCriterion(this);
});
// Now we should be invalid (# Criteria == 0)
expect(view.validate()).toBe(false);
expect(view.validationErrors()).toContain("The rubric must contain at least one criterion");
view.clearValidationErrors();
expect(view.validationErrors()).toEqual([]);
});
});
/**
Tests for the edit settings view.
**/
describe("OpenAssessment.EditSettingsView", function() {
var StubView = function(name, descriptionText) {
this.name = name;
this.isValid = true;
var validationErrors = [];
this.description = function() {
return { dummy: descriptionText };
};
var _enabled = true;
this.isEnabled = function(isEnabled) {
if (typeof(isEnabled) !== "undefined") { this._enabled = isEnabled; }
return this._enabled;
};
this.validate = function() {
return this.isValid;
};
this.setValidationErrors = function(errors) { validationErrors = errors; };
this.validationErrors = function() { return validationErrors; };
this.clearValidationErrors = function() { validationErrors = []; };
};
var testValidateDate = function(datetimeControl, expectedError) {
// Test an invalid datetime
datetimeControl.datetime("invalid", "invalid");
expect(view.validate()).toBe(false);
expect(view.validationErrors()).toContain(expectedError);
view.clearValidationErrors();
// Test a valid datetime
datetimeControl.datetime("2014-04-05", "00:00");
expect(view.validate()).toBe(true);
expect(view.validationErrors()).toEqual([]);
};
var view = null;
var assessmentViews = null;
// The Peer and Self Editor ID's
var PEER = "oa_peer_assessment_editor";
var SELF = "oa_self_assessment_editor";
var AI = "oa_ai_assessment_editor";
var TRAINING = "oa_student_training_editor";
beforeEach(function() {
// Load the DOM fixture
loadFixtures('oa_edit.html');
// Create the stub assessment views
assessmentViews = {};
assessmentViews[SELF] = new StubView("self-assessment", "Self assessment description");
assessmentViews[PEER] = new StubView("peer-assessment", "Peer assessment description");
assessmentViews[AI] = new StubView("ai-assessment", "Example Based assessment description");
assessmentViews[TRAINING] = new StubView("student-training", "Student Training description");
// Create the view
var element = $("#oa_basic_settings_editor").get(0);
view = new OpenAssessment.EditSettingsView(element, assessmentViews);
view.submissionStart("2014-01-01", "00:00");
view.submissionDue("2014-03-04", "00:00");
});
it("sets and loads display name", function() {
view.displayName("");
expect(view.displayName()).toEqual("");
view.displayName("This is the name of the problem!");
expect(view.displayName()).toEqual("This is the name of the problem!");
});
it("sets and loads the submission start/due dates", function() {
view.submissionStart("2014-04-01", "12:34");
expect(view.submissionStart()).toEqual("2014-04-01T12:34");
view.submissionDue("2014-05-02", "12:34");
expect(view.submissionDue()).toEqual("2014-05-02T12:34");
});
it("sets and loads the image enabled state", function() {
view.imageSubmissionEnabled(true);
expect(view.imageSubmissionEnabled()).toBe(true);
view.imageSubmissionEnabled(false);
expect(view.imageSubmissionEnabled()).toBe(false);
});
it("sets and loads the leaderboard number", function() {
view.leaderboardNum(18);
expect(view.leaderboardNum()).toEqual(18);
view.leaderboardNum(0);
expect(view.leaderboardNum()).toEqual(0);
});
it("builds a description of enabled assessments", function() {
// Depends on the template having an original order
// of training --> peer --> self --> ai
// Disable all assessments, and expect an empty description
assessmentViews[PEER].isEnabled(false);
assessmentViews[SELF].isEnabled(false);
assessmentViews[AI].isEnabled(false);
assessmentViews[TRAINING].isEnabled(false);
expect(view.assessmentsDescription()).toEqual([]);
// Enable the first assessment only
assessmentViews[PEER].isEnabled(false);
assessmentViews[SELF].isEnabled(true);
assessmentViews[AI].isEnabled(false);
assessmentViews[TRAINING].isEnabled(false);
expect(view.assessmentsDescription()).toEqual([
{
name: "self-assessment",
dummy: "Self assessment description"
}
]);
// Enable the second assessment only
assessmentViews[PEER].isEnabled(true);
assessmentViews[SELF].isEnabled(false);
assessmentViews[AI].isEnabled(false);
assessmentViews[TRAINING].isEnabled(false);
expect(view.assessmentsDescription()).toEqual([
{
name: "peer-assessment",
dummy: "Peer assessment description"
}
]);
// Enable both assessments
assessmentViews[PEER].isEnabled(true);
assessmentViews[SELF].isEnabled(true);
assessmentViews[AI].isEnabled(false);
assessmentViews[TRAINING].isEnabled(false);
expect(view.assessmentsDescription()).toEqual([
{
name: "peer-assessment",
dummy: "Peer assessment description"
},
{
name: "self-assessment",
dummy: "Self assessment description"
}
]);
});
it("validates submission start datetime fields", function() {
testValidateDate(
view.startDatetimeControl,
"Submission start is invalid"
);
});
it("validates submission due datetime fields", function() {
testValidateDate(
view.dueDatetimeControl,
"Submission due is invalid"
);
});
it("validates the leaderboard number field", function() {
// Valid value for the leaderboard number
view.leaderboardNum(0);
expect(view.validate()).toBe(true);
expect(view.validationErrors()).toEqual([]);
// Below the minimum
view.leaderboardNum(-1);
expect(view.validate()).toBe(false);
expect(view.validationErrors()).toContain(
"Leaderboard number is invalid"
);
// Clear validation errors
view.clearValidationErrors();
expect(view.validationErrors()).toEqual([]);
// Valid, near the maximum
view.leaderboardNum(100);
expect(view.validate()).toBe(true);
// Above the maximum
view.leaderboardNum(101);
expect(view.validate()).toBe(false);
});
it("validates assessment views", function() {
// Simulate one of the assessment views being invalid
assessmentViews[PEER].isValid = false;
assessmentViews[PEER].setValidationErrors(["test error"]);
assessmentViews[PEER].isEnabled(true);
// Expect that the parent view is also invalid
expect(view.validate()).toBe(false);
expect(view.validationErrors()).toContain("test error");
});
it("validates only assessments that are enabled", function() {
// Simulate one of the assessment views being invalid but disabled
assessmentViews[PEER].isValid = false;
assessmentViews[PEER].setValidationErrors(["test error"]);
assessmentViews[PEER].isEnabled(false);
// Spy on the assessment view's validate() method so we can
// verify that it doesn't get called (thus marking the DOM)
spyOn(assessmentViews[PEER], 'validate').andCallThrough();
// Expect that the parent view is still valid
expect(view.validate()).toBe(true);
// Check that the assessment view didn't get a chance
// to mark anything as invalid
expect(assessmentViews[PEER].validate).not.toHaveBeenCalled();
});
});
describe("OpenAssessment.ValidationAlert", function() {
var alert = null;
beforeEach(function() {
loadFixtures('oa_edit.html');
alert = new OpenAssessment.ValidationAlert().install();
});
it("shows and hides an alert", function() {
// Initially, the alert should be hidden
expect(alert.isVisible()).toBe(false);
// Show the alert
alert.show();
expect(alert.isVisible()).toBe(true);
// Hide the alert
alert.hide();
expect(alert.isVisible()).toBe(false);
});
it("sets the alert title and message", function() {
alert.setMessage("new title", "new message");
expect(alert.getTitle()).toEqual("new title");
expect(alert.getMessage()).toEqual("new message");
});
it("hides when the user dismisses the alert", function() {
// Show the alert
alert.show();
expect(alert.isVisible()).toBe(true);
// Simulate a user click on the close button
alert.closeButton.click();
// The alert should be hidden
expect(alert.isVisible()).toBe(false);
});
});
\ No newline at end of file
/**
Common test configuration, loaded before any of the spec files.
**/
// Set the fixture path
jasmine.getFixtures().fixturesPath = 'base/fixtures';
/**
Interface for editing view in Studio.
The constructor initializes the DOM for editing.
Args:
runtime (Runtime): an XBlock runtime instance.
element (DOM element): The DOM element representing this XBlock.
server (OpenAssessment.Server): The interface to the XBlock server.
Returns:
OpenAssessment.StudioView
**/
OpenAssessment.StudioView = function(runtime, element, server) {
this.runtime = runtime;
this.server = server;
// Initialize the code box
this.codeBox = CodeMirror.fromTextArea(
$(element).find('.openassessment-editor').first().get(0),
{mode: "xml", lineNumbers: true, lineWrapping: true}
);
// Install click handlers
var view = this;
$(element).find('.openassessment-save-button').click(
function(eventData) {
view.save();
});
$(element).find('.openassessment-cancel-button').click(
function(eventData) {
view.cancel();
});
};
OpenAssessment.StudioView.prototype = {
/**
Load the XBlock XML definition from the server and display it in the view.
**/
load: function() {
var view = this;
this.server.loadXml().done(
function(xml) {
view.codeBox.setValue(xml);
}).fail(function(msg) {
view.showError(msg);
}
);
},
/**
Save the problem's XML definition to the server.
If the problem has been released, make the user confirm the save.
**/
save: function() {
var view = this;
// Check whether the problem has been released; if not,
// warn the user and allow them to cancel.
this.server.checkReleased().done(
function(isReleased) {
if (isReleased) { view.confirmPostReleaseUpdate($.proxy(view.updateXml, view)); }
else { view.updateXml(); }
}
).fail(function(errMsg) {
view.showError(msg);
});
},
/**
Make the user confirm that he/she wants to update a problem
that has already been released.
Args:
onConfirm (function): A function that accepts no arguments,
executed if the user confirms the update.
**/
confirmPostReleaseUpdate: function(onConfirm) {
var msg = gettext("This problem has already been released. Any changes will apply only to future assessments.");
// TODO: classier confirm dialog
if (confirm(msg)) { onConfirm(); }
},
/**
Save the updated XML definition to the server.
**/
updateXml: function() {
// Notify the client-side runtime that we are starting
// to save so it can show the "Saving..." notification
this.runtime.notify('save', {state: 'start'});
// Send the updated XML to the server
var xml = this.codeBox.getValue();
var view = this;
this.server.updateXml(xml).done(function() {
// Notify the client-side runtime that we finished saving
// so it can hide the "Saving..." notification.
view.runtime.notify('save', {state: 'end'});
// Reload the XML definition in the editor
view.load();
}).fail(function(msg) {
view.showError(msg);
});
},
/**
Cancel editing.
**/
cancel: function() {
// Notify the client-side runtime so it will close the editing modal.
this.runtime.notify('cancel', {});
},
/**
Display an error message to the user.
Args:
errorMsg (string): The error message to display.
**/
showError: function(errorMsg) {
this.runtime.notify('error', {msg: errorMsg});
}
};
/* XBlock entry point for Studio view */
function OpenAssessmentEditor(runtime, element) {
/**
Initialize the editing interface on page load.
**/
var server = new OpenAssessment.Server(runtime, element);
var view = new OpenAssessment.StudioView(runtime, element, server);
view.load();
}
......@@ -18,6 +18,19 @@ if (typeof window.gettext === 'undefined') {
window.gettext = function(text) { return text; };
}
// If ngettext isn't found (workbench, testing, etc.), return the simplistic english version
if (typeof window.ngetgext === 'undefined') {
window.ngettext = function (singular_text, plural_text, n) {
if (n > 1) {
return plural_text;
} else {
return singular_text;
}
}
}
// Stub event logging if the runtime doesn't provide it
if (typeof window.Logger === 'undefined') {
window.Logger = {
......
/**
Container that handles addition / deletion of arbitrary items.
An item is any object that has a `getFieldValues()` method,
which should return a JSON-serializable representation
of the item.
Containers copy "template" elements to create new items.
For example, to create a container for an item called "test_item",
the DOM should look something like:
<div id="test_container" />
<div id="test_item_template">
<div class="test_item">
<div class="test_item_remove_button">Remove</div>
<p>This is the default value for the item.</p>
</div>
</div>
<div id="test_item_add_button">Add</div>
A critical property of this setup is that the element you want to
include/duplicate is wrapped inside of a template element which is
the one that your reference when referring to a template. In the
above example, $("#test_item_template") would be the appropriate
reference to the template.
You can then initialize the container:
>>> var container = $("#test_container").get(0);
>>> var template = $("#test_item_template").get(0);
>>> var addButton = $("#test_item_add_button").get(0);
>>>
>>> container = OpenAssessment.Container(
>>> ContainerItem, {
>>> containerElement: container,
>>> templateElement: template,
>>> addButtonElement: addButton,
>>> removeButtonClass: "test_item_remove_button"
>>> containerItemClass: "test_item"
>>> }
>>> );
The container is responsible for initializing the "add" and "remove" buttons,
including for pre-existing items in the container element.
Templates elements are never deleted, so they should be hidden using CSS styles.
Args:
containerItem (constructor): The constructor of the container item object
used to access the contents of items in the container.
Kwargs:
containerElement (DOM element): The element representing the container.
templateElement (DOM element): The element containing the template for creating new items.
addButtonElement (DOM element): The element of the button used to add new items to the container.
removeButtonClass (string): The CSS class of the button that removes an item from the container.
There may be one of these for each item in the container.
containerItemClass (string): The CSS class of items in the container.
New items will be assigned this class.
notifier (OpenAssessment.Notifier): Used to send notifications of updates to container items.
**/
OpenAssessment.Container = function(containerItem, kwargs) {
this.containerElement = kwargs.containerElement;
this.templateElement = kwargs.templateElement;
this.addButtonElement = kwargs.addButtonElement;
this.removeButtonClass = kwargs.removeButtonClass;
this.containerItemClass = kwargs.containerItemClass;
this.notifier = kwargs.notifier;
// Since every container item should be instantiated with
// the notifier we were given, create a helper method
// that does this automatically.
var container = this;
this.createContainerItem = function(element) {
return new containerItem(element, container.notifier);
};
};
OpenAssessment.Container.prototype = {
/**
Adds event listeners to the container and its children. Must be
called explicitly when the container is initially created.
*/
addEventListeners: function() {
var container = this;
// Install a click handler for the add button
$(this.addButtonElement).click($.proxy(this.add, this));
// Find items already in the container and install click
// handlers for the delete buttons.
$("." + this.removeButtonClass, this.containerElement).click(
function(eventData) {
var item = container.createContainerItem(eventData.target);
container.remove(item);
}
);
// Initialize existing items, in case they need to install their
// own event handlers.
$("." + this.containerItemClass, this.containerElement).each(
function(index, element) {
var item = container.createContainerItem(element);
item.addEventListeners();
}
);
},
/**
Adds a new item to the container.
**/
add: function() {
// Copy the template into the container
// Remove any CSS IDs (since now the element is not unique)
// and add the item class so we can find it later.
// Note that the element we add is the first child of the template element.
// For more on the template structure expected, see the class comment
$(this.templateElement)
.children().first()
.clone()
.removeAttr('id')
.toggleClass('is--hidden', false)
.toggleClass(this.containerItemClass, true)
.appendTo($(this.containerElement));
// Install a click handler for the delete button
// Since we just added the new element to the container,
// it should be the last one.
var container = this;
var containerItem = $("." + this.containerItemClass, this.containerElement).last();
containerItem.find('.' + this.removeButtonClass)
.click(function(eventData) {
var containerItem = container.createContainerItem(eventData.target);
container.remove(containerItem);
} );
// Initialize the item, allowing it to install event handlers.
// Fire event handler for adding a new element
var handlerItem = container.createContainerItem(containerItem);
handlerItem.addEventListeners();
handlerItem.addHandler();
},
/**
Remove the item associated with an element.
If the element is not itself an item, traverse up the
DOM tree until an item is found.
Args:
item: The container item object to remove.
**/
remove: function(item) {
var itemElement = $(item.element).closest("." + this.containerItemClass);
var containerItem = this.createContainerItem(itemElement);
containerItem.removeHandler();
itemElement.remove();
},
/**
Retrieves the values that each container defines for itself, in the order in which they are
presented in the DOM.
Returns:
array: The values representing each container item.
**/
getItemValues: function () {
var values = [];
var container = this;
$("." + this.containerItemClass, this.containerElement).each(
function(index, element) {
var containerItem = container.createContainerItem(element);
var fieldValues = containerItem.getFieldValues();
values.push(fieldValues);
}
);
return values;
},
/**
Retrieve the element representing an item in this container.
Args:
index (int): The index of the item, starting from 0.
Returns:
Container item object or null.
**/
getItem: function(index) {
var element = $("." + this.containerItemClass, this.containerElement).get(index);
return (element !== undefined) ? this.createContainerItem(element) : null;
},
/**
Retrieve all elements representing items in this container.
Returns:
array of container item objects
**/
getAllItems: function() {
var container = this;
return $("." + this.containerItemClass, this.containerElement)
.map(function() { return container.createContainerItem(this); });
}
};
/**
Interface for editing view in Studio.
The constructor initializes the DOM for editing.
Args:
runtime (Runtime): an XBlock runtime instance.
element (DOM element): The DOM element representing this XBlock.
server (OpenAssessment.Server): The interface to the XBlock server.
Returns:
OpenAssessment.StudioView
**/
OpenAssessment.StudioView = function(runtime, element, server) {
this.element = element;
this.runtime = runtime;
this.server = server;
// Resize the editing modal
this.fixModalHeight();
// Initializes the tabbing functionality and activates the last used.
this.initializeTabs();
// Initialize the validation alert
this.alert = new OpenAssessment.ValidationAlert().install();
// Initialize the prompt tab view
this.promptView = new OpenAssessment.EditPromptView(
$("#oa_prompt_editor_wrapper", this.element).get(0)
);
// Initialize the settings tab view
var studentTrainingView = new OpenAssessment.EditStudentTrainingView(
$("#oa_student_training_editor", this.element).get(0)
);
var peerAssessmentView = new OpenAssessment.EditPeerAssessmentView(
$("#oa_peer_assessment_editor", this.element).get(0)
);
var selfAssessmentView = new OpenAssessment.EditSelfAssessmentView(
$("#oa_self_assessment_editor", this.element).get(0)
);
var exampleBasedAssessmentView = new OpenAssessment.EditExampleBasedAssessmentView(
$("#oa_ai_assessment_editor", this.element).get(0)
);
var assessmentLookupDictionary = {};
assessmentLookupDictionary[studentTrainingView.getID()] = studentTrainingView;
assessmentLookupDictionary[peerAssessmentView.getID()] = peerAssessmentView;
assessmentLookupDictionary[selfAssessmentView.getID()] = selfAssessmentView;
assessmentLookupDictionary[exampleBasedAssessmentView.getID()] = exampleBasedAssessmentView;
this.settingsView = new OpenAssessment.EditSettingsView(
$("#oa_basic_settings_editor", this.element).get(0), assessmentLookupDictionary
);
// Initialize the rubric tab view
this.rubricView = new OpenAssessment.EditRubricView(
$("#oa_rubric_editor_wrapper", this.element).get(0),
new OpenAssessment.Notifier([
new OpenAssessment.StudentTrainingListener()
])
);
// Install the save and cancel buttons
$(".openassessment_save_button", this.element).click($.proxy(this.save, this));
$(".openassessment_cancel_button", this.element).click($.proxy(this.cancel, this));
};
OpenAssessment.StudioView.prototype = {
/**
Adjusts the modal's height, position and padding to be larger for OA editing only (Does not impact other modals)
**/
fixModalHeight: function () {
// Add the full height class to every element from the XBlock
// to the modal window in Studio.
$(this.element)
.addClass('openassessment_full_height')
.parentsUntil('.modal-window')
.addClass('openassessment_full_height');
// Add the modal window class to the modal window
$(this.element)
.closest('.modal-window')
.addClass('openassessment_modal_window');
},
/**
Initializes the tabs that seperate the sections of the editor.
Because this function relies on the OpenAssessment Name space, the tab that it first
active will be the one that the USER was presented with, regardless of which editor they
were using. I.E. If I leave Editor A in the settings state, and enter editor B, editor B
will automatically open with the settings state.
**/
initializeTabs: function() {
// If this is the first editor that the user has opened, default to the prompt view.
if (typeof(OpenAssessment.lastOpenEditingTab) === "undefined") {
OpenAssessment.lastOpenEditingTab = 2;
}
// Initialize JQuery UI Tabs, and activates the appropriate tab.
$(".openassessment_editor_content_and_tabs", this.element)
.tabs({
active: OpenAssessment.lastOpenEditingTab
});
},
/**
Saves the state of the editing tabs in a variable outside of the scope of the editor.
When the user reopens the editing view, they will be greeted by the same tab that they left.
This code is called by the two paths that we could exit the modal through: Saving and canceling.
**/
saveTabState: function() {
var tabElement = $(".openassessment_editor_content_and_tabs", this.element);
OpenAssessment.lastOpenEditingTab = tabElement.tabs('option', 'active');
},
/**
Save the problem's XML definition to the server.
If the problem has been released, make the user confirm the save.
**/
save: function () {
var view = this;
this.saveTabState();
// Perform client-side validation:
// * Clear errors from any field marked as invalid.
// * Mark invalid fields in the UI.
// * If there are any validation errors, show an alert.
//
// The `validate()` method calls `validate()` on any subviews,
// so that each subview has the opportunity to validate
// its fields.
this.clearValidationErrors();
if (!this.validate()) {
this.alert.setMessage(
gettext("Couldn't Save This Assignment"),
gettext("Please correct the outlined fields.")
).show();
}
else {
// At this point, we know that all fields are valid,
// so we can dismiss the validation alert.
this.alert.hide();
// Check whether the problem has been released; if not,
// warn the user and allow them to cancel.
this.server.checkReleased().done(
function (isReleased) {
if (isReleased) {
view.confirmPostReleaseUpdate($.proxy(view.updateEditorContext, view));
}
else {
view.updateEditorContext();
}
}
).fail(function (errMsg) {
view.showError(errMsg);
});
}
},
/**
Make the user confirm that he/she wants to update a problem
that has already been released.
Args:
onConfirm (function): A function that accepts no arguments,
executed if the user confirms the update.
**/
confirmPostReleaseUpdate: function (onConfirm) {
var msg = gettext("This problem has already been released. Any changes will apply only to future assessments.");
// TODO: classier confirm dialog
if (confirm(msg)) { onConfirm(); }
},
/**
Save the updated problem definition to the server.
**/
updateEditorContext: function () {
// Notify the client-side runtime that we are starting
// to save so it can show the "Saving..." notification
this.runtime.notify('save', {state: 'start'});
var view = this;
this.server.updateEditorContext({
prompt: view.promptView.promptText(),
feedbackPrompt: view.rubricView.feedbackPrompt(),
criteria: view.rubricView.criteriaDefinition(),
title: view.settingsView.displayName(),
submissionStart: view.settingsView.submissionStart(),
submissionDue: view.settingsView.submissionDue(),
assessments: view.settingsView.assessmentsDescription(),
imageSubmissionEnabled: view.settingsView.imageSubmissionEnabled(),
leaderboardNum: view.settingsView.leaderboardNum(),
editorAssessmentsOrder: view.settingsView.editorAssessmentsOrder()
}).done(
// Notify the client-side runtime that we finished saving
// so it can hide the "Saving..." notification.
// Then reload the view.
function () { view.runtime.notify('save', {state: 'end'}); }
).fail(
function (msg) { view.showError(msg); }
);
},
/**
Cancel editing.
**/
cancel: function () {
// Notify the client-side runtime so it will close the editing modal
this.saveTabState();
this.runtime.notify('cancel', {});
},
/**
Display an error message to the user.
Args:
errorMsg (string): The error message to display.
**/
showError: function (errorMsg) {
this.runtime.notify('error', {msg: errorMsg});
},
/**
Mark validation errors.
Returns:
Boolean indicating whether the view is valid.
**/
validate: function() {
var settingsValid = this.settingsView.validate();
var rubricValid = this.rubricView.validate();
return settingsValid && rubricValid;
},
/**
Return a list of validation errors visible in the UI.
Mainly useful for testing.
Returns:
list of string
**/
validationErrors: function() {
return this.settingsView.validationErrors().concat(
this.rubricView.validationErrors()
);
},
/**
Clear all validation errors from the UI.
**/
clearValidationErrors: function() {
this.settingsView.clearValidationErrors();
this.rubricView.clearValidationErrors();
},
};
/* XBlock entry point for Studio view */
function OpenAssessmentEditor(runtime, element) {
/**
Initialize the editing interface on page load.
**/
var server = new OpenAssessment.Server(runtime, element);
var view = new OpenAssessment.StudioView(runtime, element, server);
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment