Commit 237fdd76 by Will Daly

Merge pull request #279 from edx/rc/2014-04-15

Rc/2014 04 15
parents 418da06d 88e8633d
...@@ -138,7 +138,7 @@ class Rubric(models.Model): ...@@ -138,7 +138,7 @@ class Rubric(models.Model):
# Validate: are options selected for each criterion in the rubric? # Validate: are options selected for each criterion in the rubric?
if len(options_selected) != len(rubric_criteria_dict): if len(options_selected) != len(rubric_criteria_dict):
msg = _("Incorrect number of options for this rubric ({actual} instead of {expected}").format( msg = _("Incorrect number of options for this rubric ({actual} instead of {expected})").format(
actual=len(options_selected), expected=len(rubric_criteria_dict)) actual=len(options_selected), expected=len(rubric_criteria_dict))
raise InvalidOptionSelection(msg) raise InvalidOptionSelection(msg)
......
...@@ -49,15 +49,15 @@ def create_assessment(submission_uuid, user_id, options_selected, rubric_dict, s ...@@ -49,15 +49,15 @@ def create_assessment(submission_uuid, user_id, options_selected, rubric_dict, s
""" """
# Check that there are not any assessments for this submission # Check that there are not any assessments for this submission
if Assessment.objects.filter(submission_uuid=submission_uuid, score_type=SELF_TYPE).exists(): if Assessment.objects.filter(submission_uuid=submission_uuid, score_type=SELF_TYPE).exists():
raise SelfAssessmentRequestError(_("Self assessment already exists for this submission")) raise SelfAssessmentRequestError(_("You've already completed your self assessment for this response."))
# Check that the student is allowed to assess this submission # Check that the student is allowed to assess this submission
try: try:
submission = get_submission_and_student(submission_uuid) submission = get_submission_and_student(submission_uuid)
if submission['student_item']['student_id'] != user_id: if submission['student_item']['student_id'] != user_id:
raise SelfAssessmentRequestError(_("Cannot self-assess this submission")) raise SelfAssessmentRequestError(_("You can only complete a self assessment on your own response."))
except SubmissionNotFoundError: except SubmissionNotFoundError:
raise SelfAssessmentRequestError(_("Could not retrieve the submission.")) raise SelfAssessmentRequestError(_("Could not retrieve the response."))
# Get or create the rubric # Get or create the rubric
try: try:
......
...@@ -90,7 +90,7 @@ class Command(BaseCommand): ...@@ -90,7 +90,7 @@ class Command(BaseCommand):
# Retrieve the submission we want to score # Retrieve the submission we want to score
# Note that we are NOT using the priority queue here, since we know # Note that we are NOT using the priority queue here, since we know
# exactly which submission we want to score. # exactly which submission we want to score.
peer_api.create_peer_workflow_item(scorer_id, submission_uuid) peer_api.create_peer_workflow_item(scorer_submission_uuid, submission_uuid)
# Create the peer assessment # Create the peer assessment
assessment = { assessment = {
......
{% load i18n %}
{% spaceless %} {% spaceless %}
<li id="openassessment__grade" class="openassessment__steps__step step--grade is--complete"> <li id="openassessment__grade" class="openassessment__steps__step step--grade is--complete">
<header class="step__header ui-toggle-visibility__control"> <header class="step__header ui-toggle-visibility__control">
<h2 class="step__title"> <h2 class="step__title">
<span class="wrapper--copy"> <span class="wrapper--copy">
<span class="step__label">Your Grade: </span> <span class="step__label">{% trans "Your Grade" %}: </span>
<span class="grade__value"> <span class="grade__value">
<span class="grade__value__title">Incomplete</span> <span class="grade__value__title">{% trans "Incomplete" %}</span>
<p class="grade__value__description">You have not completed the {% for step in incomplete_steps %}<span class="step">{{ step }} step </span> {% if not forloop.last %} and {% endif %}{% endfor %} of this problem.</p> <p class="grade__value__description">{% trans "You have not completed all the steps of this problem." %}</p>
</span> </span>
</span> </span>
</h2> </h2>
......
{% load i18n %}
{% spaceless %} {% spaceless %}
<li id="openassessment__grade" class="openassessment__steps__step step--grade"> <li id="openassessment__grade" class="openassessment__steps__step step--grade">
<header class="step__header ui-toggle-visibility__control"> <header class="step__header ui-toggle-visibility__control">
<h2 class="step__title"> <h2 class="step__title">
<span class="wrapper--copy"> <span class="wrapper--copy">
<span class="step__label">Your Grade: </span> <span class="step__label">{% trans "Your Grade" %}: </span>
<span class="grade__value"> <span class="grade__value">
<span class="grade__value__title">Not Completed</span> <span class="grade__value__title">{% trans "Not Completed" %}</span>
</span> </span>
</span> </span>
</h2> </h2>
...@@ -15,7 +16,7 @@ ...@@ -15,7 +16,7 @@
<div class="wrapper--step__content"> <div class="wrapper--step__content">
<div class="step__content"> <div class="step__content">
<div class="grade__value__description"> <div class="grade__value__description">
<p>You have not completed the {% for step in incomplete_steps %}<span class="step">{{ step }} step </span> {% if not forloop.last %} and {% endif %}{% endfor %} of this problem.</p> <p>{% trans "You have not completed all the steps of this problem." %}</p>
</div> </div>
</div> </div>
</div> </div>
......
{% load i18n %}
{% spaceless %} {% spaceless %}
<li id="openassessment__grade" class="openassessment__steps__step step--grade is--unstarted"> <li id="openassessment__grade" class="openassessment__steps__step step--grade is--unstarted">
<header class="step__header ui-toggle-visibility__control"> <header class="step__header ui-toggle-visibility__control">
<h2 class="step__title"> <h2 class="step__title">
<span class="wrapper--copy"> <span class="wrapper--copy">
<span class="step__label">Your Grade:</span> <span class="step__label">{% trans "Your Grade" %}:</span>
<span class="grade__value"> <span class="grade__value">
<span class="grade__value__title">Not Started</span> <span class="grade__value__title">{% trans "Not Started" %}</span>
</span> </span>
</span> </span>
</h2> </h2>
...@@ -15,7 +16,7 @@ ...@@ -15,7 +16,7 @@
<div class="wrapper--step__content"> <div class="wrapper--step__content">
<div class="step__content"> <div class="step__content">
<div class="grade__value__description"> <div class="grade__value__description">
<p>You have not started this problem yet.</p> <p>{% trans "You have not started this problem yet." %}</p>
</div> </div>
</div> </div>
</div> </div>
......
{% load i18n %}
{% spaceless %} {% spaceless %}
<li id="openassessment__grade" class="openassessment__steps__step step--grade"> <li id="openassessment__grade" class="openassessment__steps__step step--grade">
<header class="step__header ui-toggle-visibility__control"> <header class="step__header ui-toggle-visibility__control">
<h2 class="step__title"> <h2 class="step__title">
<span class="wrapper--copy"> <span class="wrapper--copy">
<span class="step__label">Your Grade:</span> <span class="step__label">{% trans "Your Grade" %}:</span>
<span class="grade__value"> <span class="grade__value">
<span class="grade__value__title">Waiting for Peer Assessment</span> <span class="grade__value__title">{% trans "Waiting for Peer Assessment" %}</span>
</span> </span>
</span> </span>
</h2> </h2>
...@@ -15,7 +16,7 @@ ...@@ -15,7 +16,7 @@
<div class="wrapper--step__content"> <div class="wrapper--step__content">
<div class="step__content"> <div class="step__content">
<div class="grade__value__description"> <div class="grade__value__description">
<p>Your response is still undergoing peer assessment. After your peers have assessed your response, you'll see their comments and receive your final grade.</p> <p>{% trans "Your response is still undergoing peer assessment. After your peers have assessed your response, you'll see their comments and receive your final grade." %}</p>
</div> </div>
</div> </div>
</div> </div>
......
{% load i18n %}
{% spaceless %} {% spaceless %}
<div class="wrapper wrapper--xblock wrapper--openassessment theme--basic"> <div class="wrapper wrapper--xblock wrapper--openassessment theme--basic">
<div class="openassessment problem" id="openassessment"> <div class="openassessment problem" id="openassessment">
...@@ -5,7 +6,7 @@ ...@@ -5,7 +6,7 @@
<h2 class="openassessment__title problem__header">{{ title }}</h2> <h2 class="openassessment__title problem__header">{{ title }}</h2>
<nav class="nav--contents"> <nav class="nav--contents">
<h2 class="title">Skip to a specific step:</h2> <h2 class="title">{% trans "Skip to a specific step" %}</h2>
<ol class="list list--nav"> <ol class="list list--nav">
{% for assessment in rubric_assessments %} {% for assessment in rubric_assessments %}
...@@ -14,7 +15,7 @@ ...@@ -14,7 +15,7 @@
</li> </li>
{% endfor %} {% endfor %}
<li class="list--nav__item"> <li class="list--nav__item">
<a class="action" href="#openassessment__grade">Your grade for this problem</a> <a class="action" href="#openassessment__grade">{% trans "Your grade for this problem" %}</a>
</li> </li>
</ol> </ol>
</nav> </nav>
...@@ -23,14 +24,14 @@ ...@@ -23,14 +24,14 @@
<!-- if the problem is unstarted or response hasn't been submitted --> <!-- if the problem is unstarted or response hasn't been submitted -->
<div id="openassessment__message" class="openassessment__message message"> <div id="openassessment__message" class="openassessment__message message">
<div class="message__content"> <div class="message__content">
<p>This problem requires that you work through multiple parts. <strong>You can begin by reading the question below and providing your response.</strong></p> <p>{% trans "This assignment has several steps. In the first step, you'll provide a response to the question. The other steps appear below the <strong>Your Response</strong> field." %}</p>
</div> </div>
</div> </div>
{% endblock %} {% endblock %}
<div class="wrapper--openassessment__prompt"> <div class="wrapper--openassessment__prompt">
<article class="openassessment__prompt ui-toggle-visibility"> <article class="openassessment__prompt ui-toggle-visibility">
<h2 class="openassessment__prompt__title">This Problem's Prompt/Question</h2> <h2 class="openassessment__prompt__title">{% trans "The prompt for this assignment" %}</h2>
<div class="openassessment__prompt__copy ui-toggle-visibility__content"> <div class="openassessment__prompt__copy ui-toggle-visibility__content">
{{ question|linebreaks }} {{ question|linebreaks }}
...@@ -51,10 +52,10 @@ ...@@ -51,10 +52,10 @@
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status:" %}</span>
<span class="step__status__value"> <span class="step__status__value">
<i class="ico icon-refresh icon-spin"></i> <i class="ico icon-refresh icon-spin"></i>
<span class="copy">Loading</span> <span class="copy">{% trans "Loading" %}</span>
</span> </span>
</span> </span>
</header> </header>
......
{% load i18n %} {% load i18n %}
<div id="openassessment-edit"> <div id="openassessment-edit" class="editor-with-buttons">
<script id="metadata-editor-tpl" type="text/template"><!-- empty --></script>
<textarea class="openassessment-editor"></textarea> <textarea class="openassessment-editor"></textarea>
<input type="button" class="openassessment-save-button" value="{% trans 'Save' %}"/> <div class="xblock-actions">
<input type="button" class="openassessment-cancel-button" value="{% trans 'Cancel' %}"/> <h3 class="sr">Actions</h3>
<ul>
<li class="action-item">
<a href="#" class="button action-primary openassessment-save-button">{% trans "Save" %}</a>
</li>
<li class="action-item">
<a href="#" class="button openassessment-cancel-button">{% trans "Cancel" %}</a>
</li>
</ul>
</div>
</div> </div>
{% load tz %} {% load tz %}
{% load i18n %}
{% block list_item %} {% block list_item %}
<li id="openassessment__peer-assessment" class="openassessment__steps__step step--peer-assessment ui-toggle-visibility"> <li id="openassessment__peer-assessment" class="openassessment__steps__step step--peer-assessment ui-toggle-visibility">
{% endblock %} {% endblock %}
{% spaceless %} {% spaceless %}
<span class="system__element" id="peer_submission_uuid">
{{ peer_submission.uuid }}
</span>
<header class="step__header ui-toggle-visibility__control"> <header class="step__header ui-toggle-visibility__control">
<h2 class="step__title"> <h2 class="step__title">
<span class="step__counter"></span> <span class="step__counter"></span>
<span class="wrapper--copy"> <span class="wrapper--copy">
<span class="step__label">Assess Peers</span> <span class="step__label">{% trans "Assess Peers" %}</span>
{% if peer_start %} {% if peer_start %}
<span class="step__deadline">available <span class="step__deadline">{% trans "available" %}
<span class="date"> <span class="date">
{{ peer_start|utc|date:"N j, Y H:i e" }} {{ peer_start|utc|date:"N j, Y H:i e" }}
(in {{ peer_start|timeuntil }}) (in {{ peer_start|timeuntil }})
</span> </span>
</span> </span>
{% elif peer_due %} {% elif peer_due %}
<span class="step__deadline">due <span class="step__deadline">{% trans "due" %}
<span class="date"> <span class="date">
{{ peer_due|utc|date:"N j, Y H:i e" }} {{ peer_due|utc|date:"N j, Y H:i e" }}
(in {{ peer_due|timeuntil }}) (in {{ peer_due|timeuntil }})
...@@ -32,10 +30,10 @@ ...@@ -32,10 +30,10 @@
{% block title %} {% block title %}
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status" %}:</span>
<span class="step__status__value"> <span class="step__status__value">
<span class="copy"> <span class="copy">
In Progress {% trans "In Progress" %}
(<span class="step__status__value--completed">{{ graded }}</span> of (<span class="step__status__value--completed">{{ graded }}</span> of
<span class="step__status__value--required">{{ must_grade }}</span>) <span class="step__status__value--required">{{ must_grade }}</span>)
</span> </span>
...@@ -48,7 +46,7 @@ ...@@ -48,7 +46,7 @@
<div class="ui-toggle-visibility__content"> <div class="ui-toggle-visibility__content">
<div class="wrapper--step__content"> <div class="wrapper--step__content">
<div class="step__instruction"> <div class="step__instruction">
<p>Read and assess the following response from one of your peers.</p> <p>{% trans "Read and assess the following response from one of your peers." %}</p>
</div> </div>
<div class="step__content"> <div class="step__content">
...@@ -57,7 +55,7 @@ ...@@ -57,7 +55,7 @@
<article class="peer-assessment" id="peer-assessment--001"> <article class="peer-assessment" id="peer-assessment--001">
<div class="peer-assessment__display"> <div class="peer-assessment__display">
<header class="peer-assessment__display__header"> <header class="peer-assessment__display__header">
<h3 class="peer-assessment__display__title">Assessment # <h3 class="peer-assessment__display__title">{% trans "Assessment #" %}
<span class="peer-assessment__number--current">{{ review_num }}</span> of <span class="peer-assessment__number--current">{{ review_num }}</span> of
<span class="peer-assessment__number--required">{{ must_grade }}</span> <span class="peer-assessment__number--required">{{ must_grade }}</span>
</h3> </h3>
...@@ -73,11 +71,11 @@ ...@@ -73,11 +71,11 @@
<ol class="list list--fields assessment__rubric"> <ol class="list list--fields assessment__rubric">
{% for criterion in rubric_criteria %} {% for criterion in rubric_criteria %}
<li class="field field--radio is--required assessment__rubric__question ui-toggle-visibility" id="assessment__rubric__question--{{ criterion.name }}"> <li class="field field--radio is--required assessment__rubric__question ui-toggle-visibility" id="assessment__rubric__question--{{ criterion.order_num }}">
<h4 class="question__title ui-toggle-visibility__control"> <h4 class="question__title ui-toggle-visibility__control">
<i class="ico icon-caret-right"></i> <i class="ico icon-caret-right"></i>
<span class="ui-toggle-visibility__control__copy question__title__copy">{{ criterion.prompt }}</span> <span class="ui-toggle-visibility__control__copy question__title__copy">{{ criterion.prompt }}</span>
<span class="label--required sr">* (Required)</span> <span class="label--required sr">* ({% trans "Required" %})</span>
</h4> </h4>
<div class="ui-toggle-visibility__content"> <div class="ui-toggle-visibility__content">
...@@ -87,16 +85,16 @@ ...@@ -87,16 +85,16 @@
<div class="wrapper--input"> <div class="wrapper--input">
<input type="radio" <input type="radio"
name="{{ criterion.name }}" name="{{ criterion.name }}"
id="assessment__rubric__question--{{ criterion.name }}__{{ option.name }}" id="assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
class="answer__value" class="answer__value"
value="{{ option.name }}" /> value="{{ option.name }}" />
<label for="assessment__rubric__question--{{ criterion.name }}__{{ option.name }}" <label for="assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
class="answer__label" class="answer__label"
>{{ option.name }}</label> >{{ option.name }}</label>
</div> </div>
<div class="wrapper--metadata"> <div class="wrapper--metadata">
<span class="answer__tip">{{ option.explanation }}</span> <span class="answer__tip">{{ option.explanation }}</span>
<span class="answer__points">{{option.points}} <span class="answer__points__label">points</span></span> <span class="answer__points">{{option.points}} <span class="answer__points__label">{% trans "points" %}</span></span>
</div> </div>
</li> </li>
{% endfor %} {% endfor %}
...@@ -107,11 +105,11 @@ ...@@ -107,11 +105,11 @@
<li class="wrapper--input field field--textarea assessment__rubric__question assessment__rubric__question--feedback" id="assessment__rubric__question--feedback"> <li class="wrapper--input field field--textarea assessment__rubric__question assessment__rubric__question--feedback" id="assessment__rubric__question--feedback">
<label class="question__title" for="assessment__rubric__question--feedback__value"> <label class="question__title" for="assessment__rubric__question--feedback__value">
<span class="question__title__copy">(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?</span> <span class="question__title__copy">{% trans "(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?" %}</span>
</label> </label>
<div class="wrapper--input"> <div class="wrapper--input">
<textarea id="assessment__rubric__question--feedback__value" placeholder="I noticed that this response..."></textarea> <textarea id="assessment__rubric__question--feedback__value" placeholder="{% trans "I noticed that this response..." %}"></textarea>
</div> </div>
</li> </li>
</ol> </ol>
...@@ -124,7 +122,7 @@ ...@@ -124,7 +122,7 @@
<div class="step__actions"> <div class="step__actions">
<div class="message message--inline message--error message--error-server"> <div class="message message--inline message--error message--error-server">
<h3 class="message__title">We could not submit your assessment</h3> <h3 class="message__title">{% trans "We could not submit your assessment" %}</h3>
<div class="message__content"></div> <div class="message__content"></div>
</div> </div>
......
{% extends "openassessmentblock/peer/oa_peer_assessment.html" %} {% extends "openassessmentblock/peer/oa_peer_assessment.html" %}
{% load i18n %}
{% block list_item %} {% block list_item %}
<li id="openassessment__peer-assessment"class="openassessment__steps__step step--peer-assessment is--incomplete ui-toggle-visibility"> <li id="openassessment__peer-assessment"class="openassessment__steps__step step--peer-assessment is--incomplete ui-toggle-visibility">
...@@ -6,11 +7,11 @@ ...@@ -6,11 +7,11 @@
{% block title %} {% block title %}
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status" %}:</span>
<span class="step__status__value"> <span class="step__status__value">
<span class="copy"> <span class="copy">
<i class="ico icon-warning-sign"></i> <i class="ico icon-warning-sign"></i>
Incomplete {% trans "Incomplete" %}
(<span class="step__status__value--completed">{{ graded }}</span> of (<span class="step__status__value--completed">{{ graded }}</span> of
<span class="step__status__value--required">{{ must_grade }}</span>) <span class="step__status__value--required">{{ must_grade }}</span>)
</span> </span>
...@@ -23,9 +24,9 @@ ...@@ -23,9 +24,9 @@
<div class="wrapper--step__content"> <div class="wrapper--step__content">
<div class="step__message message message--incomplete"> <div class="step__message message message--incomplete">
<h3 class="message__title">The Due Date for This Step Has Passed</h3> <h3 class="message__title">{% trans "The Due Date for This Step Has Passed" %}</h3>
<div class="message__content"> <div class="message__content">
<p>This step is now closed. You can no longer complete peer assessments or continue with this assignment, and your grade will be Incomplete.</p> <p>{% trans "This step is now closed. You can no longer complete peer assessments or continue with this assignment, and you'll receive a grade of Incomplete." %}</p>
</div> </div>
</div> </div>
</div> </div>
......
{% extends "openassessmentblock/peer/oa_peer_assessment.html" %} {% extends "openassessmentblock/peer/oa_peer_assessment.html" %}
{% load i18n %}
{% block list_item %} {% block list_item %}
<li id="openassessment__peer-assessment" class="openassessment__steps__step step--peer-assessment ui-toggle-visibility is--empty is--complete is--collapsed"> <li id="openassessment__peer-assessment" class="openassessment__steps__step step--peer-assessment ui-toggle-visibility is--empty is--complete is--collapsed">
...@@ -6,11 +7,11 @@ ...@@ -6,11 +7,11 @@
{% block title %} {% block title %}
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status" %}:</span>
<span class="step__status__value"> <span class="step__status__value">
<i class="ico icon-ok"></i> <i class="ico icon-ok"></i>
<span class="copy"> <span class="copy">
Complete {% trans "Complete" %}
(<span class="step__status__value--completed">{{ graded }}</span> of (<span class="step__status__value--completed">{{ graded }}</span> of
<span class="step__status__value--required">{{ must_grade }}</span>) <span class="step__status__value--required">{{ must_grade }}</span>)
</span> </span>
......
{% extends "openassessmentblock/peer/oa_peer_assessment.html" %} {% extends "openassessmentblock/peer/oa_peer_assessment.html" %}
{% load i18n %}
{% block list_item %} {% block list_item %}
<li id="openassessment__peer-assessment"class="openassessment__steps__step step--peer-assessment ui-toggle-visibility is--complete"> <li id="openassessment__peer-assessment"class="openassessment__steps__step step--peer-assessment ui-toggle-visibility is--complete">
...@@ -6,11 +7,11 @@ ...@@ -6,11 +7,11 @@
{% block title %} {% block title %}
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status" %}:</span>
<span class="step__status__value"> <span class="step__status__value">
<i class="ico icon-ok"></i> <i class="ico icon-ok"></i>
<span class="copy"> <span class="copy">
Complete {% trans "Complete" %}
(<span class="step__status__value--completed">{{ graded }}</span>) (<span class="step__status__value--completed">{{ graded }}</span>)
</span> </span>
</span> </span>
...@@ -22,12 +23,12 @@ ...@@ -22,12 +23,12 @@
<div class="wrapper--step__content"> <div class="wrapper--step__content">
<div class="step__message message message--complete"> <div class="step__message message message--complete">
<h3 class="message__title">Peer Assessments Complete</h3> <h3 class="message__title">{% trans "Peer Assessments Complete" %}</h3>
<div class="message__content">You have successfully completed all of the required peer assessments for this assignment. You may assess additional peer responses if you want to. Completing additional assessments will not affect your final grade.</div> <div class="message__content">{% trans "You have successfully completed all of the required peer assessments for this assignment. You may assess additional peer responses if you want to. Completing additional assessments will not affect your final grade." %}</div>
</div> </div>
<div class="step__instruction"> <div class="step__instruction">
<p>Read and assess the following response from one of your peers.</p> <p>{% trans "Read and assess the following response from one of your peers." %}</p>
</div> </div>
<div class="step__content"> <div class="step__content">
...@@ -36,7 +37,7 @@ ...@@ -36,7 +37,7 @@
<article class="peer-assessment" id="peer-assessment--001"> <article class="peer-assessment" id="peer-assessment--001">
<div class="peer-assessment__display"> <div class="peer-assessment__display">
<header class="peer-assessment__display__header"> <header class="peer-assessment__display__header">
<h3 class="peer-assessment__display__title">Assessment # <h3 class="peer-assessment__display__title">{% trans "Assessment #" %}
<span class="peer-assessment__number--current">{{ review_num }}</span> of <span class="peer-assessment__number--current">{{ review_num }}</span> of
<span class="peer-assessment__number--required">{{ must_grade }}</span> <span class="peer-assessment__number--required">{{ must_grade }}</span>
</h3> </h3>
...@@ -52,11 +53,11 @@ ...@@ -52,11 +53,11 @@
<ol class="list list--fields assessment__rubric"> <ol class="list list--fields assessment__rubric">
{% for criterion in rubric_criteria %} {% for criterion in rubric_criteria %}
<li class="field field--radio is--required assessment__rubric__question ui-toggle-visibility" id="assessment__rubric__question--{{ criterion.name }}"> <li class="field field--radio is--required assessment__rubric__question ui-toggle-visibility" id="assessment__rubric__question--{{ criterion.order_num }}">
<h4 class="question__title ui-toggle-visibility__control"> <h4 class="question__title ui-toggle-visibility__control">
<i class="ico icon-caret-right"></i> <i class="ico icon-caret-right"></i>
<span class="ui-toggle-visibility__control__copy question__title__copy">{{ criterion.prompt }}</span> <span class="ui-toggle-visibility__control__copy question__title__copy">{{ criterion.prompt }}</span>
<span class="label--required sr">* (Required)</span> <span class="label--required sr">* ({% trans "Required" %})</span>
</h4> </h4>
<div class="ui-toggle-visibility__content"> <div class="ui-toggle-visibility__content">
...@@ -66,16 +67,16 @@ ...@@ -66,16 +67,16 @@
<div class="wrapper--input"> <div class="wrapper--input">
<input type="radio" <input type="radio"
name="{{ criterion.name }}" name="{{ criterion.name }}"
id="assessment__rubric__question--{{ criterion.name }}__{{ option.name }}" id="assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
class="answer__value" class="answer__value"
value="{{ option.name }}" /> value="{{ option.name }}" />
<label for="assessment__rubric__question--{{ criterion.name }}__{{ option.name }}" <label for="assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
class="answer__label" class="answer__label"
>{{ option.name }}</label> >{{ option.name }}</label>
</div> </div>
<div class="wrapper--metadata"> <div class="wrapper--metadata">
<span class="answer__tip">{{ option.explanation }}</span> <span class="answer__tip">{{ option.explanation }}</span>
<span class="answer__points">{{option.points}} <span class="answer__points__label">points</span></span> <span class="answer__points">{{option.points}} <span class="answer__points__label">{% trans "points" %}</span></span>
</div> </div>
</li> </li>
{% endfor %} {% endfor %}
...@@ -86,10 +87,10 @@ ...@@ -86,10 +87,10 @@
<li class="wrapper--input field field--textarea assessment__rubric__question assessment__rubric__question--feedback" id="assessment__rubric__question--feedback"> <li class="wrapper--input field field--textarea assessment__rubric__question assessment__rubric__question--feedback" id="assessment__rubric__question--feedback">
<label class="question__title" for="assessment__rubric__question--feedback__value"> <label class="question__title" for="assessment__rubric__question--feedback__value">
<span class="question__title__copy">(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?</span> <span class="question__title__copy">{% trans "(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?" %}</span>
</label> </label>
<div class="wrapper--input"> <div class="wrapper--input">
<textarea id="assessment__rubric__question--feedback__value" placeholder="I felt this response was..."></textarea> <textarea id="assessment__rubric__question--feedback__value" placeholder="{% trans "I felt this response was..." %}"></textarea>
</div> </div>
</li> </li>
</ol> </ol>
...@@ -102,7 +103,7 @@ ...@@ -102,7 +103,7 @@
<div class="step__actions"> <div class="step__actions">
<div class="message message--inline message--error message--error-server"> <div class="message message--inline message--error message--error-server">
<h3 class="message__title">We could not submit your assessment</h3> <h3 class="message__title">{% trans "We could not submit your assessment" %}</h3>
</div> </div>
<ul class="list list--actions"> <ul class="list list--actions">
......
{% extends "openassessmentblock/peer/oa_peer_assessment.html" %} {% extends "openassessmentblock/peer/oa_peer_assessment.html" %}
{% load i18n %}
{% block list_item %} {% block list_item %}
<li id="openassessment__peer-assessment" class="openassessment__steps__step step--peer-assessment ui-toggle-visibility is--complete"> <li id="openassessment__peer-assessment" class="openassessment__steps__step step--peer-assessment ui-toggle-visibility is--complete">
...@@ -6,7 +7,7 @@ ...@@ -6,7 +7,7 @@
{% block title %} {% block title %}
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status" %}:</span>
<span class="step__status__value"> <span class="step__status__value">
<i class="ico icon-ok"></i> <i class="ico icon-ok"></i>
<span class="copy"> <span class="copy">
...@@ -22,11 +23,11 @@ ...@@ -22,11 +23,11 @@
<div class="wrapper--step__content"> <div class="wrapper--step__content">
<div class="step__message message message--complete"> <div class="step__message message message--complete">
<h3 class="message__title">Congratulations!</h3> <h3 class="message__title">{% trans "Peer Assessments Complete" %}</h3>
<div class="message__content"> <div class="message__content">
<p>You have successfully completed all of the peer assessment that you have been asked to do for this step. If you would like to continue providing feedback to your peers you may do so here, but it will not influence your final grade.</p> <p>{% trans "You have successfully completed all of the required peer assessments for this assignment. You may assess additional peer responses if you want to. Completing additional assessments will not affect your final grade." %}</p>
<p><strong>Currently there are no responses for you to assess. This should change momentarily. Check back shortly to provide feedback on more of your peers' responses.</strong></p> <p><strong>{% trans "All submitted peer responses have been assessed. Check back later to see if more students have submitted responses." %}</strong></p>
</div> </div>
</div> </div>
</div> </div>
......
{% extends "openassessmentblock/peer/oa_peer_assessment.html" %} {% extends "openassessmentblock/peer/oa_peer_assessment.html" %}
{% load i18n %}
{% block list_item %} {% block list_item %}
<li id="openassessment__peer-assessment" class="openassessment__steps__step step--peer-assessment is--unavailable is--empty is--collapsed"> <li id="openassessment__peer-assessment" class="openassessment__steps__step step--peer-assessment is--unavailable is--empty is--collapsed">
...@@ -6,9 +7,9 @@ ...@@ -6,9 +7,9 @@
{% block title %} {% block title %}
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status" %}:</span>
<span class="step__status__value"> <span class="step__status__value">
<span class="copy">Not Available</span> <span class="copy">{% trans "Not Available" %}</span>
</span> </span>
</span> </span>
{% endblock %} {% endblock %}
......
{% extends "openassessmentblock/peer/oa_peer_assessment.html" %} {% extends "openassessmentblock/peer/oa_peer_assessment.html" %}
{% load i18n %}
{% block list_item %} {% block list_item %}
<li id="openassessment__peer-assessment" class="openassessment__steps__step step--peer-assessment"> <li id="openassessment__peer-assessment" class="openassessment__steps__step step--peer-assessment">
...@@ -6,10 +7,10 @@ ...@@ -6,10 +7,10 @@
{% block title %} {% block title %}
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status" %}:</span>
<span class="step__status__value"> <span class="step__status__value">
<span class="copy"> <span class="copy">
In Progress {% trans "In Progress" %}
(<span class="step__status__value--completed">{{ graded }}</span> of (<span class="step__status__value--completed">{{ graded }}</span> of
<span class="step__status__value--required">{{ must_grade }}</span>) <span class="step__status__value--required">{{ must_grade }}</span>)
</span> </span>
...@@ -22,10 +23,10 @@ ...@@ -22,10 +23,10 @@
<div class="wrapper--step__content"> <div class="wrapper--step__content">
<div class="step__message message message--incomplete"> <div class="step__message message message--incomplete">
<h3 class="message__title">Waiting for Peer Responses</h3> <h3 class="message__title">{% trans "Waiting for Peer Responses" %}</h3>
<div class="message__content"> <div class="message__content">
<p>All submitted peer responses have been assessed. Check back later to see if more students have submitted responses. You'll receive your grade after you complete the <a data-behavior="ui-scroll" href="#openassessment__peer-assessment">peer assessment</a> and <a data-behavior="ui-scroll" href="#openassessment__self-assessment">self assessment</a> steps, and after your peers have assessed your response.</p> <p>{% blocktrans %}All submitted peer responses have been assessed. Check back later to see if more students have submitted responses. You'll receive your grade after you complete the <a data-behavior="ui-scroll" href="#openassessment__peer-assessment">peer assessment</a> and <a data-behavior="ui-scroll" href="#openassessment__self-assessment">self assessment</a> steps, and after your peers have assessed your response.{% endblocktrans %}</p>
</div> </div>
</div> </div>
</div> </div>
......
{% load tz %} {% load tz %}
{% load i18n %}
{% spaceless %} {% spaceless %}
{% block list_item %} {% block list_item %}
<li id="openassessment__response" class="openassessment__steps__step step--response ui-toggle-visibility"> <li id="openassessment__response" class="openassessment__steps__step step--response ui-toggle-visibility">
...@@ -8,7 +9,7 @@ ...@@ -8,7 +9,7 @@
<h2 class="step__title"> <h2 class="step__title">
<span class="step__counter"></span> <span class="step__counter"></span>
<span class="wrapper--copy"> <span class="wrapper--copy">
<span class="step__label">Your Response</span> <span class="step__label">{% trans "Your Response" %}</span>
{% if submission_start %} {% if submission_start %}
<span class="step__deadline">available <span class="step__deadline">available
<span class="date"> <span class="date">
...@@ -29,9 +30,9 @@ ...@@ -29,9 +30,9 @@
{% block title %} {% block title %}
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status" %}:</span>
<span class="step__status__value"> <span class="step__status__value">
<span class="copy">In Progress</span> <span class="copy">{% trans "In Progress" %}</span>
</span> </span>
</span> </span>
{% endblock %} {% endblock %}
...@@ -42,13 +43,13 @@ ...@@ -42,13 +43,13 @@
<div class="wrapper--step__content"> <div class="wrapper--step__content">
<div class="step__instruction"> <div class="step__instruction">
<p> <p>
Please provide your response below. {% trans "Enter your response to the question." %}
{% if submission_due %} {% if submission_due %}
You can save your progress and return to complete your response at any time before the due date of <span class="step__deadline"><span class="date">{{ submission_due|utc|date:"l, N j, Y H:i e" }}</span></span>. {% trans "You can save your progress and return to complete your response at any time before the due date" %} (<span class="step__deadline"><span class="date">{{ submission_due|utc|date:"l, N j, Y H:i e" }}</span></span>).
{% else %} {% else %}
You can save your progress and return to complete your response at any time. {% trans "You can save your progress and return to complete your response at any time." %}
{% endif %} {% endif %}
<strong class="emphasis">After you submit your response, you cannot edit it</strong>. <strong class="emphasis">{% trans "After you submit your response, you cannot edit it" %}</strong>.
</p> </p>
</div> </div>
...@@ -56,15 +57,15 @@ ...@@ -56,15 +57,15 @@
<form id="response__submission" class="response__submission"> <form id="response__submission" class="response__submission">
<ol class="list list--fields response__submission__content"> <ol class="list list--fields response__submission__content">
<li class="field field--textarea submission__answer" id="submission__answer"> <li class="field field--textarea submission__answer" id="submission__answer">
<label class="sr" for="submission__answer__value">Provide your response to the question.</label> <label class="sr" for="submission__answer__value">{% trans "Enter your response to the question." %}</label>
<textarea id="submission__answer__value" placeholder="">{{ saved_response }}</textarea> <textarea id="submission__answer__value" placeholder="">{{ saved_response }}</textarea>
<span class="tip">You may continue to work on your response until you submit it.</span> <span class="tip">{% trans "You may continue to work on your response until you submit it." %}</span>
</li> </li>
</ol> </ol>
<div class="response__submission__actions"> <div class="response__submission__actions">
<div class="message message--inline message--error message--error-server"> <div class="message message--inline message--error message--error-server">
<h3 class="message__title">We could not save your progress</h3> <h3 class="message__title">{% trans "We could not save your progress" %}</h3>
<div class="message__content"></div> <div class="message__content"></div>
</div> </div>
...@@ -74,7 +75,7 @@ ...@@ -74,7 +75,7 @@
<div id="response__save_status" class="response__submission__status"> <div id="response__save_status" class="response__submission__status">
<h3 class="response__submission__status__title"> <h3 class="response__submission__status__title">
<span class="sr">Your Working Submission Status:</span> <span class="sr">{% trans "Your Submission Status" %}:</span>
{{ save_status }} {{ save_status }}
</h3> </h3>
</div> </div>
...@@ -86,7 +87,7 @@ ...@@ -86,7 +87,7 @@
<div class="step__actions"> <div class="step__actions">
<div class="message message--inline message--error message--error-server"> <div class="message message--inline message--error message--error-server">
<h3 class="message__title">We could not submit your response</h3> <h3 class="message__title">{% trans "We could not submit your response" %}</h3>
<div class="message__content"></div> <div class="message__content"></div>
</div> </div>
...@@ -94,7 +95,7 @@ ...@@ -94,7 +95,7 @@
<li class="list--actions__item"> <li class="list--actions__item">
<a aria-role="button" href="#" id="step--response__submit" <a aria-role="button" href="#" id="step--response__submit"
class="action action--submit step--response__submit {{ submit_enabled|yesno:",is--disabled" }}"> class="action action--submit step--response__submit {{ submit_enabled|yesno:",is--disabled" }}">
<span class="copy">Submit your response and move to the next step</span> <span class="copy">{% trans "Submit your response and move to the next step" %}</span>
<i class="ico icon-caret-right"></i> <i class="ico icon-caret-right"></i>
</a> </a>
</li> </li>
......
{% extends "openassessmentblock/response/oa_response.html" %} {% extends "openassessmentblock/response/oa_response.html" %}
{% load i18n %}
{% block list_item %} {% block list_item %}
<li id="openassessment__response" class="openassessment__steps__step step--response is--incomplete ui-toggle-visibility"> <li id="openassessment__response" class="openassessment__steps__step step--response is--incomplete ui-toggle-visibility">
{% endblock %} {% endblock %}
{% block title %} {% block title %}
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status" %}:</span>
<span class="step__status__value"> <span class="step__status__value">
<i class="ico icon-warning-sign"></i> <i class="ico icon-warning-sign"></i>
<span class="copy">Incomplete</span> <span class="copy">{% trans "Incomplete" %}</span>
</span> </span>
</span> </span>
{% endblock %} {% endblock %}
...@@ -18,10 +19,10 @@ ...@@ -18,10 +19,10 @@
<div class="wrapper--step__content"> <div class="wrapper--step__content">
<div class="step__message message message--incomplete"> <div class="step__message message message--incomplete">
<h3 class="message__title">The Due Date for This Step Has Passed</h3> <h3 class="message__title">{% trans "The Due Date for This Step Has Passed" %}</h3>
<div class="message__content"> <div class="message__content">
<p>This step is now closed. You can no longer submit a response or continue with this problem, and your grade will be Incomplete. If you saved but did not submit a response, the response appears in the course records.</p> <p>{% trans "This step is now closed. You can no longer submit a response or continue with this problem, and you'll receive a grade of Incomplete. If you saved but did not submit a response, the response appears in the course records." %}</p>
</div> </div>
</div> </div>
</div> </div>
......
{% extends "openassessmentblock/response/oa_response.html" %} {% extends "openassessmentblock/response/oa_response.html" %}
{% load i18n %}
{% block list_item %} {% block list_item %}
<li id="openassessment__response" class="openassessment__steps__step step--response is--complete ui-toggle-visibility is--collapsed"> <li id="openassessment__response" class="openassessment__steps__step step--response is--complete ui-toggle-visibility is--collapsed">
...@@ -6,10 +7,10 @@ ...@@ -6,10 +7,10 @@
{% block title %} {% block title %}
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status" %}:</span>
<span class="step__status__value"> <span class="step__status__value">
<i class="ico icon-ok"></i> <i class="ico icon-ok"></i>
<span class="copy">Complete</span> <span class="copy">{% trans "Complete" %}</span>
</span> </span>
</span> </span>
{% endblock %} {% endblock %}
...@@ -19,7 +20,7 @@ ...@@ -19,7 +20,7 @@
<div class="wrapper--step__content"> <div class="wrapper--step__content">
<div class="step__content"> <div class="step__content">
<article class="submission__answer__display"> <article class="submission__answer__display">
<h3 class="submission__answer__display__title">Your Submitted Response</h3> <h3 class="submission__answer__display__title">{% trans "Your Response" %}</h3>
<div class="submission__answer__display__content"> <div class="submission__answer__display__content">
{{ student_submission.answer.text|linebreaks }} {{ student_submission.answer.text|linebreaks }}
......
{% extends "openassessmentblock/response/oa_response.html" %} {% extends "openassessmentblock/response/oa_response.html" %}
{% load i18n %}
{% block list_item %} {% block list_item %}
<li id="openassessment__response" class="openassessment__steps__step step--response is--complete ui-toggle-visibility is--collapsed"> <li id="openassessment__response" class="openassessment__steps__step step--response is--complete ui-toggle-visibility is--collapsed">
...@@ -6,10 +7,10 @@ ...@@ -6,10 +7,10 @@
{% block title %} {% block title %}
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status" %}:</span>
<span class="step__status__value"> <span class="step__status__value">
<i class="ico icon-ok"></i> <i class="ico icon-ok"></i>
<span class="copy">Complete</span> <span class="copy">{% trans "Complete" %}</span>
</span> </span>
</span> </span>
{% endblock %} {% endblock %}
...@@ -19,13 +20,13 @@ ...@@ -19,13 +20,13 @@
<div class="wrapper--step__content"> <div class="wrapper--step__content">
<div class="step__message message message--complete"> <div class="step__message message message--complete">
<h3 class="message__title">Your Response Has Been Submitted</h3> <h3 class="message__title">{% trans "Your Response Has Been Submitted" %}</h3>
<div class="message__content">You'll receive your grade after some of your peers have assessed your response and you complete the <a data-behavior="ui-scroll" href="#openassessment__peer-assessment">peer assessment</a> and <a data-behavior="ui-scroll" href="#openassessment__self-assessment">self assessment</a> steps.</div> <div class="message__content">{% trans "You'll receive your grade after some of your peers have assessed your response and you complete the <a data-behavior=\"ui-scroll\" href=\"#openassessment__peer-assessment\">peer assessment</a> and <a data-behavior=\"ui-scroll\" href=\"#openassessment__self-assessment\">self assessment</a> steps" %}.</div>
</div> </div>
<div class="step__content"> <div class="step__content">
<article class="submission__answer__display"> <article class="submission__answer__display">
<h3 class="submission__answer__display__title">Your Response</h3> <h3 class="submission__answer__display__title">{% trans "Your Response" %}</h3>
<div class="submission__answer__display__content"> <div class="submission__answer__display__content">
{{ student_submission.answer.text|linebreaks }} {{ student_submission.answer.text|linebreaks }}
......
...@@ -4,17 +4,14 @@ ...@@ -4,17 +4,14 @@
{% block list_item %} {% block list_item %}
<li id="openassessment__self-assessment" class="openassessment__steps__step step--self-assessment ui-toggle-visibility"> <li id="openassessment__self-assessment" class="openassessment__steps__step step--self-assessment ui-toggle-visibility">
{% endblock %} {% endblock %}
<span class="system__element" id="self_submission_uuid">
{{ self_submission.uuid }}
</span>
<header class="step__header ui-toggle-visibility__control"> <header class="step__header ui-toggle-visibility__control">
<h2 class="step__title"> <h2 class="step__title">
<span class="step__counter"></span> <span class="step__counter"></span>
<span class="wrapper--copy"> <span class="wrapper--copy">
<span class="step__label">Assess Yourself</span> <span class="step__label">{% trans "Assess Your Response" %}</span>
{% if self_start %} {% if self_start %}
<span class="step__deadline">available <span class="step__deadline">{% trans "available" %}
<span class="date"> <span class="date">
{{ self_start|utc|date:"N j, Y H:i e" }} {{ self_start|utc|date:"N j, Y H:i e" }}
(in {{ self_start|timeuntil }}) (in {{ self_start|timeuntil }})
...@@ -33,9 +30,9 @@ ...@@ -33,9 +30,9 @@
{% block title %} {% block title %}
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status" %}:</span>
<span class="step__status__value"> <span class="step__status__value">
<span class="copy">In Progress</span> <span class="copy">{% trans "In Progress" %}</span>
</span> </span>
</span> </span>
{% endblock %} {% endblock %}
...@@ -47,7 +44,7 @@ ...@@ -47,7 +44,7 @@
<div class="step__content"> <div class="step__content">
<article class="self-assessment__display" id="self-assessment"> <article class="self-assessment__display" id="self-assessment">
<header class="self-assessment__display__header"> <header class="self-assessment__display__header">
<h3 class="self-assessment__display__title">Your Submitted Response</h3> <h3 class="self-assessment__display__title">{% trans "Your Response" %}</h3>
</header> </header>
<div class="self-assessment__display__response"> <div class="self-assessment__display__response">
...@@ -59,11 +56,11 @@ ...@@ -59,11 +56,11 @@
<fieldset class="assessment__fields"> <fieldset class="assessment__fields">
<ol class="list list--fields assessment__rubric"> <ol class="list list--fields assessment__rubric">
{% for criterion in rubric_criteria %} {% for criterion in rubric_criteria %}
<li class="field field--radio is--required assessment__rubric__question ui-toggle-visibility" id="assessment__rubric__question--{{ criterion.name }}"> <li class="field field--radio is--required assessment__rubric__question ui-toggle-visibility" id="assessment__rubric__question--{{ criterion.order_num }}">
<h4 class="question__title ui-toggle-visibility__control"> <h4 class="question__title ui-toggle-visibility__control">
<i class="ico icon-caret-right"></i> <i class="ico icon-caret-right"></i>
<span class="question__title__copy">{{ criterion.prompt }}</span> <span class="question__title__copy">{{ criterion.prompt }}</span>
<span class="label--required sr">* (Required)</span> <span class="label--required sr">* ({% trans "Required" %})</span>
</h4> </h4>
<div class="ui-toggle-visibility__content"> <div class="ui-toggle-visibility__content">
...@@ -73,15 +70,15 @@ ...@@ -73,15 +70,15 @@
<div class="wrapper--input"> <div class="wrapper--input">
<input type="radio" <input type="radio"
name="{{ criterion.name }}" name="{{ criterion.name }}"
id="assessment__rubric__question--{{ criterion.name }}__{{ option.name }}" id="assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
class="answer__value" class="answer__value"
value="{{ option.name }}" /> value="{{ option.name }}" />
<label for="assessment__rubric__question--{{ criterion.name }}__{{ option.name }}" <label for="assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
class="answer__label">{{ option.name }}</label> class="answer__label">{{ option.name }}</label>
</div> </div>
<div class="wrapper--metadata"> <div class="wrapper--metadata">
<span class="answer__tip">{{ option.explanation }}</span> <span class="answer__tip">{{ option.explanation }}</span>
<span class="answer__points">{{option.points}} <span class="answer__points__label">points</span></span> <span class="answer__points">{{option.points}} <span class="answer__points__label">{% trans "points" %}</span></span>
</div> </div>
</li> </li>
{% endfor %} {% endfor %}
...@@ -96,7 +93,7 @@ ...@@ -96,7 +93,7 @@
<div class="step__actions"> <div class="step__actions">
<div class="message message--inline message--error message--error-server"> <div class="message message--inline message--error message--error-server">
<h3 class="message__title">We could not submit your assessment</h3> <h3 class="message__title">{% trans "We could not submit your assessment" %}</h3>
<div class="message__content"></div> <div class="message__content"></div>
</div> </div>
......
{% extends "openassessmentblock/self/oa_self_assessment.html" %} {% extends "openassessmentblock/self/oa_self_assessment.html" %}
{% load i18n %}
{% block list_item %} {% block list_item %}
<li id="openassessment__self-assessment" class="openassessment__steps__step step--self-assessment is--incomplete ui-toggle-visibility"> <li id="openassessment__self-assessment" class="openassessment__steps__step step--self-assessment is--incomplete ui-toggle-visibility">
...@@ -6,10 +7,10 @@ ...@@ -6,10 +7,10 @@
{% block title %} {% block title %}
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status" %}:</span>
<span class="step__status__value"> <span class="step__status__value">
<i class="ico icon-warning-sign"></i> <i class="ico icon-warning-sign"></i>
<span class="copy">Incomplete</span> <span class="copy">{% trans "Incomplete" %}</span>
</span> </span>
</span> </span>
{% endblock %} {% endblock %}
...@@ -19,9 +20,9 @@ ...@@ -19,9 +20,9 @@
<div class="wrapper--step__content"> <div class="wrapper--step__content">
<div class="step__message message message--incomplete"> <div class="step__message message message--incomplete">
<h3 class="message__title">The Due Date for This Step Has Passed</h3> <h3 class="message__title">{% trans "The Due Date for This Step Has Passed" %}</h3>
<div class="message__content"> <div class="message__content">
<p>This step is now closed. You can no longer complete a self assessment or continue with this assignment, and your grade will be Incomplete.</p> <p>{% trans "This step is now closed. You can no longer complete a self assessment or continue with this assignment, and you'll receive a grade of Incomplete." %}</p>
</div> </div>
</div> </div>
</div> </div>
......
{% extends "openassessmentblock/self/oa_self_assessment.html" %} {% extends "openassessmentblock/self/oa_self_assessment.html" %}
{% load i18n %}
{% block list_item %} {% block list_item %}
<li id="openassessment__self-assessment" class="openassessment__steps__step step--self-assessment is--complete is--empty is--collapsed"> <li id="openassessment__self-assessment" class="openassessment__steps__step step--self-assessment is--complete is--empty is--collapsed">
...@@ -6,10 +7,10 @@ ...@@ -6,10 +7,10 @@
{% block title %} {% block title %}
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status" %}:</span>
<span class="step__status__value"> <span class="step__status__value">
<i class="ico icon-ok"></i> <i class="ico icon-ok"></i>
<span class="copy">Complete</span> <span class="copy">{% trans "Complete" %}</span>
</span> </span>
</span> </span>
{% endblock %} {% endblock %}
......
{% extends "openassessmentblock/self/oa_self_assessment.html" %} {% extends "openassessmentblock/self/oa_self_assessment.html" %}
{% load i18n %}
{% block list_item %} {% block list_item %}
<li id="openassessment__self-assessment" class="openassessment__steps__step step--self-assessment is--empty is--unavailable is--collapsed"> <li id="openassessment__self-assessment" class="openassessment__steps__step step--self-assessment is--empty is--unavailable is--collapsed">
...@@ -6,9 +7,9 @@ ...@@ -6,9 +7,9 @@
{% block title %} {% block title %}
<span class="step__status"> <span class="step__status">
<span class="step__status__label">This step's status:</span> <span class="step__status__label">{% trans "This step's status" %}:</span>
<span class="step__status__value"> <span class="step__status__value">
<span class="copy">Not Available</span> <span class="copy">{% trans "Not Available" %}</span>
</span> </span>
</span> </span>
{% endblock %} {% endblock %}
......
{% load i18n %}
<div class="wrapper--staff-info wrapper--ui-staff"> <div class="wrapper--staff-info wrapper--ui-staff">
<div class="staff-info ui-staff ui-toggle-visibility is--collapsed"> <div class="staff-info ui-staff ui-toggle-visibility is--collapsed">
<h2 class="staff-info__title ui-staff__title ui-toggle-visibility__control"> <h2 class="staff-info__title ui-staff__title ui-toggle-visibility__control">
<i class="ico icon-caret-right"></i> <i class="ico icon-caret-right"></i>
<span class="staff-info__title__copy">Course Staff Information</span> <span class="staff-info__title__copy">{% trans "Course Staff Information" %}</span>
</h2> </h2>
<div class="staff-info__content ui-staff__content ui-toggle-visibility__content"> <div class="staff-info__content ui-staff__content ui-toggle-visibility__content">
<div class="staff-info__summary ui-staff__content__section"> <div class="staff-info__summary ui-staff__content__section">
<dl class="submissions--total"> <dl class="submissions--total">
<dt class="label">Total number of submissions:</dt> <dt class="label">{% trans "Response total" %}:</dt>
<dd class="value">{{ num_submissions }}</dd> <dd class="value">{{ num_submissions }}</dd>
</dl> </dl>
</div> </div>
<div class="staff-info__status ui-staff__content__section"> <div class="staff-info__status ui-staff__content__section">
<table class="staff-info__status__table" summary="Where are your students currently in this problem"> <table class="staff-info__status__table" summary="{% trans "Where are your students currently in this problem" %}">
<caption class="title">Student Progress/Step Status</caption> <caption class="title">{% trans "Student Progress" %}</caption>
<thead> <thead>
<tr> <tr>
<th abbr="Step" scope="col">Problem Step</th> <th abbr="Step" scope="col">{% trans "Problem Step" %}</th>
<th abbr="# of Students" scope="col">Number of Students Actively in Step</th> <th abbr="# of Students" scope="col">{% trans "Active Students in Step" %}</th>
</tr> </tr>
</thead> </thead>
...@@ -38,7 +39,7 @@ ...@@ -38,7 +39,7 @@
</div> </div>
<div class="staff-info__status ui-staff__content__section"> <div class="staff-info__status ui-staff__content__section">
Location: {{ item_id }} {% trans "Location" %}: {{ item_id }}
</div> </div>
</div> </div>
</div> </div>
......
...@@ -154,4 +154,4 @@ class GradeMixin(object): ...@@ -154,4 +154,4 @@ class GradeMixin(object):
'options': feedback_options, 'options': feedback_options,
} }
) )
return {'success': True, 'msg': _(u"Feedback saved!")} return {'success': True, 'msg': _(u"Feedback saved.")}
...@@ -161,7 +161,7 @@ class PeerAssessmentMixin(object): ...@@ -161,7 +161,7 @@ class PeerAssessmentMixin(object):
if assessment: if assessment:
context_dict["must_grade"] = assessment["must_grade"] context_dict["must_grade"] = assessment["must_grade"]
finished, count = peer_api.has_finished_required_evaluating( finished, count = peer_api.has_finished_required_evaluating(
student_item, self.submission_uuid,
assessment["must_grade"] assessment["must_grade"]
) )
context_dict["graded"] = count context_dict["graded"] = count
...@@ -212,7 +212,7 @@ class PeerAssessmentMixin(object): ...@@ -212,7 +212,7 @@ class PeerAssessmentMixin(object):
peer_submission = False peer_submission = False
try: try:
peer_submission = peer_api.get_submission_to_assess( peer_submission = peer_api.get_submission_to_assess(
student_item_dict, self.submission_uuid,
assessment["must_be_graded_by"], assessment["must_be_graded_by"],
True True
) )
......
...@@ -45,7 +45,7 @@ def _parse_date(value): ...@@ -45,7 +45,7 @@ def _parse_date(value):
try: try:
return parse_date(value).replace(tzinfo=pytz.utc) return parse_date(value).replace(tzinfo=pytz.utc)
except ValueError: except ValueError:
raise InvalidDateFormat(_("Could not parse date '{date}'").format(date=value)) raise InvalidDateFormat(_("'{date}' is an invalid date format. Make sure the date is formatted as YYYY-MM-DDTHH:MM:SS.").format(date=value))
else: else:
raise InvalidDateFormat(_("'{date}' must be a date string or datetime").format(date=value)) raise InvalidDateFormat(_("'{date}' must be a date string or datetime").format(date=value))
...@@ -185,13 +185,13 @@ def resolve_dates(start, end, date_ranges): ...@@ -185,13 +185,13 @@ def resolve_dates(start, end, date_ranges):
step_end = _parse_date(step_end) if step_end is not None else prev_end step_end = _parse_date(step_end) if step_end is not None else prev_end
if step_start < prev_start: if step_start < prev_start:
msg = _(u"The start date '{start}' must be after the previous start date '{prev}'.").format( msg = _(u"This step's start date '{start}' cannot be earlier than the previous step's start date '{prev}'.").format(
start=step_start, prev=prev_start start=step_start, prev=prev_start
) )
raise DateValidationError(msg) raise DateValidationError(msg)
if step_end > prev_end: if step_end > prev_end:
msg = _(u"The due date '{due}' must be before the following due date '{prev}'.").format( msg = _(u"This step's due date '{due}' cannot be later than the next step's due date '{prev}'.").format(
due=step_end, prev=prev_end due=step_end, prev=prev_end
) )
raise DateValidationError(msg) raise DateValidationError(msg)
...@@ -207,7 +207,7 @@ def resolve_dates(start, end, date_ranges): ...@@ -207,7 +207,7 @@ def resolve_dates(start, end, date_ranges):
# Now that we have resolved both start and end dates, we can safely compare them # Now that we have resolved both start and end dates, we can safely compare them
for resolved_start, resolved_end in resolved_ranges: for resolved_start, resolved_end in resolved_ranges:
if resolved_start >= resolved_end: if resolved_start >= resolved_end:
msg = _(u"Start date '{start}' cannot be later than the due date '{due}'").format( msg = _(u"The start date '{start}' cannot be later than the due date '{due}'").format(
start=resolved_start, due=resolved_end start=resolved_start, due=resolved_end
) )
raise DateValidationError(msg) raise DateValidationError(msg)
......
...@@ -99,8 +99,6 @@ class SelfAssessmentMixin(object): ...@@ -99,8 +99,6 @@ class SelfAssessmentMixin(object):
Dict with keys "success" (bool) indicating success/failure Dict with keys "success" (bool) indicating success/failure
and "msg" (unicode) containing additional information if an error occurs. and "msg" (unicode) containing additional information if an error occurs.
""" """
if 'submission_uuid' not in data:
return {'success': False, 'msg': _(u"Missing submission_uuid key in request")}
if 'options_selected' not in data: if 'options_selected' not in data:
return {'success': False, 'msg': _(u"Missing options_selected key in request")} return {'success': False, 'msg': _(u"Missing options_selected key in request")}
......
...@@ -38,7 +38,7 @@ ...@@ -38,7 +38,7 @@
"template": "openassessmentblock/response/oa_response.html", "template": "openassessmentblock/response/oa_response.html",
"context": { "context": {
"saved_response": "", "saved_response": "",
"save_status": "Unsaved draft", "save_status": "This response has not been saved.",
"submit_enabled": false, "submit_enabled": false,
"submission_due": "" "submission_due": ""
}, },
......
...@@ -15,11 +15,11 @@ describe("OpenAssessment.BaseView", function() { ...@@ -15,11 +15,11 @@ describe("OpenAssessment.BaseView", function() {
grade: readFixtures("oa_grade_complete.html") grade: readFixtures("oa_grade_complete.html")
}; };
this.peerAssess = function(submissionId, optionsSelected, feedback) { this.peerAssess = function(optionsSelected, feedback) {
return $.Deferred(function(defer) { defer.resolve(); }).promise(); return $.Deferred(function(defer) { defer.resolve(); }).promise();
}; };
this.selfAssess = function(submissionId, optionsSelected) { this.selfAssess = function(optionsSelected) {
return $.Deferred(function(defer) { defer.resolve(); }).promise(); return $.Deferred(function(defer) { defer.resolve(); }).promise();
}; };
...@@ -90,7 +90,7 @@ describe("OpenAssessment.BaseView", function() { ...@@ -90,7 +90,7 @@ describe("OpenAssessment.BaseView", function() {
var testError = 'Test failure contacting server message'; var testError = 'Test failure contacting server message';
loadSubviews(function() { loadSubviews(function() {
/* stub our selfAssess to fail */ /* stub our selfAssess to fail */
spyOn(server, 'selfAssess').andCallFake(function(submissionId, optionsSelected) { spyOn(server, 'selfAssess').andCallFake(function(optionsSelected) {
return $.Deferred(function(defer) { defer.rejectWith(server, [testError]); }).promise(); return $.Deferred(function(defer) { defer.rejectWith(server, [testError]); }).promise();
}); });
view.selfAssess(); view.selfAssess();
......
...@@ -69,7 +69,7 @@ describe("OpenAssessment.GradeView", function() { ...@@ -69,7 +69,7 @@ describe("OpenAssessment.GradeView", function() {
expect(server.feedbackText).toEqual('I disliked the feedback I received'); expect(server.feedbackText).toEqual('I disliked the feedback I received');
expect(server.feedbackOptions).toEqual([ expect(server.feedbackOptions).toEqual([
'These assessments were not useful.', 'These assessments were not useful.',
'I disagree with the ways that my peers assessed me.' 'I disagree with one or more of the peer assessments of my response.'
]); ]);
}); });
......
...@@ -63,21 +63,21 @@ describe("OpenAssessment.ResponseView", function() { ...@@ -63,21 +63,21 @@ describe("OpenAssessment.ResponseView", function() {
view.responseChanged(); view.responseChanged();
expect(view.submitEnabled()).toBe(false); expect(view.submitEnabled()).toBe(false);
expect(view.saveEnabled()).toBe(false); expect(view.saveEnabled()).toBe(false);
expect(view.saveStatus()).toContain('Unsaved draft'); expect(view.saveStatus()).toContain('This response has not been saved.');
// Response is whitespace --> save/submit buttons disabled // Response is whitespace --> save/submit buttons disabled
view.response(' \n \n '); view.response(' \n \n ');
view.responseChanged(); view.responseChanged();
expect(view.submitEnabled()).toBe(false); expect(view.submitEnabled()).toBe(false);
expect(view.saveEnabled()).toBe(false); expect(view.saveEnabled()).toBe(false);
expect(view.saveStatus()).toContain('Unsaved draft'); expect(view.saveStatus()).toContain('This response has not been saved.');
// Response is not blank --> submit button enabled // Response is not blank --> submit button enabled
view.response('Test response'); view.response('Test response');
view.responseChanged(); view.responseChanged();
expect(view.submitEnabled()).toBe(true); expect(view.submitEnabled()).toBe(true);
expect(view.saveEnabled()).toBe(true); expect(view.saveEnabled()).toBe(true);
expect(view.saveStatus()).toContain('Unsaved draft'); expect(view.saveStatus()).toContain('This response has not been saved.');
}); });
it("updates submit/save buttons and save status when the user saves a response", function() { it("updates submit/save buttons and save status when the user saves a response", function() {
...@@ -86,14 +86,14 @@ describe("OpenAssessment.ResponseView", function() { ...@@ -86,14 +86,14 @@ describe("OpenAssessment.ResponseView", function() {
view.save(); view.save();
expect(view.submitEnabled()).toBe(false); expect(view.submitEnabled()).toBe(false);
expect(view.saveEnabled()).toBe(false); expect(view.saveEnabled()).toBe(false);
expect(view.saveStatus()).toContain('Saved but not submitted'); expect(view.saveStatus()).toContain('saved but not submitted');
// Response is not blank --> submit button enabled // Response is not blank --> submit button enabled
view.response('Test response'); view.response('Test response');
view.save(); view.save();
expect(view.submitEnabled()).toBe(true); expect(view.submitEnabled()).toBe(true);
expect(view.saveEnabled()).toBe(false); expect(view.saveEnabled()).toBe(false);
expect(view.saveStatus()).toContain('Saved but not submitted'); expect(view.saveStatus()).toContain('saved but not submitted');
}); });
it("shows unsaved draft only when response text has changed", function() { it("shows unsaved draft only when response text has changed", function() {
...@@ -101,21 +101,21 @@ describe("OpenAssessment.ResponseView", function() { ...@@ -101,21 +101,21 @@ describe("OpenAssessment.ResponseView", function() {
view.response('Lorem ipsum'); view.response('Lorem ipsum');
view.save(); view.save();
expect(view.saveEnabled()).toBe(false); expect(view.saveEnabled()).toBe(false);
expect(view.saveStatus()).toContain('Saved but not submitted'); expect(view.saveStatus()).toContain('saved but not submitted');
// Keep the text the same, but trigger an update // Keep the text the same, but trigger an update
// Should still be saved // Should still be saved
view.response('Lorem ipsum'); view.response('Lorem ipsum');
view.responseChanged(); view.responseChanged();
expect(view.saveEnabled()).toBe(false); expect(view.saveEnabled()).toBe(false);
expect(view.saveStatus()).toContain('Saved but not submitted'); expect(view.saveStatus()).toContain('saved but not submitted');
// Change the text // Change the text
// This should cause it to change to unsaved draft // This should cause it to change to unsaved draft
view.response('changed '); view.response('changed ');
view.responseChanged(); view.responseChanged();
expect(view.saveEnabled()).toBe(true); expect(view.saveEnabled()).toBe(true);
expect(view.saveStatus()).toContain('Unsaved draft'); expect(view.saveStatus()).toContain('This response has not been saved.');
}); });
it("sends the saved submission to the server", function() { it("sends the saved submission to the server", function() {
......
...@@ -35,13 +35,7 @@ describe("OpenAssessment.Server", function() { ...@@ -35,13 +35,7 @@ describe("OpenAssessment.Server", function() {
var testString = ''; var testString = '';
for (i = 0; i < (testStringSize); i++) { testString += 'x'; } for (i = 0; i < (testStringSize); i++) { testString += 'x'; }
return testString; return testString;
} };
var getHugeStringError = function() {
// return a string that can be used with .toContain()
// "Response text is too large. Please reduce the size of your response and try to submit again.";
return "text is too large"
}
beforeEach(function() { beforeEach(function() {
// Create the server // Create the server
...@@ -103,7 +97,7 @@ describe("OpenAssessment.Server", function() { ...@@ -103,7 +97,7 @@ describe("OpenAssessment.Server", function() {
var success = false; var success = false;
var options = {clarity: "Very clear", precision: "Somewhat precise"}; var options = {clarity: "Very clear", precision: "Somewhat precise"};
server.peerAssess("abc1234", options, "Excellent job!").done(function() { server.peerAssess(options, "Excellent job!").done(function() {
success = true; success = true;
}); });
...@@ -112,7 +106,6 @@ describe("OpenAssessment.Server", function() { ...@@ -112,7 +106,6 @@ describe("OpenAssessment.Server", function() {
url: '/peer_assess', url: '/peer_assess',
type: "POST", type: "POST",
data: JSON.stringify({ data: JSON.stringify({
submission_uuid: "abc1234",
options_selected: options, options_selected: options,
feedback: "Excellent job!" feedback: "Excellent job!"
}) })
...@@ -185,7 +178,7 @@ describe("OpenAssessment.Server", function() { ...@@ -185,7 +178,7 @@ describe("OpenAssessment.Server", function() {
receivedMsg = msg; receivedMsg = msg;
}); });
expect(receivedMsg).toEqual("Could not contact server."); expect(receivedMsg).toContain("This section could not be loaded");
}); });
it("informs the caller of an Ajax error when sending a submission", function() { it("informs the caller of an Ajax error when sending a submission", function() {
...@@ -201,7 +194,7 @@ describe("OpenAssessment.Server", function() { ...@@ -201,7 +194,7 @@ describe("OpenAssessment.Server", function() {
); );
expect(receivedErrorCode).toEqual("AJAX"); expect(receivedErrorCode).toEqual("AJAX");
expect(receivedErrorMsg).toEqual("Could not contact server."); expect(receivedErrorMsg).toContain("This response could not be submitted");
}); });
it("confirms that very long submissions fail with an error without ajax", function() { it("confirms that very long submissions fail with an error without ajax", function() {
...@@ -215,7 +208,7 @@ describe("OpenAssessment.Server", function() { ...@@ -215,7 +208,7 @@ describe("OpenAssessment.Server", function() {
} }
); );
expect(receivedErrorCode).toEqual("submit"); expect(receivedErrorCode).toEqual("submit");
expect(receivedErrorMsg).toContain(getHugeStringError()); expect(receivedErrorMsg).toContain("This response is too long");
}); });
it("informs the caller of an server error when sending a submission", function() { it("informs the caller of an server error when sending a submission", function() {
...@@ -240,21 +233,21 @@ describe("OpenAssessment.Server", function() { ...@@ -240,21 +233,21 @@ describe("OpenAssessment.Server", function() {
server.save(testString).fail( server.save(testString).fail(
function(errorMsg) { receivedErrorMsg = errorMsg; } function(errorMsg) { receivedErrorMsg = errorMsg; }
); );
expect(receivedErrorMsg).toContain(getHugeStringError()); expect(receivedErrorMsg).toContain("This response is too long");
}); });
it("informs the caller of an AJAX error when sending a submission", function() { it("informs the caller of an AJAX error when saving a submission", function() {
stubAjax(false, null); stubAjax(false, null);
var receivedMsg = null; var receivedMsg = null;
server.save("Test").fail(function(errorMsg) { receivedMsg = errorMsg; }); server.save("Test").fail(function(errorMsg) { receivedMsg = errorMsg; });
expect(receivedMsg).toEqual('Could not contact server.'); expect(receivedMsg).toContain('This response could not be saved');
}); });
it("informs the caller of an AJAX error when sending a self assessment", function() { it("informs the caller of an AJAX error when sending a self assessment", function() {
stubAjax(false, null); stubAjax(false, null);
var receivedMsg = null; var receivedMsg = null;
server.selfAssess("Test").fail(function(errorMsg) { receivedMsg = errorMsg; }); server.selfAssess("Test").fail(function(errorMsg) { receivedMsg = errorMsg; });
expect(receivedMsg).toEqual('Could not contact server.'); expect(receivedMsg).toContain('This assessment could not be submitted');
}); });
it("informs the caller of a server error when sending a submission", function() { it("informs the caller of a server error when sending a submission", function() {
...@@ -272,7 +265,7 @@ describe("OpenAssessment.Server", function() { ...@@ -272,7 +265,7 @@ describe("OpenAssessment.Server", function() {
receivedMsg = msg; receivedMsg = msg;
}); });
expect(receivedMsg).toEqual("Could not contact server."); expect(receivedMsg).toContain("This problem could not be loaded");
}); });
it("informs the caller of an Ajax error when updating XML", function() { it("informs the caller of an Ajax error when updating XML", function() {
...@@ -283,7 +276,7 @@ describe("OpenAssessment.Server", function() { ...@@ -283,7 +276,7 @@ describe("OpenAssessment.Server", function() {
receivedMsg = msg; receivedMsg = msg;
}); });
expect(receivedMsg).toEqual("Could not contact server."); expect(receivedMsg).toContain("This problem could not be saved");
}); });
it("informs the caller of a server error when loading XML", function() { it("informs the caller of a server error when loading XML", function() {
...@@ -312,12 +305,12 @@ describe("OpenAssessment.Server", function() { ...@@ -312,12 +305,12 @@ describe("OpenAssessment.Server", function() {
var options = {clarity: "Very clear", precision: "Somewhat precise"}; var options = {clarity: "Very clear", precision: "Somewhat precise"};
var receivedErrorMsg = ""; var receivedErrorMsg = "";
var testString = getHugeTestString(); var testString = getHugeTestString();
server.peerAssess("abc1234", options, testString).fail( server.peerAssess(options, testString).fail(
function(errorMsg) { function(errorMsg) {
receivedErrorMsg = errorMsg; receivedErrorMsg = errorMsg;
} }
); );
expect(receivedErrorMsg).toContain(getHugeStringError()); expect(receivedErrorMsg).toContain("The comments on this assessment are too long");
}); });
it("informs the caller of a server error when sending a peer assessment", function() { it("informs the caller of a server error when sending a peer assessment", function() {
...@@ -325,7 +318,7 @@ describe("OpenAssessment.Server", function() { ...@@ -325,7 +318,7 @@ describe("OpenAssessment.Server", function() {
var receivedMsg = null; var receivedMsg = null;
var options = {clarity: "Very clear", precision: "Somewhat precise"}; var options = {clarity: "Very clear", precision: "Somewhat precise"};
server.peerAssess("abc1234", options, "Excellent job!").fail(function(msg) { server.peerAssess(options, "Excellent job!").fail(function(msg) {
receivedMsg = msg; receivedMsg = msg;
}); });
...@@ -337,11 +330,11 @@ describe("OpenAssessment.Server", function() { ...@@ -337,11 +330,11 @@ describe("OpenAssessment.Server", function() {
var receivedMsg = null; var receivedMsg = null;
var options = {clarity: "Very clear", precision: "Somewhat precise"}; var options = {clarity: "Very clear", precision: "Somewhat precise"};
server.peerAssess("abc1234", options, "Excellent job!").fail(function(msg) { server.peerAssess(options, "Excellent job!").fail(function(msg) {
receivedMsg = msg; receivedMsg = msg;
}); });
expect(receivedMsg).toEqual("Could not contact server."); expect(receivedMsg).toContain("This assessment could not be submitted");
}); });
it("informs the caller of an AJAX error when checking whether the XBlock has been released", function() { it("informs the caller of an AJAX error when checking whether the XBlock has been released", function() {
...@@ -352,7 +345,7 @@ describe("OpenAssessment.Server", function() { ...@@ -352,7 +345,7 @@ describe("OpenAssessment.Server", function() {
receivedMsg = errMsg; receivedMsg = errMsg;
}); });
expect(receivedMsg).toEqual("Could not contact server."); expect(receivedMsg).toContain("The server could not be contacted");
}); });
...@@ -376,7 +369,7 @@ describe("OpenAssessment.Server", function() { ...@@ -376,7 +369,7 @@ describe("OpenAssessment.Server", function() {
receivedErrorMsg = errorMsg; receivedErrorMsg = errorMsg;
} }
); );
expect(receivedErrorMsg).toContain(getHugeStringError()); expect(receivedErrorMsg).toContain("This feedback is too long");
}); });
it("informs the caller of an AJAX error when sending feedback on submission", function() { it("informs the caller of an AJAX error when sending feedback on submission", function() {
...@@ -387,7 +380,7 @@ describe("OpenAssessment.Server", function() { ...@@ -387,7 +380,7 @@ describe("OpenAssessment.Server", function() {
server.submitFeedbackOnAssessment("test feedback", options).fail( server.submitFeedbackOnAssessment("test feedback", options).fail(
function(errMsg) { receivedMsg = errMsg; } function(errMsg) { receivedMsg = errMsg; }
); );
expect(receivedMsg).toEqual("Could not contact server."); expect(receivedMsg).toContain("This feedback could not be submitted");
}); });
it("informs the caller of a server error when sending feedback on submission", function() { it("informs the caller of a server error when sending feedback on submission", function() {
......
...@@ -245,7 +245,6 @@ OpenAssessment.BaseView.prototype = { ...@@ -245,7 +245,6 @@ OpenAssessment.BaseView.prototype = {
*/ */
peerAssessRequest: function(successFunction) { peerAssessRequest: function(successFunction) {
// Retrieve assessment info from the DOM // Retrieve assessment info from the DOM
var submissionId = $("#peer_submission_uuid", this.element)[0].innerHTML.trim();
var optionsSelected = {}; var optionsSelected = {};
$("#peer-assessment--001__assessment input[type=radio]:checked", this.element).each( $("#peer-assessment--001__assessment input[type=radio]:checked", this.element).each(
function(index, sel) { function(index, sel) {
...@@ -257,7 +256,7 @@ OpenAssessment.BaseView.prototype = { ...@@ -257,7 +256,7 @@ OpenAssessment.BaseView.prototype = {
// Send the assessment to the server // Send the assessment to the server
var view = this; var view = this;
this.toggleActionError('peer', null); this.toggleActionError('peer', null);
this.server.peerAssess(submissionId, optionsSelected, feedback).done( this.server.peerAssess(optionsSelected, feedback).done(
successFunction successFunction
).fail(function(errMsg) { ).fail(function(errMsg) {
view.toggleActionError('peer', errMsg); view.toggleActionError('peer', errMsg);
...@@ -269,7 +268,6 @@ OpenAssessment.BaseView.prototype = { ...@@ -269,7 +268,6 @@ OpenAssessment.BaseView.prototype = {
**/ **/
selfAssess: function() { selfAssess: function() {
// Retrieve self-assessment info from the DOM // Retrieve self-assessment info from the DOM
var submissionId = $("#self_submission_uuid", this.element)[0].innerHTML.trim();
var optionsSelected = {}; var optionsSelected = {};
$("#self-assessment--001__assessment input[type=radio]:checked", this.element).each( $("#self-assessment--001__assessment input[type=radio]:checked", this.element).each(
function(index, sel) { function(index, sel) {
...@@ -280,7 +278,7 @@ OpenAssessment.BaseView.prototype = { ...@@ -280,7 +278,7 @@ OpenAssessment.BaseView.prototype = {
// Send the assessment to the server // Send the assessment to the server
var view = this; var view = this;
this.toggleActionError('self', null); this.toggleActionError('self', null);
this.server.selfAssess(submissionId, optionsSelected).done( this.server.selfAssess(optionsSelected).done(
function() { function() {
view.renderPeerAssessmentStep(); view.renderPeerAssessmentStep();
view.renderSelfAssessmentStep(); view.renderSelfAssessmentStep();
......
...@@ -87,7 +87,7 @@ OpenAssessment.StudioView.prototype = { ...@@ -87,7 +87,7 @@ OpenAssessment.StudioView.prototype = {
executed if the user confirms the update. executed if the user confirms the update.
**/ **/
confirmPostReleaseUpdate: function(onConfirm) { confirmPostReleaseUpdate: function(onConfirm) {
var msg = "This problem has already been released. Any changes will apply only to future assessments."; var msg = "This problem has already been released. Any changes will apply only to future assessments.";
// TODO: classier confirm dialog // TODO: classier confirm dialog
if (confirm(msg)) { onConfirm(); } if (confirm(msg)) { onConfirm(); }
}, },
......
...@@ -141,7 +141,7 @@ OpenAssessment.ResponseView.prototype = { ...@@ -141,7 +141,7 @@ OpenAssessment.ResponseView.prototype = {
} else { } else {
// Setting the HTML will overwrite the screen reader tag, // Setting the HTML will overwrite the screen reader tag,
// so prepend it to the message. // so prepend it to the message.
sel.html('<span class="sr">Your Working Submission Status:</span>\n' + msg); sel.html('<span class="sr">Status of Your Response:</span>\n' + msg);
} }
}, },
...@@ -177,7 +177,7 @@ OpenAssessment.ResponseView.prototype = { ...@@ -177,7 +177,7 @@ OpenAssessment.ResponseView.prototype = {
// Update the save button and status only if the response has changed // Update the save button and status only if the response has changed
if ($.trim(this.savedResponse) !== currentResponse) { if ($.trim(this.savedResponse) !== currentResponse) {
this.saveEnabled(isBlank); this.saveEnabled(isBlank);
this.saveStatus('Unsaved draft'); this.saveStatus('This response has not been saved.');
} }
}, },
...@@ -201,7 +201,7 @@ OpenAssessment.ResponseView.prototype = { ...@@ -201,7 +201,7 @@ OpenAssessment.ResponseView.prototype = {
view.submitEnabled(currentResponse !== ''); view.submitEnabled(currentResponse !== '');
if (currentResponse == savedResponse) { if (currentResponse == savedResponse) {
view.saveEnabled(false); view.saveEnabled(false);
view.saveStatus("Saved but not submitted"); view.saveStatus("This response has been saved but not submitted.");
} }
}).fail(function(errMsg) { }).fail(function(errMsg) {
view.saveStatus('Error'); view.saveStatus('Error');
......
...@@ -69,7 +69,7 @@ OpenAssessment.Server.prototype = { ...@@ -69,7 +69,7 @@ OpenAssessment.Server.prototype = {
}).done(function(data) { }).done(function(data) {
defer.resolveWith(this, [data]); defer.resolveWith(this, [data]);
}).fail(function(data) { }).fail(function(data) {
defer.rejectWith(this, ['Could not contact server.']); defer.rejectWith(this, ['This section could not be loaded.']);
}); });
}).promise(); }).promise();
}, },
...@@ -100,7 +100,7 @@ OpenAssessment.Server.prototype = { ...@@ -100,7 +100,7 @@ OpenAssessment.Server.prototype = {
}).done(function(data) { }).done(function(data) {
defer.resolveWith(this, [data]); defer.resolveWith(this, [data]);
}).fail(function(data) { }).fail(function(data) {
defer.rejectWith(this, ['Could not contact server.']); defer.rejectWith(this, ['This section could not be loaded.']);
}); });
}).promise(); }).promise();
}, },
...@@ -119,7 +119,7 @@ OpenAssessment.Server.prototype = { ...@@ -119,7 +119,7 @@ OpenAssessment.Server.prototype = {
var url = this.url('submit'); var url = this.url('submit');
if (submission.length > this.maxInputSize) { if (submission.length > this.maxInputSize) {
return $.Deferred(function(defer) { return $.Deferred(function(defer) {
defer.rejectWith(this, ["submit", "Response text is too large. Please reduce the size of your response and try to submit again."]); defer.rejectWith(this, ["submit", "This response is too long. Please shorten the response and try to submit it again."]);
}).promise(); }).promise();
} }
return $.Deferred(function(defer) { return $.Deferred(function(defer) {
...@@ -140,7 +140,7 @@ OpenAssessment.Server.prototype = { ...@@ -140,7 +140,7 @@ OpenAssessment.Server.prototype = {
defer.rejectWith(this, [errorNum, errorMsg]); defer.rejectWith(this, [errorNum, errorMsg]);
} }
}).fail(function(data) { }).fail(function(data) {
defer.rejectWith(this, ["AJAX", "Could not contact server."]); defer.rejectWith(this, ["AJAX", "This response could not be submitted."]);
}); });
}).promise(); }).promise();
}, },
...@@ -159,7 +159,7 @@ OpenAssessment.Server.prototype = { ...@@ -159,7 +159,7 @@ OpenAssessment.Server.prototype = {
var url = this.url('save_submission'); var url = this.url('save_submission');
if (submission.length > this.maxInputSize) { if (submission.length > this.maxInputSize) {
return $.Deferred(function(defer) { return $.Deferred(function(defer) {
defer.rejectWith(this, ["Response text is too large. Please reduce the size of your response and try to submit again."]); defer.rejectWith(this, ["This response is too long. Please shorten the response and try to save it again."]);
}).promise(); }).promise();
} }
return $.Deferred(function(defer) { return $.Deferred(function(defer) {
...@@ -171,7 +171,7 @@ OpenAssessment.Server.prototype = { ...@@ -171,7 +171,7 @@ OpenAssessment.Server.prototype = {
if (data.success) { defer.resolve(); } if (data.success) { defer.resolve(); }
else { defer.rejectWith(this, [data.msg]); } else { defer.rejectWith(this, [data.msg]); }
}).fail(function(data) { }).fail(function(data) {
defer.rejectWith(this, ["Could not contact server."]); defer.rejectWith(this, ["This response could not be saved."]);
}); });
}).promise(); }).promise();
}, },
...@@ -199,7 +199,7 @@ OpenAssessment.Server.prototype = { ...@@ -199,7 +199,7 @@ OpenAssessment.Server.prototype = {
var url = this.url('submit_feedback'); var url = this.url('submit_feedback');
if (text.length > this.maxInputSize) { if (text.length > this.maxInputSize) {
return $.Deferred(function(defer) { return $.Deferred(function(defer) {
defer.rejectWith(this, ["Response text is too large. Please reduce the size of your response and try to submit again."]); defer.rejectWith(this, ["This feedback is too long. Please shorten your feedback and try to submit it again."]);
}).promise(); }).promise();
} }
var payload = JSON.stringify({ var payload = JSON.stringify({
...@@ -213,7 +213,7 @@ OpenAssessment.Server.prototype = { ...@@ -213,7 +213,7 @@ OpenAssessment.Server.prototype = {
else { defer.rejectWith(this, [data.msg]); } else { defer.rejectWith(this, [data.msg]); }
} }
).fail(function(data) { ).fail(function(data) {
defer.rejectWith(this, ['Could not contact server.']); defer.rejectWith(this, ['This feedback could not be submitted.']);
}); });
}).promise(); }).promise();
}, },
...@@ -221,7 +221,6 @@ OpenAssessment.Server.prototype = { ...@@ -221,7 +221,6 @@ OpenAssessment.Server.prototype = {
/** /**
Send a peer assessment to the XBlock. Send a peer assessment to the XBlock.
Args: Args:
submissionId (string): The UUID of the submission.
optionsSelected (object literal): Keys are criteria names, optionsSelected (object literal): Keys are criteria names,
values are the option text the user selected for the criterion. values are the option text the user selected for the criterion.
feedback (string): Written feedback on the submission. feedback (string): Written feedback on the submission.
...@@ -233,21 +232,20 @@ OpenAssessment.Server.prototype = { ...@@ -233,21 +232,20 @@ OpenAssessment.Server.prototype = {
Example: Example:
var options = { clarity: "Very clear", precision: "Somewhat precise" }; var options = { clarity: "Very clear", precision: "Somewhat precise" };
var feedback = "Good job!"; var feedback = "Good job!";
server.peerAssess("abc123", options, feedback).done( server.peerAssess(options, feedback).done(
function() { console.log("Success!"); } function() { console.log("Success!"); }
).fail( ).fail(
function(errorMsg) { console.log(errorMsg); } function(errorMsg) { console.log(errorMsg); }
); );
**/ **/
peerAssess: function(submissionId, optionsSelected, feedback) { peerAssess: function(optionsSelected, feedback) {
var url = this.url('peer_assess'); var url = this.url('peer_assess');
if (feedback.length > this.maxInputSize) { if (feedback.length > this.maxInputSize) {
return $.Deferred(function(defer) { return $.Deferred(function(defer) {
defer.rejectWith(this, ["Response text is too large. Please reduce the size of your response and try to submit again."]); defer.rejectWith(this, ["The comments on this assessment are too long. Please shorten your comments and try to submit them again."]);
}).promise(); }).promise();
} }
var payload = JSON.stringify({ var payload = JSON.stringify({
submission_uuid: submissionId,
options_selected: optionsSelected, options_selected: optionsSelected,
feedback: feedback feedback: feedback
}); });
...@@ -262,7 +260,7 @@ OpenAssessment.Server.prototype = { ...@@ -262,7 +260,7 @@ OpenAssessment.Server.prototype = {
} }
} }
).fail(function(data) { ).fail(function(data) {
defer.rejectWith(this, ['Could not contact server.']); defer.rejectWith(this, ['This assessment could not be submitted.']);
}); });
}).promise(); }).promise();
}, },
...@@ -271,7 +269,6 @@ OpenAssessment.Server.prototype = { ...@@ -271,7 +269,6 @@ OpenAssessment.Server.prototype = {
Send a self-assessment to the XBlock. Send a self-assessment to the XBlock.
Args: Args:
submissionId (string): The UUID of the submission.
optionsSelected (object literal): Keys are criteria names, optionsSelected (object literal): Keys are criteria names,
values are the option text the user selected for the criterion. values are the option text the user selected for the criterion.
...@@ -281,16 +278,15 @@ OpenAssessment.Server.prototype = { ...@@ -281,16 +278,15 @@ OpenAssessment.Server.prototype = {
Example: Example:
var options = { clarity: "Very clear", precision: "Somewhat precise" }; var options = { clarity: "Very clear", precision: "Somewhat precise" };
server.selfAssess("abc123", options).done( server.selfAssess(options).done(
function() { console.log("Success!"); } function() { console.log("Success!"); }
).fail( ).fail(
function(errorMsg) { console.log(errorMsg); } function(errorMsg) { console.log(errorMsg); }
); );
**/ **/
selfAssess: function(submissionId, optionsSelected) { selfAssess: function(optionsSelected) {
var url = this.url('self_assess'); var url = this.url('self_assess');
var payload = JSON.stringify({ var payload = JSON.stringify({
submission_uuid: submissionId,
options_selected: optionsSelected options_selected: optionsSelected
}); });
return $.Deferred(function(defer) { return $.Deferred(function(defer) {
...@@ -304,7 +300,7 @@ OpenAssessment.Server.prototype = { ...@@ -304,7 +300,7 @@ OpenAssessment.Server.prototype = {
} }
} }
).fail(function(data) { ).fail(function(data) {
defer.rejectWith(this, ['Could not contact server.']); defer.rejectWith(this, ['This assessment could not be submitted.']);
}); });
}); });
}, },
...@@ -332,7 +328,7 @@ OpenAssessment.Server.prototype = { ...@@ -332,7 +328,7 @@ OpenAssessment.Server.prototype = {
if (data.success) { defer.resolveWith(this, [data.xml]); } if (data.success) { defer.resolveWith(this, [data.xml]); }
else { defer.rejectWith(this, [data.msg]); } else { defer.rejectWith(this, [data.msg]); }
}).fail(function(data) { }).fail(function(data) {
defer.rejectWith(this, ['Could not contact server.']); defer.rejectWith(this, ['This problem could not be loaded.']);
}); });
}).promise(); }).promise();
}, },
...@@ -361,7 +357,7 @@ OpenAssessment.Server.prototype = { ...@@ -361,7 +357,7 @@ OpenAssessment.Server.prototype = {
if (data.success) { defer.resolve(); } if (data.success) { defer.resolve(); }
else { defer.rejectWith(this, [data.msg]); } else { defer.rejectWith(this, [data.msg]); }
}).fail(function(data) { }).fail(function(data) {
defer.rejectWith(this, ['Could not contact server.']); defer.rejectWith(this, ['This problem could not be saved.']);
}); });
}).promise(); }).promise();
}, },
...@@ -391,7 +387,7 @@ OpenAssessment.Server.prototype = { ...@@ -391,7 +387,7 @@ OpenAssessment.Server.prototype = {
if (data.success) { defer.resolveWith(this, [data.is_released]); } if (data.success) { defer.resolveWith(this, [data.is_released]); }
else { defer.rejectWith(this, [data.msg]); } else { defer.rejectWith(this, [data.msg]); }
}).fail(function(data) { }).fail(function(data) {
defer.rejectWith(this, ["Could not contact server."]); defer.rejectWith(this, ["The server could not be contacted."]);
}); });
}).promise(); }).promise();
} }
......
...@@ -74,7 +74,7 @@ class SubmissionMixin(object): ...@@ -74,7 +74,7 @@ class SubmissionMixin(object):
status_tag = 'EBADFORM' status_tag = 'EBADFORM'
status_text = unicode(err.field_errors) status_text = unicode(err.field_errors)
except (api.SubmissionError, workflow_api.AssessmentWorkflowError): except (api.SubmissionError, workflow_api.AssessmentWorkflowError):
logger.exception("Error occurred while submitting.") logger.exception("This response was not submitted.")
status_tag = 'EUNKNOWN' status_tag = 'EUNKNOWN'
else: else:
status = True status = True
...@@ -111,11 +111,11 @@ class SubmissionMixin(object): ...@@ -111,11 +111,11 @@ class SubmissionMixin(object):
{"saved_response": self.saved_response} {"saved_response": self.saved_response}
) )
except: except:
return {'success': False, 'msg': _(u"Could not save response submission")} return {'success': False, 'msg': _(u"This response could not be saved.")}
else: else:
return {'success': True, 'msg': u''} return {'success': True, 'msg': u''}
else: else:
return {'success': False, 'msg': _(u"Missing required key 'submission'")} return {'success': False, 'msg': _(u"This response was not submitted.")}
def create_submission(self, student_item_dict, student_sub): def create_submission(self, student_item_dict, student_sub):
...@@ -173,7 +173,7 @@ class SubmissionMixin(object): ...@@ -173,7 +173,7 @@ class SubmissionMixin(object):
Returns: Returns:
unicode unicode
""" """
return _(u'Saved but not submitted') if self.has_saved else _(u'Unsaved draft') return _(u'This response has been saved but not submitted.') if self.has_saved else _(u'This response has not been saved.')
@XBlock.handler @XBlock.handler
def render_submission(self, data, suffix=''): def render_submission(self, data, suffix=''):
......
...@@ -80,7 +80,6 @@ class TestGrade(XBlockHandlerTestCase): ...@@ -80,7 +80,6 @@ class TestGrade(XBlockHandlerTestCase):
# Verify that we're on the right template # Verify that we're on the right template
self.assertIn(u'not completed', resp.decode('utf-8').lower()) self.assertIn(u'not completed', resp.decode('utf-8').lower())
self.assertIn(u'self assessment', resp.decode('utf-8').lower())
@scenario('data/grade_incomplete_scenario.xml', user_id='Daniels') @scenario('data/grade_incomplete_scenario.xml', user_id='Daniels')
def test_grade_incomplete_missing_peer(self, xblock): def test_grade_incomplete_missing_peer(self, xblock):
...@@ -92,7 +91,6 @@ class TestGrade(XBlockHandlerTestCase): ...@@ -92,7 +91,6 @@ class TestGrade(XBlockHandlerTestCase):
# Verify that we're on the right template # Verify that we're on the right template
self.assertIn(u'not completed', resp.decode('utf-8').lower()) self.assertIn(u'not completed', resp.decode('utf-8').lower())
self.assertIn(u'peer assessment', resp.decode('utf-8').lower())
@scenario('data/grade_scenario.xml', user_id='Greggs') @scenario('data/grade_scenario.xml', user_id='Greggs')
def test_submit_feedback(self, xblock): def test_submit_feedback(self, xblock):
...@@ -190,7 +188,7 @@ class TestGrade(XBlockHandlerTestCase): ...@@ -190,7 +188,7 @@ class TestGrade(XBlockHandlerTestCase):
scorer_sub = sub_api.create_submission(scorer, {'text': submission_text}) scorer_sub = sub_api.create_submission(scorer, {'text': submission_text})
workflow_api.create_workflow(scorer_sub['uuid']) workflow_api.create_workflow(scorer_sub['uuid'])
submission = peer_api.get_submission_to_assess(scorer, len(peers)) submission = peer_api.get_submission_to_assess(scorer_sub['uuid'], len(peers))
# Store the scorer's submission so our user can assess it later # Store the scorer's submission so our user can assess it later
scorer_submissions.append(scorer_sub) scorer_submissions.append(scorer_sub)
...@@ -205,7 +203,7 @@ class TestGrade(XBlockHandlerTestCase): ...@@ -205,7 +203,7 @@ class TestGrade(XBlockHandlerTestCase):
# Have our user make assessments (so she can get a score) # Have our user make assessments (so she can get a score)
for asmnt in peer_assessments: for asmnt in peer_assessments:
new_submission = peer_api.get_submission_to_assess(student_item, len(peers)) new_submission = peer_api.get_submission_to_assess(submission['uuid'], len(peers))
peer_api.create_assessment( peer_api.create_assessment(
submission['uuid'], student_id, asmnt, {'criteria': xblock.rubric_criteria}, submission['uuid'], student_id, asmnt, {'criteria': xblock.rubric_criteria},
xblock.get_assessment_module('peer-assessment')['must_be_graded_by'] xblock.get_assessment_module('peer-assessment')['must_be_graded_by']
......
...@@ -6,9 +6,6 @@ from collections import namedtuple ...@@ -6,9 +6,6 @@ from collections import namedtuple
import copy import copy
import json import json
import mock
import submissions.api as sub_api
from openassessment.workflow import api as workflow_api
from openassessment.assessment import peer_api from openassessment.assessment import peer_api
from .base import XBlockHandlerTestCase, scenario from .base import XBlockHandlerTestCase, scenario
...@@ -37,7 +34,7 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -37,7 +34,7 @@ class TestPeerAssessment(XBlockHandlerTestCase):
# Now Hal will assess Sally. # Now Hal will assess Sally.
assessment = copy.deepcopy(self.ASSESSMENT) assessment = copy.deepcopy(self.ASSESSMENT)
sub = peer_api.get_submission_to_assess(hal_student_item, 1) peer_api.get_submission_to_assess(hal_submission['uuid'], 1)
peer_api.create_assessment( peer_api.create_assessment(
hal_submission['uuid'], hal_submission['uuid'],
hal_student_item['student_id'], hal_student_item['student_id'],
...@@ -48,7 +45,7 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -48,7 +45,7 @@ class TestPeerAssessment(XBlockHandlerTestCase):
# Now Sally will assess Hal. # Now Sally will assess Hal.
assessment = copy.deepcopy(self.ASSESSMENT) assessment = copy.deepcopy(self.ASSESSMENT)
sub = peer_api.get_submission_to_assess(sally_student_item, 1) peer_api.get_submission_to_assess(sally_submission['uuid'], 1)
peer_api.create_assessment( peer_api.create_assessment(
sally_submission['uuid'], sally_submission['uuid'],
sally_student_item['student_id'], sally_student_item['student_id'],
...@@ -86,8 +83,8 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -86,8 +83,8 @@ class TestPeerAssessment(XBlockHandlerTestCase):
# Create a submission for the scorer (required before assessing another student) # Create a submission for the scorer (required before assessing another student)
another_student = copy.deepcopy(student_item) another_student = copy.deepcopy(student_item)
another_student['student_id'] = "Bob" another_student['student_id'] = "Bob"
xblock.create_submission(another_student, self.SUBMISSION) another_submission = xblock.create_submission(another_student, self.SUBMISSION)
peer_api.get_submission_to_assess(another_student, 3) peer_api.get_submission_to_assess(another_submission['uuid'], 3)
# Submit an assessment and expect a successful response # Submit an assessment and expect a successful response
...@@ -126,8 +123,8 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -126,8 +123,8 @@ class TestPeerAssessment(XBlockHandlerTestCase):
# Create a submission for the scorer (required before assessing another student) # Create a submission for the scorer (required before assessing another student)
another_student = copy.deepcopy(student_item) another_student = copy.deepcopy(student_item)
another_student['student_id'] = "Bob" another_student['student_id'] = "Bob"
xblock.create_submission(another_student, self.SUBMISSION) another_sub = xblock.create_submission(another_student, self.SUBMISSION)
peer_api.get_submission_to_assess(another_student, 3) peer_api.get_submission_to_assess(another_sub['uuid'], 3)
# Submit an assessment and expect a successful response # Submit an assessment and expect a successful response
...@@ -160,7 +157,7 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -160,7 +157,7 @@ class TestPeerAssessment(XBlockHandlerTestCase):
# Create a submission for the scorer (required before assessing another student) # Create a submission for the scorer (required before assessing another student)
another_student = copy.deepcopy(student_item) another_student = copy.deepcopy(student_item)
another_student['student_id'] = "Bob" another_student['student_id'] = "Bob"
another_submission = xblock.create_submission(another_student, self.SUBMISSION) xblock.create_submission(another_student, self.SUBMISSION)
# Submit an assessment, but mutate the options selected so they do NOT match the rubric # Submit an assessment, but mutate the options selected so they do NOT match the rubric
assessment = copy.deepcopy(self.ASSESSMENT) assessment = copy.deepcopy(self.ASSESSMENT)
...@@ -211,7 +208,8 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -211,7 +208,8 @@ class TestPeerAssessment(XBlockHandlerTestCase):
# Now Hal will assess Sally. # Now Hal will assess Sally.
assessment = copy.deepcopy(self.ASSESSMENT) assessment = copy.deepcopy(self.ASSESSMENT)
sally_sub = peer_api.get_submission_to_assess(hal_student_item, 1) sally_sub = peer_api.get_submission_to_assess(hal_submission['uuid'], 1)
assessment['submission_uuid'] = sally_sub['uuid']
peer_api.create_assessment( peer_api.create_assessment(
hal_submission['uuid'], hal_submission['uuid'],
hal_student_item['student_id'], hal_student_item['student_id'],
...@@ -222,7 +220,8 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -222,7 +220,8 @@ class TestPeerAssessment(XBlockHandlerTestCase):
# Now Sally will assess Hal. # Now Sally will assess Hal.
assessment = copy.deepcopy(self.ASSESSMENT) assessment = copy.deepcopy(self.ASSESSMENT)
hal_sub = peer_api.get_submission_to_assess(sally_student_item, 1) hal_sub = peer_api.get_submission_to_assess(sally_submission['uuid'], 1)
assessment['submission_uuid'] = hal_sub['uuid']
peer_api.create_assessment( peer_api.create_assessment(
sally_submission['uuid'], sally_submission['uuid'],
sally_student_item['student_id'], sally_student_item['student_id'],
...@@ -243,9 +242,6 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -243,9 +242,6 @@ class TestPeerAssessment(XBlockHandlerTestCase):
self.assertIsNotNone(peer_response) self.assertIsNotNone(peer_response)
self.assertNotIn(submission["answer"]["text"].encode('utf-8'), peer_response.body) self.assertNotIn(submission["answer"]["text"].encode('utf-8'), peer_response.body)
hal_response = "Hal".encode('utf-8') in peer_response.body
sally_response = "Sally".encode('utf-8') in peer_response.body
peer_api.create_assessment( peer_api.create_assessment(
submission['uuid'], submission['uuid'],
student_item['student_id'], student_item['student_id'],
...@@ -275,7 +271,7 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -275,7 +271,7 @@ class TestPeerAssessment(XBlockHandlerTestCase):
peer_response = xblock.render_peer_assessment(request) peer_response = xblock.render_peer_assessment(request)
self.assertIsNotNone(peer_response) self.assertIsNotNone(peer_response)
self.assertNotIn(submission["answer"]["text"].encode('utf-8'), peer_response.body) self.assertNotIn(submission["answer"]["text"].encode('utf-8'), peer_response.body)
self.assertIn("Congratulations".encode('utf-8'), peer_response.body) self.assertIn("Peer Assessments Complete", peer_response.body)
@scenario('data/peer_assessment_scenario.xml', user_id='Bob') @scenario('data/peer_assessment_scenario.xml', user_id='Bob')
def test_peer_unavailable(self, xblock): def test_peer_unavailable(self, xblock):
......
...@@ -14,7 +14,7 @@ class SaveResponseTest(XBlockHandlerTestCase): ...@@ -14,7 +14,7 @@ class SaveResponseTest(XBlockHandlerTestCase):
def test_default_saved_response_blank(self, xblock): def test_default_saved_response_blank(self, xblock):
resp = self.request(xblock, 'render_submission', json.dumps({})) resp = self.request(xblock, 'render_submission', json.dumps({}))
self.assertIn('<textarea id="submission__answer__value" placeholder=""></textarea>', resp) self.assertIn('<textarea id="submission__answer__value" placeholder=""></textarea>', resp)
self.assertIn('Unsaved draft', resp) self.assertIn('response has not been saved', resp)
@ddt.file_data('data/save_responses.json') @ddt.file_data('data/save_responses.json')
@scenario('data/save_scenario.xml', user_id="Perleman") @scenario('data/save_scenario.xml', user_id="Perleman")
...@@ -32,7 +32,7 @@ class SaveResponseTest(XBlockHandlerTestCase): ...@@ -32,7 +32,7 @@ class SaveResponseTest(XBlockHandlerTestCase):
submitted=submission_text submitted=submission_text
) )
self.assertIn(expected_html, resp.decode('utf-8')) self.assertIn(expected_html, resp.decode('utf-8'))
self.assertIn('Saved but not submitted', resp) self.assertIn('saved but not submitted', resp.lower())
@scenario('data/save_scenario.xml', user_id="Valchek") @scenario('data/save_scenario.xml', user_id="Valchek")
def test_overwrite_saved_response(self, xblock): def test_overwrite_saved_response(self, xblock):
...@@ -57,4 +57,4 @@ class SaveResponseTest(XBlockHandlerTestCase): ...@@ -57,4 +57,4 @@ class SaveResponseTest(XBlockHandlerTestCase):
def test_missing_submission_key(self, xblock): def test_missing_submission_key(self, xblock):
resp = self.request(xblock, 'save_submission', json.dumps({}), response_format="json") resp = self.request(xblock, 'save_submission', json.dumps({}), response_format="json")
self.assertFalse(resp['success']) self.assertFalse(resp['success'])
self.assertIn('submission', resp['msg']) self.assertIn('not submitted', resp['msg'])
...@@ -95,13 +95,6 @@ class TestSelfAssessment(XBlockHandlerTestCase): ...@@ -95,13 +95,6 @@ class TestSelfAssessment(XBlockHandlerTestCase):
@scenario('data/self_assessment_scenario.xml', user_id='Bob') @scenario('data/self_assessment_scenario.xml', user_id='Bob')
def test_self_assess_handler_missing_keys(self, xblock): def test_self_assess_handler_missing_keys(self, xblock):
# Missing submission_uuid
assessment = copy.deepcopy(self.ASSESSMENT)
del assessment['submission_uuid']
resp = self.request(xblock, 'self_assess', json.dumps(assessment), response_format='json')
self.assertFalse(resp['success'])
self.assertIn('submission_uuid', resp['msg'])
# Missing options_selected # Missing options_selected
assessment = copy.deepcopy(self.ASSESSMENT) assessment = copy.deepcopy(self.ASSESSMENT)
del assessment['options_selected'] del assessment['options_selected']
......
...@@ -112,7 +112,7 @@ class SubmissionRenderTest(XBlockHandlerTestCase): ...@@ -112,7 +112,7 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
xblock, 'openassessmentblock/response/oa_response.html', xblock, 'openassessmentblock/response/oa_response.html',
{ {
'saved_response': '', 'saved_response': '',
'save_status': 'Unsaved draft', 'save_status': 'This response has not been saved.',
'submit_enabled': False, 'submit_enabled': False,
'submission_due': dt.datetime(2999, 5, 6).replace(tzinfo=pytz.utc), 'submission_due': dt.datetime(2999, 5, 6).replace(tzinfo=pytz.utc),
} }
...@@ -124,7 +124,7 @@ class SubmissionRenderTest(XBlockHandlerTestCase): ...@@ -124,7 +124,7 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
xblock, 'openassessmentblock/response/oa_response.html', xblock, 'openassessmentblock/response/oa_response.html',
{ {
'saved_response': '', 'saved_response': '',
'save_status': 'Unsaved draft', 'save_status': 'This response has not been saved.',
'submit_enabled': False, 'submit_enabled': False,
} }
) )
...@@ -140,7 +140,7 @@ class SubmissionRenderTest(XBlockHandlerTestCase): ...@@ -140,7 +140,7 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
xblock, 'openassessmentblock/response/oa_response.html', xblock, 'openassessmentblock/response/oa_response.html',
{ {
'saved_response': 'A man must have a code', 'saved_response': 'A man must have a code',
'save_status': 'Saved but not submitted', 'save_status': 'This response has been saved but not submitted.',
'submit_enabled': True, 'submit_enabled': True,
'submission_due': dt.datetime(2999, 5, 6).replace(tzinfo=pytz.utc), 'submission_due': dt.datetime(2999, 5, 6).replace(tzinfo=pytz.utc),
} }
...@@ -231,7 +231,7 @@ class SubmissionRenderTest(XBlockHandlerTestCase): ...@@ -231,7 +231,7 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
def test_integration(self, xblock): def test_integration(self, xblock):
# Expect that the response step is open and displays the deadline # Expect that the response step is open and displays the deadline
resp = self.request(xblock, 'render_submission', json.dumps(dict())) resp = self.request(xblock, 'render_submission', json.dumps(dict()))
self.assertIn('Please provide your response below', resp) self.assertIn('Enter your response to the question', resp)
self.assertIn('Monday, May 6, 2999 00:00 UTC', resp) self.assertIn('Monday, May 6, 2999 00:00 UTC', resp)
# Create a submission for the user # Create a submission for the user
......
...@@ -61,19 +61,19 @@ def validate_assessments(assessments, enforce_peer_then_self=False): ...@@ -61,19 +61,19 @@ def validate_assessments(assessments, enforce_peer_then_self=False):
""" """
if enforce_peer_then_self: if enforce_peer_then_self:
if len(assessments) != 2: if len(assessments) != 2:
return (False, _("Problem must have exactly two assessments")) return (False, _("This problem must have exactly two assessments."))
if assessments[0].get('name') != 'peer-assessment': if assessments[0].get('name') != 'peer-assessment':
return (False, _("The first assessment must be a peer-assessment")) return (False, _("The first assessment must be a peer assessment."))
if assessments[1].get('name') != 'self-assessment': if assessments[1].get('name') != 'self-assessment':
return (False, _("The second assessment must be a self-assessment")) return (False, _("The second assessment must be a self assessment."))
if len(assessments) == 0: if len(assessments) == 0:
return (False, _("Problem must include at least one assessment")) return (False, _("This problem must include at least one assessment."))
for assessment_dict in assessments: for assessment_dict in assessments:
# Supported assessment # Supported assessment
if not assessment_dict.get('name') in ['peer-assessment', 'self-assessment']: if not assessment_dict.get('name') in ['peer-assessment', 'self-assessment']:
return (False, _("Assessment type is not supported")) return (False, _('The "name" value must be "peer-assessment" or "self-assessment".'))
# Number you need to grade is >= the number of people that need to grade you # Number you need to grade is >= the number of people that need to grade you
if assessment_dict.get('name') == 'peer-assessment': if assessment_dict.get('name') == 'peer-assessment':
...@@ -81,13 +81,13 @@ def validate_assessments(assessments, enforce_peer_then_self=False): ...@@ -81,13 +81,13 @@ def validate_assessments(assessments, enforce_peer_then_self=False):
must_be_graded_by = assessment_dict.get('must_be_graded_by') must_be_graded_by = assessment_dict.get('must_be_graded_by')
if must_grade is None or must_grade < 1: if must_grade is None or must_grade < 1:
return (False, _('"must_grade" must be a positive integer')) return (False, _('The "must_grade" value must be a positive integer.'))
if must_be_graded_by is None or must_be_graded_by < 1: if must_be_graded_by is None or must_be_graded_by < 1:
return (False, _('"must_be_graded_by" must be a positive integer')) return (False, _('The "must_be_graded_by" value must be a positive integer.'))
if must_grade < must_be_graded_by: if must_grade < must_be_graded_by:
return (False, _('"must_grade" should be greater than or equal to "must_be_graded_by"')) return (False, _('The "must_grade" value must be greater than or equal to the "must_be_graded_by" value.'))
return (True, u'') return (True, u'')
...@@ -109,7 +109,7 @@ def validate_rubric(rubric_dict, current_rubric, is_released): ...@@ -109,7 +109,7 @@ def validate_rubric(rubric_dict, current_rubric, is_released):
try: try:
rubric_from_dict(rubric_dict) rubric_from_dict(rubric_dict)
except InvalidRubric: except InvalidRubric:
return (False, u'Rubric definition is not valid') return (False, u'This rubric definition is not valid.')
# No duplicate criteria names # No duplicate criteria names
duplicates = _duplicates([criterion['name'] for criterion in rubric_dict['criteria']]) duplicates = _duplicates([criterion['name'] for criterion in rubric_dict['criteria']])
...@@ -134,12 +134,12 @@ def validate_rubric(rubric_dict, current_rubric, is_released): ...@@ -134,12 +134,12 @@ def validate_rubric(rubric_dict, current_rubric, is_released):
# Number of criteria must be the same # Number of criteria must be the same
if len(rubric_dict['criteria']) != len(current_rubric['criteria']): if len(rubric_dict['criteria']) != len(current_rubric['criteria']):
return (False, u'Number of criteria cannot be changed after a problem is released.') return (False, u'The number of criteria cannot be changed after a problem is released.')
# Number of options for each criterion must be the same # Number of options for each criterion must be the same
for new_criterion, old_criterion in _match_by_order(rubric_dict['criteria'], current_rubric['criteria']): for new_criterion, old_criterion in _match_by_order(rubric_dict['criteria'], current_rubric['criteria']):
if len(new_criterion['options']) != len(old_criterion['options']): if len(new_criterion['options']) != len(old_criterion['options']):
return (False, u'Number of options cannot be changed after a problem is released.') return (False, u'The number of options cannot be changed after a problem is released.')
else: else:
for new_option, old_option in _match_by_order(new_criterion['options'], old_criterion['options']): for new_option, old_option in _match_by_order(new_criterion['options'], old_criterion['options']):
......
...@@ -196,23 +196,23 @@ def _parse_options_xml(options_root): ...@@ -196,23 +196,23 @@ def _parse_options_xml(options_root):
try: try:
option_dict['points'] = int(option.get('points')) option_dict['points'] = int(option.get('points'))
except ValueError: except ValueError:
raise UpdateFromXmlError(_("XML option points must be an integer.")) raise UpdateFromXmlError(_('The value for "points" must be an integer.'))
else: else:
raise UpdateFromXmlError(_("XML option definition must contain a 'points' attribute.")) raise UpdateFromXmlError(_('Every "option" element must contain a "points" attribute.'))
# Option name # Option name
option_name = option.find('name') option_name = option.find('name')
if option_name is not None: if option_name is not None:
option_dict['name'] = _safe_get_text(option_name) option_dict['name'] = _safe_get_text(option_name)
else: else:
raise UpdateFromXmlError(_("XML option definition must contain a 'name' element.")) raise UpdateFromXmlError(_('Every "option" element must contain a "name" element.'))
# Option explanation # Option explanation
option_explanation = option.find('explanation') option_explanation = option.find('explanation')
if option_explanation is not None: if option_explanation is not None:
option_dict['explanation'] = _safe_get_text(option_explanation) option_dict['explanation'] = _safe_get_text(option_explanation)
else: else:
raise UpdateFromXmlError(_("XML option definition must contain an 'explanation' element.")) raise UpdateFromXmlError(_('Every "option" element must contain an "explanation" element.'))
# Add the options dictionary to the list # Add the options dictionary to the list
options_list.append(option_dict) options_list.append(option_dict)
...@@ -248,14 +248,14 @@ def _parse_criteria_xml(criteria_root): ...@@ -248,14 +248,14 @@ def _parse_criteria_xml(criteria_root):
if criterion_name is not None: if criterion_name is not None:
criterion_dict['name'] = _safe_get_text(criterion_name) criterion_dict['name'] = _safe_get_text(criterion_name)
else: else:
raise UpdateFromXmlError(_("XML criterion definition must contain a 'name' element.")) raise UpdateFromXmlError(_('Every "criterion" element must contain a "name" element.'))
# Criterion prompt # Criterion prompt
criterion_prompt = criterion.find('prompt') criterion_prompt = criterion.find('prompt')
if criterion_prompt is not None: if criterion_prompt is not None:
criterion_dict['prompt'] = _safe_get_text(criterion_prompt) criterion_dict['prompt'] = _safe_get_text(criterion_prompt)
else: else:
raise UpdateFromXmlError(_("XML criterion definition must contain a 'prompt' element.")) raise UpdateFromXmlError(_('Every "criterion" element must contain a "prompt" element.'))
# Criterion options # Criterion options
criterion_dict['options'] = _parse_options_xml(criterion) criterion_dict['options'] = _parse_options_xml(criterion)
...@@ -290,7 +290,7 @@ def _parse_rubric_xml(rubric_root): ...@@ -290,7 +290,7 @@ def _parse_rubric_xml(rubric_root):
if prompt_el is not None: if prompt_el is not None:
rubric_dict['prompt'] = _safe_get_text(prompt_el) rubric_dict['prompt'] = _safe_get_text(prompt_el)
else: else:
raise UpdateFromXmlError(_("XML rubric definition must contain a 'prompt' element.")) raise UpdateFromXmlError(_('Every "criterion" element must contain a "prompt" element.'))
# Criteria # Criteria
rubric_dict['criteria'] = _parse_criteria_xml(rubric_root) rubric_dict['criteria'] = _parse_criteria_xml(rubric_root)
...@@ -323,7 +323,7 @@ def _parse_assessments_xml(assessments_root, start, due): ...@@ -323,7 +323,7 @@ def _parse_assessments_xml(assessments_root, start, due):
if 'name' in assessment.attrib: if 'name' in assessment.attrib:
assessment_dict['name'] = unicode(assessment.get('name')) assessment_dict['name'] = unicode(assessment.get('name'))
else: else:
raise UpdateFromXmlError(_('XML assessment definition must have a "name" attribute')) raise UpdateFromXmlError(_('All "criterion" and "option" elements must contain a "name" element.'))
# Assessment start # Assessment start
if 'start' in assessment.attrib: if 'start' in assessment.attrib:
...@@ -331,7 +331,7 @@ def _parse_assessments_xml(assessments_root, start, due): ...@@ -331,7 +331,7 @@ def _parse_assessments_xml(assessments_root, start, due):
if parsed_start is not None: if parsed_start is not None:
assessment_dict['start'] = parsed_start assessment_dict['start'] = parsed_start
else: else:
raise UpdateFromXmlError(_("Could not parse 'start' attribute as a valid date time")) raise UpdateFromXmlError(_('The date format in the "start" attribute is invalid. Make sure the date is formatted as YYYY-MM-DDTHH:MM:SS.'))
else: else:
assessment_dict['start'] = None assessment_dict['start'] = None
...@@ -341,7 +341,7 @@ def _parse_assessments_xml(assessments_root, start, due): ...@@ -341,7 +341,7 @@ def _parse_assessments_xml(assessments_root, start, due):
if parsed_start is not None: if parsed_start is not None:
assessment_dict['due'] = parsed_start assessment_dict['due'] = parsed_start
else: else:
raise UpdateFromXmlError(_("Could not parse 'due' attribute as a valid date time")) raise UpdateFromXmlError(_('The date format in the "due" attribute is invalid. Make sure the date is formatted as YYYY-MM-DDTHH:MM:SS.'))
else: else:
assessment_dict['due'] = None assessment_dict['due'] = None
...@@ -350,14 +350,14 @@ def _parse_assessments_xml(assessments_root, start, due): ...@@ -350,14 +350,14 @@ def _parse_assessments_xml(assessments_root, start, due):
try: try:
assessment_dict['must_grade'] = int(assessment.get('must_grade')) assessment_dict['must_grade'] = int(assessment.get('must_grade'))
except ValueError: except ValueError:
raise UpdateFromXmlError(_('Assessment "must_grade" attribute must be an integer.')) raise UpdateFromXmlError(_('The "must_grade" value must be a positive integer.'))
# Assessment must_be_graded_by # Assessment must_be_graded_by
if 'must_be_graded_by' in assessment.attrib: if 'must_be_graded_by' in assessment.attrib:
try: try:
assessment_dict['must_be_graded_by'] = int(assessment.get('must_be_graded_by')) assessment_dict['must_be_graded_by'] = int(assessment.get('must_be_graded_by'))
except ValueError: except ValueError:
raise UpdateFromXmlError(_('Assessment "must_be_graded_by" attribute must be an integer.')) raise UpdateFromXmlError(_('The "must_be_graded_by" value must be a positive integer.'))
# Update the list of assessments # Update the list of assessments
assessments_list.append(assessment_dict) assessments_list.append(assessment_dict)
...@@ -466,7 +466,7 @@ def update_from_xml(oa_block, root, validator=DEFAULT_VALIDATOR): ...@@ -466,7 +466,7 @@ def update_from_xml(oa_block, root, validator=DEFAULT_VALIDATOR):
# Check that the root has the correct tag # Check that the root has the correct tag
if root.tag != 'openassessment': if root.tag != 'openassessment':
raise UpdateFromXmlError(_("XML content must contain an 'openassessment' root element.")) raise UpdateFromXmlError(_('Every open assessment problem must contain an "openassessment" element.'))
# Retrieve the start date for the submission # Retrieve the start date for the submission
# Set it to None by default; we will update it to the latest start date later on # Set it to None by default; we will update it to the latest start date later on
...@@ -474,7 +474,7 @@ def update_from_xml(oa_block, root, validator=DEFAULT_VALIDATOR): ...@@ -474,7 +474,7 @@ def update_from_xml(oa_block, root, validator=DEFAULT_VALIDATOR):
if 'submission_start' in root.attrib: if 'submission_start' in root.attrib:
submission_start = _parse_date(unicode(root.attrib['submission_start'])) submission_start = _parse_date(unicode(root.attrib['submission_start']))
if submission_start is None: if submission_start is None:
raise UpdateFromXmlError(_("Invalid date format for submission start date")) raise UpdateFromXmlError(_('The format for the submission start date is invalid. Make sure the date is formatted as YYYY-MM-DDTHH:MM:SS.'))
# Retrieve the due date for the submission # Retrieve the due date for the submission
# Set it to None by default; we will update it to the earliest deadline later on # Set it to None by default; we will update it to the earliest deadline later on
...@@ -482,26 +482,26 @@ def update_from_xml(oa_block, root, validator=DEFAULT_VALIDATOR): ...@@ -482,26 +482,26 @@ def update_from_xml(oa_block, root, validator=DEFAULT_VALIDATOR):
if 'submission_due' in root.attrib: if 'submission_due' in root.attrib:
submission_due = _parse_date(unicode(root.attrib['submission_due'])) submission_due = _parse_date(unicode(root.attrib['submission_due']))
if submission_due is None: if submission_due is None:
raise UpdateFromXmlError(_("Invalid date format for submission due date")) raise UpdateFromXmlError(_('The format for the submission due date is invalid. Make sure the date is formatted as YYYY-MM-DDTHH:MM:SS.'))
# Retrieve the title # Retrieve the title
title_el = root.find('title') title_el = root.find('title')
if title_el is None: if title_el is None:
raise UpdateFromXmlError(_("XML content must contain a 'title' element.")) raise UpdateFromXmlError(_('Every assessment must contain a "title" element.'))
else: else:
title = _safe_get_text(title_el) title = _safe_get_text(title_el)
# Retrieve the rubric # Retrieve the rubric
rubric_el = root.find('rubric') rubric_el = root.find('rubric')
if rubric_el is None: if rubric_el is None:
raise UpdateFromXmlError(_("XML content must contain a 'rubric' element.")) raise UpdateFromXmlError(_('Every assessment must contain a "rubric" element.'))
else: else:
rubric = _parse_rubric_xml(rubric_el) rubric = _parse_rubric_xml(rubric_el)
# Retrieve the assessments # Retrieve the assessments
assessments_el = root.find('assessments') assessments_el = root.find('assessments')
if assessments_el is None: if assessments_el is None:
raise UpdateFromXmlError(_("XML content must contain an 'assessments' element.")) raise UpdateFromXmlError(_('Every assessment must contain an "assessments" element.'))
else: else:
assessments = _parse_assessments_xml(assessments_el, oa_block.start, oa_block.due) assessments = _parse_assessments_xml(assessments_el, oa_block.start, oa_block.due)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment