Commit 8981b3e3 by Andy Armstrong

Complete changes

parent d2664543
......@@ -53,8 +53,10 @@ install: install-wheels install-python install-js install-nltk-data install-test
quality:
jshint openassessment/xblock/static/js/src -c .jshintrc --verbose
test: quality
./scripts/test.sh
test: quality test-python test-js
test-python:
./scripts/test-python.sh
render-templates:
./scripts/render-templates.sh
......@@ -65,6 +67,8 @@ test-js: render-templates
test-js-debug: render-templates
./scripts/js-debugger.sh
test-sandbox: test-acceptance test-a11y
test-acceptance:
./scripts/test-acceptance.sh tests
......
......@@ -6,11 +6,11 @@
{% for criterion in rubric_criteria %}
<li
class="field field--radio is--required assessment__rubric__question ui-toggle-visibility {% if criterion.options %}has--options{% endif %}"
id="assessment__rubric__question--{{ criterion.order_num }}"
id="{{ rubric_type }}__assessment__rubric__question--{{ criterion.order_num }}"
>
<h4 class="question__title ui-toggle-visibility__control">
<i class="icon fa fa-caret-right" aria-hidden="true"></i>
<span id="assessment__rubric__prompt--{{ criterion.order_num }}" class="ui-toggle-visibility__control__copy question__title__copy">{{ criterion.prompt }}</span>
<span id="{{ rubric_type }}__assessment__rubric__prompt--{{ criterion.order_num }}" class="ui-toggle-visibility__control__copy question__title__copy">{{ criterion.prompt }}</span>
<span class="label--required sr">* ({% trans "Required" %})</span>
</h4>
......@@ -21,11 +21,11 @@
<div class="wrapper--input">
<input type="radio"
name="{{ criterion.name }}"
id="assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
id="{{ rubric_type }}__assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
class="answer__value"
value="{{ option.name }}"
aria-labelledby="assessment__rubric__prompt--{{ criterion.order_num }}"/>
<label for="assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
aria-labelledby="{{ rubric_type }}__assessment__rubric__prompt--{{ criterion.order_num }}"/>
<label for="{{ rubric_type }}__assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
class="answer__label"
>{{ option.label }}</label>
</div>
......@@ -39,9 +39,9 @@
{% if criterion.feedback == 'optional' or criterion.feedback == 'required' %}
<li class="answer--feedback">
<div class="wrapper--input">
<label for="assessment__rubric__question--{{ criterion.order_num }}__feedback" class="answer__label">{% trans "Comments" %}</label>
<label for="{{ rubric_type }}__assessment__rubric__question--{{ criterion.order_num }}__feedback" class="answer__label">{% trans "Comments" %}</label>
<textarea
id="assessment__rubric__question--{{ criterion.order_num }}__feedback"
id="{{ rubric_type }}__assessment__rubric__question--{{ criterion.order_num }}__feedback"
class="answer__value"
value="{{ criterion.name }}"
name="{{ criterion.name }}"
......@@ -56,14 +56,15 @@
</div>
</li>
{% endfor %}
<li class="wrapper--input field field--textarea assessment__rubric__question assessment__rubric__question--feedback" id="assessment__rubric__question--feedback">
<label class="question__title" for="assessment__rubric__question--feedback__value">
<li class="wrapper--input field field--textarea assessment__rubric__question assessment__rubric__question--feedback">
<label class="question__title" for="{{ rubric_type }}__assessment__rubric__question--feedback__value">
<span class="question__title__copy">{{ rubric_feedback_prompt }}</span>
</label>
<div class="wrapper--input">
<textarea
id="assessment__rubric__question--feedback__value"
id="{{ rubric_type }}__assessment__rubric__question--feedback__value"
class="assessment__rubric__question--feedback__value"
placeholder="{{ rubric_feedback_default_text }}"
maxlength="500"
>
......
......@@ -72,7 +72,7 @@
</div>
<form id="peer-assessment--001__assessment" class="peer-assessment__assessment" method="post">
{% include "openassessmentblock/oa_rubric.html" %}
{% include "openassessmentblock/oa_rubric.html" with rubric_type="peer" %}
</form>
</article>
</li>
......
......@@ -62,7 +62,7 @@
{% for criterion in rubric_criteria %}
<li
class="field field--radio is--required assessment__rubric__question ui-toggle-visibility {% if criterion.options %}has--options{% endif %}"
id="assessment__rubric__question--{{ criterion.order_num }}"
id="peer__assessment__rubric__question--{{ criterion.order_num }}"
>
<h4 class="question__title ui-toggle-visibility__control">
<i class="icon fa fa-caret-right" aria-hidden="true"></i>
......@@ -77,10 +77,10 @@
<div class="wrapper--input">
<input type="radio"
name="{{ criterion.name }}"
id="assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
id="peer__assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
class="answer__value"
value="{{ option.name }}" />
<label for="assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
<label for="peer__assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
class="answer__label"
>{{ option.label }}</label>
</div>
......@@ -94,9 +94,9 @@
{% if criterion.feedback == 'optional' or criterion.feedback == 'required' %}
<li class="answer--feedback">
<div class="wrapper--input">
<label for="assessment__rubric__question--{{ criterion.order_num }}__feedback" class="answer__label">{% trans "Comments" %}</label>
<label for="peer__assessment__rubric__question--{{ criterion.order_num }}__feedback" class="answer__label">{% trans "Comments" %}</label>
<textarea
id="assessment__rubric__question--{{ criterion.order_num }}__feedback"
id="peer__assessment__rubric__question--{{ criterion.order_num }}__feedback"
class="answer__value"
value="{{ criterion.name }}"
name="{{ criterion.name }}"
......@@ -112,13 +112,13 @@
</li>
{% endfor %}
<li class="wrapper--input field field--textarea assessment__rubric__question assessment__rubric__question--feedback" id="assessment__rubric__question--feedback">
<label class="question__title" for="assessment__rubric__question--feedback__value">
<li class="wrapper--input field field--textarea assessment__rubric__question assessment__rubric__question--feedback">
<label class="question__title" for="peer__assessment__rubric__question--feedback__value">
<span class="question__title__copy">{{ rubric_feedback_prompt }}</span>
</label>
<div class="wrapper--input">
<textarea
id="assessment__rubric__question--feedback__value"
id="peer__assessment__rubric__question--feedback__value"
placeholder="{{ rubric_feedback_default_text }}"
maxlength="500"
>
......
......@@ -25,7 +25,7 @@
<div class="message__content">
<p>
{% blocktrans with removed_datetime=workflow_cancellation.created_at|utc|date:"N j, Y H:i e" removed_by_username=workflow_cancellation.cancelled_by %}
{% blocktrans with removed_datetime=workflow_cancellation.cancelled_at|utc|date:"N j, Y H:i e" removed_by_username=workflow_cancellation.cancelled_by %}
Your submission has been cancelled by {{ removed_by_username }} on {{ removed_datetime }}
{% endblocktrans %}
<br>
......
......@@ -49,7 +49,7 @@
</article>
<form id="self-assessment--001__assessment" class="self-assessment__assessment" method="post">
{% include "openassessmentblock/oa_rubric.html" %}
{% include "openassessmentblock/oa_rubric.html" with rubric_type="self" %}
</form>
</div>
......
......@@ -26,7 +26,7 @@
</div>
<form class="staff-assessment__assessment" method="post">
{% include "openassessmentblock/oa_rubric.html" with rubric_feedback_prompt="(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?" rubric_feedback_default_text="I noticed that this response..." %}
{% include "openassessmentblock/oa_rubric.html" with rubric_type="staff" rubric_feedback_prompt="(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?" rubric_feedback_default_text="I noticed that this response..." %}
</form>
</article>
</div>
......
......@@ -21,7 +21,7 @@
<div class="ui-toggle-visibility__content">
{% if workflow_cancellation %}
<p>
{% blocktrans with removed_by_username=workflow_cancellation.cancelled_by removed_datetime=workflow_cancelled_at|utc|date:"F j, Y H:i e" %}
{% blocktrans with removed_by_username=workflow_cancellation.cancelled_by removed_datetime=workflow_cancellation.cancelled_at|utc|date:"F j, Y H:i e" %}
Learner submission removed by {{ removed_by_username }} on {{ removed_datetime }}
{% endblocktrans %}
</p>
......
......@@ -83,7 +83,7 @@
{% if criterion.options %}
<li
class="field field--radio is--required assessment__rubric__question ui-toggle-visibility has--options"
id="assessment__rubric__question--{{ criterion.order_num }}"
id="training__assessment__rubric__question--{{ criterion.order_num }}"
>
<h4 class="question__title ui-toggle-visibility__control">
<i class="icon fa fa-caret-right" aria-hidden="true"></i>
......@@ -111,7 +111,7 @@
<div class="wrapper--input">
<input type="radio"
name="{{ criterion.name }}"
id="assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
id="training__assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
class="answer__value"
value="{{ option.name }}" />
<label for="assessment__rubric__question--{{ criterion.order_num }}__{{ option.order_num }}"
......
......@@ -393,10 +393,10 @@ def cancel_workflow(submission_uuid, comments, cancelled_by_id, assessment_requi
def get_assessment_workflow_cancellation(submission_uuid):
"""
Get cancellation information for a assessment workflow.
Get cancellation information for an assessment workflow.
Args:
submission_uuid (str): The UUID of assessment workflow.
submission_uuid (str): The UUID of the submission.
"""
try:
workflow_cancellation = AssessmentWorkflowCancellation.get_latest_workflow_cancellation(submission_uuid)
......
......@@ -24,7 +24,6 @@ from openassessment.assessment.api import self as self_api
from openassessment.assessment.api import ai as ai_api
from openassessment.fileupload import api as file_api
from openassessment.workflow import api as workflow_api
from openassessment.workflow.models import AssessmentWorkflowCancellation
from openassessment.fileupload import exceptions as file_exceptions
......@@ -285,22 +284,13 @@ class StaffAreaMixin(object):
workflow = self.get_workflow_info(submission_uuid=submission_uuid)
workflow_cancellation = workflow_api.get_assessment_workflow_cancellation(submission_uuid)
if workflow_cancellation:
workflow_cancellation['cancelled_by'] = self.get_username(workflow_cancellation['cancelled_by_id'])
# Get the date that the workflow was cancelled to use in preference to the serialized date string
cancellation_model = AssessmentWorkflowCancellation.get_latest_workflow_cancellation(submission_uuid)
workflow_cancelled_at = cancellation_model.created_at
else:
workflow_cancelled_at = None
workflow_cancellation = self.get_workflow_cancellation_info(submission_uuid)
context = {
'submission': create_submission_dict(submission, self.prompts) if submission else None,
'score': workflow.get('score'),
'workflow_status': workflow.get('status'),
'workflow_cancellation': workflow_cancellation,
'workflow_cancelled_at': workflow_cancelled_at,
'peer_assessments': peer_assessments,
'submitted_assessments': submitted_assessments,
'self_assessment': self_assessment,
......
......@@ -86,11 +86,13 @@
{
"template": "openassessmentblock/response/oa_response.html",
"context": {
"saved_response": {"answer":
{"parts": [
{ "text": "", "prompt": { "description": "Prompt 1" }},
{ "text": "", "prompt": { "description": "Prompt 2" }}
]}
"saved_response": {
"answer": {
"parts": [
{ "text": "", "prompt": { "description": "Prompt 1" }},
{ "text": "", "prompt": { "description": "Prompt 2" }}
]
}
},
"save_status": "This response has not been saved.",
"submit_enabled": false,
......@@ -585,12 +587,12 @@
}
],
"template": {
"answer": {
"parts": [
{ "text": ""},
{ "text": ""}
]
},
"answer": {
"parts": [
{ "text": ""},
{ "text": ""}
]
},
"criteria": [
{
"name": "criterion_with_two_options",
......@@ -684,6 +686,50 @@
{
"template": "openassessmentblock/staff_area/oa_student_info.html",
"context": {
"rubric_criteria": [
{
"name": "vocabulary",
"prompt": "vocabulary",
"order_num": 0,
"feedback": "optional",
"options": [
{
"order_num": 0,
"points": 0,
"name": "Bad"
},
{
"order_num": 1,
"points": 1,
"name": "Good"
}
]
},
{
"name": "grammar",
"prompt": "grammar",
"order_num": 1,
"options": [
{
"order_num": 0,
"points": 0,
"name": "Bad"
},
{
"order_num": 1,
"points": 1,
"name": "Good"
}
]
},
{
"name": "feedback_only",
"prompt": "Feedback only, no options!",
"order_num": 2,
"feedback": "required",
"options": []
}
],
"submission": {
"image_url": "/test-url",
"answer":{
......@@ -695,6 +741,23 @@
"output": "oa_student_info.html"
},
{
"template": "openassessmentblock/staff_area/oa_student_info.html",
"context": {
"submission": {
"image_url": "/test-url",
"answer": {
"text": "testing response text"
}
},
"workflow_cancellation": {
"cancelled_by": "staff",
"cancelled_at": "2015-10-01T04:53",
"comments": "Cancelled!"
}
},
"output": "oa_staff_cancelled_submission.html"
},
{
"template": "openassessmentblock/peer/oa_peer_assessment.html",
"context": {
"rubric_criteria": [
......
......@@ -6,6 +6,7 @@ describe('OpenAssessment.StaffAreaView', function() {
// Stub server that returns dummy data for the staff info view
var StubServer = function() {
this.studentTemplate = 'oa_student_info.html';
// Remember which fragments have been loaded
this.fragmentsLoaded = [];
......@@ -23,7 +24,7 @@ describe('OpenAssessment.StaffAreaView', function() {
this.studentInfo = function() {
var server = this;
return $.Deferred(function(defer) {
var fragment = readFixtures('oa_student_info.html');
var fragment = readFixtures(server.studentTemplate);
defer.resolveWith(server, [fragment]);
});
};
......@@ -50,6 +51,10 @@ describe('OpenAssessment.StaffAreaView', function() {
return successPromise;
};
this.staffAssess = function() {
return successPromise;
};
this.data = {};
};
......@@ -162,67 +167,6 @@ describe('OpenAssessment.StaffAreaView', function() {
});
});
describe('Student Info', function() {
var chooseStudent = function(view, studentName) {
var studentNameField = $('.openassessment__student_username', view.element),
submitButton = $('.action--submit-username', view.element);
studentNameField.val(studentName);
submitButton.click();
};
beforeEach(function() {
loadFixtures('oa_base_course_staff.html');
appendLoadFixtures('oa_student_info.html');
});
it('shows an error when clicking "Submit" with no student name chosen', function() {
var staffArea = createStaffArea();
chooseStudent(staffArea, '');
expect($('.openassessment_student_info_form .form--error', staffArea.element).text().trim())
.toBe('A learner name must be provided.');
});
describe('Submission Management', function() {
it('updates submission cancellation button when comments changes', function() {
// Prevent the server's response from resolving,
// so we can see what happens before view gets re-rendered.
spyOn(server, 'cancelSubmission').and.callFake(function() {
return $.Deferred(function() {}).promise();
});
var staffArea = createStaffArea();
chooseStudent(staffArea, 'testStudent');
// comments is blank --> cancel submission button disabled
staffArea.comment('');
staffArea.handleCommentChanged();
expect(staffArea.cancelSubmissionEnabled()).toBe(false);
// Response is whitespace --> cancel submission button disabled
staffArea.comment(' \n \n ');
staffArea.handleCommentChanged();
expect(staffArea.cancelSubmissionEnabled()).toBe(false);
// Response is not blank --> cancel submission button enabled
staffArea.comment('Cancellation reason.');
staffArea.handleCommentChanged();
expect(staffArea.cancelSubmissionEnabled()).toBe(true);
});
it('submits the cancel submission comments to the server', function() {
spyOn(server, 'cancelSubmission').and.callThrough();
var staffArea = createStaffArea();
chooseStudent(staffArea, 'testStudent');
staffArea.comment('Cancellation reason.');
staffArea.cancelSubmission('Bob');
expect(server.cancelSubmission).toHaveBeenCalledWith('Bob', 'Cancellation reason.');
});
});
});
describe('Staff Toolbar', function() {
beforeEach(function() {
loadFixtures('oa_base_course_staff.html');
......@@ -281,6 +225,13 @@ describe('OpenAssessment.StaffAreaView', function() {
});
describe('Staff Tools', function() {
var chooseStudent = function(view, studentName) {
var studentNameField = $('.openassessment__student_username', view.element),
submitButton = $('.action--submit-username', view.element);
studentNameField.val(studentName);
submitButton.click();
};
beforeEach(function() {
loadFixtures('oa_base_course_staff.html');
});
......@@ -296,6 +247,101 @@ describe('OpenAssessment.StaffAreaView', function() {
expect($staffToolsButton).not.toHaveClass('is--active');
expect($staffToolsPanel).toHaveClass('is--hidden');
});
it('shows an error when clicking "Submit" with no student name chosen', function() {
var staffArea = createStaffArea();
chooseStudent(staffArea, '');
expect($('.openassessment_student_info_form .form--error', staffArea.element).text().trim())
.toBe('A learner name must be provided.');
});
describe('Submission Management', function() {
it('updates submission cancellation button when comments changes', function() {
// Prevent the server's response from resolving,
// so we can see what happens before view gets re-rendered.
spyOn(server, 'cancelSubmission').and.callFake(function() {
return $.Deferred(function() {}).promise();
});
var staffArea = createStaffArea();
chooseStudent(staffArea, 'testStudent');
// comments is blank --> cancel submission button disabled
staffArea.comment('');
staffArea.handleCommentChanged();
expect(staffArea.cancelSubmissionEnabled()).toBe(false);
// Response is whitespace --> cancel submission button disabled
staffArea.comment(' \n \n ');
staffArea.handleCommentChanged();
expect(staffArea.cancelSubmissionEnabled()).toBe(false);
// Response is not blank --> cancel submission button enabled
staffArea.comment('Cancellation reason.');
staffArea.handleCommentChanged();
expect(staffArea.cancelSubmissionEnabled()).toBe(true);
});
it('submits the cancel submission comments to the server', function() {
// Show the staff area for the test student
var staffArea = createStaffArea();
chooseStudent(staffArea, 'testStudent');
// Cancel the student's submission
staffArea.comment('Cancellation reason.');
server.studentTemplate = 'oa_staff_cancelled_submission.html';
staffArea.cancelSubmission('Bob');
// Verify that the student view reflects the cancellation
expect($($('.staff-info__student__response p', staffArea.element)[0]).text().trim()).toBe(
'Learner submission removed by staff on October 1, 2015 04:53 UTC'
);
expect($($('.staff-info__student__response p', staffArea.element)[1]).text().trim()).toBe(
'Comments: Cancelled!'
);
});
});
describe('Staff Grade Override', function() {
var fillAssessment = function($assessment) {
$('#staff__assessment__rubric__question--2__feedback', $assessment).val('Text response');
$('.question__answers', $assessment).each(function(element) {
$('input[type="radio"]', this).first().click();
});
};
it('enables the submit button when all required fields are specified', function() {
var staffArea = createStaffArea(),
$assessment, $submitButton;
chooseStudent(staffArea, 'testStudent');
$assessment = $('.wrapper--staff-assessment', staffArea.element)
$submitButton = $('.action--submit', $assessment);
expect($submitButton).toHaveClass('is--disabled');
fillAssessment($assessment);
expect($submitButton).not.toHaveClass('is--disabled');
});
it('can submit a staff grade override', function() {
var staffArea = createStaffArea(),
$assessment, $submitButton;
chooseStudent(staffArea, 'testStudent');
$assessment = $('.wrapper--staff-assessment', staffArea.element)
$submitButton = $('.action--submit', $assessment);
fillAssessment($assessment);
// Submit the assessment
server.studentTemplate = 'oa_staff_cancelled_submission.html';
$submitButton.click();
// Verify that the student info reflects the update
expect($($('.staff-info__student__response p', staffArea.element)[0]).text().trim()).toBe(
'Learner submission removed by staff on October 1, 2015 04:53 UTC'
);
expect($($('.staff-info__student__response p', staffArea.element)[1]).text().trim()).toBe(
'Comments: Cancelled!'
);
});
});
});
describe('Staff Info', function() {
......
......@@ -62,7 +62,7 @@ OpenAssessment.Rubric.prototype = {
**/
overallFeedback: function(overallFeedback) {
var selector = '#assessment__rubric__question--feedback__value';
var selector = '.assessment__rubric__question--feedback__value';
if (typeof overallFeedback === 'undefined') {
return $(selector, this.element).val();
}
......
......@@ -8,6 +8,7 @@ from openassessment.fileupload import api as file_upload_api
from openassessment.fileupload.exceptions import FileUploadError
from openassessment.workflow import api as workflow_api
from openassessment.workflow.errors import AssessmentWorkflowError
from .resolve_dates import DISTANT_FUTURE
from data_conversion import create_submission_dict, prepare_submission_for_serialization
......@@ -421,18 +422,12 @@ class SubmissionMixin(object):
context['save_status'] = self.save_status
context['submit_enabled'] = self.saved_response != ''
path = "openassessmentblock/response/oa_response.html"
elif workflow["status"] == "cancelled":
workflow_cancellation = workflow_api.get_assessment_workflow_cancellation(self.submission_uuid)
if workflow_cancellation:
workflow_cancellation['cancelled_by'] = self.get_username(workflow_cancellation['cancelled_by_id'])
context['workflow_cancellation'] = workflow_cancellation
context["workflow_cancellation"] = self.get_workflow_cancellation_info(self.submission_uuid)
context["student_submission"] = self.get_user_submission(
workflow["submission_uuid"]
)
path = 'openassessmentblock/response/oa_response_cancelled.html'
elif workflow["status"] == "done":
student_submission = self.get_user_submission(
workflow["submission_uuid"]
......
......@@ -16,7 +16,7 @@ from submissions import api as sub_api
from submissions.api import SubmissionRequestError, SubmissionInternalError
from openassessment.fileupload import api
from openassessment.workflow import api as workflow_api
from openassessment.workflow.models import AssessmentWorkflowCancellation
from openassessment.xblock.openassessmentblock import OpenAssessmentBlock
from openassessment.xblock.data_conversion import create_submission_dict, prepare_submission_for_serialization
......@@ -329,7 +329,7 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
xblock.get_username = Mock(return_value='Bob')
workflow_api.get_assessment_workflow_cancellation = Mock(return_value={
AssessmentWorkflowCancellation.get_latest_workflow_cancellation = Mock(return_value={
'comments': 'Inappropriate language',
'cancelled_by_id': 'Bob',
'created_at': dt.datetime(2999, 5, 6).replace(tzinfo=pytz.utc),
......@@ -348,7 +348,7 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
'workflow_cancellation': {
'comments': 'Inappropriate language',
'cancelled_by_id': 'Bob',
'created_at': dt.datetime(2999, 5, 6).replace(tzinfo=pytz.utc),
'cancelled_at': dt.datetime(2999, 5, 6).replace(tzinfo=pytz.utc),
'cancelled_by': 'Bob'
}
}
......@@ -498,6 +498,7 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
"""
path, context = xblock.submission_path_and_context()
self.maxDiff = None # Show a full diff
self.assertEqual(path, expected_path)
self.assertEqual(context, expected_context)
......
......@@ -3,7 +3,9 @@ Handle OpenAssessment XBlock requests to the Workflow API.
"""
from xblock.core import XBlock
from openassessment.workflow import api as workflow_api
from openassessment.workflow.models import AssessmentWorkflowCancellation
from openassessment.xblock.data_conversion import create_rubric_dict
......@@ -178,3 +180,26 @@ class WorkflowMixin(object):
for ra in self.valid_assessments
if ra['name'] in self.ASSESSMENT_STEP_NAMES
]
def get_workflow_cancellation_info(self, submission_uuid):
"""
Returns cancellation information for a particular submission.
:param submission_uuid: The submission to return information for.
:return: The cancellation information, or None if the submission has
not been cancelled.
"""
cancellation_info = workflow_api.get_assessment_workflow_cancellation(submission_uuid)
if not cancellation_info:
return None
# Add the username of the staff member who cancelled the submission
cancellation_info['cancelled_by'] = self.get_username(cancellation_info['cancelled_by_id'])
# Add the date that the workflow was cancelled (in preference to the serialized date string)
del cancellation_info['created_at']
cancellation_model = AssessmentWorkflowCancellation.get_latest_workflow_cancellation(submission_uuid)
if cancellation_model:
cancellation_info['cancelled_at'] = cancellation_model.created_at
return cancellation_info
#!/usr/bin/env bash
# Need to exit with an error code to fail the Travis build
set -e
cd `dirname $BASH_SOURCE` && cd ..
export DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-"settings.test_with_coverage"}
./scripts/test-python.sh $1
./scripts/render-templates.sh
./scripts/test-js.sh
......@@ -167,7 +167,7 @@ class AssessmentMixin(object):
"""
Mixin for interacting with the assessment rubric.
"""
def assess(self, options_selected):
def assess(self, assessment_type, options_selected):
"""
Create an assessment.
......@@ -183,7 +183,8 @@ class AssessmentMixin(object):
"""
for criterion_num, option_num in enumerate(options_selected):
sel = "#assessment__rubric__question--{criterion_num}__{option_num}".format(
sel = "#{assessment_type}__assessment__rubric__question--{criterion_num}__{option_num}".format(
assessment_type=assessment_type,
criterion_num=criterion_num,
option_num=option_num
)
......
......@@ -103,7 +103,7 @@ class OpenAssessmentTest(WebAppTest):
# Submit a self-assessment
self.self_asmnt_page.wait_for_page().wait_for_response()
self.assertIn(self.SUBMISSION, self.self_asmnt_page.response_text)
self.self_asmnt_page.assess(self.OPTIONS_SELECTED).wait_for_complete()
self.self_asmnt_page.assess("self", self.OPTIONS_SELECTED).wait_for_complete()
self.assertTrue(self.self_asmnt_page.is_complete)
# Verify the grade
......@@ -170,7 +170,7 @@ class PeerAssessmentTest(OpenAssessmentTest):
self.submission_page.visit().submit_response(self.SUBMISSION)
# Assess the submission (there should be at least one available)
self.peer_asmnt_page.wait_for_page().wait_for_response().assess(self.OPTIONS_SELECTED)
self.peer_asmnt_page.wait_for_page().wait_for_response().assess("peer", self.OPTIONS_SELECTED)
# Check that the status indicates we've assessed one submission
try:
......@@ -208,7 +208,7 @@ class StudentTrainingTest(OpenAssessmentTest):
msg = "Did not complete at least {num} student training example(s).".format(num=example_num)
self.fail(msg)
self.student_training_page.wait_for_page().wait_for_response().assess(options_selected)
self.student_training_page.wait_for_page().wait_for_response().assess("training", options_selected)
# Check browser scrolled back to top only on first example
......@@ -365,7 +365,7 @@ class StaffAreaTest(OpenAssessmentTest):
self.staff_area_page.verify_learner_final_score("Final grade: 6 out of 8")
# Do staff override and wait for final score to change.
self.staff_area_page.assess([0, 1])
self.staff_area_page.assess("staff", [0, 1])
# Verify that the new student score is different from the original one.
# TODO: uncomment this after hooked up to the API. Also verify other state if appropriate.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment