Commit 509d4185 by cahrens

Staff tool UI for full staff grading.

Part 2 (TNL-3493)
parent f7c00a2b
......@@ -167,7 +167,6 @@
</div>
</div>
<!-- Conditional not really required because button will be hidden. -->
{% if staff_assessment_required %}
<div class="openassessment__staff-grading wrapper--staff-grading wrapper--ui-staff is--hidden">
<div class="staff-grading ui-staff">
......
......@@ -5,7 +5,7 @@
<header class="staff__grade__header ui-toggle-visibility__control">
<h3 class="staff__grade__title">
<span class="wrapper--copy">
<span class="staff__grade__label">{% trans "Instructor Assessment" %}</span>
<button class="staff__grade__show-form">{% trans "Staff Assessment" %}</button>
</span>
</h3>
......
......@@ -4,11 +4,11 @@
{% block body %}
<div class="staff__grade__form ui-toggle-visibility__content" data-submission-uuid="{{ submission.uuid }}">
<div class="wrapper--staff-assessment">
<div class="step__instruction">
<div>
<p>{% trans "Give this learner a grade using the problem's rubric." %}</p>
</div>
<div class="step__content">
<div>
<article class="staff-assessment">
<div class="staff-assessment__display">
<header class="staff-assessment__display__header">
......@@ -25,12 +25,12 @@
</div>
<form class="staff-assessment__assessment" method="post">
{% include "openassessmentblock/oa_rubric.html" with rubric_type="staff" rubric_feedback_prompt="(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?" rubric_feedback_default_text="I noticed that this response..." %}
{% include "openassessmentblock/oa_rubric.html" with rubric_type="staff" %}
</form>
</article>
</div>
<div class="step__actions">
<div>
<div class="message message--inline message--error message--error-server">
<h4 class="message__title">{% trans "We could not submit your assessment" %}</h4>
<div class="message__content"></div>
......@@ -39,13 +39,13 @@
<ul class="list list--actions">
<li class="list--actions__item submit_assessment--action">
<button type="submit" class="action action--submit is--disabled">
<span class="copy">{% trans "Submit Assessment" %}</span>
<span class="copy">{% trans "Submit assessment" %}</span>
</button>
</li>
<li class="list--actions__item submit_assessment--action">
<button type="submit" class="action action--submit is--disabled continue_grading--action">
<span class="copy">{% trans "Submit Assessment and Grade Another Learner" %}</span>
<span class="copy">{% trans "Submit assessment and continue grading" %}</span>
</button>
</li>
</ul>
......
......@@ -25,7 +25,7 @@
</div>
<form class="staff-assessment__assessment" method="post">
{% include "openassessmentblock/oa_rubric.html" with rubric_type="staff" rubric_feedback_prompt="(Optional) What aspects of this response stood out to you? What did it do well? How could it improve?" rubric_feedback_default_text="I noticed that this response..." %}
{% include "openassessmentblock/oa_rubric.html" with rubric_type="staff" %}
</form>
</article>
</div>
......@@ -39,7 +39,7 @@
<ul class="list list--actions">
<li class="list--actions__item">
<button type="submit" class="action action--submit is--disabled">
<span class="copy">{% trans "Submit your assessment" %}</span>
<span class="copy">{% trans "Submit assessment" %}</span>
</button>
<div class="staff-override-error"></div>
......
......@@ -265,7 +265,7 @@
<div class="staff-info__staff-override__content ui-toggle-visibility__content">
<div class="wrapper--input">
{% include "openassessmentblock/staff_area/oa_staff_assessment.html" %}
{% include "openassessmentblock/staff_area/oa_staff_override_assessment.html" %}
</div>
</div>
</div>
......
......@@ -78,6 +78,7 @@ def require_course_staff(error_key, with_json_handler=False):
permission_errors = {
"STAFF_AREA": xblock._(u"You do not have permission to access the ORA staff area"),
"STUDENT_INFO": xblock._(u"You do not have permission to access ORA learner information."),
"STUDENT_GRADE": xblock._(u"You do not have permission to access ORA staff grading."),
}
if not xblock.is_course_staff and with_json_handler:
......@@ -234,19 +235,15 @@ class StaffAreaMixin(object):
return self.render_assessment(path, context)
except PeerAssessmentInternalError:
return self.render_error(self._(u"Error finding assessment workflow cancellation.")) # TODO: this error is too specific
return self.render_error(self._(u"Error getting learner information."))
@XBlock.handler
@require_course_staff("STUDENT_INFO") # TODO: should this be a different "permission"?
@require_course_staff("STUDENT_GRADE")
def render_staff_grade_form(self, data, suffix=''): # pylint: disable=W0613
"""
Renders all relative information for a specific student's workflow. TODO update
Given a student's username, we can render a staff-only section of the page
with submissions and assessments specific to the student.
Renders a form to staff-grade the next available learner submission.
Must be course staff to render this view.
"""
try:
student_item_dict = self.get_student_item_dict()
......@@ -254,7 +251,10 @@ class StaffAreaMixin(object):
item_id = student_item_dict.get('item_id')
staff_id = student_item_dict['student_id']
# Note that this will check out a submission for grading by the specified staff member.
# If no submissions are available for grading, will return None.
submission_to_assess = staff_api.get_submission_to_assess(course_id, item_id, staff_id)
if submission_to_assess is not None:
submission = submission_api.get_submission_and_student(submission_to_assess['uuid'])
if submission:
......@@ -264,36 +264,40 @@ class StaffAreaMixin(object):
)
path = 'openassessmentblock/staff_area/oa_staff_grade_learners_assessment.html'
return self.render_assessment(path, submission_context)
else:
return self.render_error(self._(u"Error loading the checked out learner response."))
else:
return self.render_error(self._(u"No more assessments can be graded at this time."))
return self.render_error(self._(u"No other learner responses are available for grading at this time."))
except PeerAssessmentInternalError:
return self.render_error(self._(u"Error finding assessment workflow cancellation.")) # TODO Update!
return self.render_error(self._(u"Error getting staff grade information."))
def get_student_submission_context(self, student_username, submission):
"""
TODO: update!
Get the proper path and context for rendering the student info
section of the staff area.
Get a context dict for rendering a student submission and associated rubric (for staff grading).
Includes submission (populating submitted file information if relevant), rubric_criteria,
and student_username.
Args:
student_username (unicode): The username of the student to report.
submission (object): A submission, as returned by the submission_api.
Returns:
A context dict for rendering a student submission and associated rubric (for staff grading).
"""
if submission:
if 'file_key' in submission.get('answer', {}):
file_key = submission['answer']['file_key']
try:
submission['file_url'] = file_api.get_download_url(file_key)
except file_exceptions.FileUploadError:
# Log the error, but do not prevent the rest of the student info
# from being displayed.
msg = (
u"Could not retrieve image URL for staff debug page. "
u"The learner username is '{student_username}', and the file key is {file_key}"
).format(student_username=student_username, file_key=file_key)
logger.exception(msg)
if submission and 'file_key' in submission.get('answer', {}):
file_key = submission['answer']['file_key']
try:
submission['file_url'] = file_api.get_download_url(file_key)
except file_exceptions.FileUploadError:
# Log the error, but do not prevent the rest of the student info
# from being displayed.
msg = (
u"Could not retrieve image URL for staff debug page. "
u"The learner username is '{student_username}', and the file key is {file_key}"
).format(student_username=student_username, file_key=file_key)
logger.exception(msg)
context = {
'submission': create_submission_dict(submission, self.prompts) if submission else None,
......@@ -301,6 +305,12 @@ class StaffAreaMixin(object):
'student_username': student_username,
}
if self.rubric_feedback_prompt is not None:
context["rubric_feedback_prompt"] = self.rubric_feedback_prompt
if self.rubric_feedback_default_text is not None:
context['rubric_feedback_default_text'] = self.rubric_feedback_default_text
return context
def get_student_info_path_and_context(self, student_username, expanded_view=None):
......@@ -315,8 +325,9 @@ class StaffAreaMixin(object):
"""
anonymous_user_id = None
student_item = None
submission_uuid = None
submissions = None
submission = None
submission_uuid = None
if student_username:
anonymous_user_id = self.get_anonymous_user_id(student_username, self.course_id)
......@@ -326,6 +337,8 @@ class StaffAreaMixin(object):
# If there is a submission available for the requested student, present
# it. If not, there will be no other information to collect.
submissions = submission_api.get_submissions(student_item, 1)
if submissions:
submission = submissions[0]
submission_uuid = submission['uuid']
......
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -689,6 +689,8 @@
"template": "openassessmentblock/staff_area/oa_staff_area.html",
"context": {
"staff_assessment_required": true,
"staff_assessment_ungraded": 10,
"staff_assessment_in_progress": 2,
"status_counts": {
"self": 1,
"peer": 2,
......@@ -718,6 +720,40 @@
"output": "oa_staff_area_full_grading.html"
},
{
"template": "openassessmentblock/staff_area/oa_staff_area.html",
"context": {
"staff_assessment_required": true,
"staff_assessment_ungraded": 9,
"staff_assessment_in_progress": 0,
"status_counts": {
"self": 1,
"peer": 2,
"waiting": 3,
"done": 4
},
"num_submissions": 10,
"item_id": "test_item",
"step_dates": [
{
"step": "submission",
"start": "2014-01-01",
"due": "N/A"
},
{
"step": "peer",
"start": "2014-02-02",
"due": "N/A"
},
{
"step": "self",
"start": "2014-03-03",
"due": "2015-04-05"
}
]
},
"output": "oa_staff_area_full_grading_2.html"
},
{
"template": "openassessmentblock/staff_area/oa_student_info.html",
"context": {
"rubric_criteria": [
......
......@@ -364,9 +364,12 @@ describe('OpenAssessment.StaffAreaView', function() {
});
};
var getAssessment = function(staffArea) {
return $('.openassessment__staff-tools .wrapper--staff-assessment', staffArea.element);
};
var submitAssessment = function(staffArea) {
var $assessment = $('.wrapper--staff-assessment', staffArea.element),
$submitButton = $('.action--submit', $assessment);
var $submitButton = $('.action--submit', getAssessment(staffArea.element));
$submitButton.click();
};
......@@ -374,7 +377,7 @@ describe('OpenAssessment.StaffAreaView', function() {
var staffArea = createStaffArea(),
$assessment, $submitButton;
chooseStudent(staffArea, 'testStudent');
$assessment = $('.wrapper--staff-assessment', staffArea.element);
$assessment = getAssessment(staffArea.element);
$submitButton = $('.action--submit', $assessment);
expect($submitButton).toHaveClass('is--disabled');
fillAssessment($assessment);
......@@ -394,7 +397,7 @@ describe('OpenAssessment.StaffAreaView', function() {
);
// Fill in and submit the assessment
$assessment = $('.wrapper--staff-assessment', staffArea.element);
$assessment = getAssessment(staffArea.element);
fillAssessment($assessment);
server.studentTemplate = 'oa_staff_graded_submission.html';
submitAssessment(staffArea);
......@@ -412,7 +415,7 @@ describe('OpenAssessment.StaffAreaView', function() {
serverErrorMessage = 'Mock server error',
$assessment;
chooseStudent(staffArea, 'testStudent');
$assessment = $('.wrapper--staff-assessment', staffArea.element);
$assessment = getAssessment(staffArea.element);
fillAssessment($assessment);
// Submit the assessment but return a server error message
......@@ -445,7 +448,7 @@ describe('OpenAssessment.StaffAreaView', function() {
describe('Grade Available Responses', function() {
var showInstructorAssessmentForm = function(staffArea) {
$('.staff__grade__control', staffArea.element).click();
$('.staff__grade__show-form', staffArea.element).click();
};
var fillAssessment = function($assessment) {
......@@ -455,9 +458,12 @@ describe('OpenAssessment.StaffAreaView', function() {
});
};
var getAssessment = function(staffArea) {
return $('.openassessment__staff-grading .wrapper--staff-assessment', staffArea.element);
};
var submitAssessment = function(staffArea) {
var $assessment = $('.wrapper--staff-assessment', staffArea.element),
$submitButton = $('.action--submit', $assessment);
var $submitButton = $('.action--submit', getAssessment(staffArea.element));
$submitButton.click();
};
......@@ -469,7 +475,7 @@ describe('OpenAssessment.StaffAreaView', function() {
var staffArea = createStaffArea({}, 'oa_staff_area_full_grading.html'),
$assessment, $submitButtons;
showInstructorAssessmentForm(staffArea);
$assessment = $('.wrapper--staff-assessment', staffArea.element);
$assessment = getAssessment(staffArea.element);
$submitButtons = $('.action--submit', $assessment);
expect($submitButtons.length).toBe(2);
expect($submitButtons).toHaveClass('is--disabled');
......@@ -481,7 +487,7 @@ describe('OpenAssessment.StaffAreaView', function() {
var staffArea = createStaffArea({}, 'oa_staff_area_full_grading.html'),
$assessment, $gradeSection;
showInstructorAssessmentForm(staffArea);
$assessment = $('.wrapper--staff-assessment', staffArea.element);
$assessment = getAssessment(staffArea.element);
// Verify that the submission is shown for the first user
expect($('.staff-assessment__display__title', $assessment).text().trim()).toBe(
......@@ -508,7 +514,7 @@ describe('OpenAssessment.StaffAreaView', function() {
);
// Fill in and click the button to submit and request another submission
$assessment = $('.wrapper--staff-assessment', staffArea.element);
$assessment = getAssessment(staffArea.element);
fillAssessment($assessment);
server.staffGradeFormTemplate = 'oa_staff_grade_learners_assessment_2.html';
$('.continue_grading--action', $assessment).click();
......@@ -534,7 +540,7 @@ describe('OpenAssessment.StaffAreaView', function() {
serverErrorMessage = 'Mock server error',
$assessment;
showInstructorAssessmentForm(staffArea);
$assessment = $('.wrapper--staff-assessment', staffArea.element);
$assessment = getAssessment(staffArea.element);
fillAssessment($assessment);
// Submit the assessment but return a server error message
......@@ -544,5 +550,25 @@ describe('OpenAssessment.StaffAreaView', function() {
// Verify that the error message is shown
expect($('.staff-grade-error', staffArea.element).first().text().trim()).toBe(serverErrorMessage);
});
it('shows the number of ungraded and checked out submissions', function() {
var staffArea = createStaffArea({}, 'oa_staff_area_full_grading.html'),
$assessment;
expect($('.staff__grade__value').text().trim()).toBe("10 Available and 2 Checked Out");
showInstructorAssessmentForm(staffArea);
// Render a different staff area teamplate the next time around so counts can update.
server.staffAreaTemplate = 'oa_staff_area_full_grading_2.html';
// Fill in assessment and make sure the counts re-render.
$assessment = getAssessment(staffArea.element);
fillAssessment($assessment);
server.staffGradeFormTemplate = 'oa_staff_grade_learners_assessment_2.html';
submitAssessment(staffArea);
expect($('.staff__grade__value').text().trim()).toBe("9 Available and 0 Checked Out");
});
});
});
......@@ -131,7 +131,7 @@ if (typeof OpenAssessment.Server === "undefined" || !OpenAssessment.Server) {
}).done(function(data) {
defer.resolveWith(this, [data]);
}).fail(function() {
defer.rejectWith(this, [gettext('The staff grade form could not be loaded.')]);
defer.rejectWith(this, [gettext('The staff assessment form could not be loaded.')]);
});
}).promise();
},
......
......@@ -157,15 +157,13 @@
}
// staff grade header
// Styling for staff grade tab ("Grade Available Responses").
.ui-staff {
.staff__grade__control {
padding: 0 (3*$baseline-h/4);
border-top: ($baseline-v/4) solid $color-decorative-tertiary;
background: $bg-content;
// step title
h3.staff__grade__title {
@include text-align(left);
@include float(none);
......@@ -174,14 +172,13 @@
display: block;
width: 100%;
.staff__grade__label {
.staff__grade__show-form {
@extend %t-superheading;
text-transform: none;
letter-spacing: normal;
}
}
// staff grade status
.staff__grade__status {
display: inline-block;
margin-top: ($baseline-v/4);
......@@ -241,8 +238,8 @@
}
}
// Override the default color for h3 (for elements that can be toggled).
.ui-toggle-visibility .ui-toggle-visibility__control .staff__grade__title{
// Override the default color for h3 (for elements that can be toggled).
.ui-toggle-visibility .ui-toggle-visibility__control .staff__grade__title {
color: $action-primary-color;
}
}
......
<openassessment>
<title>Open Assessment Test</title>
<prompts>
<prompt>
<description>Given the state of the world today, what do you think should be done to combat poverty? Please answer in a short essay of 200-300 words.</description>
</prompt>
<prompt>
<description>Given the state of the world today, what do you think should be done to combat pollution?</description>
</prompt>
</prompts>
<rubric>
<criterion>
<name>Concise</name>
<prompt>How concise is it?</prompt>
<option points="0">
<name>Neal Stephenson (late)</name>
<explanation>Neal Stephenson explanation</explanation>
</option>
<option points="1">
<name>HP Lovecraft</name>
<explanation>HP Lovecraft explanation</explanation>
</option>
<option points="3">
<name>Robert Heinlein</name>
<explanation>Robert Heinlein explanation</explanation>
</option>
<option points="4">
<name>Neal Stephenson (early)</name>
<explanation>Neal Stephenson (early) explanation</explanation>
</option>
<option points="5">
<name>Earnest Hemingway</name>
<explanation>Earnest Hemingway</explanation>
</option>
</criterion>
<criterion>
<name>Clear-headed</name>
<prompt>How clear is the thinking?</prompt>
<option points="0">
<name>Yogi Berra</name>
<explanation>Yogi Berra explanation</explanation>
</option>
<option points="1">
<name>Hunter S. Thompson</name>
<explanation>Hunter S. Thompson explanation</explanation>
</option>
<option points="2">
<name>Robert Heinlein</name>
<explanation>Robert Heinlein explanation</explanation>
</option>
<option points="3">
<name>Isaac Asimov</name>
<explanation>Isaac Asimov explanation</explanation>
</option>
<option points="10">
<name>Spock</name>
<explanation>Spock explanation</explanation>
</option>
</criterion>
<criterion>
<name>Form</name>
<prompt>Lastly, how is its form? Punctuation, grammar, and spelling all count.</prompt>
<option points="0">
<name>lolcats</name>
<explanation>lolcats explanation</explanation>
</option>
<option points="1">
<name>Facebook</name>
<explanation>Facebook explanation</explanation>
</option>
<option points="2">
<name>Reddit</name>
<explanation>Reddit explanation</explanation>
</option>
<option points="3">
<name>metafilter</name>
<explanation>metafilter explanation</explanation>
</option>
<option points="4">
<name>Usenet, 1996</name>
<explanation>Usenet, 1996 explanation</explanation>
</option>
<option points="5">
<name>The Elements of Style</name>
<explanation>The Elements of Style explanation</explanation>
</option>
</criterion>
</rubric>
<assessments>
<assessment name="staff-assessment" />
</assessments>
</openassessment>
......@@ -608,6 +608,88 @@ class TestCourseStaff(XBlockHandlerTestCase):
self.assertIn("The learner submission has been removed from peer", resp['msg'])
self.assertEqual(True, resp['success'])
@scenario('data/staff_grade_scenario.xml', user_id='Bob')
def test_staff_assessment_counts(self, xblock):
"""
Verify the staff assessment counts (ungraded and checked out)
as shown in the staff grading tool when staff assessment is required.
"""
_, context = xblock.get_staff_path_and_context()
self._verify_staff_assessment_context(context, True, 0, 0)
# Simulate that we are course staff
xblock.xmodule_runtime = self._create_mock_runtime(
xblock.scope_ids.usage_id, True, False, "Bob"
)
bob_item = STUDENT_ITEM.copy()
bob_item["item_id"] = xblock.scope_ids.usage_id
# Create a submission for Bob, and corresponding workflow.
self._create_submission(bob_item, {'text': "Bob Answer"}, [])
# Verify the count as shown in the staff grading tool.
_, context = xblock.get_staff_path_and_context()
self._verify_staff_assessment_context(context, True, 1, 0)
# Check out the assessment for grading and ensure that the count changes.
self.request(xblock, 'render_staff_grade_form', json.dumps({}))
_, context = xblock.get_staff_path_and_context()
self._verify_staff_assessment_context(context, True, 0, 1)
@scenario('data/example_based_assessment.xml', user_id='Bob')
def test_staff_assessment_counts_not_required(self, xblock):
"""
Verify the staff assessment counts (ungraded and checked out) are
not present in the context when staff assessment is not required.
"""
xblock.xmodule_runtime = self._create_mock_runtime(
xblock.scope_ids.usage_id, True, True, "Bob"
)
_, context = xblock.get_staff_path_and_context()
self._verify_staff_assessment_context(context, False)
@scenario('data/staff_grade_scenario.xml', user_id='Bob')
def test_staff_assessment_form(self, xblock):
"""
Smoke test that the staff assessment form renders when staff assessment
is required.
"""
permission_denied = "You do not have permission to access ORA staff grading."
no_submissions_available = "No other learner responses are available for grading at this time."
submission_text = "Grade me, please!"
resp = self.request(xblock, 'render_staff_grade_form', json.dumps({})).decode('utf-8')
self.assertIn(permission_denied, resp)
self.assertNotIn(no_submissions_available, resp)
# Simulate that we are course staff
xblock.xmodule_runtime = self._create_mock_runtime(
xblock.scope_ids.usage_id, True, False, "Bob"
)
resp = self.request(xblock, 'render_staff_grade_form', json.dumps({})).decode('utf-8')
self.assertNotIn(permission_denied, resp)
self.assertIn(no_submissions_available, resp)
self.assertNotIn(submission_text, resp)
bob_item = STUDENT_ITEM.copy()
bob_item["item_id"] = xblock.scope_ids.usage_id
# Create a submission for Bob, and corresponding workflow.
self._create_submission(bob_item, {'text': submission_text}, [])
resp = self.request(xblock, 'render_staff_grade_form', json.dumps({})).decode('utf-8')
self.assertNotIn(no_submissions_available, resp)
self.assertIn(submission_text, resp)
def _verify_staff_assessment_context(self, context, required, ungraded=None, in_progress=None):
self.assertEquals(required, context['staff_assessment_required'])
if not required:
self.assertNotIn('staff_assessment_ungraded', context)
self.assertNotIn('staff_assessment_in_progress', context)
else:
self.assertEqual(ungraded, context['staff_assessment_ungraded'])
self.assertEqual(in_progress, context['staff_assessment_in_progress'])
def _create_mock_runtime(
self,
item_id,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment