Commit 1ae9872d by muzaffaryousaf

Adding the functionality for Remove Submission from grading pool.

TNL-900
parent d252121e
...@@ -12,12 +12,12 @@ from dogapi import dog_stats_api ...@@ -12,12 +12,12 @@ from dogapi import dog_stats_api
from openassessment.assessment.models import ( from openassessment.assessment.models import (
Assessment, AssessmentFeedback, AssessmentPart, Assessment, AssessmentFeedback, AssessmentPart,
InvalidRubricSelection, PeerWorkflow, PeerWorkflowItem, InvalidRubricSelection, PeerWorkflow, PeerWorkflowItem,
) PeerWorkflowCancellation)
from openassessment.assessment.serializers import ( from openassessment.assessment.serializers import (
AssessmentFeedbackSerializer, RubricSerializer, AssessmentFeedbackSerializer, RubricSerializer,
full_assessment_dict, rubric_from_dict, serialize_assessments, full_assessment_dict, rubric_from_dict, serialize_assessments,
InvalidRubric InvalidRubric,
) PeerWorkflowCancellationSerializer)
from openassessment.assessment.errors import ( from openassessment.assessment.errors import (
PeerAssessmentRequestError, PeerAssessmentWorkflowError, PeerAssessmentInternalError PeerAssessmentRequestError, PeerAssessmentWorkflowError, PeerAssessmentInternalError
) )
...@@ -945,3 +945,54 @@ def _log_workflow(submission_uuid, workflow): ...@@ -945,3 +945,54 @@ def _log_workflow(submission_uuid, workflow):
tags.append(u"overgrading") tags.append(u"overgrading")
dog_stats_api.increment('openassessment.assessment.peer_workflow.count', tags=tags) dog_stats_api.increment('openassessment.assessment.peer_workflow.count', tags=tags)
def cancel_submission_peer_workflow(submission_uuid, comments, cancelled_by_id):
"""
Add an entry in PeerWorkflowCancellation table for a PeerWorkflow.
PeerWorkflow which has been cancelled are no longer included in the
peer grading pool.
Args:
submission_uuid (str): The UUID of the peer workflow.
comments: The reason for cancellation.
cancelled_by_id: The ID of the user who cancelled the peer workflow.
"""
try:
workflow = PeerWorkflow.objects.get(submission_uuid=submission_uuid)
return PeerWorkflowCancellation.create(workflow=workflow, comments=comments, cancelled_by_id=cancelled_by_id)
except (
PeerWorkflow.DoesNotExist,
PeerWorkflow.MultipleObjectsReturned,
):
error_message = u"Error finding workflow for submission UUID {}.".format(submission_uuid)
logger.exception(error_message)
raise PeerAssessmentWorkflowError(error_message)
except DatabaseError:
error_message = u"Error creating peer workflow cancellation for submission UUID {}.".format(submission_uuid)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
def get_submission_cancellation(submission_uuid):
"""
Get cancellation information for a submission's peer workflow.
Args:
submission_uuid (str): The UUID of the peer workflow.
"""
try:
workflow = PeerWorkflow.objects.get(submission_uuid=submission_uuid)
workflow_cancellation = workflow.cancellation.get()
return PeerWorkflowCancellationSerializer(workflow_cancellation).data
except (
PeerWorkflow.DoesNotExist,
PeerWorkflowCancellation.DoesNotExist,
PeerWorkflowCancellation.MultipleObjectsReturned
):
return None
except DatabaseError:
error_message = u"Error finding peer workflow cancellation for submission UUID {}.".format(submission_uuid)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
\ No newline at end of file
...@@ -206,7 +206,8 @@ class PeerWorkflow(models.Model): ...@@ -206,7 +206,8 @@ class PeerWorkflow(models.Model):
Before retrieving a new submission for a peer assessor, check to see if that Before retrieving a new submission for a peer assessor, check to see if that
assessor already has a submission out for assessment. If an unfinished assessor already has a submission out for assessment. If an unfinished
assessment is found that has not expired, return the associated submission. assessment is found that has not expired or has not been cancelled,
return the associated submission.
TODO: If a user begins an assessment, then resubmits, this will never find TODO: If a user begins an assessment, then resubmits, this will never find
the unfinished assessment. Is this OK? the unfinished assessment. Is this OK?
...@@ -221,13 +222,12 @@ class PeerWorkflow(models.Model): ...@@ -221,13 +222,12 @@ class PeerWorkflow(models.Model):
""" """
oldest_acceptable = now() - self.TIME_LIMIT oldest_acceptable = now() - self.TIME_LIMIT
items = list(self.graded.all().order_by("-started_at", "-id")) items = list(self.graded.all().select_related('author').order_by("-started_at", "-id"))
valid_open_items = [] valid_open_items = []
completed_sub_uuids = [] completed_sub_uuids = []
# First, remove all completed items. # First, remove all completed items.
for item in items: for item in items:
if item.assessment is not None: if item.assessment is not None or item.author.cancellation.exists():
completed_sub_uuids.append(item.submission_uuid) completed_sub_uuids.append(item.submission_uuid)
else: else:
valid_open_items.append(item) valid_open_items.append(item)
...@@ -266,6 +266,7 @@ class PeerWorkflow(models.Model): ...@@ -266,6 +266,7 @@ class PeerWorkflow(models.Model):
# 3) Is not something you have already scored. # 3) Is not something you have already scored.
# 4) Does not have a combination of completed assessments or open # 4) Does not have a combination of completed assessments or open
# assessments equal to or more than the requirement. # assessments equal to or more than the requirement.
# 5) Has not been cancelled.
try: try:
peer_workflows = list(PeerWorkflow.objects.raw( peer_workflows = list(PeerWorkflow.objects.raw(
"select pw.id, pw.submission_uuid " "select pw.id, pw.submission_uuid "
...@@ -274,6 +275,11 @@ class PeerWorkflow(models.Model): ...@@ -274,6 +275,11 @@ class PeerWorkflow(models.Model):
"and pw.course_id=%s " "and pw.course_id=%s "
"and pw.student_id<>%s " "and pw.student_id<>%s "
"and pw.grading_completed_at is NULL " "and pw.grading_completed_at is NULL "
"and ("
" select count(pwc.id)"
" from assessment_peerworkflowcancellation pwc"
" where pwc.workflow_id=pw.id"
") = 0 "
"and pw.id not in (" "and pw.id not in ("
" select pwi.author_id " " select pwi.author_id "
" from assessment_peerworkflowitem pwi " " from assessment_peerworkflowitem pwi "
...@@ -318,6 +324,7 @@ class PeerWorkflow(models.Model): ...@@ -318,6 +324,7 @@ class PeerWorkflow(models.Model):
# that: # that:
# 1) Does not belong to you # 1) Does not belong to you
# 2) Is not something you have already scored # 2) Is not something you have already scored
# 3) Has not been cancelled.
try: try:
query = list(PeerWorkflow.objects.raw( query = list(PeerWorkflow.objects.raw(
"select pw.id, pw.submission_uuid " "select pw.id, pw.submission_uuid "
...@@ -325,6 +332,11 @@ class PeerWorkflow(models.Model): ...@@ -325,6 +332,11 @@ class PeerWorkflow(models.Model):
"where course_id=%s " "where course_id=%s "
"and item_id=%s " "and item_id=%s "
"and student_id<>%s " "and student_id<>%s "
"and ("
" select count(pwc.id)"
" from assessment_peerworkflowcancellation pwc"
" where pwc.workflow_id=pw.id"
") = 0 "
"and pw.id not in ( " "and pw.id not in ( "
"select pwi.author_id " "select pwi.author_id "
"from assessment_peerworkflowitem pwi " "from assessment_peerworkflowitem pwi "
...@@ -461,3 +473,51 @@ class PeerWorkflowItem(models.Model): ...@@ -461,3 +473,51 @@ class PeerWorkflowItem(models.Model):
def __unicode__(self): def __unicode__(self):
return repr(self) return repr(self)
class PeerWorkflowCancellation(models.Model):
"""Model for tracking cancellations of peer workflows.
It is created when a staff member requests removal of a submission
from the peer grading pool.
"""
workflow = models.ForeignKey(PeerWorkflow, related_name='cancellation')
comments = models.TextField(max_length=10000)
cancelled_by_id = models.CharField(max_length=40, db_index=True)
created_at = models.DateTimeField(default=now, db_index=True)
class Meta:
ordering = ["created_at", "id"]
app_label = "assessment"
def __repr__(self):
return (
"PeerWorkflowCancellation(workflow={0.workflow}, "
"comments={0.comments}, cancelled_by_id={0.cancelled_by_id}, "
"created_at={0.created_at})"
).format(self)
def __unicode__(self):
return repr(self)
@classmethod
def create(cls, workflow, comments, cancelled_by_id):
"""
Create a new PeerWorkflowCancellation object.
Args:
workflow (PeerWorkflow): The cancelled peer workflow.
comments (unicode): The reason for cancellation.
cancelled_by_id (unicode): The ID of the user who cancelled the peer workflow.
Returns:
PeerWorkflowCancellation
"""
workflow_params = {
'workflow': workflow,
'comments': comments,
'cancelled_by_id': cancelled_by_id,
}
return cls.objects.create(**workflow_params)
...@@ -5,8 +5,8 @@ from rest_framework import serializers ...@@ -5,8 +5,8 @@ from rest_framework import serializers
from .base import AssessmentSerializer from .base import AssessmentSerializer
from openassessment.assessment.models import ( from openassessment.assessment.models import (
AssessmentFeedback, AssessmentFeedbackOption, AssessmentFeedback, AssessmentFeedbackOption,
PeerWorkflow, PeerWorkflowItem PeerWorkflow, PeerWorkflowItem,
) PeerWorkflowCancellation)
class AssessmentFeedbackOptionSerializer(serializers.ModelSerializer): class AssessmentFeedbackOptionSerializer(serializers.ModelSerializer):
...@@ -71,3 +71,17 @@ class PeerWorkflowItemSerializer(serializers.ModelSerializer): ...@@ -71,3 +71,17 @@ class PeerWorkflowItemSerializer(serializers.ModelSerializer):
'assessment', 'assessment',
'scored' 'scored'
) )
class PeerWorkflowCancellationSerializer(serializers.ModelSerializer):
"""
Serialize a `PeerWorkflowCancellation` model.
"""
class Meta:
model = PeerWorkflowCancellation
fields = (
'comments',
'cancelled_by_id',
'created_at',
)
...@@ -13,7 +13,7 @@ from openassessment.test_utils import CacheResetTest ...@@ -13,7 +13,7 @@ from openassessment.test_utils import CacheResetTest
from openassessment.assessment.api import peer as peer_api from openassessment.assessment.api import peer as peer_api
from openassessment.assessment.models import ( from openassessment.assessment.models import (
Assessment, AssessmentPart, AssessmentFeedback, AssessmentFeedbackOption, Assessment, AssessmentPart, AssessmentFeedback, AssessmentFeedbackOption,
PeerWorkflow, PeerWorkflowItem PeerWorkflow, PeerWorkflowCancellation, PeerWorkflowItem
) )
from openassessment.workflow import api as workflow_api from openassessment.workflow import api as workflow_api
from submissions import api as sub_api from submissions import api as sub_api
...@@ -151,7 +151,7 @@ class TestPeerApi(CacheResetTest): ...@@ -151,7 +151,7 @@ class TestPeerApi(CacheResetTest):
Tests for the peer assessment API functions. Tests for the peer assessment API functions.
""" """
CREATE_ASSESSMENT_NUM_QUERIES = 58 CREATE_ASSESSMENT_NUM_QUERIES = 59
def test_create_assessment_points(self): def test_create_assessment_points(self):
self._create_student_and_submission("Tim", "Tim's answer") self._create_student_and_submission("Tim", "Tim's answer")
...@@ -882,6 +882,15 @@ class TestPeerApi(CacheResetTest): ...@@ -882,6 +882,15 @@ class TestPeerApi(CacheResetTest):
item = buffy_workflow.find_active_assessments() item = buffy_workflow.find_active_assessments()
self.assertEqual(xander_answer["uuid"], item.submission_uuid) self.assertEqual(xander_answer["uuid"], item.submission_uuid)
# Cancel the Xander's submission.
xander_workflow = PeerWorkflow.get_by_submission_uuid(xander_answer['uuid'])
PeerWorkflowCancellation.create(workflow=xander_workflow, comments='test comments', cancelled_by_id=_['student_id'])
# Check to see if Buffy is actively reviewing Xander's submission.
# She isn't able to get the submission to assess.
item = buffy_workflow.find_active_assessments()
self.assertIsNone(item)
def test_get_workflow_by_uuid(self): def test_get_workflow_by_uuid(self):
buffy_answer, _ = self._create_student_and_submission("Buffy", "Buffy's answer") buffy_answer, _ = self._create_student_and_submission("Buffy", "Buffy's answer")
self._create_student_and_submission("Xander", "Xander's answer") self._create_student_and_submission("Xander", "Xander's answer")
...@@ -903,6 +912,16 @@ class TestPeerApi(CacheResetTest): ...@@ -903,6 +912,16 @@ class TestPeerApi(CacheResetTest):
submission_uuid = buffy_workflow.get_submission_for_review(3) submission_uuid = buffy_workflow.get_submission_for_review(3)
self.assertEqual(xander_answer["uuid"], submission_uuid) self.assertEqual(xander_answer["uuid"], submission_uuid)
# Cancel the Xander's submission.
xander_workflow = PeerWorkflow.get_by_submission_uuid(xander_answer['uuid'])
PeerWorkflowCancellation.create(workflow=xander_workflow, comments='test comments',
cancelled_by_id=_['student_id'])
# Check to see if Buffy is actively reviewing Xander's submission.
# She isn't able to get the submission uuid to assess.
submission_uuid = buffy_workflow.get_submission_for_review(3)
self.assertNotEqual(xander_answer["uuid"], submission_uuid)
def test_get_submission_for_over_grading(self): def test_get_submission_for_over_grading(self):
buffy_answer, _ = self._create_student_and_submission("Buffy", "Buffy's answer") buffy_answer, _ = self._create_student_and_submission("Buffy", "Buffy's answer")
xander_answer, _ = self._create_student_and_submission("Xander", "Xander's answer") xander_answer, _ = self._create_student_and_submission("Xander", "Xander's answer")
......
...@@ -11,7 +11,17 @@ ...@@ -11,7 +11,17 @@
<div class="step__content"> <div class="step__content">
<h3 class="title">{% trans "Student Response" %}</h3> <h3 class="title">{% trans "Student Response" %}</h3>
<div class="student__answer__display__content"> <div class="student__answer__display__content">
{{ submission.answer.text|linebreaks }} {% if submission_cancellation %}
{% blocktrans with removed_by_username=submission_cancellation.cancelled_by removed_datetime=submission_cancellation.created_at|utc|date:"N j, Y H:i e" %}
Student submission removed by {{ removed_by_username }} on {{ removed_datetime }}
{% endblocktrans %}
<br>
{% blocktrans with comments=submission_cancellation.comments %}
Comments: {{ comments }}
{% endblocktrans %}
{% else %}
{{ submission.answer.text|linebreaks }}
{% endif %}
</div> </div>
{% if submission.image_url %} {% if submission.image_url %}
...@@ -24,6 +34,60 @@ ...@@ -24,6 +34,60 @@
</div> </div>
</div> </div>
{% if not submission_cancellation %}
<div id="openassessment__staff-info__cancel__submission"
class="openassessment__staff-info__cancel__submission wrapper--staff-info wrapper--ui-staff wrapper--ui--collapse">
<div class="ui-staff ui-toggle-visibility is--collapsed">
<h2 class="staff-info__title ui-staff__title ui-toggle-visibility__control">
<i class="ico icon-caret-right"></i>
<span class="staff-info__collapse__title">{% trans "Remove submission from peer grading" %}</span>
</h2>
<div class="staff-info__cancel-submission__content ui-toggle-visibility__content">
<div class="ui-staff__content__section">
<div class="wrapper--input">
<form id="openassessment_staff_cancel_submission_form"
data-submission-uuid="{{ submission.uuid }}">
<ul class="list list--actions">
<li>
<div class="has--warnings">
<div class="warning">
{% trans "Caution: Removing a student's submission is irreversible. It should only be used in cases where the student's submission was inappropriate." %}
</div>
</div>
</li>
<li>
<label for="staff-info__cancel-submission__comments"
class="label">{% trans "Comments:" %}</label>
</li>
<li>
<textarea
id="staff-info__cancel-submission__comments"
class="cancel_submission_comments"
value=""
maxlength="10000"></textarea>
</li>
</ul>
<ul class="list list--actions">
<li class="list--actions__item">
<a data-submission-uuid="{{ submission.uuid }}" aria-role="button" href=""
id="submit_cancel_submission" class="action--submit is--disabled">
<span class="copy">{% trans "Remove submission" %}</span>
</a>
<div class="cancel-submission-error"></div>
</li>
</ul>
</form>
</div>
</div>
</div>
</div>
</div>
{% endif %}
{% if peer_assessments %} {% if peer_assessments %}
<div class="staff-info__status ui-staff__content__section"> <div class="staff-info__status ui-staff__content__section">
<h3 class="title">{% trans "Peer Assessments for This Student" %}</h3> <h3 class="title">{% trans "Peer Assessments for This Student" %}</h3>
...@@ -64,7 +128,7 @@ ...@@ -64,7 +128,7 @@
{% endwith %} {% endwith %}
{% endfor %} {% endfor %}
</div> </div>
{% endif %} {% endif %}
{% if submitted_assessments %} {% if submitted_assessments %}
<div class="staff-info__status ui-staff__content__section"> <div class="staff-info__status ui-staff__content__section">
......
...@@ -211,7 +211,6 @@ class OpenAssessmentBlock( ...@@ -211,7 +211,6 @@ class OpenAssessmentBlock(
""" """
item_id = self._serialize_opaque_key(self.scope_ids.usage_id) item_id = self._serialize_opaque_key(self.scope_ids.usage_id)
# This is not the real way course_ids should work, but this is a # This is not the real way course_ids should work, but this is a
# temporary expediency for LMS integration # temporary expediency for LMS integration
if hasattr(self, "xmodule_runtime"): if hasattr(self, "xmodule_runtime"):
...@@ -746,3 +745,7 @@ class OpenAssessmentBlock( ...@@ -746,3 +745,7 @@ class OpenAssessmentBlock(
return key.to_deprecated_string() return key.to_deprecated_string()
else: else:
return unicode(key) return unicode(key)
def get_username(self, anonymous_user_id):
if hasattr(self, "xmodule_runtime"):
return self.xmodule_runtime.get_real_user(anonymous_user_id).username
...@@ -7,6 +7,9 @@ from functools import wraps ...@@ -7,6 +7,9 @@ from functools import wraps
import logging import logging
from xblock.core import XBlock from xblock.core import XBlock
from openassessment.assessment.errors import (
PeerAssessmentInternalError, PeerAssessmentWorkflowError,
)
from openassessment.assessment.errors.ai import AIError from openassessment.assessment.errors.ai import AIError
from openassessment.xblock.resolve_dates import DISTANT_PAST, DISTANT_FUTURE from openassessment.xblock.resolve_dates import DISTANT_PAST, DISTANT_FUTURE
from openassessment.xblock.data_conversion import ( from openassessment.xblock.data_conversion import (
...@@ -35,7 +38,7 @@ def require_global_admin(error_key): ...@@ -35,7 +38,7 @@ def require_global_admin(error_key):
Decorated function Decorated function
""" """
def _decorator(func): # pylint: disable=C0111 def _decorator(func): # pylint: disable=C0111
@wraps(func) @wraps(func)
def _wrapped(xblock, *args, **kwargs): # pylint: disable=C0111 def _wrapped(xblock, *args, **kwargs): # pylint: disable=C0111
permission_errors = { permission_errors = {
...@@ -86,7 +89,7 @@ class StaffInfoMixin(object): ...@@ -86,7 +89,7 @@ class StaffInfoMixin(object):
@XBlock.handler @XBlock.handler
@require_course_staff("STAFF_INFO") @require_course_staff("STAFF_INFO")
def render_staff_info(self, data, suffix=''): # pylint: disable=W0613 def render_staff_info(self, data, suffix=''): # pylint: disable=W0613
""" """
Template context dictionary for course staff debug panel. Template context dictionary for course staff debug panel.
...@@ -158,7 +161,7 @@ class StaffInfoMixin(object): ...@@ -158,7 +161,7 @@ class StaffInfoMixin(object):
@XBlock.json_handler @XBlock.json_handler
@require_global_admin("SCHEDULE_TRAINING") @require_global_admin("SCHEDULE_TRAINING")
def schedule_training(self, data, suffix=''): # pylint: disable=W0613 def schedule_training(self, data, suffix=''): # pylint: disable=W0613
""" """
Schedule a new training task for example-based grading. Schedule a new training task for example-based grading.
""" """
...@@ -194,7 +197,7 @@ class StaffInfoMixin(object): ...@@ -194,7 +197,7 @@ class StaffInfoMixin(object):
@XBlock.handler @XBlock.handler
@require_course_staff("STUDENT_INFO") @require_course_staff("STUDENT_INFO")
def render_student_info(self, data, suffix=''): # pylint: disable=W0613 def render_student_info(self, data, suffix=''): # pylint: disable=W0613
""" """
Renders all relative information for a specific student's workflow. Renders all relative information for a specific student's workflow.
...@@ -204,9 +207,13 @@ class StaffInfoMixin(object): ...@@ -204,9 +207,13 @@ class StaffInfoMixin(object):
Must be course staff to render this view. Must be course staff to render this view.
""" """
student_id = data.params.get('student_id', '') try:
path, context = self.get_student_info_path_and_context(student_id) student_id = data.params.get('student_id', '')
return self.render_assessment(path, context) path, context = self.get_student_info_path_and_context(student_id)
return self.render_assessment(path, context)
except PeerAssessmentInternalError as ex:
return self.render_error(self._(ex.message))
def get_student_info_path_and_context(self, student_id): def get_student_info_path_and_context(self, student_id):
""" """
...@@ -262,8 +269,13 @@ class StaffInfoMixin(object): ...@@ -262,8 +269,13 @@ class StaffInfoMixin(object):
if "example-based-assessment" in assessment_steps: if "example-based-assessment" in assessment_steps:
example_based_assessment = ai_api.get_latest_assessment(submission_uuid) example_based_assessment = ai_api.get_latest_assessment(submission_uuid)
submission_cancellation = peer_api.get_submission_cancellation(submission_uuid)
if submission_cancellation:
submission_cancellation['cancelled_by'] = self.get_username(submission_cancellation['cancelled_by_id'])
context = { context = {
'submission': submission, 'submission': submission,
'submission_cancellation': submission_cancellation,
'peer_assessments': peer_assessments, 'peer_assessments': peer_assessments,
'submitted_assessments': submitted_assessments, 'submitted_assessments': submitted_assessments,
'self_assessment': self_assessment, 'self_assessment': self_assessment,
...@@ -317,3 +329,45 @@ class StaffInfoMixin(object): ...@@ -317,3 +329,45 @@ class StaffInfoMixin(object):
'success': False, 'success': False,
'msg': self._(u"An error occurred while rescheduling tasks: {}".format(ex)) 'msg': self._(u"An error occurred while rescheduling tasks: {}".format(ex))
} }
@XBlock.json_handler
@require_course_staff("STUDENT_INFO")
def cancel_submission(self, data, suffix=''):
"""
This will cancel the peer workflow for the particular submission.
Args:
data (dict): Data contain two attributes: submission_uuid and
comments. submission_uuid is id of submission which is to be
removed from the grading pool.comments is the reason given
by the user.
suffix (not used)
Return:
Json serializable dict with the following elements:
'success': (bool) Indicates whether or not the workflow cancelled successfully.
'msg': The response (could be error message or success message).
"""
submission_uuid = data.get('submission_uuid')
comments = data.get('comments')
if not comments:
return {"success": False, "msg": self._(u'Please enter valid reason to remove the submission.')}
student_item_dict = self.get_student_item_dict()
try:
peer_api.cancel_submission_peer_workflow(
submission_uuid=submission_uuid, comments=comments, cancelled_by_id=student_item_dict['student_id']
)
return {"success": True, 'msg': self._(u"Student submission was removed from the peer grading pool."
u" If you'd like to allow the student to submit a new response,"
u" please also reset the student state of the problem from"
u" the Instructor Dashboard.")}
except (
PeerAssessmentWorkflowError,
PeerAssessmentInternalError
) as ex:
msg = ex.message
logger.exception(msg)
return {"success": False, 'msg': msg}
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -656,6 +656,19 @@ ...@@ -656,6 +656,19 @@
"output": "oa_staff_info.html" "output": "oa_staff_info.html"
}, },
{ {
"template": "openassessmentblock/staff_debug/student_info.html",
"context": {
"submission": {
"image_url": "/test-url",
"answer":{
"text": "testing response text"
}
},
"submission_cancellation": 0
},
"output": "oa_student_info.html"
},
{
"template": "openassessmentblock/peer/oa_peer_assessment.html", "template": "openassessmentblock/peer/oa_peer_assessment.html",
"context": { "context": {
"rubric_criteria": [ "rubric_criteria": [
......
...@@ -34,6 +34,14 @@ describe("OpenAssessment.StaffInfoView", function() { ...@@ -34,6 +34,14 @@ describe("OpenAssessment.StaffInfoView", function() {
}).promise(); }).promise();
}; };
var successPromise = $.Deferred(
function(defer) { defer.resolve(); }
).promise();
this.cancelSubmission = function(submissionUUID) {
return successPromise;
};
this.data = {}; this.data = {};
}; };
...@@ -153,4 +161,49 @@ describe("OpenAssessment.StaffInfoView", function() { ...@@ -153,4 +161,49 @@ describe("OpenAssessment.StaffInfoView", function() {
expect(server.rescheduleUnfinishedTasks).toHaveBeenCalled(); expect(server.rescheduleUnfinishedTasks).toHaveBeenCalled();
}); });
it("updates submission cancellation button when comments changes", function() {
// Prevent the server's response from resolving,
// so we can see what happens before view gets re-rendered.
spyOn(server, 'cancelSubmission').andCallFake(function() {
return $.Deferred(function(defer) {}).promise();
});
// Load the fixture
loadFixtures('oa_student_info.html');
var el = $("#openassessment-base").get(0);
var view = new OpenAssessment.StaffInfoView(el, server, baseView);
// comments is blank --> cancel submission button disabled
view.comment('');
view.handleCommentChanged();
expect(view.cancelSubmissionEnabled()).toBe(false);
// Response is whitespace --> cancel submission button disabled
view.comment(' \n \n ');
view.handleCommentChanged();
expect(view.cancelSubmissionEnabled()).toBe(false);
// Response is not blank --> cancel submission button enabled
view.comment('Test comments');
view.handleCommentChanged();
expect(view.cancelSubmissionEnabled()).toBe(true);
});
it("submits the cancel submission comments to the server", function() {
spyOn(server, 'cancelSubmission').andCallThrough();
// Load the fixture
loadFixtures('oa_student_info.html');
var el = $("#openassessment-base").get(0);
var view = new OpenAssessment.StaffInfoView(el, server, baseView);
view.comment('Test comments');
view.cancelSubmission('Bob');
expect(server.cancelSubmission).toHaveBeenCalledWith('Bob', 'Test comments');
});
}); });
...@@ -130,6 +130,26 @@ describe("OpenAssessment.Server", function() { ...@@ -130,6 +130,26 @@ describe("OpenAssessment.Server", function() {
}); });
}); });
it("sends a submission to XBlock for cancellation", function() {
stubAjax(true, {success:true, msg:'test message'});
var submissionUUID = 'Bob';
var comments = 'Test comments';
var success = false;
server.cancelSubmission(submissionUUID, comments).done(
function() {
success=true;
}
);
expect(success).toBe(true);
expect($.ajax).toHaveBeenCalledWith({
url: '/cancel_submission',
type: "POST",
data: JSON.stringify({submission_uuid: submissionUUID, comments: comments})
});
});
it("saves a response submission", function() { it("saves a response submission", function() {
stubAjax(true, {'success': true, 'msg': ''}); stubAjax(true, {'success': true, 'msg': ''});
var success = false; var success = false;
......
...@@ -48,9 +48,8 @@ OpenAssessment.BaseView.prototype = { ...@@ -48,9 +48,8 @@ OpenAssessment.BaseView.prototype = {
Args: Args:
parentSel (JQuery selector): CSS selector for the container element. parentSel (JQuery selector): CSS selector for the container element.
**/ **/
setUpCollapseExpand: function(parentSel) { setUpCollapseExpand: function (parentSel) {
parentSel.find('.ui-toggle-visibility__control').click( parentSel.on('click', '.ui-toggle-visibility__control', function (eventData) {
function(eventData) {
var sel = $(eventData.target).closest('.ui-toggle-visibility'); var sel = $(eventData.target).closest('.ui-toggle-visibility');
sel.toggleClass('is--collapsed'); sel.toggleClass('is--collapsed');
} }
......
...@@ -53,10 +53,23 @@ OpenAssessment.StaffInfoView.prototype = { ...@@ -53,10 +53,23 @@ OpenAssessment.StaffInfoView.prototype = {
function(html) { function(html) {
// Load the HTML and install event handlers // Load the HTML and install event handlers
$('#openassessment__student-info', view.element).replaceWith(html); $('#openassessment__student-info', view.element).replaceWith(html);
// Install key handler for new staff grade Save button.
var selCancelSub = $('#openassessment__staff-info__cancel__submission', this.element);
selCancelSub.on('click', '#submit_cancel_submission', function (eventObject) {
eventObject.preventDefault();
view.cancelSubmission($(this).data('submission-uuid'));
}
);
// Install change handler for textarea (to enable cancel submission button)
var handleChange = function(eventData) { view.handleCommentChanged(); };
selCancelSub.find('#staff-info__cancel-submission__comments').on('change keyup drop paste', handleChange);
} }
).fail(function(errMsg) { ).fail(function(errMsg) {
view.showLoadError('student_info'); view.showLoadError('student_info');
}); });
}, },
/** /**
...@@ -64,6 +77,7 @@ OpenAssessment.StaffInfoView.prototype = { ...@@ -64,6 +77,7 @@ OpenAssessment.StaffInfoView.prototype = {
**/ **/
installHandlers: function() { installHandlers: function() {
var sel = $('#openassessment__staff-info', this.element); var sel = $('#openassessment__staff-info', this.element);
var selStudentInfo = $('#openassessment__student-info', this.element);
var view = this; var view = this;
if (sel.length <= 0) { if (sel.length <= 0) {
...@@ -71,6 +85,7 @@ OpenAssessment.StaffInfoView.prototype = { ...@@ -71,6 +85,7 @@ OpenAssessment.StaffInfoView.prototype = {
} }
this.baseView.setUpCollapseExpand(sel, function() {}); this.baseView.setUpCollapseExpand(sel, function() {});
this.baseView.setUpCollapseExpand(selStudentInfo, function() {});
// Install key handler for student id field // Install key handler for student id field
sel.find('#openassessment_student_info_form').submit( sel.find('#openassessment_student_info_form').submit(
...@@ -111,7 +126,6 @@ OpenAssessment.StaffInfoView.prototype = { ...@@ -111,7 +126,6 @@ OpenAssessment.StaffInfoView.prototype = {
**/ **/
scheduleTraining: function() { scheduleTraining: function() {
var view = this;
this.server.scheduleTraining().done( this.server.scheduleTraining().done(
function(msg) { function(msg) {
$('#schedule_training_message', this.element).text(msg) $('#schedule_training_message', this.element).text(msg)
...@@ -136,5 +150,78 @@ OpenAssessment.StaffInfoView.prototype = { ...@@ -136,5 +150,78 @@ OpenAssessment.StaffInfoView.prototype = {
).fail(function(errMsg) { ).fail(function(errMsg) {
$('#reschedule_unfinished_tasks_message', this.element).text(errMsg) $('#reschedule_unfinished_tasks_message', this.element).text(errMsg)
}); });
},
/**
Upon request, cancel the submission from grading pool.
**/
cancelSubmission: function(submissionUUID) {
// Immediately disable the submit button to prevent multiple submission
this.cancelSubmissionEnabled(false);
var view = this;
var sel = $('#openassessment__student-info', this.element);
var comments = sel.find('#staff-info__cancel-submission__comments').val();
this.server.cancelSubmission(submissionUUID, comments).done(
function(msg) {
$('.cancel-submission-error').html('');
$('#openassessment__staff-info__cancel__submission', view.element).html(msg);
}
).fail(function(errMsg) {
$('.cancel-submission-error').html(errMsg);
});
},
/**
Enable/disable the cancel submission button.
Check whether the cancel submission button is enabled.
Args:
enabled (bool): If specified, set the state of the button.
Returns:
bool: Whether the button is enabled.
Examples:
>> view.submitEnabled(true); // enable the button
>> view.submitEnabled(); // check whether the button is enabled
>> true
**/
cancelSubmissionEnabled: function(enabled) {
var sel = $('#submit_cancel_submission', this.element);
if (typeof enabled === 'undefined') {
return !sel.hasClass('is--disabled');
} else {
sel.toggleClass('is--disabled', !enabled);
}
},
/**
Set the comment text.
Retrieve the comment text.
Args:
text (string): reason to .
Returns:
string: The current comment text.
**/
comment: function(text) {
var sel = $('#staff-info__cancel-submission__comments', this.element);
if (typeof text === 'undefined') {
return sel.val();
} else {
sel.val(text);
}
},
/**
Enable/disable the cancel submission based on whether
the user has entered a comment.
**/
handleCommentChanged: function() {
// Enable the cancel submission button only for non-blank comments
var isBlank = ($.trim(this.comment()) !== '');
this.cancelSubmissionEnabled(isBlank);
} }
}; };
...@@ -549,6 +549,34 @@ if (typeof OpenAssessment.Server == "undefined" || !OpenAssessment.Server) { ...@@ -549,6 +549,34 @@ if (typeof OpenAssessment.Server == "undefined" || !OpenAssessment.Server) {
defer.rejectWith(this, [gettext('Could not retrieve download url.')]); defer.rejectWith(this, [gettext('Could not retrieve download url.')]);
}); });
}).promise(); }).promise();
},
/**
Cancel the submission from peer grading pool.
Args:
submissionUUID: ID for submission to be cancelled from pool.
comments: reason to cancel the submission
         **/
cancelSubmission: function (submissionUUID, comments) {
var url = this.url('cancel_submission');
var payload = JSON.stringify({
submission_uuid: submissionUUID,
comments: comments
});
return $.Deferred(function (defer) {
$.ajax({ type: "POST", url: url, data: payload }).done(
function(data) {
if (data.success) {
defer.resolveWith(this, [data.msg]);
}
else {
defer.rejectWith(this, [data.msg]);
}
}
).fail(function(data) {
defer.rejectWith(this, [gettext('The submission could not be removed from grading pool.')]);
});
}).promise();
} }
}; };
} }
...@@ -133,4 +133,19 @@ ...@@ -133,4 +133,19 @@
} }
} }
// UI cancel submission.
.staff-info__cancel-submission__content {
textarea {
width: 100%;
height: 100px;
text-align: left;
}
ul.list--actions {
.action--submit {
margin: 10px 0px;
}
}
}
} }
...@@ -224,6 +224,31 @@ class TestCourseStaff(XBlockHandlerTestCase): ...@@ -224,6 +224,31 @@ class TestCourseStaff(XBlockHandlerTestCase):
self.assertEquals([], context['peer_assessments']) self.assertEquals([], context['peer_assessments'])
self.assertEquals("openassessmentblock/staff_debug/student_info.html", path) self.assertEquals("openassessmentblock/staff_debug/student_info.html", path)
@scenario('data/basic_scenario.xml', user_id='Bob')
def test_staff_debug_student_info_with_cancelled_submission(self, xblock):
# Simulate that we are course staff
xblock.xmodule_runtime = self._create_mock_runtime(
xblock.scope_ids.usage_id, True, False, "Bob"
)
bob_item = STUDENT_ITEM.copy()
bob_item["item_id"] = xblock.scope_ids.usage_id
# Create a submission for Bob, and corresponding workflow.
submission = sub_api.create_submission(bob_item, {'text': "Bob Answer"})
peer_api.on_start(submission["uuid"])
workflow_api.create_workflow(submission["uuid"], ['self'])
peer_api.cancel_submission_peer_workflow(
submission_uuid=submission["uuid"],
comments="vulgar language",
cancelled_by_id=bob_item['student_id']
)
path, context = xblock.get_student_info_path_and_context("Bob")
self.assertEquals("Bob Answer", context['submission']['answer']['text'])
self.assertIsNotNone(context['submission_cancellation'])
self.assertEquals("openassessmentblock/staff_debug/student_info.html", path)
@scenario('data/self_only_scenario.xml', user_id='Bob') @scenario('data/self_only_scenario.xml', user_id='Bob')
def test_staff_debug_student_info_image_submission(self, xblock): def test_staff_debug_student_info_image_submission(self, xblock):
# Simulate that we are course staff # Simulate that we are course staff
...@@ -491,6 +516,50 @@ class TestCourseStaff(XBlockHandlerTestCase): ...@@ -491,6 +516,50 @@ class TestCourseStaff(XBlockHandlerTestCase):
__, context = xblock.get_staff_path_and_context() __, context = xblock.get_staff_path_and_context()
self.assertNotIn('classifierset', context) self.assertNotIn('classifierset', context)
@scenario('data/basic_scenario.xml', user_id='Bob')
def test_cancel_submission_without_reason(self, xblock):
# If we're not course staff, we shouldn't be able to see the
# cancel submission option
xblock.xmodule_runtime = self._create_mock_runtime(
xblock.scope_ids.usage_id, False, False, "Bob"
)
resp = self.request(xblock, 'cancel_submission', json.dumps({}))
self.assertIn("you do not have permission", resp.decode('utf-8').lower())
# If we ARE course staff, then we should see the cancel submission option
# with valid error message.
xblock.xmodule_runtime.user_is_staff = True
resp = self.request(xblock, 'cancel_submission', json.dumps({}), response_format='json')
self.assertIn("Please enter valid reason", resp['msg'])
self.assertEqual(False, resp['success'])
@scenario('data/basic_scenario.xml', user_id='Bob')
def test_cancel_submission_full_flow(self, xblock):
# Simulate that we are course staff
xblock.xmodule_runtime = self._create_mock_runtime(
xblock.scope_ids.usage_id, True, False, "Bob"
)
bob_item = STUDENT_ITEM.copy()
bob_item["item_id"] = xblock.scope_ids.usage_id
# Create a submission for Bob, and corresponding workflow.
submission = sub_api.create_submission(bob_item, {'text': "Bob Answer"})
peer_api.on_start(submission["uuid"])
workflow_api.create_workflow(submission["uuid"], ['peer'])
incorrect_submission_uuid = 'abc'
params = {"submission_uuid": incorrect_submission_uuid, "comments": "vulgar language."}
# Raise flow not found exception.
resp = self.request(xblock, 'cancel_submission', json.dumps(params), response_format='json')
self.assertIn("Error finding workflow", resp['msg'])
self.assertEqual(False, resp['success'])
# Verify that we can render without error
params = {"submission_uuid": submission["uuid"], "comments": "vulgar language."}
resp = self.request(xblock, 'cancel_submission', json.dumps(params), response_format='json')
self.assertIn("Student submission was removed from the ", resp['msg'])
self.assertEqual(True, resp['success'])
def _create_mock_runtime(self, item_id, is_staff, is_admin, anonymous_user_id): def _create_mock_runtime(self, item_id, is_staff, is_admin, anonymous_user_id):
mock_runtime = Mock( mock_runtime = Mock(
course_id='test_course', course_id='test_course',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment