Commit aa4897f2 by muzaffaryousaf

Refactoring the code.

TNL-900
parent 1ae9872d
......@@ -11,13 +11,13 @@ from dogapi import dog_stats_api
from openassessment.assessment.models import (
Assessment, AssessmentFeedback, AssessmentPart,
InvalidRubricSelection, PeerWorkflow, PeerWorkflowItem,
PeerWorkflowCancellation)
InvalidRubricSelection, PeerWorkflow, PeerWorkflowItem, PeerWorkflowCancellation
)
from openassessment.assessment.serializers import (
AssessmentFeedbackSerializer, RubricSerializer,
full_assessment_dict, rubric_from_dict, serialize_assessments,
InvalidRubric,
PeerWorkflowCancellationSerializer)
InvalidRubric, PeerWorkflowCancellationSerializer
)
from openassessment.assessment.errors import (
PeerAssessmentRequestError, PeerAssessmentWorkflowError, PeerAssessmentInternalError
)
......@@ -951,11 +951,11 @@ def cancel_submission_peer_workflow(submission_uuid, comments, cancelled_by_id):
"""
Add an entry in PeerWorkflowCancellation table for a PeerWorkflow.
PeerWorkflow which has been cancelled are no longer included in the
PeerWorkflow which has been cancelled is no longer included in the
peer grading pool.
Args:
submission_uuid (str): The UUID of the peer workflow.
submission_uuid (str): The UUID of the peer workflow's submission.
comments: The reason for cancellation.
cancelled_by_id: The ID of the user who cancelled the peer workflow.
"""
......@@ -980,19 +980,18 @@ def get_submission_cancellation(submission_uuid):
Get cancellation information for a submission's peer workflow.
Args:
submission_uuid (str): The UUID of the peer workflow.
submission_uuid (str): The UUID of the peer workflow's submission.
"""
try:
workflow = PeerWorkflow.objects.get(submission_uuid=submission_uuid)
workflow_cancellation = workflow.cancellation.get()
workflow_cancellation = workflow.cancellations.latest('created_at')
return PeerWorkflowCancellationSerializer(workflow_cancellation).data
except (
PeerWorkflow.DoesNotExist,
PeerWorkflowCancellation.DoesNotExist,
PeerWorkflowCancellation.MultipleObjectsReturned
):
return None
except DatabaseError:
error_message = u"Error finding peer workflow cancellation for submission UUID {}.".format(submission_uuid)
logger.exception(error_message)
raise PeerAssessmentInternalError(error_message)
\ No newline at end of file
raise PeerAssessmentInternalError(error_message)
......@@ -227,7 +227,7 @@ class PeerWorkflow(models.Model):
completed_sub_uuids = []
# First, remove all completed items.
for item in items:
if item.assessment is not None or item.author.cancellation.exists():
if item.assessment is not None or item.author.cancellations.exists():
completed_sub_uuids.append(item.submission_uuid)
else:
valid_open_items.append(item)
......@@ -481,7 +481,7 @@ class PeerWorkflowCancellation(models.Model):
It is created when a staff member requests removal of a submission
from the peer grading pool.
"""
workflow = models.ForeignKey(PeerWorkflow, related_name='cancellation')
workflow = models.ForeignKey(PeerWorkflow, related_name='cancellations')
comments = models.TextField(max_length=10000)
cancelled_by_id = models.CharField(max_length=40, db_index=True)
......@@ -515,9 +515,9 @@ class PeerWorkflowCancellation(models.Model):
PeerWorkflowCancellation
"""
workflow_params = {
cancellation_params = {
'workflow': workflow,
'comments': comments,
'cancelled_by_id': cancelled_by_id,
}
return cls.objects.create(**workflow_params)
return cls.objects.create(**cancellation_params)
......@@ -5,8 +5,8 @@ from rest_framework import serializers
from .base import AssessmentSerializer
from openassessment.assessment.models import (
AssessmentFeedback, AssessmentFeedbackOption,
PeerWorkflow, PeerWorkflowItem,
PeerWorkflowCancellation)
PeerWorkflow, PeerWorkflowCancellation, PeerWorkflowItem
)
class AssessmentFeedbackOptionSerializer(serializers.ModelSerializer):
......
......@@ -884,7 +884,9 @@ class TestPeerApi(CacheResetTest):
# Cancel the Xander's submission.
xander_workflow = PeerWorkflow.get_by_submission_uuid(xander_answer['uuid'])
PeerWorkflowCancellation.create(workflow=xander_workflow, comments='test comments', cancelled_by_id=_['student_id'])
PeerWorkflowCancellation.create(
workflow=xander_workflow, comments='Cancellation reason', cancelled_by_id=_['student_id']
)
# Check to see if Buffy is actively reviewing Xander's submission.
# She isn't able to get the submission to assess.
......@@ -914,8 +916,9 @@ class TestPeerApi(CacheResetTest):
# Cancel the Xander's submission.
xander_workflow = PeerWorkflow.get_by_submission_uuid(xander_answer['uuid'])
PeerWorkflowCancellation.create(workflow=xander_workflow, comments='test comments',
cancelled_by_id=_['student_id'])
PeerWorkflowCancellation.create(
workflow=xander_workflow, comments='Cancellation reason', cancelled_by_id=_['student_id']
)
# Check to see if Buffy is actively reviewing Xander's submission.
# She isn't able to get the submission uuid to assess.
......
......@@ -16,6 +16,7 @@
Student submission removed by {{ removed_by_username }} on {{ removed_datetime }}
{% endblocktrans %}
<br>
<!-- Comments: Reason for Cancellation-->
{% blocktrans with comments=submission_cancellation.comments %}
Comments: {{ comments }}
{% endblocktrans %}
......@@ -36,7 +37,7 @@
{% if not submission_cancellation %}
<div id="openassessment__staff-info__cancel__submission"
class="openassessment__staff-info__cancel__submission wrapper--staff-info wrapper--ui-staff wrapper--ui--collapse">
class="openassessment__staff-info__cancel__submission wrapper--ui-staff wrapper--ui--collapse">
<div class="ui-staff ui-toggle-visibility is--collapsed">
<h2 class="staff-info__title ui-staff__title ui-toggle-visibility__control">
<i class="ico icon-caret-right"></i>
......
......@@ -339,7 +339,7 @@ class StaffInfoMixin(object):
Args:
data (dict): Data contain two attributes: submission_uuid and
comments. submission_uuid is id of submission which is to be
removed from the grading pool.comments is the reason given
removed from the grading pool. Comments is the reason given
by the user.
suffix (not used)
......
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -185,7 +185,7 @@ describe("OpenAssessment.StaffInfoView", function() {
expect(view.cancelSubmissionEnabled()).toBe(false);
// Response is not blank --> cancel submission button enabled
view.comment('Test comments');
view.comment('Cancellation reason.');
view.handleCommentChanged();
expect(view.cancelSubmissionEnabled()).toBe(true);
});
......@@ -199,10 +199,10 @@ describe("OpenAssessment.StaffInfoView", function() {
var el = $("#openassessment-base").get(0);
var view = new OpenAssessment.StaffInfoView(el, server, baseView);
view.comment('Test comments');
view.comment('Cancellation reason.');
view.cancelSubmission('Bob');
expect(server.cancelSubmission).toHaveBeenCalledWith('Bob', 'Test comments');
expect(server.cancelSubmission).toHaveBeenCalledWith('Bob', 'Cancellation reason.');
});
......
......@@ -134,7 +134,7 @@ describe("OpenAssessment.Server", function() {
stubAjax(true, {success:true, msg:'test message'});
var submissionUUID = 'Bob';
var comments = 'Test comments';
var comments = 'Cancellation reason.';
var success = false;
server.cancelSubmission(submissionUUID, comments).done(
function() {
......
......@@ -126,6 +126,7 @@ OpenAssessment.StaffInfoView.prototype = {
**/
scheduleTraining: function() {
var view = this;
this.server.scheduleTraining().done(
function(msg) {
$('#schedule_training_message', this.element).text(msg)
......@@ -156,7 +157,7 @@ OpenAssessment.StaffInfoView.prototype = {
Upon request, cancel the submission from grading pool.
**/
cancelSubmission: function(submissionUUID) {
// Immediately disable the submit button to prevent multiple submission
// Immediately disable the button to prevent multiple requests.
this.cancelSubmissionEnabled(false);
var view = this;
var sel = $('#openassessment__student-info', this.element);
......
......@@ -574,7 +574,7 @@ if (typeof OpenAssessment.Server == "undefined" || !OpenAssessment.Server) {
}
}
).fail(function(data) {
defer.rejectWith(this, [gettext('The submission could not be removed from grading pool.')]);
defer.rejectWith(this, [gettext('The submission could not be removed from the grading pool.')]);
});
}).promise();
}
......
......@@ -133,19 +133,26 @@
}
}
// UI cancel submission.
}
// UI cancel submission.
.openassessment__staff-info__cancel__submission {
margin-bottom: ($baseline-v) !important;
.staff-info__cancel-submission__content {
textarea {
width: 100%;
height: 100px;
height: ($baseline-v*5);
text-align: left;
}
ul.list--actions {
.action--submit {
margin: 10px 0px;
margin: ($baseline-v/2) 0px;
}
}
}
}
......@@ -198,7 +198,7 @@ class TestCourseStaff(XBlockHandlerTestCase):
@scenario('data/self_only_scenario.xml', user_id='Bob')
def test_staff_debug_student_info_self_only(self, xblock):
# Simulate that we are course staff
xblock.xmodule_runtime = self._create_mock_runtime(
xblock.xmodule_runtime = self._create_mock_runtime(
xblock.scope_ids.usage_id, True, False, "Bob"
)
......@@ -240,7 +240,7 @@ class TestCourseStaff(XBlockHandlerTestCase):
peer_api.cancel_submission_peer_workflow(
submission_uuid=submission["uuid"],
comments="vulgar language",
comments="Inappropriate language",
cancelled_by_id=bob_item['student_id']
)
......@@ -252,7 +252,7 @@ class TestCourseStaff(XBlockHandlerTestCase):
@scenario('data/self_only_scenario.xml', user_id='Bob')
def test_staff_debug_student_info_image_submission(self, xblock):
# Simulate that we are course staff
xblock.xmodule_runtime = self._create_mock_runtime(
xblock.xmodule_runtime = self._create_mock_runtime(
xblock.scope_ids.usage_id, True, False, "Bob"
)
......@@ -548,14 +548,14 @@ class TestCourseStaff(XBlockHandlerTestCase):
workflow_api.create_workflow(submission["uuid"], ['peer'])
incorrect_submission_uuid = 'abc'
params = {"submission_uuid": incorrect_submission_uuid, "comments": "vulgar language."}
params = {"submission_uuid": incorrect_submission_uuid, "comments": "Inappropriate language."}
# Raise flow not found exception.
resp = self.request(xblock, 'cancel_submission', json.dumps(params), response_format='json')
self.assertIn("Error finding workflow", resp['msg'])
self.assertEqual(False, resp['success'])
# Verify that we can render without error
params = {"submission_uuid": submission["uuid"], "comments": "vulgar language."}
params = {"submission_uuid": submission["uuid"], "comments": "Inappropriate language."}
resp = self.request(xblock, 'cancel_submission', json.dumps(params), response_format='json')
self.assertIn("Student submission was removed from the ", resp['msg'])
self.assertEqual(True, resp['success'])
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment