Commit dfd2a367 by muhammad-ammar

fix3

parent e8c4e31b
...@@ -426,9 +426,19 @@ def get_assessment_median_scores(submission_uuid): ...@@ -426,9 +426,19 @@ def get_assessment_median_scores(submission_uuid):
try: try:
workflow = PeerWorkflow.objects.get(submission_uuid=submission_uuid) workflow = PeerWorkflow.objects.get(submission_uuid=submission_uuid)
items = workflow.graded_by.filter(scored=True) items = workflow.graded_by.filter(scored=True)
assessments = [item.assessment for item in items] assessments = [item.assessment for item in items]
scores = Assessment.scores_by_criterion(assessments) scores = Assessment.scores_by_criterion(assessments)
return Assessment.get_median_score_dict(scores) score_dict = Assessment.get_median_score_dict(scores)
# Is it OK way to give zero score to a cancelled submission? Another way could be don't calculate
# the score above and just return a fake score dict but this may have side effects?
if workflow.is_cancelled:
for key in score_dict:
score_dict[key] = 0
return score_dict
except DatabaseError: except DatabaseError:
error_message = ( error_message = (
u"Error getting assessment median scores for submission {uuid}" u"Error getting assessment median scores for submission {uuid}"
...@@ -661,13 +671,16 @@ def get_submission_to_assess(submission_uuid, graded_by): ...@@ -661,13 +671,16 @@ def get_submission_to_assess(submission_uuid, graded_by):
""" """
workflow = PeerWorkflow.get_by_submission_uuid(submission_uuid) workflow = PeerWorkflow.get_by_submission_uuid(submission_uuid)
if workflow.is_cancelled:
return None
if not workflow: if not workflow:
raise PeerAssessmentWorkflowError( raise PeerAssessmentWorkflowError(
u"A Peer Assessment Workflow does not exist for the student " u"A Peer Assessment Workflow does not exist for the student "
u"with submission UUID {}".format(submission_uuid) u"with submission UUID {}".format(submission_uuid)
) )
if workflow.is_cancelled:
return None
open_item = workflow.find_active_assessments() open_item = workflow.find_active_assessments()
peer_submission_uuid = open_item.submission_uuid if open_item else None peer_submission_uuid = open_item.submission_uuid if open_item else None
# If there is an active assessment for this user, get that submission, # If there is an active assessment for this user, get that submission,
...@@ -964,15 +977,15 @@ def cancel_submission_peer_workflow(submission_uuid, comments, cancelled_by_id): ...@@ -964,15 +977,15 @@ def cancel_submission_peer_workflow(submission_uuid, comments, cancelled_by_id):
try: try:
workflow = PeerWorkflow.objects.get(submission_uuid=submission_uuid) workflow = PeerWorkflow.objects.get(submission_uuid=submission_uuid)
assessment = workflow.graded_by.filter( items = workflow.graded_by.filter(
assessment__submission_uuid=submission_uuid, assessment__score_type=PEER_TYPE assessment__submission_uuid=submission_uuid, assessment__score_type=PEER_TYPE
).order_by('-assessment') ).order_by('-assessment')
if assessment: if items:
sub_api.set_score( sub_api.set_score(
submission_uuid, submission_uuid,
0, 0,
assessment.points_possible items[0].assessment.points_possible
) )
return PeerWorkflowCancellation.create(workflow=workflow, comments=comments, cancelled_by_id=cancelled_by_id) return PeerWorkflowCancellation.create(workflow=workflow, comments=comments, cancelled_by_id=cancelled_by_id)
...@@ -1003,3 +1016,22 @@ def get_submission_cancellation(submission_uuid): ...@@ -1003,3 +1016,22 @@ def get_submission_cancellation(submission_uuid):
error_message = u"Error finding peer workflow cancellation for submission UUID {}.".format(submission_uuid) error_message = u"Error finding peer workflow cancellation for submission UUID {}.".format(submission_uuid)
logger.exception(error_message) logger.exception(error_message)
raise PeerAssessmentInternalError(error_message) raise PeerAssessmentInternalError(error_message)
def is_peer_workflow_submission_cancelled(submission_uuid):
"""
Check if peer workflow submission is cancelled?
Args:
submission_uuid (str): The UUID of the peer workflow's submission.
"""
# Users must submit a response before they can peer-assess.
if submission_uuid is None:
return False
workflow = PeerWorkflow.get_by_submission_uuid(submission_uuid)
if workflow:
return workflow.is_cancelled
return False
...@@ -934,6 +934,62 @@ class TestPeerApi(CacheResetTest): ...@@ -934,6 +934,62 @@ class TestPeerApi(CacheResetTest):
REQUIRED_GRADED_BY, REQUIRED_GRADED_BY,
) )
def test_cancelled_submission_peerworkflow_status(self):
"""
Test cancelled submissions peerworkflow status.
"""
buffy_sub, buffy = self._create_student_and_submission("Buffy", "Buffy's answer")
# Check for a workflow for Buffy.
buffy_workflow = PeerWorkflow.get_by_submission_uuid(buffy_sub['uuid'])
self.assertIsNotNone(buffy_workflow)
# Cancel the buffy's submission.
PeerWorkflowCancellation.create(
workflow=buffy_workflow, comments='Cancellation reason', cancelled_by_id=buffy['student_id']
)
workflow = PeerWorkflow.get_by_submission_uuid(buffy_sub["uuid"])
self.assertTrue(workflow.is_cancelled)
def test_cancelled_submission_peerworkflow_score(self):
tim_sub, tim = self._create_student_and_submission("Tim", "Tim's answer")
bob_sub, bob = self._create_student_and_submission("Bob", "Bob's answer")
sub = peer_api.get_submission_to_assess(tim_sub['uuid'], 1)
peer_api.create_assessment(
tim_sub["uuid"], tim["student_id"],
ASSESSMENT_DICT['options_selected'],
ASSESSMENT_DICT['criterion_feedback'],
ASSESSMENT_DICT['overall_feedback'],
RUBRIC_DICT,
1,
)
sub = peer_api.get_submission_to_assess(bob_sub['uuid'], 1)
peer_api.create_assessment(
bob_sub["uuid"], bob["student_id"],
ASSESSMENT_DICT['options_selected'],
ASSESSMENT_DICT['criterion_feedback'],
ASSESSMENT_DICT['overall_feedback'],
RUBRIC_DICT,
1,
)
requirements = {
'must_grade': 1,
'must_be_graded_by': 1
}
peer_api.cancel_submission_peer_workflow(
submission_uuid=bob_sub["uuid"],
comments="Inappropriate language",
cancelled_by_id=bob['student_id']
)
score = peer_api.get_score(bob_sub["uuid"], requirements)
self.assertEqual(score['points_earned'], 0)
def test_get_workflow_by_uuid(self): def test_get_workflow_by_uuid(self):
buffy_answer, _ = self._create_student_and_submission("Buffy", "Buffy's answer") buffy_answer, _ = self._create_student_and_submission("Buffy", "Buffy's answer")
self._create_student_and_submission("Xander", "Xander's answer") self._create_student_and_submission("Xander", "Xander's answer")
......
...@@ -214,9 +214,14 @@ class PeerAssessmentMixin(object): ...@@ -214,9 +214,14 @@ class PeerAssessmentMixin(object):
"Submit your assessment & move to response #{response_number}" "Submit your assessment & move to response #{response_number}"
).format(response_number=(count + 2)) ).format(response_number=(count + 2))
if peer_api.is_peer_workflow_submission_cancelled(self.submission_uuid):
path = 'openassessmentblock/peer/oa_peer_waiting.html'
# Sets the XBlock boolean to signal to Message that it WAS able to grab a submission
self.no_peers = True
# Once a student has completed a problem, it stays complete, # Once a student has completed a problem, it stays complete,
# so this condition needs to be first. # so this condition needs to be first.
if (workflow.get('status') == 'done' or finished) and not continue_grading: elif (workflow.get('status') == 'done' or finished) and not continue_grading:
path = "openassessmentblock/peer/oa_peer_complete.html" path = "openassessmentblock/peer/oa_peer_complete.html"
# Allow continued grading even if the problem due date has passed # Allow continued grading even if the problem due date has passed
......
...@@ -249,6 +249,33 @@ class TestCourseStaff(XBlockHandlerTestCase): ...@@ -249,6 +249,33 @@ class TestCourseStaff(XBlockHandlerTestCase):
self.assertIsNotNone(context['submission_cancellation']) self.assertIsNotNone(context['submission_cancellation'])
self.assertEquals("openassessmentblock/staff_debug/student_info.html", path) self.assertEquals("openassessmentblock/staff_debug/student_info.html", path)
@scenario('data/basic_scenario.xml', user_id='Bob')
def test_cancelled_submission_peer_aseseement_render_path(self, xblock):
"""
Test that peer assessment path should be oa_peer_waiting.html for a cancelled submission.
"""
# Simulate that we are course staff
xblock.xmodule_runtime = self._create_mock_runtime(
xblock.scope_ids.usage_id, True, False, "Bob"
)
bob_item = STUDENT_ITEM.copy()
bob_item["item_id"] = xblock.scope_ids.usage_id
# Create a submission for Bob, and corresponding workflow.
submission = sub_api.create_submission(bob_item, {'text': "Bob Answer"})
peer_api.on_start(submission["uuid"])
workflow_api.create_workflow(submission["uuid"], ['self'])
peer_api.cancel_submission_peer_workflow(
submission_uuid=submission["uuid"],
comments="Inappropriate language",
cancelled_by_id=bob_item['student_id']
)
xblock.submission_uuid = submission["uuid"]
path, context = xblock.peer_path_and_context(False)
self.assertEquals("openassessmentblock/peer/oa_peer_waiting.html", path)
@scenario('data/self_only_scenario.xml', user_id='Bob') @scenario('data/self_only_scenario.xml', user_id='Bob')
def test_staff_debug_student_info_image_submission(self, xblock): def test_staff_debug_student_info_image_submission(self, xblock):
# Simulate that we are course staff # Simulate that we are course staff
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment