Commit 77666852 by Stephen Sanchez

Merge pull request #108 from edx/sanchez/fix-over-grading

Adding some over grading logic so kick in after submission deadline
parents 801a1caa 835a0f85
......@@ -7,8 +7,8 @@
<span class="step__counter"></span>
<span class="wrapper--copy">
<span class="step__label">Your Response</span>
{% if formatted_due_datetime %}
<span class="step__deadline">due <span class="date">{{ formatted_due_datetime }}</span></span>
{% if submission_due %}
<span class="step__deadline">due <span class="date">{{ submission_due }}</span></span>
{% endif %}
</span>
</h2>
......
......@@ -145,10 +145,14 @@ class PeerAssessmentMixin(object):
return self.render_assessment(path, context_dict)
def get_peer_submission(self, student_item_dict, assessment):
submissions_open, __ = self.is_open(step="submission")
over_grading = not submissions_open
peer_submission = False
try:
peer_submission = peer_api.get_submission_to_assess(
student_item_dict, assessment["must_be_graded_by"]
student_item_dict,
assessment["must_be_graded_by"],
over_grading
)
except PeerAssessmentWorkflowError as err:
logger.exception(err)
......
<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
<openassessment>
<openassessment submission_due="2014-03-11T18:20">
<title>
Global Poverty
</title>
......@@ -105,10 +105,10 @@
</rubric>
<assessments>
<assessment name="peer-assessment"
start="2014-12-20T19:00-7:00"
start="2014-03-11T10:00-18:10"
due="2014-12-21T22:22-7:00"
must_grade="1"
must_be_graded_by="1" />
must_grade="3"
must_be_graded_by="3" />
<assessment name="self-assessment" />
</assessments>
</openassessment>
import copy
import logging
import dateutil
from xblock.core import XBlock
from django.utils.translation import ugettext as _
from submissions import api
......@@ -186,9 +187,14 @@ class SubmissionMixin(object):
"""
workflow = self.get_workflow_info()
problem_open, date = self.is_open()
sub_due = None
if self.submission_due is not None:
submission_deadline = dateutil.parser.parse(self.submission_due)
sub_due = submission_deadline.strftime("%A, %B %d, %Y %X")
context = {
"saved_response": self.saved_response,
"save_status": self.save_status
"save_status": self.save_status,
"submission_due": sub_due,
}
if not workflow and not problem_open:
......
<openassessment submission_due="2014-03-05">
<title>Open Assessment Test</title>
<prompt>
Given the state of the world today, what do you think should be done to
combat poverty? Please answer in a short essay of 200-300 words.
</prompt>
<rubric>
<prompt>Read for conciseness, clarity of thought, and form.</prompt>
<criterion>
<name>𝓒𝓸𝓷𝓬𝓲𝓼𝓮</name>
<prompt>How concise is it?</prompt>
<option points="3">
<name>ﻉซƈﻉɭɭﻉกՇ</name>
<explanation>Extremely concise</explanation>
</option>
<option points="2">
<name>Ġööḋ</name>
<explanation>Concise</explanation>
</option>
<option points="1">
<name>ק๏๏г</name>
<explanation>Wordy</explanation>
</option>
</criterion>
<criterion>
<name>Form</name>
<prompt>How well-formed is it?</prompt>
<option points="3">
<name>Good</name>
<explanation>Good</explanation>
</option>
<option points="2">
<name>Fair</name>
<explanation>Fair</explanation>
</option>
<option points="1">
<name>Poor</name>
<explanation>Poor</explanation>
</option>
</criterion>
</rubric>
<assessments>
<assessment name="peer-assessment" must_grade="1" must_be_graded_by="1" start="2014-03-02" due="2014-03-10"/>
<assessment name="self-assessment" start="2014-03-8" due="2014-03-10"/>
</assessments>
</openassessment>
......@@ -46,6 +46,23 @@ class TestOpenAssessment(XBlockHandlerTestCase):
self.assertIsNotNone(grade_response)
self.assertTrue(grade_response.body.find("openassessment__grade"))
@scenario('data/dates_scenario.xml')
def test_load_student_view_with_dates(self, xblock):
"""OA XBlock returns some HTML to the user.
View basic test for verifying we're returned some HTML about the
Open Assessment XBlock. We don't want to match too heavily against the
contents.
"""
xblock_fragment = self.runtime.render(xblock, "student_view")
self.assertTrue(xblock_fragment.body_html().find("Openassessmentblock"))
# Validate Submission Rendering.
submission_response = xblock.render_submission({})
self.assertIsNotNone(submission_response)
self.assertTrue(submission_response.body.find("openassessment__response"))
self.assertTrue(submission_response.body.find("April"))
@scenario('data/basic_scenario.xml', user_id='Bob')
def test_default_fields(self, xblock):
......
......@@ -20,6 +20,58 @@ class TestPeerAssessment(XBlockHandlerTestCase):
SUBMISSION = u'ՇﻉรՇ รપ๒๓ٱรรٱѻก'
@scenario('data/over_grade_scenario.xml', user_id='Bob')
def test_load_peer_student_view_with_dates(self, xblock):
student_item = xblock.get_student_item_dict()
sally_student_item = copy.deepcopy(student_item)
sally_student_item['student_id'] = "Sally"
sally_submission = xblock.create_submission(sally_student_item, u"Sally's answer")
xblock.get_workflow_info()
# Hal comes and submits a response.
hal_student_item = copy.deepcopy(student_item)
hal_student_item['student_id'] = "Hal"
hal_submission = xblock.create_submission(hal_student_item, u"Hal's answer")
xblock.get_workflow_info()
# Now Hal will assess Sally.
assessment = copy.deepcopy(self.ASSESSMENT)
sub = peer_api.get_submission_to_assess(hal_student_item, 1)
assessment['submission_uuid'] = sub['uuid']
peer_api.create_assessment(
sub['uuid'],
hal_student_item['student_id'],
assessment,
{'criteria': xblock.rubric_criteria}
)
# Now Sally will assess Hal.
assessment = copy.deepcopy(self.ASSESSMENT)
sub = peer_api.get_submission_to_assess(sally_student_item, 1)
assessment['submission_uuid'] = sub['uuid']
peer_api.create_assessment(
sub['uuid'],
sally_student_item['student_id'],
assessment,
{'criteria': xblock.rubric_criteria}
)
# If Over Grading is on, this should now return Sally's response to Bob.
submission = xblock.create_submission(student_item, u"Bob's answer")
workflow_info = xblock.get_workflow_info()
self.assertEqual(workflow_info["status"], u'peer')
# Validate Submission Rendering.
peer_response = xblock.render_peer_assessment({})
self.assertIsNotNone(peer_response)
self.assertNotIn(submission["answer"].encode('utf-8'), peer_response.body)
#Validate Peer Rendering.
self.assertIn("Sally".encode('utf-8'), peer_response.body)
@scenario('data/peer_assessment_scenario.xml', user_id='Bob')
def test_assess_handler(self, xblock):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment