Commit 8cfc38ec by Eric Fischer

Merge pull request #837 from edx/efischer/response_submitted

Fixing up oa_response
parents f77b6670 5f40fffd
......@@ -22,17 +22,18 @@
<div class="step__message message message--complete">
<h3 class="message__title">{% trans "Your Response Has Been Submitted" %}</h3>
<div class="message__content">
{% if has_peer and has_self %}
{% trans "You will receive your grade after all steps are complete and your response is fully assessed." %}
{% if peer_incomplete and self_incomplete %}
{% blocktrans with peer_start_tag='<a data-behavior="ui-scroll" href="#openassessment__peer-assessment">'|safe self_start_tag='<a data-behavior="ui-scroll" href="#openassessment__self-assessment">'|safe end_tag='</a>'|safe %}
You'll receive your grade after some of your peers have assessed your response and you complete the {{ peer_start_tag }}peer assessment{{ end_tag }} and {{ self_start_tag }}self assessment{{ end_tag }} steps.
You still need to complete the {{ peer_start_tag }}peer assessment{{ end_tag }} and {{ self_start_tag }}self assessment{{ end_tag }} steps.
{% endblocktrans %}
{% elif has_peer %}
{% elif peer_incomplete %}
{% blocktrans with start_tag='<a data-behavior="ui-scroll" href="#openassessment__peer-assessment">'|safe end_tag='</a>'|safe %}
You'll receive your grade after some of your peers have assessed your response and you complete the {{ start_tag }}peer assessment{{ end_tag }} step.
You still need to complete the {{ start_tag }}peer assessment{{ end_tag }} step.
{% endblocktrans %}
{% elif has_self %}
{% elif self_incomplete %}
{% blocktrans with start_tag='<a data-behavior="ui-scroll" href="#openassessment__self-assessment">'|safe end_tag='</a>'|safe %}
You'll receive your grade after you complete the {{ start_tag }}self assessment{{ end_tag }} step.
You still need to complete the {{ start_tag }}self assessment{{ end_tag }} step.
{% endblocktrans %}
{% endif %}
</div>
......
......@@ -388,8 +388,6 @@ class SubmissionMixin(object):
context['file_upload_type'] = self.file_upload_type
context['allow_latex'] = self.allow_latex
context['has_peer'] = 'peer-assessment' in self.assessment_steps
context['has_self'] = 'self-assessment' in self.assessment_steps
if self.file_upload_type:
context['file_url'] = self._get_download_url()
......@@ -437,6 +435,8 @@ class SubmissionMixin(object):
student_submission = self.get_user_submission(
workflow["submission_uuid"]
)
context["peer_incomplete"] = "peer" in workflow["status_details"] and not workflow["status_details"]["peer"]["complete"]
context["self_incomplete"] = "self" in workflow["status_details"] and not workflow["status_details"]["self"]["complete"]
context["student_submission"] = create_submission_dict(student_submission, self.prompts)
path = 'openassessmentblock/response/oa_response_submitted.html'
......
......@@ -322,26 +322,39 @@ class TestGrade(XBlockHandlerTestCase, SubmitAssessmentsMixin):
@scenario('data/grade_incomplete_scenario.xml', user_id='Bunk')
def test_grade_incomplete_missing_self(self, xblock):
# Graded peers, but haven't completed self assessment
self.create_submission_and_assessments(
xblock, self.SUBMISSION, [self.PEERS[0]], [PEER_ASSESSMENTS[0]], None
)
resp = self.request(xblock, 'render_grade', json.dumps(dict()))
resp = self._test_incomplete_helper(xblock, [self.PEERS[0]], None)
self.assertNotIn('peer assessment', resp)
self.assertIn('self assessment', resp)
# Verify that we're on the right template
self.assertIn(u'not completed', resp.decode('utf-8').lower())
@scenario('data/grade_incomplete_scenario.xml', user_id='Daniels')
@scenario('data/grade_incomplete_scenario.xml', user_id='Bunk')
def test_grade_incomplete_missing_peer(self, xblock):
# Have not yet completed peer assessment
resp = self._test_incomplete_helper(xblock, [], SELF_ASSESSMENT)
self.assertNotIn('self assessment', resp)
self.assertIn('peer assessment', resp)
@scenario('data/grade_incomplete_scenario.xml', user_id='Bunk')
def test_grade_incomplete_missing_both(self, xblock):
resp = self._test_incomplete_helper(xblock, [], None)
self.assertIn('self assessment', resp)
self.assertIn('peer assessment', resp)
def _test_incomplete_helper(self, xblock, peers, self_assessment):
self.create_submission_and_assessments(
xblock, self.SUBMISSION, [], [], None
xblock, self.SUBMISSION, peers, [PEER_ASSESSMENTS[0]] if peers else [], self_assessment
)
resp = self.request(xblock, 'render_grade', json.dumps(dict()))
# Verify that we're on the right template
# Verify grading page is rendered properly
resp = self.request(xblock, 'render_grade', json.dumps(dict()))
self.assertIn(u'not completed', resp.decode('utf-8').lower())
# Verify that response_submitted page is rendered properly. This isn't super tightly connnected
# to grade rendering, but it seems a shame to do the same setup in 2 different places.
submitted_resp = self.request(xblock, 'render_submission', json.dumps(dict()))
decoded_response = submitted_resp.decode('utf-8').lower()
self.assertIn(u'steps are complete and your response is fully assessed', decoded_response)
self.assertIn(u'you still need to complete', decoded_response)
return decoded_response
@scenario('data/grade_scenario.xml', user_id='Greggs')
def test_submit_feedback(self, xblock):
# Create submissions and assessments
......
......@@ -180,8 +180,6 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
{
'file_upload_type': None,
'submission_start': dt.datetime(4999, 4, 1).replace(tzinfo=pytz.utc),
'has_peer': True,
'has_self': True,
'allow_latex': False,
}
)
......@@ -202,8 +200,8 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
{
'student_submission': create_submission_dict(submission, xblock.prompts),
'file_upload_type': None,
'has_peer': True,
'has_self': True,
'peer_incomplete': True,
'self_incomplete': True,
'allow_latex': False,
}
)
......@@ -222,8 +220,6 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
'save_status': 'This response has not been saved.',
'submit_enabled': False,
'submission_due': dt.datetime(2999, 5, 6).replace(tzinfo=pytz.utc),
'has_peer': True,
'has_self': True,
'allow_latex': False,
}
)
......@@ -241,8 +237,6 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
}, xblock.prompts),
'save_status': 'This response has not been saved.',
'submit_enabled': False,
'has_peer': True,
'has_self': False,
'allow_latex': False,
}
)
......@@ -266,8 +260,6 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
'save_status': 'This response has been saved but not submitted.',
'submit_enabled': True,
'submission_due': dt.datetime(2999, 5, 6).replace(tzinfo=pytz.utc),
'has_peer': True,
'has_self': True,
'allow_latex': False,
}
)
......@@ -291,8 +283,6 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
'save_status': 'This response has been saved but not submitted.',
'submit_enabled': True,
'submission_due': dt.datetime(2999, 5, 6).replace(tzinfo=pytz.utc),
'has_peer': True,
'has_self': True,
'allow_latex': False,
}
)
......@@ -309,8 +299,8 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
'submission_due': dt.datetime(2999, 5, 6).replace(tzinfo=pytz.utc),
'student_submission': create_submission_dict(submission, xblock.prompts),
'file_upload_type': None,
'has_peer': True,
'has_self': True,
'peer_incomplete': True,
'self_incomplete': True,
'allow_latex': False,
}
)
......@@ -335,8 +325,6 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
{
'file_upload_type': None,
'allow_latex': False,
'has_peer': True,
'has_self': True,
'submission_due': dt.datetime(2999, 5, 6).replace(tzinfo=pytz.utc),
'student_submission': submission,
'workflow_cancellation': {
......@@ -367,8 +355,8 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
{"prompt": {'description': 'One prompt.'}, "text": "An old format response."}
]}},
'file_upload_type': None,
'has_peer': True,
'has_self': True,
'peer_incomplete': True,
'self_incomplete': True,
'allow_latex': False,
}
)
......@@ -380,8 +368,6 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
{
'file_upload_type': None,
'submission_due': dt.datetime(2014, 4, 5).replace(tzinfo=pytz.utc),
'has_peer': False,
'has_self': True,
'allow_latex': False,
}
)
......@@ -398,8 +384,8 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
'submission_due': dt.datetime(2014, 4, 5).replace(tzinfo=pytz.utc),
'student_submission': create_submission_dict(submission, xblock.prompts),
'file_upload_type': None,
'has_peer': False,
'has_self': True,
'peer_incomplete': False,
'self_incomplete': True,
'allow_latex': False,
}
)
......@@ -424,8 +410,6 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
'submission_due': dt.datetime(2999, 5, 6).replace(tzinfo=pytz.utc),
'student_submission': create_submission_dict(submission, xblock.prompts),
'file_upload_type': None,
'has_peer': True,
'has_self': True,
'allow_latex': False,
}
)
......@@ -450,8 +434,6 @@ class SubmissionRenderTest(XBlockHandlerTestCase):
'submission_due': dt.datetime(2014, 4, 5).replace(tzinfo=pytz.utc),
'student_submission': create_submission_dict(submission, xblock.prompts),
'file_upload_type': None,
'has_peer': False,
'has_self': True,
'allow_latex': False,
}
)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment