Commit 0ba747ef by Joe Blaylock

Merge pull request #30 from edx/jrbl/xblock_submit_return_type

Jrbl/xblock submit return type
parents 21daed03 9861f268
...@@ -7,7 +7,7 @@ from mako.template import Template ...@@ -7,7 +7,7 @@ from mako.template import Template
from submissions import api from submissions import api
from xblock.core import XBlock from xblock.core import XBlock
from xblock.fields import Scope, String from xblock.fields import List, Scope, String
from xblock.fragment import Fragment from xblock.fragment import Fragment
...@@ -15,21 +15,23 @@ mako_default_filters = ['unicode', 'h', 'trim'] ...@@ -15,21 +15,23 @@ mako_default_filters = ['unicode', 'h', 'trim']
class OpenAssessmentBlock(XBlock): class OpenAssessmentBlock(XBlock):
""" """Displays a question and gives an area where students can compose a response."""
Displays a question and gives an area where students can compose a response.
""" start_datetime = String(default=None, scope=Scope.content, help="ISO-8601 formatted string representing the start date of this assignment.")
due_datetime = String(default=None, scope=Scope.content, help="ISO-8601 formatted string representing the end date of this assignment.")
prompt = String( prompt = String( default=None, scope=Scope.content, help="A prompt to display to a student (plain text).")
default=u"This prompt is unconfigured. Perhaps you could enter some text telling us a little about yourself?", rubric = List( default=None, scope=Scope.content, help="Instructions and criteria for students giving feedback.")
scope=Scope.content, rubric_instructions = String( default=None, scope=Scope.content, help="Instructions for self and peer assessment.")
help="A prompt to display to a student", rubric_criteria = List(default=None, scope=Scope.content, help="The different parts of grading for students giving feedback.")
) rubric_evals = List(default=None, scope=Scope.content, help="The requested set of evaluations and the order in which to apply them.")
course_id = String( course_id = String( default=u"TestCourse", scope=Scope.content, help="The course_id associated with this prompt (until we can get it from runtime).",)
default=u"TestCourse",
scope=Scope.content, submit_errors = { # Reported to user sometimes, and useful in tests
help="The course_id associated with this prompt (until we can get it from runtime).", 'ENOSUB': 'API submission is unrequested',
) 'ENODATA': 'API returned an empty response',
'EBADFORM': 'API Submission Request Error',
'EUNKNOWN': 'API returned unclassified exception',
}
def _get_xblock_trace(self): def _get_xblock_trace(self):
"""Uniquely identify this xblock by context. """Uniquely identify this xblock by context.
...@@ -43,64 +45,209 @@ class OpenAssessmentBlock(XBlock): ...@@ -43,64 +45,209 @@ class OpenAssessmentBlock(XBlock):
Useful for logging, debugging, and uniqueification.""" Useful for logging, debugging, and uniqueification."""
return (self.scope_ids.usage_id, self.scope_ids.user_id) return (self.scope_ids.usage_id, self.scope_ids.user_id)
def student_view(self, context=None): def _get_student_item_dict(self):
""" """Create a student_item_dict from our surrounding context.
The main view of OpenAssessmentBlock, displayed when viewing courses.
See also: submissions.api for details.
""" """
item_id, student_id = self._get_xblock_trace()
student_item_dict = dict(
student_id=student_id,
item_id=item_id,
course_id=self.course_id,
item_type='openassessment' # XXX: Is this the tag we want? Why?
)
return student_item_dict
def student_view(self, context=None):
"""The main view of OpenAssessmentBlock, displayed when viewing courses."""
def load(path): def load(path):
"""Handy helper for getting resources from our kit.""" """Handy helper for getting resources from our kit."""
data = pkg_resources.resource_string(__name__, path) data = pkg_resources.resource_string(__name__, path)
return data.decode("utf8") return data.decode("utf8")
trace = self._get_xblock_trace() trace = self._get_xblock_trace()
html = Template(load("static/html/openassessment.html"), student_item_dict = self._get_student_item_dict()
default_filters=mako_default_filters, previous_submissions = api.get_submissions(student_item_dict)
input_encoding='utf-8', if previous_submissions: # XXX: until workflow better, move on w/ prev submit
) html = Template(load("static/html/oa_rubric.html"),
frag = Fragment(html.render_unicode(xblock_trace=trace, question=self.prompt)) default_filters=mako_default_filters,
frag.add_css(load("static/css/openassessment.css")) input_encoding='utf-8',
frag.add_javascript(load("static/js/src/openassessment.js")) )
frag.initialize_js('OpenAssessmentBlock') frag = Fragment(html.render_unicode(xblock_trace=trace,
rubric_instructions=self.rubric_instructions,
rubric_criteria=self.rubric_criteria,
))
frag.add_css(load("static/css/openassessment.css"))
frag.add_javascript(load("static/js/src/oa_assessment.js"))
frag.initialize_js('OpenAssessmentBlock')
else: # XXX: until workflow better, submit until submitted
html = Template(load("static/html/oa_submission.html"),
default_filters=mako_default_filters,
input_encoding='utf-8',
)
frag = Fragment(html.render_unicode(xblock_trace=trace, question=self.prompt))
frag.add_css(load("static/css/openassessment.css"))
frag.add_javascript(load("static/js/src/oa_submission.js"))
frag.initialize_js('OpenAssessmentBlock')
return frag return frag
@XBlock.json_handler @XBlock.json_handler
def assess(self, data, suffix=''):
"""Place an assessment into Openassessment system"""
return (False, "Assessment handler is not implemented yet.")
@XBlock.json_handler
def submit(self, data, suffix=''): def submit(self, data, suffix=''):
""" """
Place the submission text into Openassessment system Place the submission text into Openassessment system
""" """
student_sub = data['submission']
item_id, student_id = self._get_xblock_trace()
student_item_dict = dict(
student_id=student_id,
item_id=item_id,
course_id=self.course_id,
item_type='openassessment' # Is this the tag we want? Why?
)
status = False status = False
status_tag = 'ENOSUB'
status_text = None
student_sub = data['submission']
student_item_dict = self._get_student_item_dict()
try: try:
status_tag = 'ENODATA'
response = api.create_submission(student_item_dict, student_sub) response = api.create_submission(student_item_dict, student_sub)
status = True if response else False if response:
status = True
status_tag = response.get('student_item')
status_text = response.get('attempt_number')
except api.SubmissionRequestError, e:
status_tag = 'EBADFORM'
status_text = unicode(e.field_errors)
except api.SubmissionError: except api.SubmissionError:
status = False status_tag = 'EUNKNOWN'
return status # relies on success being orthogonal to errors
status_text = status_text if status_text else self.submit_errors[status_tag]
return (status, status_tag, status_text)
@classmethod
def parse_xml(cls, node, runtime, keys, id_generator):
"""Instantiate xblock object from runtime XML definition."""
block = runtime.construct_xblock_from_class(cls, keys)
for child in node:
if child.tag == 'prompt':
block.prompt = child.text.strip()
elif child.tag == 'rubric':
block.rubric_instructions = child.text.strip()
block.rubric_criteria = []
for criterion in child:
crit = {'name': criterion.attrib.get('name', ''),
'instructions': criterion.text.strip(),
}
for option in criterion:
crit[option.attrib['val']] = option.text.strip()
block.rubric_criteria.append(crit)
elif child.tag == 'evals':
block.rubric_evals = []
for evaluation in child:
e = {'type': evaluation.tag,
'name': evaluation.attrib.get('name', ''),
'start_datetime': evaluation.attrib.get('start', None),
'due_datetime': evaluation.attrib.get('due', None),
# These attrs are accepted for self, ai evals, but ignored:
'must_grade': evaluation.attrib.get('must_grade', 1),
'must_be_graded_by': evaluation.attrib.get('must_be_graded_by', 0), }
block.rubric_evals.append(e)
else:
# XXX: jrbl thinks this lets you embed other blocks inside this (?)
block.runtime.add_node_as_child(block, child, id_generator)
return block
# Arbitrary attributes can be defined on the # Arbitrary attributes can be defined on the
@staticmethod @staticmethod
def workbench_scenarios(): def workbench_scenarios():
"""A canned scenario for display in the workbench.""" """A canned scenario for display in the workbench."""
return [ return [
("OpenAssessmentBlock", ("OpenAssessmentBlock Poverty Rubric",
"""<vertical_demo> """
<openassessment prompt="This is my prompt. There are many like it, but this one is mine." /> <vertical_demo>
</vertical_demo>
<openassessment start="2014-12-19T23:00-7:00" due="2014-12-21T23:00-7:00">
<prompt>
Given the state of the world today, what do you think should be done to
combat poverty? Please answer in a short essay of 200-300 words.
</prompt>
<rubric>
Read for conciseness, clarity of thought, and form.
<criterion name="concise">
How concise is it?
<option val="0">Neal Stephenson (late)</option>
<option val="1">HP Lovecraft</option>
<option val="3">Robert Heinlein</option>
<option val="4">Neal Stephenson (early)</option>
<option val="5">Earnest Hemingway</option>
</criterion>
<criterion name="clearheaded">
How clear is the thinking?
<option val="0">The Unabomber</option>
<option val="1">Hunter S. Thompson</option>
<option val="2">Robert Heinlein</option>
<option val="3">Isaac Asimov</option>
<option val="55">Spock</option>
</criterion>
<criterion name="form">
Lastly, how is it's form? Punctuation, grammar, and spelling all count.
<option val="0">lolcats</option>
<option val="1">Facebook</option>
<option val="2">Reddit</option>
<option val="3">metafilter</option>
<option val="4">Usenet, 1996</option>
<option val="99">The Elements of Style</option>
</criterion>
</rubric>
<evals>
<peereval start="2014-12-20T19:00-7:00"
due="2014-12-21T22:22-7:00"
must_grade="5"
must_be_graded_by="3" />
<selfeval/>
</evals>
</openassessment>
</vertical_demo>
"""),
("OpenAssessmentBlock Censorship Rubric",
"""
<vertical_demo>
<openassessment start="2013-12-19T23:00-7:00" due="2014-12-21T23:00-7:00">
<prompt>
What do you think about censorship in libraries? I think it's pretty great.
</prompt>
<rubric>
Read for conciseness, clarity of thought, and form.
<criterion name="concise">
How concise is it?
<option val="0">The Bible</option>
<option val="1">Earnest Hemingway</option>
<option val="3">Matsuo Basho</option>
</criterion>
<criterion name="clearheaded">
How clear is the thinking?
<option val="0">Eric</option>
<option val="1">John</option>
<option val="2">Ian</option>
</criterion>
<criterion name="form">
Lastly, how is it's form? Punctuation, grammar, and spelling all count.
<option val="0">IRC</option>
<option val="1">Real Email</option>
<option val="2">Old-timey letters</option>
</criterion>
</rubric>
<evals>
<selfeval/>
<peereval start="2014-12-20T19:00-7:00"
due="2014-12-21T22:22-7:00"
must_grade="5"
must_be_graded_by="3" />
</evals>
</openassessment>
</vertical_demo>
"""), """),
] ]
# <h3>Censorship in the Libraries</h3>
#
# <p>'All of us can think of a book that we hope none of our children or any other children have taken off the shelf. But if I have the right to remove that book from the shelf -- that work I abhor -- then you also have exactly the same right and so does everyone else. And then we have no books left on the shelf for any of us.' --Katherine Paterson, Author
# </p>
#
# <p>
# Write a persuasive essay to a newspaper reflecting your views on censorship in libraries. Do you believe that certain materials, such as books, music, movies, magazines, etc., should be removed from the shelves if they are found offensive? Support your position with convincing arguments from your own experience, observations, and/or reading.
# </p>
/* START CSS for OpenAssessmentBlock */ /* START CSS for OpenAssessmentBlock */
.openassessment_block .openassessment_question { .openassessment_block .openassessment_prompt {
font-weight: bold; font-weight: bold;
} }
...@@ -26,17 +26,17 @@ ...@@ -26,17 +26,17 @@
.openassessment_response_status_block .clickhere { .openassessment_response_status_block .clickhere {
font-size: small; font-size: small;
font-color: black; color: black;
font-weight: normal; font-weight: normal;
text-align: center; text-align: center;
} }
.openassessment_response_status_block .success { .openassessment_response_status_block .success {
font-color: green; color: green;
} }
.openassessment_response_status_block .failure { .openassessment_response_status_block .failure {
font-color: red; color: red;
} }
.clickhere_span { .clickhere_span {
......
<!-- START OpenAssessmentBlock HTML -->
<div class="openassessment_block" id="openassessment_block_${xblock_trace[0]}">
<p class="openassessment_prompt"
id="openassessment_rubric_instructions_${xblock_trace[0]}">${rubric_instructions}</p>
% for criterion in rubric_criteria:
<div>
<p class="openassessment_prompt">${criterion["instructions"]}</p>
% for value in sorted([k for k in criterion.keys() if k != 'name' and k != 'instructions']):
<input type="radio" value="${value}">${criterion[value]}</input>
% endfor
</div>
% endfor
<input type="button"
class="openassessment_submit" id="openassessment_submit_${xblock_trace[0]}" value="Submit" />
</div>
<div class="openassessment_response_status_block" id=openassessment_response_status_block_${xblock_trace[0]}">
This message should be invisible; please upgrade your browser.
</div>
<!-- END OpenAssessmentBlock HTML -->
<!-- START OpenAssessmentBlock HTML --> <!-- START OpenAssessmentBlock HTML -->
<div class="openassessment_block" id="openassessment_block_${xblock_trace[0]}"> <div class="openassessment_block" id="openassessment_block_${xblock_trace[0]}">
<p class="openassessment_question" id="openassessment_question_${xblock_trace[0]}">${question}</p> <p class="openassessment_prompt" id="openassessment_question_${xblock_trace[0]}">${question}</p>
<textarea class="openassessment_submission" id="openassessment_submission_${xblock_trace[0]}">Compose your response here</textarea> <textarea class="openassessment_submission" id="openassessment_submission_${xblock_trace[0]}">Compose your response here</textarea>
<input type="button" class="openassessment_submit" id="openassessment_submit_${xblock_trace[0]}" value="Submit" /> <input type="button" class="openassessment_submit" id="openassessment_submit_${xblock_trace[0]}" value="Submit" />
</div> </div>
<div class="openassessment_response_status_block" id=openassessment_response_status_block_${xblock_trace[0]}"> <div class="openassessment_response_status_block" id=openassessment_response_status_block_${xblock_trace[0]}">
This message should be invisible; consider upgrading your browser. This message should be invisible; please upgrade your browser.
</div> </div>
<!-- END OpenAssessmentBlock HTML --> <!-- END OpenAssessmentBlock HTML -->
/* START Javascript for OpenassessmentComposeXBlock. */
function OpenAssessmentBlock(runtime, element) {
var handlerUrl = runtime.handlerUrl(element, 'assess');
var success_msg = '<p class="success">Thanks for your feedback!</p>';
var failure_msg = '<p class="failure">An error occurred with your feedback</p>';
var click_msg = '<p class="clickhere">(click here to dismiss this message)</p>';
/* Sample Debug Console: http://localhost:8000/submissions/Joe_Bloggs/TestCourse/u_3 */
function displayStatus(result) {
status = result[0]
error_msg = result[1]
if (status) {
$('.openassessment_response_status_block', element).html(success_msg.concat(click_msg));
} else {
$('.openassessment_response_status_block', element).html(failure_msg.concat(error_msg).concat(click_msg));
}
$('.openassessment_response_status_block', element).css('display', 'block');
}
$('.openassessment_response_status_block', element).click(function(eventObject) {
$('.openassessment_response_status_block', element).css('display', 'none');
});
$('.openassessment_submit', element).click(function(eventObject) {
$.ajax({
type: "POST",
url: handlerUrl,
/* data: JSON.stringify({"submission": $('.openassessment_submission', element).val()}), */
data: JSON.stringify({"assessment": "I'm not sure how to stringify a form"}),
success: displayStatus
});
});
$(function ($) {
/* Here's where you'd do things on page load. */
$(element).css('background-color', 'LightBlue')
});
}
/* END Javascript for OpenassessmentComposeXBlock. */
/* START Javascript for OpenassessmentComposeXBlock. */ /* START Javascript for OpenassessmentComposeXBlock. */
function OpenAssessmentBlock(runtime, element) { function OpenAssessmentBlock(runtime, element) {
/* Sample Debug Console: http://localhost:8000/submissions/Joe_Bloggs/TestCourse/u_3 */ var handlerUrl = runtime.handlerUrl(element, 'submit');
var success_msg = '<p class="failure">Your submission has been received, thank you!</p>'; var success_msg = '<p class="success">Your submission has been received, thank you!</p>';
var failure_msg = '<p class="success">An error occurred with your submission</p>'; var failure_msg = '<p class="failure">An error occurred with your submission</p>';
var click_msg = '<p class="clickhere">(click here to dismiss this message)</p>'; var click_msg = '<p class="clickhere">(click here to dismiss this message)</p>';
/* Sample Debug Console: http://localhost:8000/submissions/Joe_Bloggs/TestCourse/u_3 */
function itWorked(result) { function displayStatus(result) {
if (itWorked) { status = result[0]
$('.openassessment_response_status_block', element).html(success_msg.concat(click_msg)); error_msg = result[2]
if (status) {
$('.openassessment_response_status_block', element).html(success_msg.concat(click_msg));
} else { } else {
$('.openassessment_response_status_block', element).html(failure_msg.concat(click_msg)); $('.openassessment_response_status_block', element).html(failure_msg.concat(error_msg).concat(click_msg));
} }
$('.openassessment_response_status_block', element).css('display', 'block'); $('.openassessment_response_status_block', element).css('display', 'block');
} }
...@@ -18,13 +21,12 @@ function OpenAssessmentBlock(runtime, element) { ...@@ -18,13 +21,12 @@ function OpenAssessmentBlock(runtime, element) {
$('.openassessment_response_status_block', element).css('display', 'none'); $('.openassessment_response_status_block', element).css('display', 'none');
}); });
var handlerUrl = runtime.handlerUrl(element, 'submit');
$('.openassessment_submit', element).click(function(eventObject) { $('.openassessment_submit', element).click(function(eventObject) {
$.ajax({ $.ajax({
type: "POST", type: "POST",
url: handlerUrl, url: handlerUrl,
data: JSON.stringify({"submission": $('.openassessment_submission', element).val()}), data: JSON.stringify({"submission": $('.openassessment_submission', element).val()}),
success: itWorked success: displayStatus
}); });
}); });
......
...@@ -10,7 +10,7 @@ from mock import patch ...@@ -10,7 +10,7 @@ from mock import patch
from workbench.runtime import WorkbenchRuntime from workbench.runtime import WorkbenchRuntime
from submissions import api from submissions import api
from submissions.api import SubmissionInternalError from submissions.api import SubmissionRequestError, SubmissionInternalError
class TestOpenAssessment(TestCase): class TestOpenAssessment(TestCase):
...@@ -28,6 +28,7 @@ class TestOpenAssessment(TestCase): ...@@ -28,6 +28,7 @@ class TestOpenAssessment(TestCase):
/> />
""", self.runtime.id_generator) """, self.runtime.id_generator)
self.assessment = self.runtime.get_block(assessment_id) self.assessment = self.runtime.get_block(assessment_id)
self.default_json_submission = json.dumps({"submission": "This is my answer to this test question!"})
def make_request(self, body): def make_request(self, body):
"""Mock request method.""" """Mock request method."""
...@@ -35,49 +36,44 @@ class TestOpenAssessment(TestCase): ...@@ -35,49 +36,44 @@ class TestOpenAssessment(TestCase):
request.body = body request.body = body
return request return request
def text_of_response(self, response):
"""Return the text of response."""
return "".join(response.app_iter)
def test_submit_submission(self): def test_submit_submission(self):
""" """XBlock accepts response, returns true on success."""
Verify we can submit an answer to the XBlock and get the expected return
value.
"""
json_data = json.dumps(
{"submission": "This is my answer to this test question!"}
)
resp = self.runtime.handle( resp = self.runtime.handle(
self.assessment, 'submit', self.assessment, 'submit',
self.make_request(json_data) self.make_request(self.default_json_submission)
) )
result = self.text_of_response(resp) result = json.loads(resp.body)
self.assertEqual("true", result) self.assertTrue(result[0])
@patch.object(api, 'create_submission') @patch.object(api, 'create_submission')
def test_submission_failure(self, mock_submit): def test_submission_general_failure(self, mock_submit):
""" """Internal errors return some code for submission failure."""
Nothing from the front end currently causes an exception. However the
backend could have an internal error that will bubble up. This will
mock an internal error and ensure the front end returns the proper
value.
"""
mock_submit.side_effect = SubmissionInternalError("Cat on fire.") mock_submit.side_effect = SubmissionInternalError("Cat on fire.")
json_data = json.dumps( resp = self.runtime.handle(
{"submission": "This is my answer to this test question!"} self.assessment, 'submit',
self.make_request(self.default_json_submission)
) )
result = json.loads(resp.body)
self.assertFalse(result[0])
self.assertEqual(result[1], "EUNKNOWN")
self.assertEqual(result[2], self.assessment.submit_errors["EUNKNOWN"])
@patch.object(api, 'create_submission')
def test_submission_API_failure(self, mock_submit):
"""API usage errors return code and meaningful message."""
mock_submit.side_effect = SubmissionRequestError("Cat on fire.")
resp = self.runtime.handle( resp = self.runtime.handle(
self.assessment, 'submit', self.assessment, 'submit',
self.make_request(json_data) self.make_request(self.default_json_submission)
) )
result = self.text_of_response(resp) result = json.loads(resp.body)
self.assertEquals("false", result) self.assertFalse(result[0])
self.assertEqual(result[1], "EBADFORM")
self.assertEqual(result[2], "Cat on fire.")
def test_load_student_view(self): def test_load_student_view(self):
""" """OA XBlock returns some HTML to the user.
View basic test for verifying we're returned some HTML about the View basic test for verifying we're returned some HTML about the
Open Assessment XBlock. We don't want to match too heavily against the Open Assessment XBlock. We don't want to match too heavily against the
contents. contents.
......
...@@ -8,3 +8,4 @@ django-extensions==1.3.3 ...@@ -8,3 +8,4 @@ django-extensions==1.3.3
djangorestframework==2.3.5 djangorestframework==2.3.5
Mako==0.9.1 Mako==0.9.1
pytz==2013.9 pytz==2013.9
django-pdb==0.3.2
...@@ -107,6 +107,7 @@ MIDDLEWARE_CLASSES = ( ...@@ -107,6 +107,7 @@ MIDDLEWARE_CLASSES = (
'django.contrib.messages.middleware.MessageMiddleware', 'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection: # Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware', # 'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django_pdb.middleware.PdbMiddleware', # Needed to enable shell-on-crash behavior
) )
ROOT_URLCONF = 'urls' ROOT_URLCONF = 'urls'
...@@ -130,6 +131,7 @@ INSTALLED_APPS = ( ...@@ -130,6 +131,7 @@ INSTALLED_APPS = (
# Third party # Third party
'django_extensions', 'django_extensions',
'django_pdb', # Allows post-mortem debugging on exceptions
# XBlock # XBlock
'workbench', 'workbench',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment