Commit 18ed8a38 by Will Daly

Prevent authors from submitting essays in Studio Preview mode

parent 30b77ac9
......@@ -242,7 +242,10 @@ class OpenAssessmentBlock(
Useful for logging, debugging, and uniqueification.
"""
return unicode(self.scope_ids.usage_id), unicode(self.scope_ids.user_id)
return (
unicode(self.scope_ids.usage_id),
unicode(self.scope_ids.user_id) if self.scope_ids.user_id is not None else None,
)
def get_student_item_dict(self):
"""Create a student_item_dict from our surrounding context.
......
......@@ -35,7 +35,7 @@ describe("OpenAssessment.BaseUI", function() {
defer.resolveWith(this, [server.fragments[component]]);
}).promise();
};
}
};
// Stub runtime
var runtime = {};
......@@ -56,7 +56,7 @@ describe("OpenAssessment.BaseUI", function() {
waitsFor(function() {
var subviewHasHtml = $("#openassessment-base").children().map(
function(index, el) { return el.innerHTML != ''; }
function(index, el) { return el.innerHTML !== ''; }
);
return Array(subviewHasHtml).every(function(hasHtml) { return hasHtml; });
});
......@@ -64,11 +64,11 @@ describe("OpenAssessment.BaseUI", function() {
runs(function() {
return callback();
});
}
};
beforeEach(function() {
// Load the DOM fixture
jasmine.getFixtures().fixturesPath = 'base/fixtures'
jasmine.getFixtures().fixturesPath = 'base/fixtures';
loadFixtures('oa_base.html');
// Create a new stub server
......@@ -91,7 +91,7 @@ describe("OpenAssessment.BaseUI", function() {
loadSubviews(function() {
spyOn(server, 'peerAssess').andCallThrough();
ui.peerAssess();
expect(server.peerAssess).toHaveBeenCalled()
expect(server.peerAssess).toHaveBeenCalled();
});
});
......
......@@ -197,9 +197,9 @@ OpenAssessment.BaseUI.prototype = {
ui.renderSubmissionStep();
ui.renderPeerAssessmentStep(true);
}
).fail(function(errMsg) {
// TODO: display to the user
console.log(errMsg);
).fail(function(errCode, errMsg) {
// TODO: display to the user in a classier way
alert(errMsg);
});
},
......
......@@ -25,10 +25,11 @@ class SubmissionMixin(object):
submit_errors = {
# Reported to user sometimes, and useful in tests
'ENODATA': 'API returned an empty response',
'EBADFORM': 'API Submission Request Error',
'EUNKNOWN': 'API returned unclassified exception',
'ENOMULTI': 'Multiple submissions are not allowed for this item',
'ENODATA': _(u'API returned an empty response.'),
'EBADFORM': _(u'API Submission Request Error.'),
'EUNKNOWN': _(u'API returned unclassified exception.'),
'ENOMULTI': _(u'Multiple submissions are not allowed.'),
'ENOPREVIEW': _(u'You cannot make a submission while in Preview mode.'),
}
@XBlock.json_handler
......@@ -54,6 +55,12 @@ class SubmissionMixin(object):
status_text = None
student_sub = data['submission']
student_item_dict = self.get_student_item_dict()
# Short-circuit if no user is defined (as in Studio Preview mode)
# Since students can't submit, they will never be able to progress in the workflow
if student_item_dict['student_id'] is None:
return False, 'ENOPREVIEW', self.submit_errors['ENOPREVIEW']
workflow = self.get_workflow_info()
status_tag = 'ENOMULTI' # It is an error to submit multiple times for the same item
......
"""
Tests the Open Assessment XBlock functionality.
"""
import json
import datetime as dt
import pytz
......@@ -9,55 +8,13 @@ from mock import Mock, patch
from openassessment.xblock import openassessmentblock
from openassessment.xblock.submission_mixin import SubmissionMixin
from submissions import api as sub_api
from submissions.api import SubmissionRequestError, SubmissionInternalError
from .base import XBlockHandlerTestCase, scenario
class TestOpenAssessment(XBlockHandlerTestCase):
SUBMISSION = json.dumps({"submission": "This is my answer to this test question!"})
@scenario('data/basic_scenario.xml', user_id='Bob')
def test_submit_submission(self, xblock):
"""XBlock accepts response, returns true on success"""
resp = self.request(xblock, 'submit', self.SUBMISSION, response_format='json')
self.assertTrue(resp[0])
@scenario('data/basic_scenario.xml', user_id='Bob')
def test_submission_multisubmit_failure(self, xblock):
"""XBlock returns true on first, false on second submission"""
# We don't care about return value of first one
self.request(xblock, 'submit', self.SUBMISSION, response_format='json')
# This one should fail because we're not allowed to submit multiple times
resp = self.request(xblock, 'submit', self.SUBMISSION, response_format='json')
self.assertFalse(resp[0])
self.assertEqual(resp[1], "ENOMULTI")
self.assertEqual(resp[2], xblock.submit_errors["ENOMULTI"])
@scenario('data/basic_scenario.xml')
@patch.object(sub_api, 'create_submission')
def test_submission_general_failure(self, xblock, mock_submit):
"""Internal errors return some code for submission failure."""
mock_submit.side_effect = SubmissionInternalError("Cat on fire.")
resp = self.request(xblock, 'submit', self.SUBMISSION, response_format='json')
self.assertFalse(resp[0])
self.assertEqual(resp[1], "EUNKNOWN")
self.assertEqual(resp[2], SubmissionMixin().submit_errors["EUNKNOWN"])
@scenario('data/basic_scenario.xml')
@patch.object(sub_api, 'create_submission')
def test_submission_API_failure(self, xblock, mock_submit):
"""API usage errors return code and meaningful message."""
mock_submit.side_effect = SubmissionRequestError("Cat on fire.")
resp = self.request(xblock, 'submit', self.SUBMISSION, response_format='json')
self.assertFalse(resp[0])
self.assertEqual(resp[1], "EBADFORM")
self.assertEqual(resp[2], "Cat on fire.")
@scenario('data/basic_scenario.xml')
def test_load_student_view(self, xblock):
"""OA XBlock returns some HTML to the user.
......
"""
Test submission to the OpenAssessment XBlock.
"""
import json
from mock import patch
from submissions import api as sub_api
from submissions.api import SubmissionRequestError, SubmissionInternalError
from openassessment.xblock.submission_mixin import SubmissionMixin
from .base import XBlockHandlerTestCase, scenario
class SubmissionTest(XBlockHandlerTestCase):
SUBMISSION = json.dumps({"submission": "This is my answer to this test question!"})
@scenario('data/basic_scenario.xml', user_id='Bob')
def test_submit_submission(self, xblock):
resp = self.request(xblock, 'submit', self.SUBMISSION, response_format='json')
self.assertTrue(resp[0])
@scenario('data/basic_scenario.xml', user_id='Bob')
def test_submission_multisubmit_failure(self, xblock):
# We don't care about return value of first one
self.request(xblock, 'submit', self.SUBMISSION, response_format='json')
# This one should fail because we're not allowed to submit multiple times
resp = self.request(xblock, 'submit', self.SUBMISSION, response_format='json')
self.assertFalse(resp[0])
self.assertEqual(resp[1], "ENOMULTI")
self.assertEqual(resp[2], xblock.submit_errors["ENOMULTI"])
@scenario('data/basic_scenario.xml', user_id='Bob')
@patch.object(sub_api, 'create_submission')
def test_submission_general_failure(self, xblock, mock_submit):
mock_submit.side_effect = SubmissionInternalError("Cat on fire.")
resp = self.request(xblock, 'submit', self.SUBMISSION, response_format='json')
self.assertFalse(resp[0])
self.assertEqual(resp[1], "EUNKNOWN")
self.assertEqual(resp[2], SubmissionMixin().submit_errors["EUNKNOWN"])
@scenario('data/basic_scenario.xml', user_id='Bob')
@patch.object(sub_api, 'create_submission')
def test_submission_API_failure(self, xblock, mock_submit):
mock_submit.side_effect = SubmissionRequestError("Cat on fire.")
resp = self.request(xblock, 'submit', self.SUBMISSION, response_format='json')
self.assertFalse(resp[0])
self.assertEqual(resp[1], "EBADFORM")
# In Studio preview mode, the runtime sets the user ID to None
@scenario('data/basic_scenario.xml', user_id=None)
def test_cannot_submit_in_preview_mode(self, xblock,):
resp = self.request(xblock, 'submit', self.SUBMISSION, response_format='json')
self.assertFalse(resp[0])
self.assertEqual(resp[1], "ENOPREVIEW")
self.assertEqual(resp[2], "You cannot make a submission while in Preview mode.")
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment