Commit c3c120d2 by Joe Blaylock

Merge pull request #211 from edx/jrbl/tim-346-check-input-sizes

TIM-346: Server-and-client-side size validation
parents e59ec0b5 4023d756
...@@ -237,6 +237,8 @@ class Assessment(models.Model): ...@@ -237,6 +237,8 @@ class Assessment(models.Model):
objects that map to each :class:`Criterion` in the :class:`Rubric` we're objects that map to each :class:`Criterion` in the :class:`Rubric` we're
assessing against. assessing against.
""" """
MAXSIZE = 1024 * 100 # 100KB
submission_uuid = models.CharField(max_length=128, db_index=True) submission_uuid = models.CharField(max_length=128, db_index=True)
rubric = models.ForeignKey(Rubric) rubric = models.ForeignKey(Rubric)
...@@ -429,6 +431,8 @@ class AssessmentFeedback(models.Model): ...@@ -429,6 +431,8 @@ class AssessmentFeedback(models.Model):
as well as zero or more feedback options as well as zero or more feedback options
("Please select the statements below that reflect what you think of this peer grading experience") ("Please select the statements below that reflect what you think of this peer grading experience")
""" """
MAXSIZE = 1024*100 # 100KB
submission_uuid = models.CharField(max_length=128, unique=True, db_index=True) submission_uuid = models.CharField(max_length=128, unique=True, db_index=True)
assessments = models.ManyToManyField(Assessment, related_name='assessment_feedback', default=None) assessments = models.ManyToManyField(Assessment, related_name='assessment_feedback', default=None)
feedback_text = models.TextField(max_length=10000, default="") feedback_text = models.TextField(max_length=10000, default="")
......
...@@ -1000,6 +1000,10 @@ def set_assessment_feedback(feedback_dict): ...@@ -1000,6 +1000,10 @@ def set_assessment_feedback(feedback_dict):
feedback_text = feedback_dict.get('feedback_text') feedback_text = feedback_dict.get('feedback_text')
selected_options = feedback_dict.get('options', list()) selected_options = feedback_dict.get('options', list())
if feedback_text and len(feedback_text) > AssessmentFeedback.MAXSIZE:
error_message = u"Assessment feedback too large."
raise PeerAssessmentRequestError(error_message)
try: try:
# Get or create the assessment model for this submission # Get or create the assessment model for this submission
# If we receive an integrity error, assume that someone else is trying to create # If we receive an integrity error, assume that someone else is trying to create
......
...@@ -160,6 +160,13 @@ class AssessmentPartSerializer(serializers.ModelSerializer): ...@@ -160,6 +160,13 @@ class AssessmentPartSerializer(serializers.ModelSerializer):
class AssessmentSerializer(serializers.ModelSerializer): class AssessmentSerializer(serializers.ModelSerializer):
"""Simplified serializer for :class:`Assessment` that's lighter on the DB.""" """Simplified serializer for :class:`Assessment` that's lighter on the DB."""
def validate_feedback(self, attrs, source):
"""Check that the feedback is within an acceptable size range."""
value = attrs[source]
if len(value) > Assessment.MAXSIZE:
raise serializers.ValidationError("Maximum feedback size exceeded.")
return attrs
class Meta: class Meta:
model = Assessment model = Assessment
fields = ( fields = (
......
...@@ -100,6 +100,17 @@ ASSESSMENT_DICT_PASS = dict( ...@@ -100,6 +100,17 @@ ASSESSMENT_DICT_PASS = dict(
} }
) )
# Answers are against RUBRIC_DICT -- this is worth 12 points
ASSESSMENT_DICT_PASS_HUGE = dict(
feedback=u"这是中国" * Assessment.MAXSIZE,
options_selected={
"secret": "yes",
u"ⓢⓐⓕⓔ": "yes",
"giveup": "eager",
"singing": "no",
}
)
REQUIRED_GRADED = 5 REQUIRED_GRADED = 5
REQUIRED_GRADED_BY = 3 REQUIRED_GRADED_BY = 3
...@@ -125,6 +136,18 @@ class TestPeerApi(TestCase): ...@@ -125,6 +136,18 @@ class TestPeerApi(TestCase):
self.assertEqual(assessment["points_possible"], 14) self.assertEqual(assessment["points_possible"], 14)
self.assertEqual(assessment["feedback"], ASSESSMENT_DICT["feedback"]) self.assertEqual(assessment["feedback"], ASSESSMENT_DICT["feedback"])
def test_create_huge_assessment_fails(self):
self._create_student_and_submission("Tim", "Tim's answer")
bob_sub, bob = self._create_student_and_submission("Bob", "Bob's answer")
sub = peer_api.get_submission_to_assess(bob, 1)
with self.assertRaises(peer_api.PeerAssessmentRequestError):
peer_api.create_assessment(
sub["uuid"],
bob["student_id"],
ASSESSMENT_DICT_PASS_HUGE,
RUBRIC_DICT,
)
@file_data('valid_assessments.json') @file_data('valid_assessments.json')
def test_get_assessments(self, assessment_dict): def test_get_assessments(self, assessment_dict):
self._create_student_and_submission("Tim", "Tim's answer") self._create_student_and_submission("Tim", "Tim's answer")
...@@ -587,6 +610,17 @@ class TestPeerApi(TestCase): ...@@ -587,6 +610,17 @@ class TestPeerApi(TestCase):
} }
) )
@patch.object(AssessmentFeedback, 'save')
@raises(peer_api.PeerAssessmentRequestError)
def test_set_assessment_feedback_error_on_huge_save(self, mock_filter):
tim_answer, tim = self._create_student_and_submission("Tim", "Tim's answer", MONDAY)
peer_api.set_assessment_feedback(
{
'submission_uuid': tim_answer['uuid'],
'feedback_text': 'Boo'*AssessmentFeedback.MAXSIZE,
}
)
@patch.object(PeerWorkflow.objects, 'filter') @patch.object(PeerWorkflow.objects, 'filter')
@raises(peer_api.PeerAssessmentWorkflowError) @raises(peer_api.PeerAssessmentWorkflowError)
def test_failure_to_get_latest_workflow(self, mock_filter): def test_failure_to_get_latest_workflow(self, mock_filter):
......
...@@ -190,7 +190,13 @@ ...@@ -190,7 +190,13 @@
</li> </li>
</ol> </ol>
</div> </div>
<div class="submission__feeedback__actions">
<div class="submission__feedback__actions">
<div class="message message--inline message--error message--error-server">
<h3 class="message__title">We could not submit your feedback</h3>
<div class="message__content"></div>
</div>
<ul class="list list--actions submission__feeedback__actions"> <ul class="list list--actions submission__feeedback__actions">
<li class="list--actions__item"> <li class="list--actions__item">
<button type="submit" id="feedback__submit" class="action action--submit feedback__submit">Submit Feedback On Peer Evaluations</button> <button type="submit" id="feedback__submit" class="action action--submit feedback__submit">Submit Feedback On Peer Evaluations</button>
......
...@@ -117,6 +117,7 @@ ...@@ -117,6 +117,7 @@
<div class="step__actions"> <div class="step__actions">
<div class="message message--inline message--error message--error-server"> <div class="message message--inline message--error message--error-server">
<h3 class="message__title">We could not submit your assessment</h3> <h3 class="message__title">We could not submit your assessment</h3>
<div class="message__content"></div>
</div> </div>
<ul class="list list--actions"> <ul class="list list--actions">
......
...@@ -44,6 +44,7 @@ ...@@ -44,6 +44,7 @@
<div class="response__submission__actions"> <div class="response__submission__actions">
<div class="message message--inline message--error message--error-server"> <div class="message message--inline message--error message--error-server">
<h3 class="message__title">We could not save your progress</h3> <h3 class="message__title">We could not save your progress</h3>
<div class="message__content"></div>
</div> </div>
<ul class="list list--actions"> <ul class="list list--actions">
...@@ -65,6 +66,7 @@ ...@@ -65,6 +66,7 @@
<div class="step__actions"> <div class="step__actions">
<div class="message message--inline message--error message--error-server"> <div class="message message--inline message--error message--error-server">
<h3 class="message__title">We could not submit your response</h3> <h3 class="message__title">We could not submit your response</h3>
<div class="message__content"></div>
</div> </div>
<ul class="list list--actions"> <ul class="list list--actions">
......
...@@ -89,6 +89,7 @@ ...@@ -89,6 +89,7 @@
<div class="step__actions"> <div class="step__actions">
<div class="message message--inline message--error message--error-server"> <div class="message message--inline message--error message--error-server">
<h3 class="message__title">We could not submit your assessment</h3> <h3 class="message__title">We could not submit your assessment</h3>
<div class="message__content"></div>
</div> </div>
<ul class="list list--actions"> <ul class="list list--actions">
......
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -30,6 +30,19 @@ describe("OpenAssessment.Server", function() { ...@@ -30,6 +30,19 @@ describe("OpenAssessment.Server", function() {
); );
}; };
var getHugeTestString = function() {
var testStringSize = server.maxInputSize + 1;
var testString = '';
for (i = 0; i < (testStringSize); i++) { testString += 'x'; }
return testString;
}
var getHugeStringError = function() {
// return a string that can be used with .toContain()
// "Response text is too large. Please reduce the size of your response and try to submit again.";
return "text is too large"
}
beforeEach(function() { beforeEach(function() {
// Create the server // Create the server
// Since the runtime is a stub implementation that ignores the element passed to it, // Since the runtime is a stub implementation that ignores the element passed to it,
...@@ -191,6 +204,20 @@ describe("OpenAssessment.Server", function() { ...@@ -191,6 +204,20 @@ describe("OpenAssessment.Server", function() {
expect(receivedErrorMsg).toEqual("Could not contact server."); expect(receivedErrorMsg).toEqual("Could not contact server.");
}); });
it("confirms that very long submissions fail with an error without ajax", function() {
var receivedErrorCode = "";
var receivedErrorMsg = "";
var testString = getHugeTestString();
server.submit(testString).fail(
function(errorCode, errorMsg) {
receivedErrorCode = errorCode;
receivedErrorMsg = errorMsg;
}
);
expect(receivedErrorCode).toEqual("submit");
expect(receivedErrorMsg).toContain(getHugeStringError());
});
it("informs the caller of an server error when sending a submission", function() { it("informs the caller of an server error when sending a submission", function() {
stubAjax(true, [false, "ENODATA", "Error occurred!"]); stubAjax(true, [false, "ENODATA", "Error occurred!"]);
...@@ -207,6 +234,15 @@ describe("OpenAssessment.Server", function() { ...@@ -207,6 +234,15 @@ describe("OpenAssessment.Server", function() {
expect(receivedErrorMsg).toEqual("Error occurred!"); expect(receivedErrorMsg).toEqual("Error occurred!");
}); });
it("confirms that very long saves fail with an error without ajax", function() {
var receivedErrorMsg = "";
var testString = getHugeTestString();
server.save(testString).fail(
function(errorMsg) { receivedErrorMsg = errorMsg; }
);
expect(receivedErrorMsg).toContain(getHugeStringError());
});
it("informs the caller of an AJAX error when sending a submission", function() { it("informs the caller of an AJAX error when sending a submission", function() {
stubAjax(false, null); stubAjax(false, null);
var receivedMsg = null; var receivedMsg = null;
...@@ -265,6 +301,18 @@ describe("OpenAssessment.Server", function() { ...@@ -265,6 +301,18 @@ describe("OpenAssessment.Server", function() {
expect(receivedMsg).toEqual("Test error"); expect(receivedMsg).toEqual("Test error");
}); });
it("confirms that very long peer assessments fail with an error without ajax", function() {
var options = {clarity: "Very clear", precision: "Somewhat precise"};
var receivedErrorMsg = "";
var testString = getHugeTestString();
server.peerAssess("abc1234", options, testString).fail(
function(errorMsg) {
receivedErrorMsg = errorMsg;
}
);
expect(receivedErrorMsg).toContain(getHugeStringError());
});
it("informs the caller of a server error when sending a peer assessment", function() { it("informs the caller of a server error when sending a peer assessment", function() {
stubAjax(true, {success:false, msg:'Test error!'}); stubAjax(true, {success:false, msg:'Test error!'});
...@@ -312,6 +360,18 @@ describe("OpenAssessment.Server", function() { ...@@ -312,6 +360,18 @@ describe("OpenAssessment.Server", function() {
expect(receivedMsg).toEqual("Test error"); expect(receivedMsg).toEqual("Test error");
}); });
it("confirms that very long assessment feedback fails with an error without ajax", function() {
var options = ["Option 1", "Option 2"];
var receivedErrorMsg = "";
var testString = getHugeTestString();
server.submitFeedbackOnAssessment(testString, options).fail(
function(errorMsg) {
receivedErrorMsg = errorMsg;
}
);
expect(receivedErrorMsg).toContain(getHugeStringError());
});
it("informs the caller of an AJAX error when sending feedback on submission", function() { it("informs the caller of an AJAX error when sending feedback on submission", function() {
stubAjax(false, null); stubAjax(false, null);
......
...@@ -342,11 +342,10 @@ OpenAssessment.BaseUI.prototype = { ...@@ -342,11 +342,10 @@ OpenAssessment.BaseUI.prototype = {
function(element, index) { return $(element).val(); } function(element, index) { return $(element).val(); }
); );
ui.server.submitFeedbackOnAssessment(text, options).done(function() { ui.server.submitFeedbackOnAssessment(text, options).done(function() {
// When we have successfully sent the submission, textarea no longer editable
// TODO // TODO
console.log("Feedback to the assessments submitted, thanks!"); // When we have successfully sent the submission, textarea no longer editable
// console.log("Feedback to the assessments submitted, thanks!");
}).fail(function(errMsg) { }).fail(function(errMsg) {
// TODO: display to the user
ui.toggleActionError('feedback_assess', errMsg); ui.toggleActionError('feedback_assess', errMsg);
}); });
}, },
...@@ -447,8 +446,9 @@ OpenAssessment.BaseUI.prototype = { ...@@ -447,8 +446,9 @@ OpenAssessment.BaseUI.prototype = {
var container = null; var container = null;
if (type == 'save') { container = '.response__submission__actions'; } if (type == 'save') { container = '.response__submission__actions'; }
else if (type == 'submit') { container = '.step__actions'; } else if (type == 'submit') { container = '.step__actions'; }
else if (type == 'peer') { container = '.peer-assessment__actions'; } else if (type == 'peer') { container = '.step__actions'; }
else if (type == 'self') { container = '.self-assessment__actions'; } else if (type == 'self') { container = '.self-assessment__actions'; }
else if (type == 'feedback_assess') { container = '.submission__feedback__actions'; }
// If we don't have anywhere to put the message, just log it to the console // If we don't have anywhere to put the message, just log it to the console
if (container === null) { if (container === null) {
...@@ -458,7 +458,7 @@ OpenAssessment.BaseUI.prototype = { ...@@ -458,7 +458,7 @@ OpenAssessment.BaseUI.prototype = {
else { else {
// Insert the error message // Insert the error message
var msgHtml = (msg === null) ? "" : msg; var msgHtml = (msg === null) ? "" : msg;
$(container + " .message__content").html('<p>' + msg + '</p>'); $(container + " .message__content").html('<p>' + msgHtml + '</p>');
// Toggle the error class // Toggle the error class
$(container).toggleClass('has--error', msg !== null); $(container).toggleClass('has--error', msg !== null);
......
...@@ -37,6 +37,11 @@ OpenAssessment.Server.prototype = { ...@@ -37,6 +37,11 @@ OpenAssessment.Server.prototype = {
return this.runtime.handlerUrl(this.element, handler); return this.runtime.handlerUrl(this.element, handler);
}, },
/*
* Get maximum size of input
*/
maxInputSize: 1024 * 64, /* 64KB should be enough for anybody, right? ;^P */
/** /**
Render the XBlock. Render the XBlock.
...@@ -112,6 +117,11 @@ OpenAssessment.Server.prototype = { ...@@ -112,6 +117,11 @@ OpenAssessment.Server.prototype = {
**/ **/
submit: function(submission) { submit: function(submission) {
var url = this.url('submit'); var url = this.url('submit');
if (submission.length > this.maxInputSize) {
return $.Deferred(function(defer) {
defer.rejectWith(this, ["submit", "Response text is too large. Please reduce the size of your response and try to submit again."]);
}).promise();
}
return $.Deferred(function(defer) { return $.Deferred(function(defer) {
$.ajax({ $.ajax({
type: "POST", type: "POST",
...@@ -147,6 +157,11 @@ OpenAssessment.Server.prototype = { ...@@ -147,6 +157,11 @@ OpenAssessment.Server.prototype = {
**/ **/
save: function(submission) { save: function(submission) {
var url = this.url('save_submission'); var url = this.url('save_submission');
if (submission.length > this.maxInputSize) {
return $.Deferred(function(defer) {
defer.rejectWith(this, ["Response text is too large. Please reduce the size of your response and try to submit again."]);
}).promise();
}
return $.Deferred(function(defer) { return $.Deferred(function(defer) {
$.ajax({ $.ajax({
type: "POST", type: "POST",
...@@ -182,6 +197,11 @@ OpenAssessment.Server.prototype = { ...@@ -182,6 +197,11 @@ OpenAssessment.Server.prototype = {
*/ */
submitFeedbackOnAssessment: function(text, options) { submitFeedbackOnAssessment: function(text, options) {
var url = this.url('submit_feedback'); var url = this.url('submit_feedback');
if (text.length > this.maxInputSize) {
return $.Deferred(function(defer) {
defer.rejectWith(this, ["Response text is too large. Please reduce the size of your response and try to submit again."]);
}).promise();
}
var payload = JSON.stringify({ var payload = JSON.stringify({
'feedback_text': text, 'feedback_text': text,
'feedback_options': options 'feedback_options': options
...@@ -221,6 +241,11 @@ OpenAssessment.Server.prototype = { ...@@ -221,6 +241,11 @@ OpenAssessment.Server.prototype = {
**/ **/
peerAssess: function(submissionId, optionsSelected, feedback) { peerAssess: function(submissionId, optionsSelected, feedback) {
var url = this.url('peer_assess'); var url = this.url('peer_assess');
if (feedback.length > this.maxInputSize) {
return $.Deferred(function(defer) {
defer.rejectWith(this, ["Response text is too large. Please reduce the size of your response and try to submit again."]);
}).promise();
}
var payload = JSON.stringify({ var payload = JSON.stringify({
submission_uuid: submissionId, submission_uuid: submissionId,
options_selected: optionsSelected, options_selected: optionsSelected,
......
...@@ -119,19 +119,30 @@ ...@@ -119,19 +119,30 @@
border: none; border: none;
padding-bottom: 0; padding-bottom: 0;
color: $white-t; color: $white-t;
text-align: center;
} }
&.message--error { &.message--error {
background: tint($color-error, 15%); background: tint($color-error, 15%);
.message__content {
color: tint($color-error, 90%);
}
} }
&.message--warning { &.message--warning {
background: tint($color-warning, 15%); background: tint($color-warning, 15%);
.message__content {
color: tint($color-warning, 90%);
}
} }
&.message--confirm { &.message--confirm {
background: tint($color-warning, 15%); background: tint($color-confirm, 15%);
.message__content {
color: tint($color-confirm, 90%);
}
} }
} }
......
...@@ -14,6 +14,7 @@ from submissions.serializers import ( ...@@ -14,6 +14,7 @@ from submissions.serializers import (
) )
from submissions.models import Submission, StudentItem, Score, ScoreSummary from submissions.models import Submission, StudentItem, Score, ScoreSummary
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
......
...@@ -70,6 +70,8 @@ class Submission(models.Model): ...@@ -70,6 +70,8 @@ class Submission(models.Model):
because it makes caching trivial. because it makes caching trivial.
""" """
MAXSIZE = 1024*100 # 100KB
uuid = UUIDField(version=1, db_index=True) uuid = UUIDField(version=1, db_index=True)
student_item = models.ForeignKey(StudentItem) student_item = models.ForeignKey(StudentItem)
......
...@@ -65,6 +65,13 @@ class SubmissionSerializer(serializers.ModelSerializer): ...@@ -65,6 +65,13 @@ class SubmissionSerializer(serializers.ModelSerializer):
answer = JsonField(source='raw_answer') answer = JsonField(source='raw_answer')
def validate_answer(self, attrs, source):
"""Check that the answer is within an acceptable size range."""
value = attrs[source]
if len(value) > Submission.MAXSIZE:
raise serializers.ValidationError("Maximum answer size exceeded.")
return attrs
class Meta: class Meta:
model = Submission model = Submission
fields = ( fields = (
......
...@@ -20,7 +20,7 @@ STUDENT_ITEM = dict( ...@@ -20,7 +20,7 @@ STUDENT_ITEM = dict(
) )
SECOND_STUDENT_ITEM = dict( SECOND_STUDENT_ITEM = dict(
student_id="Bob", student_id="Alice",
course_id="Demo_Course", course_id="Demo_Course",
item_id="item_one", item_id="item_one",
item_type="Peer_Submission", item_type="Peer_Submission",
...@@ -28,6 +28,7 @@ SECOND_STUDENT_ITEM = dict( ...@@ -28,6 +28,7 @@ SECOND_STUDENT_ITEM = dict(
ANSWER_ONE = u"this is my answer!" ANSWER_ONE = u"this is my answer!"
ANSWER_TWO = u"this is my other answer!" ANSWER_TWO = u"this is my other answer!"
ANSWER_THREE = u'' + 'c' * (Submission.MAXSIZE + 1)
@ddt @ddt
...@@ -41,6 +42,9 @@ class TestSubmissionsApi(TestCase): ...@@ -41,6 +42,9 @@ class TestSubmissionsApi(TestCase):
student_item = self._get_student_item(STUDENT_ITEM) student_item = self._get_student_item(STUDENT_ITEM)
self._assert_submission(submission, ANSWER_ONE, student_item.pk, 1) self._assert_submission(submission, ANSWER_ONE, student_item.pk, 1)
def test_create_huge_submission_fails(self):
with self.assertRaises(api.SubmissionRequestError):
api.create_submission(STUDENT_ITEM, ANSWER_THREE)
def test_get_submission_and_student(self): def test_get_submission_and_student(self):
submission = api.create_submission(STUDENT_ITEM, ANSWER_ONE) submission = api.create_submission(STUDENT_ITEM, ANSWER_ONE)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment