Commit 91160976 by Usman Khalid

Submission

TNL-708
parent 19eeaa08
......@@ -15,18 +15,6 @@
{% endblock %}
</div>
<div class="wrapper--openassessment__prompt">
{% if question %}
<article class="openassessment__prompt ui-toggle-visibility">
<h2 class="openassessment__prompt__title">{% trans "The prompt for this assignment" %}</h2>
<div class="openassessment__prompt__copy ui-toggle-visibility__content">
{{ question|linebreaks }}
</div>
</article>
{% endif %}
</div>
<ol class="openassessment__steps" id="openassessment__steps">
{% for assessment in rubric_assessments %}
<li id="{{ assessment.class_id }}" class="openassessment__steps__step is--loading">
......
{% spaceless %}
{% load i18n %}
<ol class="submission__answer__display__content">
{% for part in answer.parts %}
<li class="submission__answer__part">
<article class="submission__answer__part__prompt">
{{ part.prompt.description|linebreaks }}
</article>
<div class="submission__answer__part__text">
{{ part.text|linebreaks }}
</div>
</li>
{% endfor %}
</ol>
{% endspaceless %}
......@@ -52,15 +52,26 @@
<div class="step__content">
<form id="response__submission" class="response__submission">
<ol class="list list--fields response__submission__content">
<li class="field field--textarea submission__answer" id="submission__answer">
<label class="sr" for="submission__answer__value">{% trans "Enter your response to the question." %}</label>
{% for part in saved_response.answer.parts %}
<li class="submission__answer__part">
<article class="openassessment__prompt ui-toggle-visibility">
<h2 class="openassessment__prompt__title">{% trans "The prompt for this assignment" %}</h2>
<div class="openassessment__prompt__copy ui-toggle-visibility__content">
{{ part.prompt.description|linebreaks }}
</div>
</article>
<div class="field field--textarea submission__answer__part__text">
<label class="sr" for="submission__answer__part__text__{{ forloop.counter }}">{% trans "Enter your response to the question." %}</label>
<textarea
id="submission__answer__value"
id="submission__answer__part__text__{{ forloop.counter }}"
class="submission__answer__part__text__value"
placeholder=""
maxlength="100000"
>{{ saved_response }}</textarea>
<span class="tip">{% trans "You may continue to work on your response until you submit it." %}</span>
>{{ part.text }}</textarea>
</div>
</li>
{% endfor %}
{% if allow_file_upload %}
<li class="field">
<div id="upload__error">
......@@ -86,6 +97,8 @@
{% endif %}
</ol>
<span class="tip">{% trans "You may continue to work on your response until you submit it." %}</span>
<div class="response__submission__actions">
<div class="message message--inline message--error message--error-server">
<h3 class="message__title">{% trans "We could not save your progress" %}</h3>
......
......@@ -42,9 +42,7 @@
<article class="submission__answer__display">
<h3 class="submission__answer__display__title">{% trans "Your Response" %}</h3>
<div class="submission__answer__display__content">
{{ student_submission.answer.text|linebreaks }}
</div>
{% include "openassessmentblock/oa_submission_answer.html" with answer=student_submission.answer %}
{% if allow_file_upload and file_url %}
<h3 class="submission__answer__display__title">{% trans "Your Image" %}</h3>
......
......@@ -126,6 +126,46 @@ def clean_criterion_feedback(rubric_criteria, criterion_feedback):
}
def prepare_submission_for_serialization(submission_data):
"""
Convert a list of answers into the right format dict for serialization.
Args:
submission_data (list of unicode): The answers.
Returns:
dict
"""
return {
'parts': [{'text': text} for text in submission_data],
}
def create_submission_dict(submission, prompts):
"""
1. Convert from legacy format.
3. Add prompts to submission['answer']['parts'] to simplify iteration in the template.
Args:
submission (dict): Submission dictionary.
prompts (list of dict): The prompts from the problem definition.
Returns:
dict
"""
parts = [{ 'prompt': prompt, 'text': ''} for prompt in prompts]
if 'text' in submission['answer']:
parts[0]['text'] = submission['answer'].pop('text')
else:
for index, part in enumerate(submission['answer'].pop('parts')):
parts[index]['text'] = part['text']
submission['answer']['parts'] = parts
return submission
def make_django_template_key(key):
"""
Django templates access dictionary items using dot notation,
......
"""An XBlock where students can read a question and compose their response"""
import copy
import datetime as dt
import json
import logging
import pkg_resources
import copy
import pytz
......@@ -284,7 +284,7 @@ class OpenAssessmentBlock(
# All data we intend to pass to the front end.
context_dict = {
"title": self.title,
"question": self.prompt,
"prompts": self.prompts,
"rubric_assessments": ui_models,
"show_staff_debug_info": self.is_course_staff and not self.in_studio_preview,
}
......
......@@ -14,7 +14,7 @@ OpenAssessment.ResponseView = function(element, server, fileUploader, baseView)
this.server = server;
this.fileUploader = fileUploader;
this.baseView = baseView;
this.savedResponse = "";
this.savedResponse = [];
this.files = null;
this.imageType = null;
this.lastChangeTime = Date.now();
......@@ -66,7 +66,7 @@ OpenAssessment.ResponseView.prototype = {
// Install change handler for textarea (to enable submission button)
this.savedResponse = this.response();
var handleChange = function(eventData) { view.handleResponseChanged(); };
sel.find('#submission__answer__value').on('change keyup drop paste', handleChange);
sel.find('.submission__answer__part__text__value').on('change keyup drop paste', handleChange);
var handlePrepareUpload = function(eventData) { view.prepareUpload(eventData.target.files); };
sel.find('input[type=file]').on('change', handlePrepareUpload);
......@@ -258,33 +258,39 @@ OpenAssessment.ResponseView.prototype = {
},
/**
Set the response text.
Retrieve the response text.
Set the response texts.
Retrieve the response texts.
Args:
text (string): If specified, the text to set for the response.
texts (array of strings): If specified, the texts to set for the response.
Returns:
string: The current response text.
array of strings: The current response texts.
**/
response: function(text) {
var sel = $('#submission__answer__value', this.element);
response: function(texts) {
var sel = $('.submission__answer__part__text__value', this.element);
if (typeof text === 'undefined') {
return sel.val();
return sel.map(function() {
return $.trim($(this).val());
}).get();
} else {
sel.val(text);
sel.map(function(index, element) {
$(this).val(texts[index]);
})
}
},
/**
Check whether the response text has changed since the last save.
Check whether the response texts have changed since the last save.
Returns: boolean
**/
responseChanged: function() {
var currentResponse = $.trim(this.response());
var savedResponse = $.trim(this.savedResponse);
return savedResponse !== currentResponse;
var savedResponse = this.savedResponse;
return this.response().some(function(element, index, array) {
return element !== savedResponse[index];
});
},
/**
......@@ -355,8 +361,15 @@ OpenAssessment.ResponseView.prototype = {
// ... but update the UI based on what the user may have entered
// since hitting the save button.
var currentResponse = view.response();
view.submitEnabled(currentResponse !== '');
if (currentResponse == savedResponse) {
var currentResponseIsEmpty = currentResponse.every(function(element, index, array) {
return element == '';
});
view.submitEnabled(!currentResponseIsEmpty);
var currentResponseEqualsSaved = currentResponse.every(function(element, index, array) {
return element === savedResponse[index];
});
if (currentResponseEqualsSaved) {
view.saveEnabled(false);
view.saveStatus(gettext("This response has been saved but not submitted."));
}
......@@ -388,7 +401,7 @@ OpenAssessment.ResponseView.prototype = {
// NOTE: in JQuery >=1.8, `pipe()` is deprecated in favor of `then()`,
// but we're using JQuery 1.7 in the LMS, so for now we're stuck with `pipe()`.
.pipe(function() {
var submission = $('#submission__answer__value', view.element).val();
var submission = view.response();
baseView.toggleActionError('response', null);
// Send the submission to the server, returning the promise.
......
import json
import logging
from xblock.core import XBlock
......@@ -9,6 +10,8 @@ from openassessment.workflow import api as workflow_api
from openassessment.workflow.errors import AssessmentWorkflowError
from .resolve_dates import DISTANT_FUTURE
from data_conversion import create_submission_dict, prepare_submission_for_serialization
from validation import validate_submission
logger = logging.getLogger(__name__)
......@@ -54,7 +57,15 @@ class SubmissionMixin(object):
)
status = False
student_sub = data['submission']
student_sub_data = data['submission']
success, msg = validate_submission(student_sub_data, self.prompts, self._)
if not success:
return (
False,
'EBADARGS',
msg
)
student_item_dict = self.get_student_item_dict()
# Short-circuit if no user is defined (as in Studio Preview mode)
......@@ -74,7 +85,7 @@ class SubmissionMixin(object):
try:
submission = self.create_submission(
student_item_dict,
student_sub
student_sub_data
)
except api.SubmissionRequestError as err:
......@@ -132,8 +143,14 @@ class SubmissionMixin(object):
dict: Contains a bool 'success' and unicode string 'msg'.
"""
if 'submission' in data:
student_sub_data = data['submission']
success, msg = validate_submission(student_sub_data, self.prompts, self._)
if not success:
return {'success': False, 'msg': msg}
try:
self.saved_response = unicode(data['submission'])
self.saved_response = json.dumps(
prepare_submission_for_serialization(student_sub_data)
)
self.has_saved = True
# Emit analytics event...
......@@ -149,11 +166,11 @@ class SubmissionMixin(object):
else:
return {'success': False, 'msg': self._(u"This response was not submitted.")}
def create_submission(self, student_item_dict, student_sub):
def create_submission(self, student_item_dict, student_sub_data):
# Store the student's response text in a JSON-encodable dict
# so that later we can add additional response fields.
student_sub_dict = {'text': student_sub}
student_sub_dict = prepare_submission_for_serialization(student_sub_data)
if self.allow_file_upload:
student_sub_dict['file_key'] = self._get_student_item_key()
......@@ -352,7 +369,21 @@ class SubmissionMixin(object):
context['submission_start'] = start_date
path = 'openassessmentblock/response/oa_response_unavailable.html'
elif not workflow:
context['saved_response'] = self.saved_response
# For backwards compatibility. Initially, problems had only one prompt
# and a string answer. We convert it to the appropriate dict.
try:
json.loads(self.saved_response)
saved_response = {
'answer': json.loads(self.saved_response),
}
except ValueError:
saved_response = {
'answer': {
'text': self.saved_response,
},
}
context['saved_response'] = create_submission_dict(saved_response, self.prompts)
context['save_status'] = self.save_status
context['submit_enabled'] = self.saved_response != ''
path = "openassessmentblock/response/oa_response.html"
......@@ -372,12 +403,13 @@ class SubmissionMixin(object):
student_submission = self.get_user_submission(
workflow["submission_uuid"]
)
context["student_submission"] = student_submission
context["student_submission"] = create_submission_dict(student_submission, self.prompts)
path = 'openassessmentblock/response/oa_response_graded.html'
else:
context["student_submission"] = self.get_user_submission(
student_submission = self.get_user_submission(
workflow["submission_uuid"]
)
context["student_submission"] = create_submission_dict(student_submission, self.prompts)
path = 'openassessmentblock/response/oa_response_submitted.html'
return path, context
......@@ -357,3 +357,33 @@ def validator(oa_block, _, strict_post_release=True):
return (True, u'')
return _inner
def validate_submission(submission, prompts, _):
"""
Validate submission dict.
Args:
submission (list of unicode): Responses for the prompts.
prompts (list of dict): The prompts from the problem definition.
_ (function): The service function used to get the appropriate i18n text.
Returns:
tuple (is_valid, msg) where
is_valid is a boolean indicating whether the submission is semantically valid
and msg describes any validation errors found.
"""
message = _(u"The submission format is invalid.")
if type(submission) != list:
return (False, message)
if len(submission) != len(prompts):
return (False, message)
for submission_part in submission:
if type(submission_part) != unicode:
return (False, message)
return (True, u'')
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment