Commit 25873d32 by Kelketek

Merge pull request #12 from open-craft/additional-feedback

Additional feedback
parents 87a929fb 3dab407c
...@@ -5,3 +5,4 @@ ...@@ -5,3 +5,4 @@
/workbench.* /workbench.*
/dist /dist
/templates /templates
*.iml
\ No newline at end of file
doc/img/mrq-3.png

45.6 KB | W: | H:

doc/img/mrq-3.png

112 KB | W: | H:

doc/img/mrq-3.png
doc/img/mrq-3.png
doc/img/mrq-3.png
doc/img/mrq-3.png
  • 2-up
  • Swipe
  • Onion skin
...@@ -179,6 +179,15 @@ class AnswerBlock(AnswerMixin, StepMixin, StudioEditableXBlockMixin, XBlock): ...@@ -179,6 +179,15 @@ class AnswerBlock(AnswerMixin, StepMixin, StudioEditableXBlockMixin, XBlock):
""" Normal view of this XBlock, identical to mentoring_view """ """ Normal view of this XBlock, identical to mentoring_view """
return self.mentoring_view(context) return self.mentoring_view(context)
def get_results(self, previous_response=None):
# Previous result is actually stored in database table-- ignore.
return {
'student_input': self.student_input,
'status': self.status,
'weight': self.weight,
'score': 1 if self.status == 'correct' else 0,
}
def submit(self, submission): def submit(self, submission):
""" """
The parent block is handling a student submission, including a new answer for this The parent block is handling a student submission, including a new answer for this
...@@ -187,12 +196,7 @@ class AnswerBlock(AnswerMixin, StepMixin, StudioEditableXBlockMixin, XBlock): ...@@ -187,12 +196,7 @@ class AnswerBlock(AnswerMixin, StepMixin, StudioEditableXBlockMixin, XBlock):
self.student_input = submission[0]['value'].strip() self.student_input = submission[0]['value'].strip()
self.save() self.save()
log.info(u'Answer submitted for`{}`: "{}"'.format(self.name, self.student_input)) log.info(u'Answer submitted for`{}`: "{}"'.format(self.name, self.student_input))
return { return self.get_results()
'student_input': self.student_input,
'status': self.status,
'weight': self.weight,
'score': 1 if self.status == 'correct' else 0,
}
@property @property
def status(self): def status(self):
......
...@@ -74,15 +74,15 @@ class MCQBlock(SubmittingXBlockMixin, QuestionnaireAbstractBlock): ...@@ -74,15 +74,15 @@ class MCQBlock(SubmittingXBlockMixin, QuestionnaireAbstractBlock):
return self._(u"Wrong") return self._(u"Wrong")
return self._(u"Not Acceptable") return self._(u"Not Acceptable")
def submit(self, submission): def calculate_results(self, submission):
log.debug(u'Received MCQ submission: "%s"', submission)
correct = submission in self.correct_choices correct = submission in self.correct_choices
tips_html = [] tips_html = []
for tip in self.get_tips(): for tip in self.get_tips():
if submission in tip.values: if submission in tip.values:
tips_html.append(tip.render('mentoring_view').content) tips_html.append(tip.render('mentoring_view').content)
formatted_tips = None
if tips_html: if tips_html:
formatted_tips = loader.render_template('templates/html/tip_choice_group.html', { formatted_tips = loader.render_template('templates/html/tip_choice_group.html', {
'tips_html': tips_html, 'tips_html': tips_html,
...@@ -94,13 +94,21 @@ class MCQBlock(SubmittingXBlockMixin, QuestionnaireAbstractBlock): ...@@ -94,13 +94,21 @@ class MCQBlock(SubmittingXBlockMixin, QuestionnaireAbstractBlock):
# Also send to the submissions API: # Also send to the submissions API:
sub_api.create_submission(self.student_item_key, submission) sub_api.create_submission(self.student_item_key, submission)
result = { return {
'submission': submission, 'submission': submission,
'status': 'correct' if correct else 'incorrect', 'status': 'correct' if correct else 'incorrect',
'tips': formatted_tips if tips_html else None, 'tips': formatted_tips,
'weight': self.weight, 'weight': self.weight,
'score': 1 if correct else 0, 'score': 1 if correct else 0,
} }
def get_results(self, previous_result):
return self.calculate_results(previous_result['submission'])
def submit(self, submission):
log.debug(u'Received MCQ submission: "%s"', submission)
result = self.calculate_results(submission)
self.student_choice = submission
log.debug(u'MCQ submission result: %s', result) log.debug(u'MCQ submission result: %s', result)
return result return result
......
...@@ -56,6 +56,7 @@ class MentoringMessageBlock(XBlock, StudioEditableXBlockMixin): ...@@ -56,6 +56,7 @@ class MentoringMessageBlock(XBlock, StudioEditableXBlockMixin):
{"display_name": "Completed", "value": "completed"}, {"display_name": "Completed", "value": "completed"},
{"display_name": "Incompleted", "value": "incomplete"}, {"display_name": "Incompleted", "value": "incomplete"},
{"display_name": "Reached max. # of attemps", "value": "max_attempts_reached"}, {"display_name": "Reached max. # of attemps", "value": "max_attempts_reached"},
{"display_name": "Review with attempts left", "value": "on-assessment-review"}
), ),
) )
editable_fields = ("content", ) editable_fields = ("content", )
...@@ -84,6 +85,8 @@ class MentoringMessageBlock(XBlock, StudioEditableXBlockMixin): ...@@ -84,6 +85,8 @@ class MentoringMessageBlock(XBlock, StudioEditableXBlockMixin):
return self._(u"Message shown when complete") return self._(u"Message shown when complete")
if self.type == 'incomplete': if self.type == 'incomplete':
return self._(u"Message shown when incomplete") return self._(u"Message shown when incomplete")
if self.type == 'on-assessment-review':
return self._(u"Message shown during review when attempts remain")
return u"INVALID MESSAGE" return u"INVALID MESSAGE"
@classmethod @classmethod
......
...@@ -81,11 +81,25 @@ class MRQBlock(QuestionnaireAbstractBlock): ...@@ -81,11 +81,25 @@ class MRQBlock(QuestionnaireAbstractBlock):
return self._(u"Ignored") return self._(u"Ignored")
return self._(u"Not Acceptable") return self._(u"Not Acceptable")
def get_results(self, previous_result):
"""
Get the results a student has already submitted.
"""
result = self.calculate_results(previous_result['submissions'])
result['completed'] = True
return result
def submit(self, submissions): def submit(self, submissions):
log.debug(u'Received MRQ submissions: "%s"', submissions) log.debug(u'Received MRQ submissions: "%s"', submissions)
score = 0 result = self.calculate_results(submissions)
self.student_choices = submissions
log.debug(u'MRQ submissions result: %s', result)
return result
def calculate_results(self, submissions):
score = 0
results = [] results = []
for choice in self.custom_choices: for choice in self.custom_choices:
choice_completed = True choice_completed = True
...@@ -106,22 +120,20 @@ class MRQBlock(QuestionnaireAbstractBlock): ...@@ -106,22 +120,20 @@ class MRQBlock(QuestionnaireAbstractBlock):
choice_result = { choice_result = {
'value': choice.value, 'value': choice.value,
'selected': choice_selected, 'selected': choice_selected,
} }
# Only include tips/results in returned response if we want to display them # Only include tips/results in returned response if we want to display them
if not self.hide_results: if not self.hide_results:
loader = ResourceLoader(__name__) loader = ResourceLoader(__name__)
choice_result['completed'] = choice_completed choice_result['completed'] = choice_completed
choice_result['tips'] = loader.render_template('templates/html/tip_choice_group.html', { choice_result['tips'] = loader.render_template('templates/html/tip_choice_group.html', {
'tips_html': choice_tips_html, 'tips_html': choice_tips_html,
}) })
results.append(choice_result) results.append(choice_result)
self.student_choices = submissions
status = 'incorrect' if score <= 0 else 'correct' if score >= len(results) else 'partial' status = 'incorrect' if score <= 0 else 'correct' if score >= len(results) else 'partial'
result = { return {
'submissions': submissions, 'submissions': submissions,
'status': status, 'status': status,
'choices': results, 'choices': results,
...@@ -130,9 +142,6 @@ class MRQBlock(QuestionnaireAbstractBlock): ...@@ -130,9 +142,6 @@ class MRQBlock(QuestionnaireAbstractBlock):
'score': (float(score) / len(results)) if results else 0, 'score': (float(score) / len(results)) if results else 0,
} }
log.debug(u'MRQ submissions result: %s', result)
return result
def validate_field_data(self, validation, data): def validate_field_data(self, validation, data):
""" """
Validate this block's field data. Validate this block's field data.
......
...@@ -2,14 +2,16 @@ ...@@ -2,14 +2,16 @@
margin: 1em 0em; margin: 1em 0em;
} }
.mentoring .messages { .mentoring .messages,
.mentoring .assessment-messages {
display: none; display: none;
margin-top: 10px; margin-top: 10px;
border-top: 2px solid #eaeaea; border-top: 2px solid #eaeaea;
padding: 12px 0px 20px; padding: 12px 0px 20px;
} }
.mentoring .messages .title1 { .mentoring .messages .title1,
.mentoring .assessment-messages .title1 {
color: #333333; color: #333333;
text-transform: uppercase; text-transform: uppercase;
font-weight: bold; font-weight: bold;
...@@ -134,3 +136,28 @@ ...@@ -134,3 +136,28 @@
.mentoring input[type="radio"] { .mentoring input[type="radio"] {
margin: 0; margin: 0;
} }
.mentoring .review-list {
list-style: none;
padding-left: 0 !important;
margin-left: 0;
}
.mentoring .review-list li {
display: inline;
}
.mentoring .review-list li a{
font-weight: bold;
}
.mentoring .results-section {
float: left;
}
.mentoring .clear {
display: block;
clear: both;
}
.mentoring .review-link {
float: right;
}
\ No newline at end of file
...@@ -17,15 +17,22 @@ function AnswerBlock(runtime, element) { ...@@ -17,15 +17,22 @@ function AnswerBlock(runtime, element) {
return $(':input', element).serializeArray(); return $(':input', element).serializeArray();
}, },
handleReview: function(result) {
$('textarea', element).prop('disabled', true);
},
handleSubmit: function(result) { handleSubmit: function(result) {
if (this.mode === 'assessment')
return;
var checkmark = $('.answer-checkmark', element); var checkmark = $('.answer-checkmark', element);
$(element).find('.message').text((result || {}).error || ''); $(element).find('.message').text((result || {}).error || '');
this.clearResult(); this.clearResult();
if (this.mode === 'assessment') {
// Display of checkmark would be redundant.
return
}
if (result.status === "correct") { if (result.status === "correct") {
checkmark.addClass('checkmark-correct icon-ok fa-check'); checkmark.addClass('checkmark-correct icon-ok fa-check');
} }
......
...@@ -60,7 +60,7 @@ function MentoringBlock(runtime, element) { ...@@ -60,7 +60,7 @@ function MentoringBlock(runtime, element) {
if (typeof obj !== 'undefined' && typeof obj[fn] == 'function') { if (typeof obj !== 'undefined' && typeof obj[fn] == 'function') {
return obj[fn].apply(obj, Array.prototype.slice.call(arguments, 2)); return obj[fn].apply(obj, Array.prototype.slice.call(arguments, 2));
} else { } else {
return undefined; return null;
} }
} }
......
...@@ -4,26 +4,26 @@ function MentoringStandardView(runtime, element, mentoring) { ...@@ -4,26 +4,26 @@ function MentoringStandardView(runtime, element, mentoring) {
var callIfExists = mentoring.callIfExists; var callIfExists = mentoring.callIfExists;
function handleSubmitResults(results) { function handleSubmitResults(response) {
messagesDOM.empty().hide(); messagesDOM.empty().hide();
$.each(results.submitResults || [], function(index, submitResult) { $.each(response.results || [], function(index, result_spec) {
var input = submitResult[0]; var input = result_spec[0];
var result = submitResult[1]; var result = result_spec[1];
var child = mentoring.getChildByName(input); var child = mentoring.getChildByName(input);
var options = { var options = {
max_attempts: results.max_attempts, max_attempts: response.max_attempts,
num_attempts: results.num_attempts num_attempts: response.num_attempts
}; };
callIfExists(child, 'handleSubmit', result, options); callIfExists(child, 'handleSubmit', result, options);
}); });
$('.attempts', element).data('max_attempts', results.max_attempts); $('.attempts', element).data('max_attempts', response.max_attempts);
$('.attempts', element).data('num_attempts', results.num_attempts); $('.attempts', element).data('num_attempts', response.num_attempts);
mentoring.renderAttempts(); mentoring.renderAttempts();
// Messages should only be displayed upon hitting 'submit', not on page reload // Messages should only be displayed upon hitting 'submit', not on page reload
mentoring.setContent(messagesDOM, results.message); mentoring.setContent(messagesDOM, response.message);
if (messagesDOM.html().trim()) { if (messagesDOM.html().trim()) {
messagesDOM.prepend('<div class="title1">' + gettext('Feedback') + '</div>'); messagesDOM.prepend('<div class="title1">' + gettext('Feedback') + '</div>');
messagesDOM.show(); messagesDOM.show();
...@@ -32,23 +32,30 @@ function MentoringStandardView(runtime, element, mentoring) { ...@@ -32,23 +32,30 @@ function MentoringStandardView(runtime, element, mentoring) {
submitDOM.attr('disabled', 'disabled'); submitDOM.attr('disabled', 'disabled');
} }
function submit() { function calculate_results(handler_name) {
var success = true;
var data = {}; var data = {};
var children = mentoring.children; var children = mentoring.children;
for (var i = 0; i < children.length; i++) { for (var i = 0; i < children.length; i++) {
var child = children[i]; var child = children[i];
if (child && child.name !== undefined && typeof(child.submit) !== "undefined") { if (child && child.name !== undefined && typeof(child[handler_name]) !== "undefined") {
data[child.name] = child.submit(); data[child.name] = child[handler_name]();
} }
} }
var handlerUrl = runtime.handlerUrl(element, 'submit'); var handlerUrl = runtime.handlerUrl(element, handler_name);
if (submitXHR) { if (submitXHR) {
submitXHR.abort(); submitXHR.abort();
} }
submitXHR = $.post(handlerUrl, JSON.stringify(data)).success(handleSubmitResults); submitXHR = $.post(handlerUrl, JSON.stringify(data)).success(handleSubmitResults);
} }
function get_results() {
calculate_results('get_results');
}
function submit() {
calculate_results('submit')
}
function clearResults() { function clearResults() {
messagesDOM.empty().hide(); messagesDOM.empty().hide();
...@@ -68,6 +75,8 @@ function MentoringStandardView(runtime, element, mentoring) { ...@@ -68,6 +75,8 @@ function MentoringStandardView(runtime, element, mentoring) {
submitDOM = $(element).find('.submit .input-main'); submitDOM = $(element).find('.submit .input-main');
submitDOM.bind('click', submit); submitDOM.bind('click', submit);
submitDOM.show(); submitDOM.show();
// Not used in standard mode.
$(element).find('.review-link').hide();
var options = { var options = {
onChange: onChange onChange: onChange
......
...@@ -97,23 +97,24 @@ function MCQBlock(runtime, element) { ...@@ -97,23 +97,24 @@ function MCQBlock(runtime, element) {
} }
}, },
handleSubmit: function(result) { handleReview: function(result){
if (this.mode === 'assessment') $('.choice input[value="' + result.submission + '"]', element).prop('checked', true);
return; $('.choice input', element).prop('disabled', true);
},
handleSubmit: function(result) {
mentoring = this.mentoring; mentoring = this.mentoring;
var messageView = MessageView(element, mentoring); var messageView = MessageView(element, mentoring);
messageView.clearResult(); messageView.clearResult();
var choiceInputs = $('.choice input', element); var choiceInputs = $('.choice-selector input', element);
$.each(choiceInputs, function(index, choiceInput) { $.each(choiceInputs, function(index, choiceInput) {
var choiceInputDOM = $(choiceInput); var choiceInputDOM = $(choiceInput);
var choiceDOM = choiceInputDOM.closest('.choice'); var choiceDOM = choiceInputDOM.closest('.choice');
var choiceResultDOM = $('.choice-result', choiceDOM); var choiceResultDOM = $('.choice-result', choiceDOM);
var choiceTipsDOM = $('.choice-tips', choiceDOM); var choiceTipsDOM = $('.choice-tips', choiceDOM);
var choiceTipsCloseDOM;
if (result.status === "correct" && choiceInputDOM.val() === result.submission) { if (result.status === "correct" && choiceInputDOM.val() === result.submission) {
choiceDOM.addClass('correct'); choiceDOM.addClass('correct');
...@@ -129,7 +130,6 @@ function MCQBlock(runtime, element) { ...@@ -129,7 +130,6 @@ function MCQBlock(runtime, element) {
messageView.showMessage(choiceTipsDOM); messageView.showMessage(choiceTipsDOM);
} }
choiceTipsCloseDOM = $('.close', choiceTipsDOM);
choiceResultDOM.off('click').on('click', function() { choiceResultDOM.off('click').on('click', function() {
if (choiceTipsDOM.html() !== '') { if (choiceTipsDOM.html() !== '') {
messageView.showMessage(choiceTipsDOM); messageView.showMessage(choiceTipsDOM);
...@@ -178,9 +178,14 @@ function MRQBlock(runtime, element) { ...@@ -178,9 +178,14 @@ function MRQBlock(runtime, element) {
return checkedValues; return checkedValues;
}, },
handleReview: function(result) {
$.each(result.submissions, function (index, value) {
$('input[type="checkbox"][value="' + value + '"]').prop('checked', true)
});
$('input', element).prop('disabled', true);
},
handleSubmit: function(result, options) { handleSubmit: function(result, options) {
if (this.mode === 'assessment')
return;
mentoring = this.mentoring; mentoring = this.mentoring;
...@@ -193,14 +198,13 @@ function MRQBlock(runtime, element) { ...@@ -193,14 +198,13 @@ function MRQBlock(runtime, element) {
var questionnaireDOM = $('fieldset.questionnaire', element); var questionnaireDOM = $('fieldset.questionnaire', element);
var data = questionnaireDOM.data(); var data = questionnaireDOM.data();
var hide_results = (data.hide_results === 'True') ? true : false; var hide_results = (data.hide_results === 'True');
$.each(result.choices, function(index, choice) { $.each(result.choices, function(index, choice) {
var choiceInputDOM = $('.choice input[value='+choice.value+']', element); var choiceInputDOM = $('.choice input[value='+choice.value+']', element);
var choiceDOM = choiceInputDOM.closest('.choice'); var choiceDOM = choiceInputDOM.closest('.choice');
var choiceResultDOM = $('.choice-result', choiceDOM); var choiceResultDOM = $('.choice-result', choiceDOM);
var choiceTipsDOM = $('.choice-tips', choiceDOM); var choiceTipsDOM = $('.choice-tips', choiceDOM);
var choiceTipsCloseDOM;
/* show hint if checked or max_attempts is disabled */ /* show hint if checked or max_attempts is disabled */
if (!hide_results && if (!hide_results &&
...@@ -215,7 +219,6 @@ function MRQBlock(runtime, element) { ...@@ -215,7 +219,6 @@ function MRQBlock(runtime, element) {
mentoring.setContent(choiceTipsDOM, choice.tips); mentoring.setContent(choiceTipsDOM, choice.tips);
choiceTipsCloseDOM = $('.close', choiceTipsDOM);
choiceResultDOM.off('click').on('click', function() { choiceResultDOM.off('click').on('click', function() {
messageView.showMessage(choiceTipsDOM); messageView.showMessage(choiceTipsDOM);
}); });
......
...@@ -21,10 +21,10 @@ ...@@ -21,10 +21,10 @@
# Imports ########################################################### # Imports ###########################################################
from django.utils.safestring import mark_safe from django.utils.safestring import mark_safe
import logging from lazy import lazy
from lxml import etree import uuid
from xblock.core import XBlock from xblock.core import XBlock
from xblock.fields import Scope, String, Float, List, UNIQUE_ID from xblock.fields import Scope, String, Float, UNIQUE_ID
from xblock.fragment import Fragment from xblock.fragment import Fragment
from xblock.validation import ValidationMessage from xblock.validation import ValidationMessage
from xblockutils.helpers import child_isinstance from xblockutils.helpers import child_isinstance
...@@ -90,14 +90,14 @@ class QuestionnaireAbstractBlock(StudioEditableXBlockMixin, StudioContainerXBloc ...@@ -90,14 +90,14 @@ class QuestionnaireAbstractBlock(StudioEditableXBlockMixin, StudioContainerXBloc
""" translate text """ """ translate text """
return self.runtime.service(self, "i18n").ugettext(text) return self.runtime.service(self, "i18n").ugettext(text)
@property @lazy
def html_id(self): def html_id(self):
""" """
A short, simple ID string used to uniquely identify this question. A short, simple ID string used to uniquely identify this question.
This is only used by templates for matching <input> and <label> elements. This is only used by templates for matching <input> and <label> elements.
""" """
return unicode(id(self)) # Unique as long as all choices are loaded at once return uuid.uuid4().hex[:20]
def student_view(self, context=None): def student_view(self, context=None):
name = getattr(self, "unmixed_class", self.__class__).__name__ name = getattr(self, "unmixed_class", self.__class__).__name__
......
...@@ -21,13 +21,20 @@ ...@@ -21,13 +21,20 @@
{% if self.display_submit %} {% if self.display_submit %}
<div class="grade" data-score="{{ self.score.1 }}" <div class="grade" data-score="{{ self.score.1 }}"
data-correct_answer="{{ self.score.2 }}" data-correct_answer="{{ self.score.2|length }}"
data-incorrect_answer="{{ self.score.3 }}" data-incorrect_answer="{{ self.score.3|length }}"
data-partially_correct_answer="{{ self.score.4 }}" data-partially_correct_answer="{{ self.score.4|length }}"
data-max_attempts="{{ self.max_attempts }}" data-max_attempts="{{ self.max_attempts }}"
data-num_attempts="{{ self.num_attempts }}"> data-num_attempts="{{ self.num_attempts }}"
data-extended_feedback="{%if self.extended_feedback %}True{% endif %}"
data-assessment_message="{{ self.assessment_message }}"
data-correct="{{ self.correct_json }}"
data-incorrect="{{ self.incorrect_json }}"
data-partial="{{ self.partial_json }}">
</div> </div>
<div class="assessment-messages"></div>
<div class="submit"> <div class="submit">
{% if self.mode == 'assessment' %} {% if self.mode == 'assessment' %}
<span class="assessment-checkmark fa icon-2x"></span> <span class="assessment-checkmark fa icon-2x"></span>
...@@ -46,4 +53,5 @@ ...@@ -46,4 +53,5 @@
{% endif %} {% endif %}
<div class="messages"></div> <div class="messages"></div>
</div> </div>
<div class="review-link"><a href="#">Review final grade</a></div>
</div> </div>
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
<li><a href="#" class="single-template add-xblock-component-button" data-category="pb-message" data-boilerplate="completed">{% trans "Message (Complete)" %}</a></li> <li><a href="#" class="single-template add-xblock-component-button" data-category="pb-message" data-boilerplate="completed">{% trans "Message (Complete)" %}</a></li>
<li><a href="#" class="single-template add-xblock-component-button" data-category="pb-message" data-boilerplate="incomplete">{% trans "Message (Incomplete)" %}</a></li> <li><a href="#" class="single-template add-xblock-component-button" data-category="pb-message" data-boilerplate="incomplete">{% trans "Message (Incomplete)" %}</a></li>
<li><a href="#" class="single-template add-xblock-component-button" data-category="pb-message" data-boilerplate="max_attempts_reached">{% trans "Message (Max # Attempts)" %}</a></li> <li><a href="#" class="single-template add-xblock-component-button" data-category="pb-message" data-boilerplate="max_attempts_reached">{% trans "Message (Max # Attempts)" %}</a></li>
<li><a href="#" class="single-template add-xblock-component-button" data-category="pb-message" data-boilerplate="on-assessment-review">{% trans "Message (Assessment Review)" %}</a></li>
</ul> </ul>
</div> </div>
</div> </div>
...@@ -10,33 +10,45 @@ ...@@ -10,33 +10,45 @@
<hr/> <hr/>
<span class="assessment-checkmark icon-2x checkmark-correct icon-ok fa fa-check"></span> <span class="assessment-checkmark icon-2x checkmark-correct icon-ok fa fa-check"></span>
<p> <div class="results-section">
<%= _.template( <p>
ngettext( <%= _.template(
"You answered 1 question correctly.", ngettext(
"You answered {number_correct} questions correctly.", "You answered 1 question correctly.",
correct_answer "You answered {number_correct} questions correctly.",
), {number_correct: correct_answer}, {interpolate: /\{(.+?)\}/g}) correct_answer
%> ), {number_correct: correct_answer}, {interpolate: /\{(.+?)\}/g})
</p> %>
</p>
<%= runDetails('correct') %>
</div>
<div class="clear"></div>
<span class="assessment-checkmark icon-2x checkmark-partially-correct icon-ok fa fa-check"></span> <span class="assessment-checkmark icon-2x checkmark-partially-correct icon-ok fa fa-check"></span>
<p> <div class="results-section">
<%= _.template( <p>
ngettext( <%= _.template(
"You answered 1 question partially correctly.", ngettext(
"You answered {number_partially_correct} questions partially correctly.", "You answered 1 question partially correctly.",
partially_correct_answer "You answered {number_partially_correct} questions partially correctly.",
), {number_partially_correct: partially_correct_answer}, {interpolate: /\{(.+?)\}/g}) partially_correct_answer
%> ), {number_partially_correct: partially_correct_answer}, {interpolate: /\{(.+?)\}/g})
</p> %>
</p>
<%= runDetails('partial') %>
</div>
<div class="clear"></div>
<span class="assessment-checkmark icon-2x checkmark-incorrect icon-exclamation fa fa-exclamation"></span> <span class="assessment-checkmark icon-2x checkmark-incorrect icon-exclamation fa fa-exclamation"></span>
<p> <div class="results-section">
<%= _.template( <p>
ngettext( <%= _.template(
"You answered 1 question incorrectly.", ngettext(
"You answered {number_incorrect} questions incorrectly.", "You answered 1 question incorrectly.",
incorrect_answer "You answered {number_incorrect} questions incorrectly.",
), {number_incorrect: incorrect_answer}, {interpolate: /\{(.+?)\}/g}) incorrect_answer
%> ), {number_incorrect: incorrect_answer}, {interpolate: /\{(.+?)\}/g})
</p> %>
</p>
<%= runDetails('incorrect') %>
</div>
<div class="clear"></div>
</script> </script>
<script type="text/template" id="xblock-review-questions-template">
<% var q, last_question; %>
<ul class="review-list <%= label %>-list">
<% for (var question in questions) {{ q = questions[question]; last_question = question == questions.length - 1; %>
<li><a href="#" class="question-link" data-step="<%= q.number %>"><%= _.template(gettext("Question {number}"), {number: q.number}, {interpolate: /\{(.+?)\}/g}) %></a></li>
<% }} %>
</ul>
</script>
<problem-builder url_name="mentoring-assessment-1" display_name="A Simple Assessment" weight="1" mode="assessment" max_attempts="2" extended_feedback="true">
<html_demo>
<p>This paragraph is shared between <strong>all</strong> questions.</p>
<p>Please answer the questions below.</p>
</html_demo>
<html_demo>
We need an XBlock with JavaScript here to test that it doesn't interfere
with the assessment, since it will show up in runtime(element).children,
but it is not a "step" element:
</html_demo>
<acid/>
<pb-answer name="goal" question="What is your goal?" />
<pb-mcq name="mcq_1_1" question="Do you like this MCQ?" correct_choices='["yes"]'>
<pb-choice value="yes">Yes</pb-choice>
<pb-choice value="maybenot">Maybe not</pb-choice>
<pb-choice value="understand">I don't understand</pb-choice>
<pb-tip values='["yes"]'>Great!</pb-tip>
<pb-tip values='["maybenot"]'>Ah, damn.</pb-tip>
<pb-tip values='["understand"]'><div id="test-custom-html">Really?</div></pb-tip>
</pb-mcq>
<pb-rating name="mcq_1_2" low="Not good at all" high="Extremely good" question="How much do you rate this MCQ?" correct_choices='["4","5"]'>
<pb-choice value="notwant">I don't want to rate it</pb-choice>
<pb-tip values='["4","5"]'>I love good grades.</pb-tip>
<pb-tip values='["1","2", "3"]'>Will do better next time...</pb-tip>
<pb-tip values='["notwant"]'>Your loss!</pb-tip>
</pb-rating>
<pb-mrq name="mrq_1_1" question="What do you like in this MRQ?" required_choices='["gracefulness","elegance","beauty"]' message="Thank you for answering!">
<pb-choice value="elegance">Its elegance</pb-choice>
<pb-choice value="beauty">Its beauty</pb-choice>
<pb-choice value="gracefulness">Its gracefulness</pb-choice>
<pb-choice value="bugs">Its bugs</pb-choice>
<pb-tip values='["gracefulness"]'>This MRQ is indeed very graceful</pb-tip>
<pb-tip values='["elegance","beauty"]'>This is something everyone has to like about this MRQ</pb-tip>
<pb-tip values='["bugs"]'>Nah, there aren't any!</pb-tip>
</pb-mrq>
<pb-message type="on-assessment-review">
<html>Assessment additional feedback message text</html>
</pb-message>
</problem-builder>
<problem-builder display_name="Mentoring Assessment Example" weight="1" mode="assessment" max_attempts="10"> <problem-builder display_name="Mentoring Assessment Example" weight="1" mode="assessment" max_attempts="2" extended_feedback="true">
<html_demo> <html_demo>
<p>This paragraph is shared between <strong>all</strong> questions.</p> <p>This paragraph is shared between <strong>all</strong> questions.</p>
<p>Please answer the questions below.</p> <p>Please answer the questions below.</p>
</html_demo> </html_demo>
<pb-answer name="goal" question="What is your goal?"> <pb-answer name="goal" question="What is your goal?" />
</pb-answer>
<pb-mcq name="mcq_1_1" question="Do you like this MCQ?" correct_choices="yes"> <pb-mcq name="mcq_1_1" question="Do you like this MCQ?" correct_choices='["yes"]'>
<pb-choice value="yes">Yes</pb-choice> <pb-choice value="yes">Yes</pb-choice>
<pb-choice value="maybenot">Maybe not</pb-choice> <pb-choice value="maybenot">Maybe not</pb-choice>
<pb-choice value="understand">I don't understand</pb-choice> <pb-choice value="understand">I don't understand</pb-choice>
<pb-tip values="yes">Great!</pb-tip> <pb-tip values='["yes"]'>Great!</pb-tip>
<pb-tip values="maybenot">Ah, damn.</pb-tip> <pb-tip values='["maybenot"]'>Ah, damn.</pb-tip>
<pb-tip values="understand"><div id="test-custom-html">Really?</div></pb-tip> <pb-tip values='["understand"]'><div id="test-custom-html">Really?</div></pb-tip>
</pb-mcq> </pb-mcq>
<pb-rating name="mcq_1_2" low="Not good at all" high="Extremely good" question="How much do you rate this MCQ?" correct_choices="4,5"> <pb-rating name="mcq_1_2" low="Not good at all" high="Extremely good" question="How much do you rate this MCQ?" correct_choices='["4","5"]'>
<pb-choice value="notwant">I don't want to rate it</pb-choice> <pb-choice value="notwant">I don't want to rate it</pb-choice>
<pb-tip values="4,5">I love good grades.</pb-tip> <pb-tip values='["4","5"]'>I love good grades.</pb-tip>
<pb-tip values="1,2,3">Will do better next time...</pb-tip> <pb-tip values='["1","2", "3"]'>Will do better next time...</pb-tip>
<pb-tip values="notwant">Your loss!</pb-tip> <pb-tip values='["notwant"]'>Your loss!</pb-tip>
</pb-rating> </pb-rating>
<pb-mrq name="mrq_1_1" question="What do you like in this MRQ?" required_choices="gracefulness,elegance,beauty"> <pb-mrq name="mrq_1_1" question="What do you like in this MRQ?" required_choices='["gracefulness","elegance","beauty"]'>
<pb-choice value="elegance">Its elegance</pb-choice> <pb-choice value="elegance">Its elegance</pb-choice>
<pb-choice value="beauty">Its beauty</pb-choice> <pb-choice value="beauty">Its beauty</pb-choice>
<pb-choice value="gracefulness">Its gracefulness</pb-choice> <pb-choice value="gracefulness">Its gracefulness</pb-choice>
<pb-choice value="bugs">Its bugs</pb-choice> <pb-choice value="bugs">Its bugs</pb-choice>
<pb-tip values="gracefulness">This MRQ is indeed very graceful</pb-tip> <pb-tip values='["gracefulness"]'>This MRQ is indeed very graceful</pb-tip>
<pb-tip values="elegance,beauty">This is something everyone has to like about this MRQ</pb-tip> <pb-tip values='["elegance","beauty"]'>This is something everyone has to like about this MRQ</pb-tip>
<pb-tip values="bugs">Nah, there isn't any!</pb-tip> <pb-tip values='["bugs"]'>Nah, there aren't any!</pb-tip>
</pb-mrq> </pb-mrq>
<pb-message type="on-assessment-review">
<html>Assessment additional feedback message text</html>
</pb-message>
</problem-builder> </problem-builder>
...@@ -6,32 +6,32 @@ ...@@ -6,32 +6,32 @@
<pb-answer name="goal" question="What is your goal?"> <pb-answer name="goal" question="What is your goal?">
</pb-answer> </pb-answer>
<pb-mcq name="mcq_1_1" question="Do you like this MCQ?" correct_choices="yes"> <pb-mcq name="mcq_1_1" question="Do you like this MCQ?" correct_choices='["yes"]'>
<pb-choice value="yes">Yes</pb-choice> <pb-choice value="yes">Yes</pb-choice>
<pb-choice value="maybenot">Maybe not</pb-choice> <pb-choice value="maybenot">Maybe not</pb-choice>
<pb-choice value="understand">I don't understand</pb-choice> <pb-choice value="understand">I don't understand</pb-choice>
<pb-tip values="yes">Great!</pb-tip> <pb-tip values='["yes"]'>Great!</pb-tip>
<pb-tip values="maybenot">Ah, damn.</pb-tip> <pb-tip values='["maybenot"]'>Ah, damn.</pb-tip>
<pb-tip values="understand"><div id="test-custom-html">Really?</div></pb-tip> <pb-tip values='["understand"]'><div id="test-custom-html">Really?</div></pb-tip>
</pb-mcq> </pb-mcq>
<pb-rating name="mcq_1_2" low="Not good at all" high="Extremely good" question="How much do you rate this MCQ?" correct_choices="4,5"> <pb-rating name="mcq_1_2" low="Not good at all" high="Extremely good" question="How much do you rate this MCQ?" correct_choices='["4","5"]'>
<pb-choice value="notwant">I don't want to rate it</pb-choice> <pb-choice value="notwant">I don't want to rate it</pb-choice>
<pb-tip values="4,5">I love good grades.</pb-tip> <pb-tip values='["4","5"]'>I love good grades.</pb-tip>
<pb-tip values="1,2,3">Will do better next time...</pb-tip> <pb-tip values='["1","2","3"]'>Will do better next time...</pb-tip>
<pb-tip values="notwant">Your loss!</pb-tip> <pb-tip values='["notwant"]'>Your loss!</pb-tip>
</pb-rating> </pb-rating>
<pb-mrq name="mrq_1_1" question="What do you like in this MRQ?" required_choices="gracefulness,elegance,beauty" message="Thank you for answering!"> <pb-mrq name="mrq_1_1" question="What do you like in this MRQ?" required_choices='["gracefulness","elegance","beauty"]' message="Thank you for answering!">
<pb-choice value="elegance">Its elegance</pb-choice> <pb-choice value="elegance">Its elegance</pb-choice>
<pb-choice value="beauty">Its beauty</pb-choice> <pb-choice value="beauty">Its beauty</pb-choice>
<pb-choice value="gracefulness">Its gracefulness</pb-choice> <pb-choice value="gracefulness">Its gracefulness</pb-choice>
<pb-choice value="bugs">Its bugs</pb-choice> <pb-choice value="bugs">Its bugs</pb-choice>
<pb-tip values="gracefulness">This MRQ is indeed very graceful</pb-tip> <pb-tip values='["gracefulness"]'>This MRQ is indeed very graceful</pb-tip>
<pb-tip values="elegance,beauty">This is something everyone has to like about this MRQ</pb-tip> <pb-tip values='["elegance,beauty"]'>This is something everyone has to like about this MRQ</pb-tip>
<pb-tip values="bugs">Nah, there aren't any!</pb-tip> <pb-tip values='["bugs"]'>Nah, there aren't any!</pb-tip>
</pb-mrq> </pb-mrq>
<pb-message type="completed"> <pb-message type="completed">
......
...@@ -32,6 +32,30 @@ class MentoringBaseTest(SeleniumBaseTest): ...@@ -32,6 +32,30 @@ class MentoringBaseTest(SeleniumBaseTest):
module_name = __name__ module_name = __name__
default_css_selector = 'div.mentoring' default_css_selector = 'div.mentoring'
def popup_check(self, mentoring, item_feedbacks, prefix='', do_submit=True):
submit = mentoring.find_element_by_css_selector('.submit input.input-main')
for index, expected_feedback in enumerate(item_feedbacks):
choice_wrapper = mentoring.find_elements_by_css_selector(prefix + " .choice")[index]
if do_submit:
# clicking on actual radio button
choice_wrapper.find_element_by_css_selector(".choice-selector input").click()
submit.click()
self.wait_until_disabled(submit)
item_feedback_icon = choice_wrapper.find_element_by_css_selector(".choice-result")
choice_wrapper.click()
item_feedback_icon.click() # clicking on item feedback icon
item_feedback_popup = choice_wrapper.find_element_by_css_selector(".choice-tips")
self.assertTrue(item_feedback_popup.is_displayed())
self.assertEqual(item_feedback_popup.text, expected_feedback)
item_feedback_popup.click()
self.assertTrue(item_feedback_popup.is_displayed())
mentoring.click()
self.assertFalse(item_feedback_popup.is_displayed())
class MentoringAssessmentBaseTest(MentoringBaseTest): class MentoringAssessmentBaseTest(MentoringBaseTest):
@staticmethod @staticmethod
...@@ -54,6 +78,7 @@ class MentoringAssessmentBaseTest(MentoringBaseTest): ...@@ -54,6 +78,7 @@ class MentoringAssessmentBaseTest(MentoringBaseTest):
controls.next_question = mentoring.find_element_by_css_selector("input.input-next") controls.next_question = mentoring.find_element_by_css_selector("input.input-next")
controls.review = mentoring.find_element_by_css_selector("input.input-review") controls.review = mentoring.find_element_by_css_selector("input.input-review")
controls.try_again = mentoring.find_element_by_css_selector("input.input-try-again") controls.try_again = mentoring.find_element_by_css_selector("input.input-try-again")
controls.review_link = mentoring.find_element_by_css_selector(".review-link a")
return mentoring, controls return mentoring, controls
......
...@@ -17,11 +17,13 @@ ...@@ -17,11 +17,13 @@
# along with this program in a file in the toplevel directory called # along with this program in a file in the toplevel directory called
# "AGPLv3". If not, see <http://www.gnu.org/licenses/>. # "AGPLv3". If not, see <http://www.gnu.org/licenses/>.
# #
from ddt import ddt, unpack, data
from .base_test import MentoringAssessmentBaseTest, GetChoices from .base_test import MentoringAssessmentBaseTest, GetChoices
CORRECT, INCORRECT, PARTIAL = "correct", "incorrect", "partially-correct" CORRECT, INCORRECT, PARTIAL = "correct", "incorrect", "partially-correct"
@ddt
class MentoringAssessmentTest(MentoringAssessmentBaseTest): class MentoringAssessmentTest(MentoringAssessmentBaseTest):
def _selenium_bug_workaround_scroll_to(self, mentoring, question): def _selenium_bug_workaround_scroll_to(self, mentoring, question):
"""Workaround for selenium bug: """Workaround for selenium bug:
...@@ -190,19 +192,29 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest): ...@@ -190,19 +192,29 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest):
self._assert_checkmark(mentoring, result) self._assert_checkmark(mentoring, result)
self.do_post(controls, last) self.do_post(controls, last)
def peek_at_multiple_choice_question(self, number, mentoring, controls, last=False): def peek_at_multiple_response_question(
self, number, mentoring, controls, last=False, extended_feedback=False, alternative_review=False
):
question = self.expect_question_visible(number, mentoring) question = self.expect_question_visible(number, mentoring)
self.assert_persistent_elements_present(mentoring) self.assert_persistent_elements_present(mentoring)
self._selenium_bug_workaround_scroll_to(mentoring, question) self._selenium_bug_workaround_scroll_to(mentoring, question)
self.assertIn("What do you like in this MRQ?", mentoring.text) self.assertIn("What do you like in this MRQ?", mentoring.text)
self.assert_disabled(controls.submit) if extended_feedback:
self.ending_controls(controls, last) self.assert_disabled(controls.submit)
if alternative_review:
self.assert_clickable(controls.review_link)
self.assert_hidden(controls.try_again)
else:
self.assert_clickable(controls.review)
else:
self.assert_disabled(controls.submit)
self.ending_controls(controls, last)
return question return question
def multiple_choice_question(self, number, mentoring, controls, choice_names, result, last=False): def multiple_response_question(self, number, mentoring, controls, choice_names, result, last=False):
question = self.peek_at_multiple_choice_question(number, mentoring, controls, last=last) question = self.peek_at_multiple_response_question(number, mentoring, controls, last=last)
choices = GetChoices(question) choices = GetChoices(question)
expected_choices = { expected_choices = {
...@@ -227,11 +239,17 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest): ...@@ -227,11 +239,17 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest):
self._assert_checkmark(mentoring, result) self._assert_checkmark(mentoring, result)
controls.review.click() controls.review.click()
def peek_at_review(self, mentoring, controls, expected): def peek_at_review(self, mentoring, controls, expected, extended_feedback=False):
self.wait_until_text_in("You scored {percentage}% on this assessment.".format(**expected), mentoring) self.wait_until_text_in("You scored {percentage}% on this assessment.".format(**expected), mentoring)
self.assert_persistent_elements_present(mentoring) self.assert_persistent_elements_present(mentoring)
if expected["num_attempts"] < expected["max_attempts"]: if expected["num_attempts"] < expected["max_attempts"]:
self.assertIn("Note: if you retake this assessment, only your final score counts.", mentoring.text) self.assertIn("Note: if you retake this assessment, only your final score counts.", mentoring.text)
self.assertFalse(mentoring.find_elements_by_css_selector('.review-list'))
elif extended_feedback:
for q_type in ['correct', 'incorrect', 'partial']:
self.assertEqual(len(mentoring.find_elements_by_css_selector('.%s-list li' % q_type)), expected[q_type])
else:
self.assertFalse(mentoring.find_elements_by_css_selector('.review-list'))
if expected["correct"] == 1: if expected["correct"] == 1:
self.assertIn("You answered 1 questions correctly.".format(**expected), mentoring.text) self.assertIn("You answered 1 questions correctly.".format(**expected), mentoring.text)
else: else:
...@@ -255,27 +273,70 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest): ...@@ -255,27 +273,70 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest):
self.assert_hidden(controls.submit) self.assert_hidden(controls.submit)
self.assert_hidden(controls.next_question) self.assert_hidden(controls.next_question)
self.assert_hidden(controls.review) self.assert_hidden(controls.review)
self.assert_hidden(controls.review_link)
def test_assessment(self):
mentoring, controls = self.go_to_assessment("Assessment 1") def assert_messages_text(self, mentoring, text):
messages = mentoring.find_element_by_css_selector('.assessment-messages')
self.assertEqual(messages.text, text)
self.assertTrue(messages.is_displayed())
def assert_messages_empty(self, mentoring):
messages = mentoring.find_element_by_css_selector('.assessment-messages')
self.assertEqual(messages.text, '')
self.assertFalse(messages.find_elements_by_xpath('./*'))
self.assertFalse(messages.is_displayed())
def extended_feedback_checks(self, mentoring, controls, expected_results):
# Multiple choice is third correctly answered question
self.assert_hidden(controls.review_link)
mentoring.find_elements_by_css_selector('.correct-list li a')[2].click()
self.peek_at_multiple_response_question(4, mentoring, controls, extended_feedback=True, alternative_review=True)
# Four correct items, plus the overall correct indicator.
correct_marks = mentoring.find_elements_by_css_selector('.checkmark-correct')
incorrect_marks = mentoring.find_elements_by_css_selector('.checkmark-incorrect')
self.assertEqual(len(correct_marks), 5)
self.assertEqual(len(incorrect_marks), 0)
item_feedbacks = [
"This is something everyone has to like about this MRQ",
"This is something everyone has to like about this MRQ",
"This MRQ is indeed very graceful",
"Nah, there aren't any!"
]
self.popup_check(mentoring, item_feedbacks, prefix='div[data-name="mrq_1_1"]', do_submit=False)
self.assert_hidden(controls.review)
self.assert_disabled(controls.submit)
controls.review_link.click()
self.peek_at_review(mentoring, controls, expected_results, extended_feedback=True)
# Rating question, right before MRQ.
mentoring.find_elements_by_css_selector('.incorrect-list li a')[0].click()
# Should be possible to visit the MRQ from there.
self.wait_until_clickable(controls.next_question)
controls.next_question.click()
self.peek_at_multiple_response_question(4, mentoring, controls, extended_feedback=True, alternative_review=True)
@data((1, False), ('Extended Feedback', True))
@unpack
def test_assessment(self, assessment, extended_feedback):
mentoring, controls = self.go_to_assessment("Assessment %s" % assessment)
self.freeform_answer(1, mentoring, controls, 'This is the answer', CORRECT) self.freeform_answer(1, mentoring, controls, 'This is the answer', CORRECT)
self.single_choice_question(2, mentoring, controls, 'Maybe not', INCORRECT) self.single_choice_question(2, mentoring, controls, 'Maybe not', INCORRECT)
self.rating_question(3, mentoring, controls, "5 - Extremely good", CORRECT) self.rating_question(3, mentoring, controls, "5 - Extremely good", CORRECT)
self.peek_at_multiple_choice_question(4, mentoring, controls, last=True) self.peek_at_multiple_response_question(4, mentoring, controls, last=True)
# see if assessment remembers the current step # see if assessment remembers the current step
self.go_to_workbench_main_page() self.go_to_workbench_main_page()
mentoring, controls = self.go_to_assessment("Assessment 1") mentoring, controls = self.go_to_assessment("Assessment %s" % assessment)
self.multiple_choice_question(4, mentoring, controls, ("Its beauty",), PARTIAL, last=True) self.multiple_response_question(4, mentoring, controls, ("Its beauty",), PARTIAL, last=True)
expected_results = { expected_results = {
"correct": 2, "partial": 1, "incorrect": 1, "percentage": 63, "correct": 2, "partial": 1, "incorrect": 1, "percentage": 63,
"num_attempts": 1, "max_attempts": 2 "num_attempts": 1, "max_attempts": 2
} }
self.peek_at_review(mentoring, controls, expected_results) self.peek_at_review(mentoring, controls, expected_results, extended_feedback=extended_feedback)
self.assert_messages_text(mentoring, "Assessment additional feedback message text")
self.assert_clickable(controls.try_again) self.assert_clickable(controls.try_again)
controls.try_again.click() controls.try_again.click()
...@@ -286,14 +347,17 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest): ...@@ -286,14 +347,17 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest):
self.rating_question(3, mentoring, controls, "1 - Not good at all", INCORRECT) self.rating_question(3, mentoring, controls, "1 - Not good at all", INCORRECT)
user_selection = ("Its elegance", "Its beauty", "Its gracefulness") user_selection = ("Its elegance", "Its beauty", "Its gracefulness")
self.multiple_choice_question(4, mentoring, controls, user_selection, CORRECT, last=True) self.multiple_response_question(4, mentoring, controls, user_selection, CORRECT, last=True)
expected_results = { expected_results = {
"correct": 3, "partial": 0, "incorrect": 1, "percentage": 75, "correct": 3, "partial": 0, "incorrect": 1, "percentage": 75,
"num_attempts": 2, "max_attempts": 2 "num_attempts": 2, "max_attempts": 2
} }
self.peek_at_review(mentoring, controls, expected_results) self.peek_at_review(mentoring, controls, expected_results, extended_feedback=extended_feedback)
self.assert_disabled(controls.try_again) self.assert_disabled(controls.try_again)
self.assert_messages_empty(mentoring)
if extended_feedback:
self.extended_feedback_checks(mentoring, controls, expected_results)
def test_single_question_assessment(self): def test_single_question_assessment(self):
""" """
...@@ -308,6 +372,7 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest): ...@@ -308,6 +372,7 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest):
} }
self.peek_at_review(mentoring, controls, expected_results) self.peek_at_review(mentoring, controls, expected_results)
self.assert_messages_empty(mentoring)
controls.try_again.click() controls.try_again.click()
# this is a wait and assertion all together - it waits until expected text is in mentoring block # this is a wait and assertion all together - it waits until expected text is in mentoring block
......
...@@ -175,6 +175,7 @@ class TestDashboardBlock(SeleniumXBlockTest): ...@@ -175,6 +175,7 @@ class TestDashboardBlock(SeleniumXBlockTest):
choices = mcq.find_elements_by_css_selector('.choices .choice label') choices = mcq.find_elements_by_css_selector('.choices .choice label')
choices[idx].click() choices[idx].click()
submit = pb.find_element_by_css_selector('.submit input.input-main') submit = pb.find_element_by_css_selector('.submit input.input-main')
self.assertTrue(submit.is_enabled())
submit.click() submit.click()
self.wait_until_disabled(submit) self.wait_until_disabled(submit)
......
...@@ -31,7 +31,7 @@ from .base_test import MentoringBaseTest ...@@ -31,7 +31,7 @@ from .base_test import MentoringBaseTest
@ddt.ddt @ddt.ddt
class MCQBlockTest(MentoringBaseTest): class QuestionnaireBlockTest(MentoringBaseTest):
def _selenium_bug_workaround_scroll_to(self, mcq_legend): def _selenium_bug_workaround_scroll_to(self, mcq_legend):
"""Workaround for selenium bug: """Workaround for selenium bug:
...@@ -159,8 +159,8 @@ class MCQBlockTest(MentoringBaseTest): ...@@ -159,8 +159,8 @@ class MCQBlockTest(MentoringBaseTest):
self.assertFalse(mcq1_tips.is_displayed()) self.assertFalse(mcq1_tips.is_displayed())
self.assertFalse(mcq2_tips.is_displayed()) self.assertFalse(mcq2_tips.is_displayed())
def test_mcq_with_comments(self): def test_mrq_with_comments(self):
mentoring = self.go_to_page('Mcq With Comments 1') mentoring = self.go_to_page('Mrq With Comments 1')
mcq = mentoring.find_element_by_css_selector('fieldset.choices') mcq = mentoring.find_element_by_css_selector('fieldset.choices')
messages = mentoring.find_element_by_css_selector('.messages') messages = mentoring.find_element_by_css_selector('.messages')
submit = mentoring.find_element_by_css_selector('.submit input.input-main') submit = mentoring.find_element_by_css_selector('.submit input.input-main')
...@@ -186,35 +186,16 @@ class MCQBlockTest(MentoringBaseTest): ...@@ -186,35 +186,16 @@ class MCQBlockTest(MentoringBaseTest):
self.assertEqual(mcq_choices_input[2].get_attribute('value'), 'gracefulness') self.assertEqual(mcq_choices_input[2].get_attribute('value'), 'gracefulness')
self.assertEqual(mcq_choices_input[3].get_attribute('value'), 'bugs') self.assertEqual(mcq_choices_input[3].get_attribute('value'), 'bugs')
def test_mcq_feedback_popups(self): def test_mrq_feedback_popups(self):
mentoring = self.go_to_page('Mcq With Comments 1') mentoring = self.go_to_page('Mrq With Comments 1')
choices_list = mentoring.find_element_by_css_selector(".choices-list")
item_feedbacks = [ item_feedbacks = [
"This is something everyone has to like about this MRQ", "This is something everyone has to like about this MRQ",
"This is something everyone has to like about beauty", "This is something everyone has to like about beauty",
"This MRQ is indeed very graceful", "This MRQ is indeed very graceful",
"Nah, there isn\\'t any!" "Nah, there aren\\'t any!"
] ]
submit = mentoring.find_element_by_css_selector('.submit input.input-main') self.popup_check(mentoring, item_feedbacks, prefix='div[data-name="mrq_1_1_7"]')
for index, expected_feedback in enumerate(item_feedbacks):
choice_wrapper = choices_list.find_elements_by_css_selector(".choice")[index]
choice_wrapper.find_element_by_css_selector(".choice-selector input").click() # click actual radio button
submit.click()
self.wait_until_disabled(submit)
item_feedback_icon = choice_wrapper.find_element_by_css_selector(".choice-result")
choice_wrapper.click()
item_feedback_icon.click() # clicking on item feedback icon
item_feedback_popup = choice_wrapper.find_element_by_css_selector(".choice-tips")
self.assertTrue(item_feedback_popup.is_displayed())
self.assertEqual(item_feedback_popup.text, expected_feedback)
item_feedback_popup.click()
self.assertTrue(item_feedback_popup.is_displayed())
mentoring.click()
self.assertFalse(item_feedback_popup.is_displayed())
def _get_questionnaire_options(self, questionnaire): def _get_questionnaire_options(self, questionnaire):
result = [] result = []
...@@ -299,7 +280,7 @@ class MCQBlockTest(MentoringBaseTest): ...@@ -299,7 +280,7 @@ class MCQBlockTest(MentoringBaseTest):
@patch.object(MentoringBlock, 'get_theme', Mock(return_value={'package': 'problem_builder', @patch.object(MentoringBlock, 'get_theme', Mock(return_value={'package': 'problem_builder',
'locations': ['public/themes/lms.css']})) 'locations': ['public/themes/lms.css']}))
class MCQBlockAprosThemeTest(MCQBlockTest): class QuestionnaireBlockAprosThemeTest(QuestionnaireBlockTest):
""" """
Test MRQ/MCQ questions without the LMS theme which is on by default. Test MRQ/MCQ questions without the LMS theme which is on by default.
""" """
......
...@@ -40,6 +40,10 @@ ...@@ -40,6 +40,10 @@
<pb-tip values='["gracefulness"]'>This MRQ is indeed very graceful</pb-tip> <pb-tip values='["gracefulness"]'>This MRQ is indeed very graceful</pb-tip>
<pb-tip values='["elegance","beauty"]'>This is something everyone has to like about this MRQ</pb-tip> <pb-tip values='["elegance","beauty"]'>This is something everyone has to like about this MRQ</pb-tip>
<pb-tip values='["bugs"]'>Nah, there isn't any!</pb-tip> <pb-tip values='["bugs"]'>Nah, there aren't any!</pb-tip>
</pb-mrq> </pb-mrq>
<pb-message type="on-assessment-review">
<html>Assessment additional feedback message text</html>
</pb-message>
</problem-builder> </problem-builder>
<problem-builder url_name="mentoring-assessment-1" display_name="A Simple Assessment" weight="1" mode="assessment" max_attempts="2" extended_feedback="true">
<html_demo>
<p>This paragraph is shared between <strong>all</strong> questions.</p>
<p>Please answer the questions below.</p>
</html_demo>
<html_demo>
We need an XBlock with JavaScript here to test that it doesn't interfere
with the assessment, since it will show up in runtime(element).children,
but it is not a "step" element:
</html_demo>
<acid/>
<pb-answer name="goal" question="What is your goal?" />
<pb-mcq name="mcq_1_1" question="Do you like this MCQ?" correct_choices='["yes"]'>
<pb-choice value="yes">Yes</pb-choice>
<pb-choice value="maybenot">Maybe not</pb-choice>
<pb-choice value="understand">I don't understand</pb-choice>
<pb-tip values='["yes"]'>Great!</pb-tip>
<pb-tip values='["maybenot"]'>Ah, damn.</pb-tip>
<pb-tip values='["understand"]'><div id="test-custom-html">Really?</div></pb-tip>
</pb-mcq>
<pb-rating name="mcq_1_2" low="Not good at all" high="Extremely good" question="How much do you rate this MCQ?" correct_choices='["4","5"]'>
<pb-choice value="notwant">I don't want to rate it</pb-choice>
<pb-tip values='["4","5"]'>I love good grades.</pb-tip>
<pb-tip values='["1","2", "3"]'>Will do better next time...</pb-tip>
<pb-tip values='["notwant"]'>Your loss!</pb-tip>
</pb-rating>
<pb-mrq name="mrq_1_1" question="What do you like in this MRQ?" required_choices='["gracefulness","elegance","beauty"]' message="Thank you for answering!">
<pb-choice value="elegance">Its elegance</pb-choice>
<pb-choice value="beauty">Its beauty</pb-choice>
<pb-choice value="gracefulness">Its gracefulness</pb-choice>
<pb-choice value="bugs">Its bugs</pb-choice>
<pb-tip values='["gracefulness"]'>This MRQ is indeed very graceful</pb-tip>
<pb-tip values='["elegance","beauty"]'>This is something everyone has to like about this MRQ</pb-tip>
<pb-tip values='["bugs"]'>Nah, there aren't any!</pb-tip>
</pb-mrq>
<pb-message type="on-assessment-review">
<html>Assessment additional feedback message text</html>
</pb-message>
</problem-builder>
...@@ -9,7 +9,7 @@ ...@@ -9,7 +9,7 @@
<pb-tip values='["gracefulness"]' width ="200" height = "200">This MCQ is indeed very graceful</pb-tip> <pb-tip values='["gracefulness"]' width ="200" height = "200">This MCQ is indeed very graceful</pb-tip>
<pb-tip values='["elegance"]' width ="600" height = "800">This is something everyone has to like about this MCQ</pb-tip> <pb-tip values='["elegance"]' width ="600" height = "800">This is something everyone has to like about this MCQ</pb-tip>
<pb-tip values='["beauty"]' width ="400" height = "600">This is something everyone has to like about beauty</pb-tip> <pb-tip values='["beauty"]' width ="400" height = "600">This is something everyone has to like about beauty</pb-tip>
<pb-tip values='["bugs"]' width = "100" height = "200">Nah, there isn\'t any!</pb-tip> <pb-tip values='["bugs"]' width = "100" height = "200">Nah, there aren\'t any!</pb-tip>
</pb-mcq> </pb-mcq>
<pb-message type="completed"> <pb-message type="completed">
......
<vertical_demo> <vertical_demo>
<problem-builder url_name="mcq_with_comments" display_name="MRQ With Resizable popups" weight="1" enforce_dependency="false"> <problem-builder url_name="mrq_with_comments" display_name="MRQ With Resizable popups" weight="1" enforce_dependency="false">
<pb-mrq name="mrq_1_1_7" question="What do you like in this MRQ?" required_choices='["elegance","gracefulness","beauty"]'> <pb-mrq name="mrq_1_1_7" question="What do you like in this MRQ?" required_choices='["elegance","gracefulness","beauty"]'>
<pb-choice value="elegance">Its elegance</pb-choice> <pb-choice value="elegance">Its elegance</pb-choice>
<pb-choice value="beauty">Its beauty</pb-choice> <pb-choice value="beauty">Its beauty</pb-choice>
...@@ -9,7 +9,7 @@ ...@@ -9,7 +9,7 @@
<pb-tip values='["gracefulness"]' width ="200" height = "200">This MRQ is indeed very graceful</pb-tip> <pb-tip values='["gracefulness"]' width ="200" height = "200">This MRQ is indeed very graceful</pb-tip>
<pb-tip values='["elegance"]' width ="600" height = "800">This is something everyone has to like about this MRQ</pb-tip> <pb-tip values='["elegance"]' width ="600" height = "800">This is something everyone has to like about this MRQ</pb-tip>
<pb-tip values='["beauty"]' width ="400" height = "600">This is something everyone has to like about beauty</pb-tip> <pb-tip values='["beauty"]' width ="400" height = "600">This is something everyone has to like about beauty</pb-tip>
<pb-tip values='["bugs"]' width = "100" height = "200">Nah, there isn\'t any!</pb-tip> <pb-tip values='["bugs"]' width = "100" height = "200">Nah, there aren\'t any!</pb-tip>
<!--<pb-message type="on-submit">This is deliberately commented out to test parsing of XML comments</pb-message> --> <!--<pb-message type="on-submit">This is deliberately commented out to test parsing of XML comments</pb-message> -->
</pb-mrq> </pb-mrq>
......
...@@ -29,7 +29,7 @@ ...@@ -29,7 +29,7 @@
<pb-choice value="bugs">Its bugs</pb-choice> <pb-choice value="bugs">Its bugs</pb-choice>
<pb-tip values='["gracefulness"]'>This MRQ is indeed very graceful</pb-tip> <pb-tip values='["gracefulness"]'>This MRQ is indeed very graceful</pb-tip>
<pb-tip values='["elegance","beauty"]'>This is something everyone has to like about this MRQ</pb-tip> <pb-tip values='["elegance","beauty"]'>This is something everyone has to like about this MRQ</pb-tip>
<pb-tip values='["bugs"]'>Nah, there isn't any!</pb-tip> <pb-tip values='["bugs"]'>Nah, there aren't any!</pb-tip>
</pb-mrq> </pb-mrq>
<pb-message type="completed"> <pb-message type="completed">
<p>Congratulations!</p> <p>Congratulations!</p>
......
...@@ -51,7 +51,7 @@ Changes from the original: ...@@ -51,7 +51,7 @@ Changes from the original:
<tip require="gracefulness">This MRQ is indeed very graceful</tip> <tip require="gracefulness">This MRQ is indeed very graceful</tip>
<tip require="elegance,beauty">This is something everyone has to like about this MRQ</tip> <tip require="elegance,beauty">This is something everyone has to like about this MRQ</tip>
<tip reject="bugs">Nah, there isn't any!</tip> <tip reject="bugs">Nah, there aren't any!</tip>
<message type="on-submit">Thank you for answering!</message> <message type="on-submit">Thank you for answering!</message>
</mrq> </mrq>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment