Commit ac902d7f by Tim Krones

Problem Builder: Introduce instance-wide setting that specifies if

feedback should be shown when student returns to block.

The new setting is called "pb_hide_feedback_if_attempts_remain".

When set to True, Problem Builder blocks will not show global feedback
messages (as long as one or more attempts remain), and will hide
results (correct/incorrect icons) for Long Answer blocks.
parent 91f77e23
......@@ -60,7 +60,8 @@ log = logging.getLogger(__name__)
loader = ResourceLoader(__name__)
_default_options_config = {
'pb_mcq_hide_previous_answer': False
'pb_mcq_hide_previous_answer': False,
'pb_hide_feedback_if_attempts_remain': False,
}
......@@ -174,11 +175,11 @@ class BaseMentoringBlock(
return xblock_settings[self.options_key]
return _default_options_config
def get_option(self, option):
def get_option(self, option, default=False):
"""
Get value of a specific instance-wide `option`.
"""
return self.get_options()[option]
return self.get_options().get(option, default)
@XBlock.json_handler
def view(self, data, suffix=''):
......@@ -563,9 +564,10 @@ class MentoringBlock(BaseMentoringBlock, StudioContainerXBlockMixin, StepParentM
"""
results = []
completed = True
show_message = bool(self.student_results)
hide_feedback = self.get_option("pb_hide_feedback_if_attempts_remain") and not self.max_attempts_reached
show_message = (not hide_feedback) and bool(self.student_results)
# In standard mode, all children is visible simultaneously, so need collecting responses from all of them
# In standard mode, all children are visible simultaneously, so need to collect results for all of them
for child in self.steps:
child_result = child.get_last_result()
results.append([child.name, child_result])
......
......@@ -2,15 +2,10 @@ function AnswerBlock(runtime, element) {
return {
mode: null,
init: function(options) {
// register the child validator
// Clear results and validate block when answer changes
$(':input', element).on('keyup', options.onChange);
this.mode = options.mode;
var checkmark = $('.answer-checkmark', element);
var completed = $('.xblock-answer', element).data('completed');
if (completed === 'True' && this.mode === 'standard') {
checkmark.addClass('checkmark-correct icon-ok fa-check');
}
// In the LMS, the HTML of multiple units can be loaded at once,
// and the user can flip among them. If that happens, the answer in
......@@ -26,15 +21,15 @@ function AnswerBlock(runtime, element) {
$('textarea', element).prop('disabled', true);
},
handleSubmit: function(result) {
handleSubmit: function(result, options) {
var checkmark = $('.answer-checkmark', element);
this.clearResult();
if (this.mode === 'assessment') {
// Display of checkmark would be redundant.
return
if (options.hide_results || this.mode === 'assessment') {
// In assessment mode, display of checkmark would be redundant.
return;
}
if (result.status) {
if (result.status === "correct") {
......@@ -66,7 +61,7 @@ function AnswerBlock(runtime, element) {
var answer_length = input_value.length;
var data = input.data();
// an answer cannot be empty event if min_characters is 0
// An answer cannot be empty even if min_characters is 0
if (_.isNumber(data.min_characters)) {
var min_characters = _.max([data.min_characters, 1]);
if (answer_length < min_characters) {
......
......@@ -70,10 +70,6 @@ function MentoringBlock(runtime, element) {
function setContent(dom, content) {
dom.html('');
dom.append(content);
var template = $('#light-child-template', dom).html();
if (template) {
dom.append(template);
}
}
function renderAttempts() {
......
......@@ -7,6 +7,8 @@ function MentoringStandardView(runtime, element, mentoring) {
function handleSubmitResults(response, disable_submit) {
messagesDOM.empty().hide();
var hide_results = response.message === undefined;
var all_have_results = response.results.length > 0;
$.each(response.results || [], function(index, result_spec) {
var input = result_spec[0];
......@@ -14,7 +16,8 @@ function MentoringStandardView(runtime, element, mentoring) {
var child = mentoring.getChildByName(input);
var options = {
max_attempts: response.max_attempts,
num_attempts: response.num_attempts
num_attempts: response.num_attempts,
hide_results: hide_results,
};
callIfExists(child, 'handleSubmit', result, options);
all_have_results = all_have_results && !$.isEmptyObject(result);
......@@ -24,12 +27,13 @@ function MentoringStandardView(runtime, element, mentoring) {
$('.attempts', element).data('num_attempts', response.num_attempts);
mentoring.renderAttempts();
// Messages should only be displayed upon hitting 'submit', not on page reload
if (!hide_results) {
mentoring.setContent(messagesDOM, response.message);
if (messagesDOM.html().trim()) {
messagesDOM.prepend('<div class="title1">' + mentoring.data.feedback_label + '</div>');
messagesDOM.show();
}
}
// Disable the submit button if we have just submitted new answers,
// or if we have just [re]loaded the page and are showing a complete set
......
......@@ -312,10 +312,6 @@ function MentoringWithStepsBlock(runtime, element) {
function setContent(dom, content) {
dom.html('');
dom.append(content);
var template = $('#light-child-template', dom).html();
if (template) {
dom.append(template);
}
}
function publishEvent(data) {
......
......@@ -12,7 +12,7 @@
<pb-tip values='["understand"]'><div id="test-custom-html">Really?</div></pb-tip>
</pb-mcq>
<pb-mrq name="feedback_mrq_3" question="What do you like in this MRQ?" required_choices='["elegance","gracefulness","beauty"]'>
<pb-mrq name="feedback_mrq_3" question="What do you like in this MRQ?" required_choices='["elegance","beauty","gracefulness"]'>
<pb-choice value="elegance">Its elegance</pb-choice>
<pb-choice value="beauty">Its beauty</pb-choice>
<pb-choice value="gracefulness">Its gracefulness</pb-choice>
......
import ddt
import unittest
from mock import MagicMock, Mock, patch
from mock import MagicMock, Mock, PropertyMock, patch
from random import random
from xblock.field_data import DictFieldData
......@@ -66,6 +66,27 @@ class TestMentoringBlock(unittest.TestCase):
self.assertIn('Unable to load child component', fragment.content)
@ddt.data(
(True, True, True),
(True, False, False),
(False, False, True),
(False, False, True),
)
@ddt.unpack
def test_correctly_decides_to_show_or_hide_feedback_message(
self, pb_hide_feedback_if_attempts_remain, max_attempts_reached, expected_show_message
):
block = MentoringBlock(Mock(), DictFieldData({
'student_results': ['must', 'be', 'non-empty'],
}), Mock())
block.get_option = Mock(return_value=pb_hide_feedback_if_attempts_remain)
with patch(
'problem_builder.mentoring.MentoringBlock.max_attempts_reached', new_callable=PropertyMock
) as patched_max_attempts_reached:
patched_max_attempts_reached.return_value = max_attempts_reached
_, _, show_message = block._get_standard_results()
self.assertEqual(show_message, expected_show_message)
@ddt.ddt
class TestMentoringBlockTheming(unittest.TestCase):
......@@ -140,13 +161,13 @@ class TestMentoringBlockTheming(unittest.TestCase):
self.assertEqual(patched_load_unicode.call_count, len(locations))
def test_student_view_calls_include_theme_files(self):
self.service_mock.get_settings_bucket = Mock(return_value={})
self.block.get_xblock_settings = Mock(return_value={})
with patch.object(self.block, 'include_theme_files') as patched_include_theme_files:
fragment = self.block.student_view({})
patched_include_theme_files.assert_called_with(fragment)
def test_author_preview_view_calls_include_theme_files(self):
self.service_mock.get_settings_bucket = Mock(return_value={})
self.block.get_xblock_settings = Mock(return_value={})
with patch.object(self.block, 'include_theme_files') as patched_include_theme_files:
fragment = self.block.author_preview_view({})
patched_include_theme_files.assert_called_with(fragment)
......@@ -190,17 +211,29 @@ class TestMentoringBlockOptions(unittest.TestCase):
def test_get_option(self):
random_key, random_value = random(), random()
with patch.object(self.block, 'get_options') as patched_get_options:
# Happy path: Customizations contain expected key
patched_get_options.return_value = {random_key: random_value}
option = self.block.get_option(random_key)
patched_get_options.assert_called_once_with()
self.assertEqual(option, random_value)
with patch.object(self.block, 'get_options') as patched_get_options:
# Sad path: Customizations do not contain expected key
patched_get_options.return_value = {}
option = self.block.get_option(random_key)
patched_get_options.assert_called_once_with()
self.assertEqual(option, False)
def test_student_view_calls_get_option(self):
self.service_mock.get_settings_bucket = Mock(return_value={})
self.block.get_xblock_settings = Mock(return_value={})
with patch.object(self.block, 'get_option') as patched_get_option:
self.block.student_view({})
patched_get_option.assert_called_with('pb_mcq_hide_previous_answer')
def test_get_standard_results_calls_get_option(self):
with patch.object(self.block, 'get_option') as patched_get_option:
self.block._get_standard_results()
patched_get_option.assert_called_with('pb_hide_feedback_if_attempts_remain')
class TestMentoringBlockJumpToIds(unittest.TestCase):
def setUp(self):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment