Commit 9f967274 by Braden MacDonald

Merge pull request #85 from open-craft/qa-bug-fixes-oc-998

Fix various bugs found in QA
parents 127c50db b4ef8686
......@@ -99,7 +99,7 @@ class MCQBlock(SubmittingXBlockMixin, QuestionnaireAbstractBlock):
return {
'submission': submission,
'message': self.message,
'message': self.message_formatted,
'status': 'correct' if correct else 'incorrect',
'tips': formatted_tips,
'weight': self.weight,
......
......@@ -99,6 +99,13 @@ class BaseMentoringBlock(
scope=Scope.content,
enforce_type=True
)
weight = Float(
display_name=_("Weight"),
help=_("Defines the maximum total grade of the block."),
default=1,
scope=Scope.settings,
enforce_type=True
)
# User state
num_attempts = Integer(
......@@ -109,6 +116,7 @@ class BaseMentoringBlock(
)
has_children = True
has_score = True # The Problem/Step Builder XBlocks produce scores. (Their children do not send scores to the LMS.)
icon_class = 'problem'
block_settings_key = 'mentoring'
......@@ -197,8 +205,11 @@ class BaseMentoringBlock(
Publish data for analytics purposes
"""
event_type = data.pop('event_type')
self.runtime.publish(self, event_type, data)
if (event_type == 'grade'):
# This handler can be called from the browser. Don't allow the browser to submit arbitrary grades ;-)
raise JsonHandlerError(403, "Posting grade events from the browser is forbidden.")
self.runtime.publish(self, event_type, data)
return {'result': 'ok'}
def author_preview_view(self, context):
......@@ -214,6 +225,10 @@ class BaseMentoringBlock(
self.include_theme_files(fragment)
return fragment
def max_score(self):
""" Maximum score. We scale all scores to a maximum of 1.0 so this is always 1.0 """
return 1.0
class MentoringBlock(BaseMentoringBlock, StudioContainerXBlockMixin, StepParentMixin):
"""
......@@ -262,13 +277,6 @@ class MentoringBlock(BaseMentoringBlock, StudioContainerXBlockMixin, StepParentM
)
# Settings
weight = Float(
display_name=_("Weight"),
help=_("Defines the maximum total grade of the block."),
default=1,
scope=Scope.settings,
enforce_type=True
)
display_name = String(
display_name=_("Title (Display name)"),
help=_("Title to display"),
......@@ -323,8 +331,6 @@ class MentoringBlock(BaseMentoringBlock, StudioContainerXBlockMixin, StepParentM
'display_submit', 'feedback_label', 'weight', 'extended_feedback'
)
has_score = True
@property
def is_assessment(self):
""" Checks if mentoring XBlock is in assessment mode """
......@@ -377,10 +383,6 @@ class MentoringBlock(BaseMentoringBlock, StudioContainerXBlockMixin, StepParentM
return Score(score, int(round(score * 100)), correct, incorrect, partially_correct)
def max_score(self):
""" Maximum score. We scale all scores to a maximum of 1.0 so this is always 1.0 """
return 1.0
def student_view(self, context):
# Migrate stored data if necessary
self.migrate_fields()
......@@ -645,7 +647,7 @@ class MentoringBlock(BaseMentoringBlock, StudioContainerXBlockMixin, StepParentM
# Save the user's latest score
self.runtime.publish(self, 'grade', {
'value': self.score.raw,
'max_value': 1,
'max_value': self.max_score(),
})
# Mark this as having used an attempt:
......@@ -712,7 +714,7 @@ class MentoringBlock(BaseMentoringBlock, StudioContainerXBlockMixin, StepParentM
log.info(u'Last assessment step submitted: {}'.format(submissions))
self.runtime.publish(self, 'grade', {
'value': score.raw,
'max_value': 1,
'max_value': self.max_score(),
'score_type': 'proficiency',
})
event_data['final_grade'] = score.raw
......@@ -848,7 +850,7 @@ class MentoringWithExplicitStepsBlock(BaseMentoringBlock, StudioContainerWithNes
enforce_type=True
)
editable_fields = ('display_name', 'max_attempts', 'extended_feedback')
editable_fields = ('display_name', 'max_attempts', 'extended_feedback', 'weight')
@lazy
def question_ids(self):
......@@ -864,6 +866,27 @@ class MentoringWithExplicitStepsBlock(BaseMentoringBlock, StudioContainerWithNes
"""
return [self.runtime.get_block(question_id) for question_id in self.question_ids]
@property
def active_step_safe(self):
"""
Get self.active_step and double-check that it is a valid value.
The stored value could be invalid if this block has been edited and new steps were
added/deleted.
"""
active_step = self.active_step
if active_step >= 0 and active_step < len(self.step_ids):
return active_step
if active_step == -1 and self.has_review_step:
return active_step # -1 indicates the review step
return 0
def get_active_step(self):
""" Get the active step as an instantiated XBlock """
block = self.runtime.get_block(self.step_ids[self.active_step_safe])
if block is None:
log.error("Unable to load step builder step child %s", self.step_ids[self.active_step_safe])
return block
@lazy
def step_ids(self):
"""
......@@ -956,6 +979,8 @@ class MentoringWithExplicitStepsBlock(BaseMentoringBlock, StudioContainerWithNes
fragment = Fragment()
children_contents = []
context = context or {}
context['hide_prev_answer'] = True # For Step Builder, we don't show the users' old answers when they try again
for child_id in self.children:
child = self.runtime.get_block(child_id)
if child is None: # child should not be None but it can happen due to bugs or permission issues
......@@ -1003,36 +1028,45 @@ class MentoringWithExplicitStepsBlock(BaseMentoringBlock, StudioContainerWithNes
]
@XBlock.json_handler
def update_active_step(self, new_value, suffix=''):
def submit(self, data, suffix=None):
"""
Called when the user has submitted the answer[s] for the current step.
"""
# First verify that active_step is correct:
if data.get("active_step") != self.active_step_safe:
raise JsonHandlerError(400, "Invalid Step. Refresh the page and try again.")
# The step child will process the data:
step_block = self.get_active_step()
if not step_block:
raise JsonHandlerError(500, "Unable to load the current step block.")
response_data = step_block.submit(data)
# Update the active step:
new_value = self.active_step_safe + 1
if new_value < len(self.step_ids):
self.active_step = new_value
elif new_value == len(self.step_ids):
# The user just completed the final step.
if self.has_review_step:
self.active_step = -1
return {
'active_step': self.active_step
}
@XBlock.json_handler
def update_num_attempts(self, data, suffix=''):
if self.num_attempts < self.max_attempts:
self.num_attempts += 1
return {
'num_attempts': self.num_attempts
}
# Update the number of attempts, if necessary:
if self.num_attempts < self.max_attempts:
self.num_attempts += 1
response_data['num_attempts'] = self.num_attempts
# And publish the score:
score = self.score
grade_data = {
'value': score.raw,
'max_value': self.max_score(),
}
self.runtime.publish(self, 'grade', grade_data)
response_data['grade_data'] = self.get_grade()
@XBlock.json_handler
def publish_attempt(self, data, suffix):
score = self.score
grade_data = {
'value': score.raw,
'max_value': 1,
}
self.runtime.publish(self, 'grade', grade_data)
return {}
response_data['active_step'] = self.active_step
return response_data
@XBlock.json_handler
def get_grade(self, data, suffix):
def get_grade(self, data=None, suffix=None):
score = self.score
return {
'score': score.percentage,
......
......@@ -148,7 +148,7 @@ class MRQBlock(QuestionnaireAbstractBlock):
'submissions': submissions,
'status': status,
'choices': results,
'message': self.message,
'message': self.message_formatted,
'weight': self.weight,
'score': (float(score) / len(results)) if results else 0,
}
......
......@@ -55,60 +55,22 @@ function MentoringWithStepsBlock(runtime, element) {
} else {
checkmark.addClass('checkmark-incorrect icon-exclamation fa-exclamation');
}
}
function postUpdateStep(response) {
activeStep = response.active_step;
if (activeStep === -1) {
updateNumAttempts();
} else {
updateControls();
var step = steps[activeStep];
if (typeof step.showFeedback == 'function') {
step.showFeedback(response);
}
}
function handleResults(response) {
showFeedback(response);
// Update active step:
// If we end up at the review step, proceed with updating the number of attempts used.
// Otherwise, get UI ready for showing next step.
var handlerUrl = runtime.handlerUrl(element, 'update_active_step');
$.post(handlerUrl, JSON.stringify(activeStep+1))
.success(postUpdateStep);
}
function updateNumAttempts() {
var handlerUrl = runtime.handlerUrl(element, 'update_num_attempts');
$.post(handlerUrl, JSON.stringify({}))
.success(function(response) {
attemptsDOM.data('num_attempts', response.num_attempts);
publishAttempt();
});
}
function publishAttempt() {
var handlerUrl = runtime.handlerUrl(element, 'publish_attempt');
$.post(handlerUrl, JSON.stringify({}))
.success(function(response) {
// Now that relevant info is up-to-date and attempt has been published, get the latest grade
updateGrade();
});
}
function updateGrade() {
var handlerUrl = runtime.handlerUrl(element, 'get_grade');
$.post(handlerUrl, JSON.stringify({}))
.success(function(response) {
gradeDOM.data('score', response.score);
gradeDOM.data('correct_answer', response.correct_answers);
gradeDOM.data('incorrect_answer', response.incorrect_answers);
gradeDOM.data('partially_correct_answer', response.partially_correct_answers);
gradeDOM.data('correct', response.correct);
gradeDOM.data('incorrect', response.incorrect);
gradeDOM.data('partial', response.partial);
gradeDOM.data('assessment_review_tips', response.assessment_review_tips);
updateReviewStep(response);
});
function updateGrade(grade_data) {
gradeDOM.data('score', grade_data.score);
gradeDOM.data('correct_answer', grade_data.correct_answers);
gradeDOM.data('incorrect_answer', grade_data.incorrect_answers);
gradeDOM.data('partially_correct_answer', grade_data.partially_correct_answers);
gradeDOM.data('correct', grade_data.correct);
gradeDOM.data('incorrect', grade_data.incorrect);
gradeDOM.data('partial', grade_data.partial);
gradeDOM.data('assessment_review_tips', grade_data.assessment_review_tips);
updateReviewStep(grade_data);
}
function updateReviewStep(response) {
......@@ -136,16 +98,27 @@ function MentoringWithStepsBlock(runtime, element) {
}
function submit() {
// We do not handle submissions at this level, so just forward to "submit" method of active step
var step = steps[activeStep];
step.submit(handleResults);
}
function markRead() {
var handlerUrl = runtime.handlerUrl(element, 'update_active_step');
$.post(handlerUrl, JSON.stringify(activeStep+1)).success(function (response) {
postUpdateStep(response);
updateDisplay();
submitDOM.attr('disabled', 'disabled'); // Disable the button until the results load.
var submitUrl = runtime.handlerUrl(element, 'submit');
var hasQuestion = steps[activeStep].hasQuestion();
var data = steps[activeStep].getSubmitData();
data["active_step"] = activeStep;
$.post(submitUrl, JSON.stringify(data)).success(function(response) {
showFeedback(response);
activeStep = response.active_step;
if (activeStep === -1) {
// We are now showing the review step / end
// Update the number of attempts.
attemptsDOM.data('num_attempts', response.num_attempts);
updateGrade(response.grade_data);
} else if (!hasQuestion) {
// This was a step with no questions, so proceed to the next step / review:
updateDisplay();
} else {
// Enable the Next button so users can proceed.
updateControls();
}
});
}
......@@ -332,11 +305,11 @@ function MentoringWithStepsBlock(runtime, element) {
if (isLastStep() && step.hasQuestion()) {
nextDOM.hide();
} else if (isLastStep()) {
reviewDOM.one('click', markRead);
reviewDOM.one('click', submit);
reviewDOM.removeAttr('disabled');
nextDOM.hide()
} else if (!step.hasQuestion()) {
nextDOM.one('click', markRead);
nextDOM.one('click', submit);
}
if (step.hasQuestion()) {
submitDOM.show();
......
......@@ -122,7 +122,12 @@ function MCQBlock(runtime, element) {
var mentoring = this.mentoring;
var messageView = MessageView(element, mentoring);
messageView.clearResult();
if (result.message) {
var msg = '<div class="message-content">' + result.message + '</div>' +
'<div class="close icon-remove-sign fa-times-circle"></div>';
messageView.showMessage(msg);
} else { messageView.clearResult(); }
display_message(result.message, messageView, options.checkmark);
......
......@@ -44,29 +44,25 @@ function MentoringStepBlock(runtime, element) {
return is_valid;
},
submit: function(resultHandler) {
var handler_name = 'submit';
getSubmitData: function() {
var data = {};
for (var i = 0; i < children.length; i++) {
var child = children[i];
if (child && child.name !== undefined) {
data[child.name.toString()] = callIfExists(child, handler_name);
data[child.name.toString()] = callIfExists(child, "submit");
}
}
var handlerUrl = runtime.handlerUrl(element, handler_name);
if (submitXHR) {
submitXHR.abort();
}
submitXHR = $.post(handlerUrl, JSON.stringify(data))
.success(function(response) {
resultHandler(response);
if (message.length) {
message.fadeIn();
$(document).click(function() {
message.fadeOut();
});
}
return data;
},
showFeedback: function(response) {
// Called when user has just submitted an answer or is reviewing their answer durign extended feedback.
if (message.length) {
message.fadeIn();
$(document).click(function() {
message.fadeOut();
});
}
},
getResults: function(resultHandler) {
......
......@@ -221,3 +221,14 @@ class QuestionnaireAbstractBlock(
child = self.runtime.get_block(child_id)
if child.type == "on-assessment-review-question":
return child.content
@property
def message_formatted(self):
""" Get the feedback message HTML, if any, formatted by the runtime """
if self.message:
# For any HTML that we aren't 'rendering' through an XBlock view such as
# student_view the runtime may need to rewrite URLs
# e.g. converting '/static/x.png' to '/c4x/.../x.png'
format_html = getattr(self.runtime, 'replace_urls', lambda html: html)
return format_html(self.message)
return ""
......@@ -160,8 +160,8 @@ class MentoringStepBlock(
def has_question(self):
return any(getattr(child, 'answerable', False) for child in self.steps)
@XBlock.json_handler
def submit(self, submissions, suffix=''):
def submit(self, submissions):
""" Handle a student submission. This is called by the parent XBlock. """
log.info(u'Received submissions: {}'.format(submissions))
# Submit child blocks (questions) and gather results
......@@ -177,6 +177,7 @@ class MentoringStepBlock(
self.reset()
for result in submit_results:
self.student_results.append(result)
self.save()
return {
'message': 'Success!',
......
......@@ -10,7 +10,7 @@
<div class="choice-selector">
<input id="choice-{{ self.html_id }}-{{ forloop.counter }}" type="radio"
name="{{ self.name }}" value="{{ choice.value }}"
{% if self.student_choice == choice.value %} checked{% endif %}
{% if self.student_choice == choice.value and not hide_prev_answer %} checked{% endif %}
/>
</div>
<label class="choice-label" for="choice-{{ self.html_id }}-{{ forloop.counter }}">
......
{% load i18n %}
<div class="mentoring themed-xblock" data-active-step="{{ self.active_step }}">
<div class="mentoring themed-xblock" data-active-step="{{ self.active_step_safe }}">
{% if show_title and title %}
<div class="title">
......
......@@ -10,7 +10,7 @@
<div class="choice-selector">
<input id="choice-{{ self.html_id }}-{{ forloop.counter }}" type="checkbox"
name="{{ self.name }}" value="{{ choice.value }}"
{% if choice.value in self.student_choices %} checked{% endif %}
{% if choice.value in self.student_choices and not hide_prev_answer %} checked{% endif %}
/>
</div>
<label class="choice-label" for="choice-{{ self.html_id }}-{{ forloop.counter }}">
......
......@@ -10,7 +10,7 @@
<div class="choice-selector">
<input id="choice-{{ self.html_id }}-{{i}}" type="radio"
name="{{ self.name }}" value="{{i}}"
{% if self.student_choice == i %} checked{%else%} data-student-choice='{{self.student_choice}}'{% endif %}
{% if self.student_choice == i and not hide_prev_answer %} checked{%else%} data-student-choice='{{self.student_choice}}'{% endif %}
/>
</div>
<label class="choice-label" for="choice-{{ self.html_id }}-{{i}}">
......@@ -30,7 +30,7 @@
<div class="choice-selector">
<input id="choice-{{ self.html_id }}-custom{{ forloop.counter }}" type="radio"
name="{{ self.name }}" value="{{ choice.value }}"
{% if self.student_choice == choice.value %} checked{%else%} data-student-choice='{{self.student_choice}}'{% endif %}
{% if self.student_choice == choice.value and not hide_prev_answer %} checked{%else%} data-student-choice='{{self.student_choice}}'{% endif %}
/>
</div>
<label class="choice-label" for="choice-{{ self.html_id }}-custom{{ forloop.counter }}">
......
......@@ -22,7 +22,7 @@
<p>And here is an example of a Multiple Choice Question (MCQ):</p>
</html_demo>
<pb-mcq name="plain_mcq" question="Which book includes the line &quot;But I have one want which I have never yet been able to satisfy, and the absence of the object of which I now feel as a most severe evil. I have no friend.&quot;?" correct_choices="frankenstein">
<pb-mcq name="plain_mcq" question="Which book includes the line &quot;But I have one want which I have never yet been able to satisfy, and the absence of the object of which I now feel as a most severe evil. I have no friend.&quot;?" correct_choices='["frankenstein"]'>
<pb-choice value="frankenstein"><em>Frankenstein</em></pb-choice>
<pb-choice value="moby"><em>Moby-Dick; or, The Whale</em></pb-choice>
<pb-choice value="twist"><em>Oliver Twist</em></pb-choice>
......@@ -40,19 +40,19 @@
<p>How did you like <em>To Kill a Mockingbird</em>?</p>
</html_demo>
<pb-rating name="rate_mockingbird" low="I really disliked it" high="I loved it" correct_choices="1,2,3,4,5" />
<pb-rating name="rate_mockingbird" low="I really disliked it" high="I loved it" correct_choices='["1","2","3","4","5"]' />
</problem-builder>
<problem-builder display_name="Multiple Response Question (and introducing tips)" weight="1" mode="standard">
<html_demo><p>A fourth type of question is the Multiple Response Question (MRQ), which allows student to select multiple answers. Each answer can be marked as required, not required, or acceptable either way. The following question contains examples of each possibility. Try getting it wrong and see how it provides feedback to guide you to a correct answer.</p></html_demo>
<pb-mrq name="future_movies" question="Which of these films are set in the future?" required_choices="apes" ignored_choices="2001">
<pb-mrq name="future_movies" question="Which of these films are set in the future?" required_choices='["apes"]' ignored_choices='["2001"]'>
<pb-choice value="starwars">Star Wars Episode V: The Empire Strikes Back</pb-choice>
<pb-choice value="2001">2001: A Space Odyssey</pb-choice>
<pb-choice value="apes">Planet of the Apes (1968)</pb-choice>
<pb-tip values="starwars">Star Wars takes place "A long time ago in a galaxy far, far away"</pb-tip>
<pb-tip values="2001">You could argue this one either way. When the film was released in 1968, it was set in the future. However, 2001 is now in the past.</pb-tip>
<pb-tip values="apes">The majority of the film takes place in the year 3978.</pb-tip>
<pb-tip values='["starwars"]'>Star Wars takes place "A long time ago in a galaxy far, far away"</pb-tip>
<pb-tip values='["2001"]'>You could argue this one either way. When the film was released in 1968, it was set in the future. However, 2001 is now in the past.</pb-tip>
<pb-tip values='["apes"]'>The majority of the film takes place in the year 3978.</pb-tip>
</pb-mrq>
<pb-message type="incomplete">
......@@ -69,14 +69,14 @@
<p>Like the MRQ above, multiple choice and rating questions can also provide feedback based on which answer was selected and whether the answer is correct or not. Here's an example:</p>
</html_demo>
<pb-mcq name="subjunctive" question="Which sentence correctly uses the subjunctive case?" correct_choices="d">
<pb-mcq name="subjunctive" question="Which sentence correctly uses the subjunctive case?" correct_choices='["d"]'>
<pb-choice value="a">To buy or not to buy, that is the question.</pb-choice>
<pb-choice value="b">Renting gives you more flexibility.</pb-choice>
<pb-choice value="c">If I was you, I'd buy the house.</pb-choice>
<pb-choice value="d">If I were you, I'd rent the house.</pb-choice>
<pb-tip values="a,b">This sentence is not discussing hypotheticals or impossibilities.</pb-tip>
<pb-tip values="c">This sentence should be using the subjunctive case since it's dicsussing a hypothetical, but "was" is the wrong case.</pb-tip>
<pb-tip values="d">Correct. "was" has become "were" to indicate the subjunctive case.</pb-tip>
<pb-tip values='["a","b"]'>This sentence is not discussing hypotheticals or impossibilities.</pb-tip>
<pb-tip values='["c"]'>This sentence should be using the subjunctive case since it's dicsussing a hypothetical, but "was" is the wrong case.</pb-tip>
<pb-tip values='["d"]'>Correct. "was" has become "were" to indicate the subjunctive case.</pb-tip>
</pb-mcq>
<pb-message type="incomplete">
......@@ -111,20 +111,20 @@
</problem-builder>
<problem-builder display_name="Assessment Example" mode="assessment">
<html_demo><p>Mentoring questions can also be asked in "Assessment" mode, where each question is asked one at a time, and a final score is shown at the end. The author can set how many attempts each student is given to go through the assessment.</p></html_demo>
<pb-mcq name="aq1" question="What is 7+3?" correct_choices="ten">
<pb-mcq name="aq1" question="What is 7+3?" correct_choices='["ten"]'>
<pb-choice value="ten">10</pb-choice>
<pb-choice value="prime">104,297</pb-choice>
<pb-choice value="infinity">Infinity</pb-choice>
<pb-choice value="undefined">Undefined</pb-choice>
</pb-mcq>
<pb-mcq name="aq2" question="What is the square root of nine?" correct_choices="three">
<pb-mcq name="aq2" question="What is the square root of nine?" correct_choices='["three"]'>
<pb-choice value="zero">1</pb-choice>
<pb-choice value="three">3</pb-choice>
<pb-choice value="nine">9</pb-choice>
<pb-choice value="infinity">Infinity</pb-choice>
<pb-choice value="undefined">Undefined</pb-choice>
</pb-mcq>
<pb-mcq name="aq3" question="What is 10&#247;0?" correct_choices="infinity,undefined">
<pb-mcq name="aq3" question="What is 10&#247;0?" correct_choices='["infinity","undefined"]'>
<pb-choice value="zero">0</pb-choice>
<pb-choice value="ten">10</pb-choice>
<pb-choice value="negten">-10</pb-choice>
......
......@@ -3,15 +3,15 @@
<p>Please answer the questions below.</p>
</html_demo>
<pb-mrq name="mrq_1_1" question="What do you like in this MRQ?" message="Thank you for answering!" required_choices="gracefulness,elegance,beauty">
<pb-mrq name="mrq_1_1" question="What do you like in this MRQ?" message="Thank you for answering!" required_choices='["gracefulness","elegance","beauty"]'>
<pb-choice value="elegance">Its elegance</pb-choice>
<pb-choice value="beauty">Its beauty</pb-choice>
<pb-choice value="gracefulness">Its gracefulness</pb-choice>
<pb-choice value="bugs">Its bugs</pb-choice>
<pb-tip values="gracefulness">This MRQ is indeed very graceful</pb-tip>
<pb-tip values="elegance,beauty">This is something everyone has to like about this MRQ</pb-tip>
<pb-tip values="bugs">Nah, there aren't any!</pb-tip>
<pb-tip values='["gracefulness"]'>This MRQ is indeed very graceful</pb-tip>
<pb-tip values='["elegance","beauty"]'>This is something everyone has to like about this MRQ</pb-tip>
<pb-tip values='["bugs"]'>Nah, there aren't any!</pb-tip>
</pb-mrq>
<pb-message type="completed">
......
......@@ -100,6 +100,16 @@ class StepBuilderTest(MentoringAssessmentBaseTest, MultipleSliderBlocksTestMixin
runtime_patcher.start()
self.addCleanup(runtime_patcher.stop)
# Mock replace_urls so that we can check that message HTML gets processed with any
# transforms that the runtime needs.
runtime_patcher2 = patch(
'workbench.runtime.WorkbenchRuntime.replace_urls',
lambda _runtime, html: html.replace('REPLACE-ME', ''),
create=True
)
runtime_patcher2.start()
self.addCleanup(runtime_patcher2.stop)
def freeform_answer(
self, number, step_builder, controls, text_input, result, saved_value="", hold=False, last=False
):
......@@ -425,6 +435,9 @@ class StepBuilderTest(MentoringAssessmentBaseTest, MultipleSliderBlocksTestMixin
None, step_builder, controls, 'This is a different answer', CORRECT, saved_value='This is the answer'
)
# Step 2
# Reload the page, which should have no effect
self.browser.execute_script("$(document).html(' ');")
step_builder, controls = self.go_to_assessment()
# Submit MCQ, go to next step
self.single_choice_question(None, step_builder, controls, 'Yes', CORRECT)
# Step 3
......
......@@ -41,7 +41,7 @@
</sb-step>
<sb-step display_name="Last step">
<pb-mrq name="mrq_1_1" question="What do you like in this MRQ?" required_choices='["gracefulness","elegance","beauty"]' message="Question Feedback Message">
<pb-mrq name="mrq_1_1" question="What do you like in this MRQ?" required_choices='["gracefulness","elegance","beauty"]' message="Question Feedback Message REPLACE-ME">
<pb-choice value="elegance">Its elegance</pb-choice>
<pb-choice value="beauty">Its beauty</pb-choice>
<pb-choice value="gracefulness">Its gracefulness</pb-choice>
......
"""
Tests common to Problem Builder and Step Builder
"""
import ddt
import unittest
from problem_builder.mentoring import MentoringBlock, MentoringWithExplicitStepsBlock
from xblock.core import XBlock
from .utils import ScoresTestMixin, instantiate_block
@ddt.ddt
class TestBuilderBlocks(ScoresTestMixin, unittest.TestCase):
""" Unit tests for Problem Builder and Step Builder """
@ddt.data(MentoringBlock, MentoringWithExplicitStepsBlock)
def test_interface(self, block_cls):
"""
Basic tests of the block's public interface.
"""
self.assertTrue(issubclass(block_cls, XBlock))
self.assertTrue(block_cls.has_children)
block = instantiate_block(block_cls)
self.assertTrue(block.has_children)
self.assert_produces_scores(block)
"""
Helper methods for testing Problem Builder / Step Builder blocks
"""
from mock import MagicMock, Mock
from xblock.field_data import DictFieldData
class ScoresTestMixin(object):
"""
Mixin for tests that involve scores (grades)
"""
def assert_produces_scores(self, block):
"""
Test that the given XBlock instance meets the requirements of being able to report
scores to the edX LMS, and have them appear on the student's progress page.
"""
self.assertTrue(block.has_score)
self.assertTrue(type(block).has_score)
self.assertEqual(block.weight, 1.0) # Default weight should be 1
self.assertIsInstance(block.max_score(), (int, float))
def instantiate_block(cls, fields=None):
"""
Instantiate the given XBlock in a mock runtime.
"""
fields = fields or {}
children = fields.pop('children', {})
field_data = DictFieldData(fields or {})
block = cls(
runtime=Mock(),
field_data=field_data,
scope_ids=MagicMock()
)
block.children = children
block.runtime.get_block = lambda child_id: children[child_id]
return block
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment