Commit d2100b60 by Braden MacDonald

Parametrize assessment tests

parent c1e91c87
...@@ -19,7 +19,8 @@ ...@@ -19,7 +19,8 @@
# #
from xblock.fields import String from xblock.fields import String
from xblockutils.base_test import SeleniumBaseTest from xblockutils.base_test import SeleniumBaseTest, SeleniumXBlockTest
from xblockutils.resources import ResourceLoader
# Studio adds a url_name property to each XBlock but Workbench doesn't. # Studio adds a url_name property to each XBlock but Workbench doesn't.
# Since we rely on it, we need to mock url_name support so it can be set via XML and # Since we rely on it, we need to mock url_name support so it can be set via XML and
...@@ -27,11 +28,13 @@ from xblockutils.base_test import SeleniumBaseTest ...@@ -27,11 +28,13 @@ from xblockutils.base_test import SeleniumBaseTest
from problem_builder import MentoringBlock from problem_builder import MentoringBlock
MentoringBlock.url_name = String() MentoringBlock.url_name = String()
loader = ResourceLoader(__name__)
class MentoringBaseTest(SeleniumBaseTest):
module_name = __name__
default_css_selector = 'div.mentoring'
class PopupCheckMixin(object):
"""
Code used by MentoringBaseTest and MentoringAssessmentBaseTest
"""
def popup_check(self, mentoring, item_feedbacks, prefix='', do_submit=True): def popup_check(self, mentoring, item_feedbacks, prefix='', do_submit=True):
submit = mentoring.find_element_by_css_selector('.submit input.input-main') submit = mentoring.find_element_by_css_selector('.submit input.input-main')
...@@ -57,7 +60,18 @@ class MentoringBaseTest(SeleniumBaseTest): ...@@ -57,7 +60,18 @@ class MentoringBaseTest(SeleniumBaseTest):
self.assertFalse(item_feedback_popup.is_displayed()) self.assertFalse(item_feedback_popup.is_displayed())
class MentoringAssessmentBaseTest(MentoringBaseTest): class MentoringBaseTest(SeleniumBaseTest, PopupCheckMixin):
module_name = __name__
default_css_selector = 'div.mentoring'
class MentoringAssessmentBaseTest(SeleniumXBlockTest, PopupCheckMixin):
"""
Base class for tests of assessment mode
"""
module_name = __name__
default_css_selector = 'div.mentoring'
@staticmethod @staticmethod
def question_text(number): def question_text(number):
if number: if number:
...@@ -65,9 +79,16 @@ class MentoringAssessmentBaseTest(MentoringBaseTest): ...@@ -65,9 +79,16 @@ class MentoringAssessmentBaseTest(MentoringBaseTest):
else: else:
return "Question" return "Question"
def go_to_assessment(self, page): def load_assessment_scenario(self, xml_file, params=None):
""" Loads an assessment scenario from an XML template """
params = params or {}
scenario = loader.render_template("xml_templates/{}".format(xml_file), params)
self.set_scenario_xml(scenario)
return self.go_to_assessment()
def go_to_assessment(self):
""" Navigates to assessment page """ """ Navigates to assessment page """
mentoring = self.go_to_page(page) mentoring = self.go_to_view("student_view")
class Namespace(object): class Namespace(object):
pass pass
......
...@@ -317,13 +317,15 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest): ...@@ -317,13 +317,15 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest):
self.peek_at_multiple_response_question(4, mentoring, controls, extended_feedback=True, alternative_review=True) self.peek_at_multiple_response_question(4, mentoring, controls, extended_feedback=True, alternative_review=True)
@data( @data(
(1, False, 4), {"max_attempts": 0, "extended_feedback": False}, # Note '0' means unlimited attempts
(3, False, 0), {"max_attempts": 1, "extended_feedback": False},
('Extended Feedback', True, 2) {"max_attempts": 4, "extended_feedback": False},
{"max_attempts": 2, "extended_feedback": True},
) )
@unpack def test_assessment(self, params):
def test_assessment(self, assessment, extended_feedback, max_attempts): mentoring, controls = self.load_assessment_scenario("assessment.xml", params)
mentoring, controls = self.go_to_assessment("Assessment %s" % assessment) max_attempts = params['max_attempts']
extended_feedback = params['extended_feedback']
self.freeform_answer(1, mentoring, controls, 'This is the answer', CORRECT) self.freeform_answer(1, mentoring, controls, 'This is the answer', CORRECT)
self.single_choice_question(2, mentoring, controls, 'Maybe not', INCORRECT) self.single_choice_question(2, mentoring, controls, 'Maybe not', INCORRECT)
...@@ -332,7 +334,7 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest): ...@@ -332,7 +334,7 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest):
# see if assessment remembers the current step # see if assessment remembers the current step
self.go_to_workbench_main_page() self.go_to_workbench_main_page()
mentoring, controls = self.go_to_assessment("Assessment %s" % assessment) mentoring, controls = self.go_to_assessment()
self.multiple_response_question(4, mentoring, controls, ("Its beauty",), PARTIAL, last=True) self.multiple_response_question(4, mentoring, controls, ("Its beauty",), PARTIAL, last=True)
...@@ -342,6 +344,12 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest): ...@@ -342,6 +344,12 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest):
} }
self.peek_at_review(mentoring, controls, expected_results, extended_feedback=extended_feedback) self.peek_at_review(mentoring, controls, expected_results, extended_feedback=extended_feedback)
if max_attempts == 1:
self.assert_messages_empty(mentoring)
self.assert_disabled(controls.try_again)
return
# The on-assessment-review message is shown if attempts remain:
self.assert_messages_text(mentoring, "Assessment additional feedback message text") self.assert_messages_text(mentoring, "Assessment additional feedback message text")
self.assert_clickable(controls.try_again) self.assert_clickable(controls.try_again)
controls.try_again.click() controls.try_again.click()
...@@ -365,7 +373,7 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest): ...@@ -365,7 +373,7 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest):
else: else:
self.assert_clickable(controls.try_again) self.assert_clickable(controls.try_again)
if 1 <= max_attempts <= 2: if 1 <= max_attempts <= 2:
self.assert_messages_empty(mentoring) # The on-assessment-review message should not be shown if no attempts remain self.assert_messages_empty(mentoring) # The on-assessment-review message is not shown if no attempts remain
else: else:
self.assert_messages_text(mentoring, "Assessment additional feedback message text") self.assert_messages_text(mentoring, "Assessment additional feedback message text")
if extended_feedback: if extended_feedback:
...@@ -375,7 +383,7 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest): ...@@ -375,7 +383,7 @@ class MentoringAssessmentTest(MentoringAssessmentBaseTest):
""" """
No 'Next Question' button on single question assessment. No 'Next Question' button on single question assessment.
""" """
mentoring, controls = self.go_to_assessment("Assessment 2") mentoring, controls = self.load_assessment_scenario("assessment_single.xml", {"max_attempts": 2})
self.single_choice_question(0, mentoring, controls, 'Maybe not', INCORRECT, last=True) self.single_choice_question(0, mentoring, controls, 'Maybe not', INCORRECT, last=True)
expected_results = { expected_results = {
......
...@@ -45,7 +45,7 @@ class MentoringThemeTest(MentoringAssessmentBaseTest): ...@@ -45,7 +45,7 @@ class MentoringThemeTest(MentoringAssessmentBaseTest):
return '#%02x%02x%02x' % (r, g, b) return '#%02x%02x%02x' % (r, g, b)
def assert_status_icon_color(self, color): def assert_status_icon_color(self, color):
mentoring, controls = self.go_to_assessment('Theme 1') mentoring, controls = self.load_assessment_scenario('assessment_single.xml', {"max_attempts": 2})
question = self.expect_question_visible(0, mentoring) question = self.expect_question_visible(0, mentoring)
choice_name = "Maybe not" choice_name = "Maybe not"
......
<problem-builder url_name="mentoring-assessment-2" display_name="A Simple Assessment" weight="1" mode="assessment" max_attempts="2">
<html_demo>
<p>This paragraph is shared between <strong>all</strong> questions.</p>
<p>Please answer the questions below.</p>
</html_demo>
<pb-mcq name="mcq_1_1" question="Do you like this MCQ?" correct_choices='["yes"]'>
<pb-choice value='["yes"]'>Yes</pb-choice>
<pb-choice value='["maybenot"]'>Maybe not</pb-choice>
<pb-choice value='["understand"]'>I don't understand</pb-choice>
<pb-tip values='["yes"]'>Great!</pb-tip>
<pb-tip values='["maybenot"]'>Ah, damn.</pb-tip>
<pb-tip values='["understand"]'><div id="test-custom-html">Really?</div></pb-tip>
</pb-mcq>
</problem-builder>
<problem-builder url_name="mentoring-assessment-1" display_name="A Simple Assessment" weight="1" mode="assessment" max_attempts="0">
<html_demo>
<p>This paragraph is shared between <strong>all</strong> questions.</p>
<p>Please answer the questions below.</p>
</html_demo>
<html_demo>
We need an XBlock with JavaScript here to test that it doesn't interfere
with the assessment, since it will show up in runtime(element).children,
but it is not a "step" element:
</html_demo>
<acid/>
<pb-answer name="goal" question="What is your goal?" />
<pb-mcq name="mcq_1_1" question="Do you like this MCQ?" correct_choices='["yes"]'>
<pb-choice value="yes">Yes</pb-choice>
<pb-choice value="maybenot">Maybe not</pb-choice>
<pb-choice value="understand">I don't understand</pb-choice>
<pb-tip values='["yes"]'>Great!</pb-tip>
<pb-tip values='["maybenot"]'>Ah, damn.</pb-tip>
<pb-tip values='["understand"]'><div id="test-custom-html">Really?</div></pb-tip>
</pb-mcq>
<pb-rating name="mcq_1_2" low="Not good at all" high="Extremely good" question="How much do you rate this MCQ?" correct_choices='["4","5"]'>
<pb-choice value="notwant">I don't want to rate it</pb-choice>
<pb-tip values='["4","5"]'>I love good grades.</pb-tip>
<pb-tip values='["1","2", "3"]'>Will do better next time...</pb-tip>
<pb-tip values='["notwant"]'>Your loss!</pb-tip>
</pb-rating>
<pb-mrq name="mrq_1_1" question="What do you like in this MRQ?" required_choices='["gracefulness","elegance","beauty"]'>
<pb-choice value="elegance">Its elegance</pb-choice>
<pb-choice value="beauty">Its beauty</pb-choice>
<pb-choice value="gracefulness">Its gracefulness</pb-choice>
<pb-choice value="bugs">Its bugs</pb-choice>
<pb-tip values='["gracefulness"]'>This MRQ is indeed very graceful</pb-tip>
<pb-tip values='["elegance","beauty"]'>This is something everyone has to like about this MRQ</pb-tip>
<pb-tip values='["bugs"]'>Nah, there aren't any!</pb-tip>
</pb-mrq>
<pb-message type="on-assessment-review">
<html>Assessment additional feedback message text</html>
</pb-message>
</problem-builder>
<problem-builder url_name="mentoring-assessment-1" display_name="A Simple Assessment" weight="1" mode="assessment" max_attempts="2" extended_feedback="true">
<html_demo>
<p>This paragraph is shared between <strong>all</strong> questions.</p>
<p>Please answer the questions below.</p>
</html_demo>
<html_demo>
We need an XBlock with JavaScript here to test that it doesn't interfere
with the assessment, since it will show up in runtime(element).children,
but it is not a "step" element:
</html_demo>
<acid/>
<pb-answer name="goal" question="What is your goal?" />
<pb-mcq name="mcq_1_1" question="Do you like this MCQ?" correct_choices='["yes"]'>
<pb-choice value="yes">Yes</pb-choice>
<pb-choice value="maybenot">Maybe not</pb-choice>
<pb-choice value="understand">I don't understand</pb-choice>
<pb-tip values='["yes"]'>Great!</pb-tip>
<pb-tip values='["maybenot"]'>Ah, damn.</pb-tip>
<pb-tip values='["understand"]'><div id="test-custom-html">Really?</div></pb-tip>
</pb-mcq>
<pb-rating name="mcq_1_2" low="Not good at all" high="Extremely good" question="How much do you rate this MCQ?" correct_choices='["4","5"]'>
<pb-choice value="notwant">I don't want to rate it</pb-choice>
<pb-tip values='["4","5"]'>I love good grades.</pb-tip>
<pb-tip values='["1","2", "3"]'>Will do better next time...</pb-tip>
<pb-tip values='["notwant"]'>Your loss!</pb-tip>
</pb-rating>
<pb-mrq name="mrq_1_1" question="What do you like in this MRQ?" required_choices='["gracefulness","elegance","beauty"]' message="Thank you for answering!">
<pb-choice value="elegance">Its elegance</pb-choice>
<pb-choice value="beauty">Its beauty</pb-choice>
<pb-choice value="gracefulness">Its gracefulness</pb-choice>
<pb-choice value="bugs">Its bugs</pb-choice>
<pb-tip values='["gracefulness"]'>This MRQ is indeed very graceful</pb-tip>
<pb-tip values='["elegance","beauty"]'>This is something everyone has to like about this MRQ</pb-tip>
<pb-tip values='["bugs"]'>Nah, there aren't any!</pb-tip>
</pb-mrq>
<pb-message type="on-assessment-review">
<html>Assessment additional feedback message text</html>
</pb-message>
</problem-builder>
<problem-builder url_name="mentoring-assessment-1" display_name="A Simple Assessment" weight="1" mode="assessment" max_attempts="4"> <problem-builder url_name="mentoring-assessment-1" display_name="A Simple Assessment" weight="1" mode="assessment" max_attempts="{{max_attempts}}" extended_feedback="{{extended_feedback}}">
<html_demo> <html_demo>
<p>This paragraph is shared between <strong>all</strong> questions.</p> <p>This paragraph is shared between <strong>all</strong> questions.</p>
......
<problem-builder url_name="mentoring-assessment-2" display_name="A Simple Assessment" weight="1" mode="assessment" max_attempts="2"> <problem-builder url_name="mentoring-assessment-2" display_name="A Simple Assessment" weight="1" mode="assessment" max_attempts="{{max_attempts}}">
<html_demo> <html_demo>
<p>This paragraph is shared between <strong>all</strong> questions.</p> <p>This paragraph is shared between <strong>all</strong> questions.</p>
<p>Please answer the questions below.</p> <p>Please answer the questions below.</p>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment