Commit bfd829e9 by E. Kolpakov

Added tests for initial state (no feedback should be shown) and for…

Added tests for initial state (no feedback should be shown) and for perfect-score-in-the-past scenario
parent 40ff34ff
...@@ -92,6 +92,13 @@ class ProblemBuilderBaseTest(SeleniumXBlockTest, PopupCheckMixin): ...@@ -92,6 +92,13 @@ class ProblemBuilderBaseTest(SeleniumXBlockTest, PopupCheckMixin):
label.click() label.click()
break break
def click_choice(self, container, choice_text):
""" Click on the choice label with the specified text """
for label in container.find_elements_by_css_selector('.choice label'):
if choice_text in label.text:
label.click()
break
class MentoringBaseTest(SeleniumBaseTest, PopupCheckMixin): class MentoringBaseTest(SeleniumBaseTest, PopupCheckMixin):
module_name = __name__ module_name = __name__
......
...@@ -21,7 +21,7 @@ import re ...@@ -21,7 +21,7 @@ import re
import mock import mock
import ddt import ddt
from selenium.common.exceptions import NoSuchElementException from selenium.common.exceptions import NoSuchElementException
from .base_test import MentoringBaseTest, MentoringAssessmentBaseTest, GetChoices from .base_test import MentoringBaseTest, MentoringAssessmentBaseTest, GetChoices, ProblemBuilderBaseTest
class MentoringTest(MentoringBaseTest): class MentoringTest(MentoringBaseTest):
...@@ -72,3 +72,157 @@ class MentoringThemeTest(MentoringAssessmentBaseTest): ...@@ -72,3 +72,157 @@ class MentoringThemeTest(MentoringAssessmentBaseTest):
with mock.patch("problem_builder.MentoringBlock.get_theme") as patched_theme: with mock.patch("problem_builder.MentoringBlock.get_theme") as patched_theme:
patched_theme.return_value = _get_mentoring_theme_settings(theme) patched_theme.return_value = _get_mentoring_theme_settings(theme)
self.assert_status_icon_color(expected_color) self.assert_status_icon_color(expected_color)
@ddt.ddt
class ProblemBuilderQuestionnaireBlockTest(ProblemBuilderBaseTest):
def _get_xblock(self, mentoring, name):
return mentoring.find_element_by_css_selector(".xblock-v1[data-name='{}']".format(name))
def _get_choice(self, questionnaire, choice_index):
return questionnaire.find_elements_by_css_selector(".choices-list .choice")[choice_index]
def _get_messages_element(self, mentoring):
return mentoring.find_element_by_css_selector('.messages')
def _get_controls(self, mentoring):
answer = self._get_xblock(mentoring, "feedback_answer_1").find_element_by_css_selector('.answer')
mcq = self._get_xblock(mentoring, "feedback_mcq_2")
mrq = self._get_xblock(mentoring, "feedback_mrq_3")
rating = self._get_xblock(mentoring, "feedback_rating_4")
return answer, mcq, mrq, rating
def _assert_feedback_hidden(self, questionnaire, choice_index):
choice = self._get_choice(questionnaire, choice_index)
choice_result = choice.find_element_by_css_selector('.choice-result')
feedback_popup = choice.find_element_by_css_selector(".choice-tips")
choice_result_classes = choice_result.get_attribute('class').split()
self.assertTrue(choice_result.is_displayed())
self.assertFalse(feedback_popup.is_displayed())
self.assertNotIn('checkmark-correct', choice_result_classes)
self.assertNotIn('checkmark-incorrect', choice_result_classes)
def _assert_feedback_visible(self, questionnaire, choice_index, expected_text,
click_choice_result=False, success=True):
"""
Asserts that feedback for given element contains particular text
If `click_choice_result` is True - clicks on `choice-result` icon before checking feedback visibility:
MRQ feedbacks are not shown right away
"""
choice = self._get_choice(questionnaire, choice_index)
choice_result = choice.find_element_by_css_selector('.choice-result')
if click_choice_result:
choice_result.click()
feedback_popup = choice.find_element_by_css_selector(".choice-tips")
self.assertTrue(choice_result.is_displayed())
self.assertTrue(feedback_popup.is_displayed())
self.assertEqual(feedback_popup.text, expected_text)
choice_result_classes = choice_result.get_attribute('class').split()
self.assertIn('checkmark-correct' if success else 'checkmark-incorrect', choice_result_classes)
def _standard_filling(self, answer, mcq, mrq, rating):
answer.send_keys('This is the answer')
self.click_choice(mcq, "Yes")
# 1st, 3rd and 4th options, first three are correct, i.e. two mistakes: 2nd and 4th
self.click_choice(mrq, "Its elegance")
self.click_choice(mrq, "Its gracefulness")
self.click_choice(mrq, "Its bugs")
self.click_choice(rating, "4")
# mcq and rating can't be reset easily, but it's not required; listing them here to keep method signature similar
def _clear_filling(self, answer, mcq, mrq, rating): # pylint: disable=unused-argument
answer.clear()
for checkbox in mrq.find_elements_by_css_selector('.choice input'):
if checkbox.is_selected():
checkbox.click()
def _standard_checks(self, answer, mcq, mrq, rating, messages):
self.assertEqual(answer.get_attribute('value'), 'This is the answer')
self._assert_feedback_visible(mcq, 0, "Great!")
self._assert_feedback_visible(
mrq, 0, "This is something everyone has to like about this MRQ",
click_choice_result=True
)
self._assert_feedback_visible(
mrq, 1, "This is something everyone has to like about beauty",
click_choice_result=True, success=False
)
self._assert_feedback_visible(mrq, 2, "This MRQ is indeed very graceful", click_choice_result=True)
self._assert_feedback_visible(mrq, 3, "Nah, there aren't any!", click_choice_result=True, success=False)
self._assert_feedback_visible(rating, 3, "I love good grades.", click_choice_result=True)
self.assertTrue(messages.is_displayed())
self.assertEqual(messages.text, "FEEDBACK\nNot done yet")
def test_feedbacks_and_messages_is_not_shown_on_first_load(self):
mentoring = self.load_scenario("feedback_persistence.xml")
answer, mcq, mrq, rating = self._get_controls(mentoring)
messages = self._get_messages_element(mentoring)
for i in range(3):
self._assert_feedback_hidden(mcq, i)
for i in range(4):
self._assert_feedback_hidden(mrq, i)
for i in range(5):
self._assert_feedback_hidden(rating, i)
self.assertFalse(messages.is_displayed())
def test_persists_feedback_on_page_reload(self):
mentoring = self.load_scenario("feedback_persistence.xml")
answer, mcq, mrq, rating = self._get_controls(mentoring)
messages = self._get_messages_element(mentoring)
self._standard_filling(answer, mcq, mrq, rating)
self.click_submit(mentoring)
self._standard_checks(answer, mcq, mrq, rating, messages)
# now, reload the page and do the same checks again
mentoring = self.go_to_view("student_view")
answer, mcq, mrq, rating = self._get_controls(mentoring)
messages = self._get_messages_element(mentoring)
self._standard_checks(answer, mcq, mrq, rating, messages)
def test_given_perfect_score_in_past_loads_current_result(self):
mentoring = self.load_scenario("feedback_persistence.xml")
answer, mcq, mrq, rating = self._get_controls(mentoring)
messages = self._get_messages_element(mentoring)
answer.send_keys('This is the answer')
self.click_choice(mcq, "Yes")
# 1st, 3rd and 4th options, first three are correct, i.e. two mistakes: 2nd and 4th
self.click_choice(mrq, "Its elegance")
self.click_choice(mrq, "Its gracefulness")
self.click_choice(mrq, "Its beauty")
self.click_choice(rating, "4")
self.click_submit(mentoring)
# precondition - verifying 100% score achieved
self.assertEqual(answer.get_attribute('value'), 'This is the answer')
self._assert_feedback_visible(mcq, 0, "Great!")
self._assert_feedback_visible(
mrq, 0, "This is something everyone has to like about this MRQ",
click_choice_result=True
)
self._assert_feedback_visible(
mrq, 1, "This is something everyone has to like about beauty",
click_choice_result=True
)
self._assert_feedback_visible(mrq, 2, "This MRQ is indeed very graceful", click_choice_result=True)
self._assert_feedback_visible(mrq, 3, "Nah, there aren't any!", click_choice_result=True)
self._assert_feedback_visible(rating, 3, "I love good grades.", click_choice_result=True)
self.assertTrue(messages.is_displayed())
self.assertEqual(messages.text, "FEEDBACK\nAll Good")
self._clear_filling(answer, mcq, mrq, rating)
self._standard_filling(answer, mcq, mrq, rating)
self.click_submit(mentoring)
self._standard_checks(answer, mcq, mrq, rating, messages)
# now, reload the page and make sure LATEST submission is loaded and feedback is shown
mentoring = self.go_to_view("student_view")
answer, mcq, mrq, rating = self._get_controls(mentoring)
messages = self._get_messages_element(mentoring)
self._standard_checks(answer, mcq, mrq, rating, messages)
\ No newline at end of file
...@@ -24,7 +24,7 @@ import ddt ...@@ -24,7 +24,7 @@ import ddt
from mock import patch, Mock from mock import patch, Mock
from problem_builder import MentoringBlock from problem_builder import MentoringBlock
from .base_test import MentoringBaseTest, ProblemBuilderBaseTest from .base_test import MentoringBaseTest
# Classes ########################################################### # Classes ###########################################################
...@@ -292,35 +292,3 @@ class QuestionnaireBlockAprosThemeTest(QuestionnaireBlockTest): ...@@ -292,35 +292,3 @@ class QuestionnaireBlockAprosThemeTest(QuestionnaireBlockTest):
Test MRQ/MCQ questions without the LMS theme which is on by default. Test MRQ/MCQ questions without the LMS theme which is on by default.
""" """
pass pass
@ddt.ddt
class ProblemBuilderQuestionnaireBlockTest(ProblemBuilderBaseTest):
def _get_choice_feedback_popup(self, mentoring, choice_index):
choices = mentoring.find_elements_by_css_selector(".choices-list .choice")
target_choice = choices[choice_index]
return target_choice.find_element_by_css_selector(".choice-tips")
def _get_messages_element(self, mentoring):
return mentoring.find_element_by_css_selector('.messages')
@ddt.data(("One", 0), ("Two", 1))
@ddt.unpack
def test_persists_feedback_on_page_reload(self, choice_value, choice_index):
mentoring = self.load_scenario("messages.xml", {"max_attempts": 1})
self.click_choice(mentoring, choice_value)
self.click_submit(mentoring)
feedback_popup = self._get_choice_feedback_popup(mentoring, choice_index)
messages = self._get_messages_element(mentoring)
self.assertTrue(feedback_popup.is_displayed())
self.assertTrue(messages.is_displayed())
# now, reload the page
mentoring = self.go_to_view("student_view")
feedback_popup = self._get_choice_feedback_popup(mentoring, choice_index)
messages = self._get_messages_element(mentoring)
self.assertTrue(feedback_popup.is_displayed())
self.assertTrue(messages.is_displayed())
<vertical_demo>
<problem-builder url_name="feedback" enforce_dependency="false">
<pb-answer name="feedback_answer_1" />
<pb-mcq name="feedback_mcq_2" question="Do you like this MCQ?" correct_choices='["yes"]'>
<pb-choice value="yes">Yes</pb-choice>
<pb-choice value="maybenot">Maybe not</pb-choice>
<pb-choice value="understand">I don't understand</pb-choice>
<pb-tip values='["yes"]'>Great!</pb-tip>
<pb-tip values='["maybenot"]'>Ah, damn.</pb-tip>
<pb-tip values='["understand"]'><div id="test-custom-html">Really?</div></pb-tip>
</pb-mcq>
<pb-mrq name="feedback_mrq_3" question="What do you like in this MRQ?" required_choices='["elegance","gracefulness","beauty"]'>
<pb-choice value="elegance">Its elegance</pb-choice>
<pb-choice value="beauty">Its beauty</pb-choice>
<pb-choice value="gracefulness">Its gracefulness</pb-choice>
<pb-choice value="bugs">Its bugs</pb-choice>
<pb-tip values='["elegance"]'>This is something everyone has to like about this MRQ</pb-tip>
<pb-tip values='["beauty"]'>This is something everyone has to like about beauty</pb-tip>
<pb-tip values='["gracefulness"]'>This MRQ is indeed very graceful</pb-tip>
<pb-tip values='["bugs"]'>Nah, there aren't any!</pb-tip>
</pb-mrq>
<pb-rating name="feedback_rating_4" low="Not good at all" high="Extremely good" question="How do you rate this MCQ?" correct_choices='["4","5"]'>
<pb-choice value="notwant">I don't want to rate it</pb-choice>
<pb-tip values='["4","5"]'>I love good grades.</pb-tip>
<pb-tip values='["1","2","3"]'>Will do better next time...</pb-tip>
<pb-tip values='["notwant"]'>Your loss!</pb-tip>
</pb-rating>
<pb-message type="completed">All Good</pb-message>
<pb-message type="incomplete">Not done yet</pb-message>
</problem-builder>
</vertical_demo>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment