Commit 4861c7f9 by Xavier Antoviaque

Merge pull request #89 from open-craft/fix-checkmark-on-second-assessment-run

Display correct checkmarks after retrying assessment
parents 496917c1 298123df
......@@ -10,6 +10,7 @@ function MentoringAssessmentView(runtime, element, mentoring) {
function cleanAll() {
// clean checkmark state
checkmark.removeClass('checkmark-correct icon-ok fa-check');
checkmark.removeClass('checkmark-partially-correct icon-ok fa-check');
checkmark.removeClass('checkmark-incorrect icon-exclamation fa-exclamation');
/* hide all children */
......
<mentoring url_name="{{ url_name }}" display_name="Nav tooltip title" weight="1" mode="assessment">
<mentoring url_name="{{ url_name }}" display_name="Nav tooltip title" weight="1" mode="assessment" max_attempts="10">
<title>Default Title</title>
<shared-header>
<p>This paragraph is shared between <strong>all</strong> questions.</p>
......
from .base_test import MentoringBaseTest
CORRECT, INCORRECT, PARTIAL = "correct", "incorrect", "partially-correct"
class MentoringAssessmentTest(MentoringBaseTest):
def _selenium_bug_workaround_scroll_to(self, mentoring):
......@@ -65,169 +66,214 @@ class MentoringAssessmentTest(MentoringBaseTest):
return
raise AssertionError("Expected selectable item present: {}".format(text))
def test_assessment(self):
# step 1 -- freeform answer
def _assert_checkmark(self, mentoring, result):
"""Assert that only the desired checkmark is present."""
states = {CORRECT: 0, INCORRECT: 0, PARTIAL: 0}
states[result] += 1
for name, count in states.items():
self.assertEqual(len(mentoring.find_elements_by_css_selector(".checkmark-{}".format(name))), count)
def go_to_workbench_main_page(self):
self.browser.get(self.live_server_url)
def go_to_assessment_1(self):
mentoring = self.go_to_page('Assessment 1')
class Namespace(object):
pass
controls = Namespace()
controls.submit = mentoring.find_element_by_css_selector("input.input-main")
controls.next_question = mentoring.find_element_by_css_selector("input.input-next")
controls.review = mentoring.find_element_by_css_selector("input.input-review")
controls.try_again = mentoring.find_element_by_css_selector("input.input-try-again")
return mentoring, controls
def at_step_1_freeform_answer(self, mentoring, controls, text_input, result, saved_value=""):
self.wait_until_text_in("QUESTION 1", mentoring)
self.assert_persistent_elements_present(mentoring)
self._selenium_bug_workaround_scroll_to(mentoring)
submit = mentoring.find_element_by_css_selector("input.input-main")
next_question = mentoring.find_element_by_css_selector("input.input-next")
review = mentoring.find_element_by_css_selector("input.input-review")
try_again = mentoring.find_element_by_css_selector("input.input-try-again")
answer = mentoring.find_element_by_css_selector("textarea.answer.editable")
self.assertIn("Please answer the questions below.", mentoring.text)
self.assertIn("QUESTION 1", mentoring.text)
self.assertIn("What is your goal?", mentoring.text)
self.assertEquals("", answer.get_attribute("value"))
self.assert_disabled(submit)
self.assert_disabled(next_question)
self.assertEquals(saved_value, answer.get_attribute("value"))
if not saved_value:
self.assert_disabled(controls.submit)
self.assert_disabled(controls.next_question)
answer.send_keys('This is the answer')
self.assertEquals('This is the answer', answer.get_attribute("value"))
answer.clear()
answer.send_keys(text_input)
self.assertEquals(text_input, answer.get_attribute("value"))
self.assert_clickable(submit)
self.assert_disabled(next_question)
self.assert_hidden(review)
self.assert_hidden(try_again)
self.assert_clickable(controls.submit)
self.assert_disabled(controls.next_question)
self.assert_hidden(controls.review)
self.assert_hidden(controls.try_again)
submit.click()
controls.submit.click()
self.wait_until_clickable(next_question)
next_question.click()
self.wait_until_clickable(controls.next_question)
self._assert_checkmark(mentoring, result)
controls.next_question.click()
# step 2 -- single choice question
def at_step_2_single_choice_question(self, mentoring, controls, choice_name, result):
self.wait_until_text_in("QUESTION 2", mentoring)
self.assert_persistent_elements_present(mentoring)
self._selenium_bug_workaround_scroll_to(mentoring)
self.assertIn("Do you like this MCQ?", mentoring.text)
self.assert_disabled(submit)
self.assert_disabled(next_question)
self.assert_hidden(review)
self.assert_hidden(try_again)
self.assert_disabled(controls.submit)
self.assert_disabled(controls.next_question)
self.assert_hidden(controls.review)
self.assert_hidden(controls.try_again)
choices = self._GetChoices(mentoring)
self.assertEquals(choices.state, {"Yes": False, "Maybe not": False, "I don't understand": False})
expected_state = {"Yes": False, "Maybe not": False, "I don't understand": False}
self.assertEquals(choices.state, expected_state)
choices.select(choice_name)
expected_state[choice_name] = True
self.assertEquals(choices.state, expected_state)
choices.select("Yes")
self.assertEquals(choices.state, {"Yes": True, "Maybe not": False, "I don't understand": False})
self.assert_clickable(submit)
self.assert_disabled(next_question)
self.assert_hidden(review)
self.assert_hidden(try_again)
self.assert_clickable(controls.submit)
self.assert_disabled(controls.next_question)
self.assert_hidden(controls.review)
self.assert_hidden(controls.try_again)
submit.click()
controls.submit.click()
self.wait_until_clickable(next_question)
next_question.click()
self.wait_until_clickable(controls.next_question)
self._assert_checkmark(mentoring, result)
controls.next_question.click()
# step 3 -- rating question
def at_step_3_rating_question(self, mentoring, controls, choice_name, result):
self.wait_until_text_in("QUESTION 3", mentoring)
self.assert_persistent_elements_present(mentoring)
self._selenium_bug_workaround_scroll_to(mentoring)
self.assertIn("How much do you rate this MCQ?", mentoring.text)
self.assert_disabled(submit)
self.assert_disabled(next_question)
self.assert_hidden(review)
self.assert_hidden(try_again)
self.assert_disabled(controls.submit)
self.assert_disabled(controls.next_question)
self.assert_hidden(controls.review)
self.assert_hidden(controls.try_again)
choices = self._GetChoices(mentoring, ".rating")
self.assertEquals(choices.state, {
expected_choices = {
"1 - Not good at all": False,
"2": False, "3": False, "4": False,
"5 - Extremely good": False,
"I don't want to rate it": False,
})
choices.select("5 - Extremely good")
self.assertEquals(choices.state, {
"1 - Not good at all": False,
"2": False, "3": False, "4": False,
"5 - Extremely good": True,
"I don't want to rate it": False,
})
self.assert_clickable(submit)
self.assert_disabled(next_question)
self.assert_hidden(review)
self.assert_hidden(try_again)
submit.click()
}
self.assertEquals(choices.state, expected_choices)
choices.select(choice_name)
expected_choices[choice_name] = True
self.assertEquals(choices.state, expected_choices)
self.wait_until_clickable(next_question)
next_question.click()
# step 4 -- multiple choice question
self.wait_until_text_in("QUESTION 4", mentoring)
self.assert_persistent_elements_present(mentoring)
self._selenium_bug_workaround_scroll_to(mentoring)
self.assertIn("What do you like in this MRQ?", mentoring.text)
self.assert_clickable(controls.submit)
self.assert_disabled(controls.next_question)
self.assert_hidden(controls.review)
self.assert_hidden(controls.try_again)
self.assert_disabled(submit)
self.assert_hidden(next_question)
self.assert_disabled(review)
self.assert_hidden(try_again)
controls.submit.click()
# see if assessment remembers the current step
self.browser.get(self.live_server_url)
# step 4 -- a second time
mentoring = self.go_to_page("Assessment 1")
self.wait_until_clickable(controls.next_question)
self._assert_checkmark(mentoring, result)
controls.next_question.click()
def peek_at_step_4_multiple_choice_question(self, mentoring, controls):
self.wait_until_text_in("QUESTION 4", mentoring)
self.assert_persistent_elements_present(mentoring)
self._selenium_bug_workaround_scroll_to(mentoring)
self.assertIn("What do you like in this MRQ?", mentoring.text)
submit = mentoring.find_element_by_css_selector("input.input-main")
next_question = mentoring.find_element_by_css_selector("input.input-next")
review = mentoring.find_element_by_css_selector("input.input-review")
try_again = mentoring.find_element_by_css_selector("input.input-try-again")
self.assert_disabled(controls.submit)
self.assert_hidden(controls.next_question)
self.assert_disabled(controls.review)
self.assert_hidden(controls.try_again)
self.assert_disabled(submit)
self.assert_hidden(next_question)
self.assert_disabled(review)
self.assert_hidden(try_again)
def at_step_4_multiple_choice_question(self, mentoring, controls, choice_names, result):
self.peek_at_step_4_multiple_choice_question(mentoring, controls)
choices = self._GetChoices(mentoring)
self.assertEquals(choices.state, {
expected_choices = {
"Its elegance": False,
"Its beauty": False,
"Its gracefulness": False,
"Its bugs": False,
})
choices.select("Its elegance")
choices.select("Its beauty")
choices.select("Its gracefulness")
self.assertEquals(choices.state, {
"Its elegance": True,
"Its beauty": True,
"Its gracefulness": True,
"Its bugs": False,
})
}
self.assertEquals(choices.state, expected_choices)
for name in choice_names:
choices.select(name)
expected_choices[name] = True
self.assertEquals(choices.state, expected_choices)
self.assert_clickable(submit)
self.assert_hidden(next_question)
self.assert_disabled(review)
self.assert_hidden(try_again)
self.assert_clickable(controls.submit)
self.assert_hidden(controls.next_question)
self.assert_disabled(controls.review)
self.assert_hidden(controls.try_again)
submit.click()
controls.submit.click()
self.wait_until_clickable(review)
review.click()
self.wait_until_clickable(controls.review)
self._assert_checkmark(mentoring, result)
controls.review.click()
# step 5 -- review
self.wait_until_text_in("You scored 100% on this assessment.", mentoring)
def peek_at_step_5_review(self, mentoring, controls, expected):
self.wait_until_text_in("You scored {percentage}% on this assessment.".format(**expected), mentoring)
self.assert_persistent_elements_present(mentoring)
self.assertIn("Note: if you retake this assessment, only your final score counts.", mentoring.text)
self.assertIn("You answered 4 questions correctly.", mentoring.text)
self.assertIn("You answered 0 questions partially correct.", mentoring.text)
self.assertIn("You answered 0 questions incorrectly.", mentoring.text)
self.assert_hidden(submit)
self.assert_hidden(next_question)
self.assert_hidden(review)
self.assert_disabled(try_again)
if expected["num_attempts"] < expected["max_attempts"]:
self.assertIn("Note: if you retake this assessment, only your final score counts.", mentoring.text)
self.assertIn("You answered {correct} questions correctly.".format(**expected), mentoring.text)
self.assertIn("You answered {partial} questions partially correct.".format(**expected), mentoring.text)
self.assertIn("You answered {incorrect} questions incorrectly.".format(**expected), mentoring.text)
self.assertIn("You have used {num_attempts} of {max_attempts} submissions.".format(**expected), mentoring.text)
self.assert_hidden(controls.submit)
self.assert_hidden(controls.next_question)
self.assert_hidden(controls.review)
def test_assessment(self):
mentoring, controls = self.go_to_assessment_1()
self.at_step_1_freeform_answer(mentoring, controls, 'This is the answer', CORRECT)
self.at_step_2_single_choice_question(mentoring, controls, 'Maybe not', INCORRECT)
self.at_step_3_rating_question(mentoring, controls, "5 - Extremely good", CORRECT)
self.peek_at_step_4_multiple_choice_question(mentoring, controls)
# see if assessment remembers the current step
self.go_to_workbench_main_page()
mentoring, controls = self.go_to_assessment_1()
self.at_step_4_multiple_choice_question(mentoring, controls, ("Its beauty",), PARTIAL)
expected_results = {
"correct": 2, "partial": 1, "incorrect": 1, "percentage": 63,
"num_attempts": 1, "max_attempts": 2}
self.peek_at_step_5_review(mentoring, controls, expected_results)
self.assert_clickable(controls.try_again)
controls.try_again.click()
self.at_step_1_freeform_answer(mentoring, controls, 'This is a different answer', CORRECT,
saved_value='This is the answer')
self.at_step_2_single_choice_question(mentoring, controls, 'Yes', CORRECT)
self.at_step_3_rating_question(mentoring, controls, "1 - Not good at all", INCORRECT)
user_selection = ("Its elegance", "Its beauty", "Its gracefulness")
self.at_step_4_multiple_choice_question(mentoring, controls, user_selection, CORRECT)
expected_results = {
"correct": 3, "partial": 0, "incorrect": 1, "percentage": 75,
"num_attempts": 2, "max_attempts": 2}
self.peek_at_step_5_review(mentoring, controls, expected_results)
self.assert_disabled(controls.try_again)
<mentoring url_name="mentoring-assessment" display_name="Nav tooltip title" weight="1" mode="assessment">
<mentoring url_name="mentoring-assessment" display_name="Nav tooltip title" weight="1" mode="assessment" max_attempts="2">
<title>A Simple Assessment</title>
<shared-header>
<p>This paragraph is shared between <strong>all</strong> questions.</p>
......@@ -17,11 +17,19 @@
<choice value="yes">Yes</choice>
<choice value="maybenot">Maybe not</choice>
<choice value="understand">I don't understand</choice>
<tip display="yes">Great!</tip>
<tip reject="maybenot">Ah, damn.</tip>
<tip reject="understand"><html><div id="test-custom-html">Really?</div></html></tip>
</mcq>
<mcq name="mcq_1_2" type="rating" low="Not good at all" high="Extremely good">
<question>How much do you rate this MCQ?</question>
<choice value="notwant">I don't want to rate it</choice>
<tip display="4,5">I love good grades.</tip>
<tip reject="1,2,3">Will do better next time...</tip>
<tip reject="notwant">Your loss!</tip>
</mcq>
<mrq name="mrq_1_1" type="choices">
......@@ -30,5 +38,9 @@
<choice value="beauty">Its beauty</choice>
<choice value="gracefulness">Its gracefulness</choice>
<choice value="bugs">Its bugs</choice>
<tip require="gracefulness">This MRQ is indeed very graceful</tip>
<tip require="elegance,beauty">This is something everyone has to like about this MRQ</tip>
<tip reject="bugs">Nah, there isn't any!</tip>
</mrq>
</mentoring>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment