Commit 85d12cce by E. Kolpakov

Merge remote-tracking branch 'remotes/origin/edx-release' into merge-candidate

Conflicts:
	problem_builder/mentoring.py
	problem_builder/questionnaire.py
	problem_builder/tests/integration/test_dashboard.py
	problem_builder/tests/integration/xml/assessment_2.xml
	problem_builder/tests/integration/xml/mcq_with_html_choices.xml
	problem_builder/tests/integration/xml/mrq_with_comments_1.xml
	problem_builder/tests/integration/xml/mrq_with_html_choices.xml
	problem_builder/tests/integration/xml_templates/assessment.xml
	problem_builder/v1/tests/xml/v1_upgrade_a_new.xml
	problem_builder/v1/tests/xml/v1_upgrade_b_new.xml
	run_tests.py
	test_requirements.txt
parents 5e00f92d 2b6e5705
...@@ -30,13 +30,14 @@ import ast ...@@ -30,13 +30,14 @@ import ast
import json import json
import logging import logging
import operator as op import operator as op
from django.template.defaultfilters import floatformat
from .dashboard_visual import DashboardVisualData from .dashboard_visual import DashboardVisualData
from .mcq import MCQBlock from .mcq import MCQBlock
from .sub_api import sub_api from .sub_api import sub_api
from lazy import lazy from lazy import lazy
from xblock.core import XBlock from xblock.core import XBlock
from xblock.fields import Scope, List, String from xblock.fields import Scope, List, String, Boolean, Dict
from xblock.fragment import Fragment from xblock.fragment import Fragment
from xblock.validation import ValidationMessage from xblock.validation import ValidationMessage
from xblockutils.helpers import child_isinstance from xblockutils.helpers import child_isinstance
...@@ -172,6 +173,20 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock): ...@@ -172,6 +173,20 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock):
).format(example_here='["2754b8afc03a439693b9887b6f1d9e36", "215028f7df3d4c68b14fb5fea4da7053"]'), ).format(example_here='["2754b8afc03a439693b9887b6f1d9e36", "215028f7df3d4c68b14fb5fea4da7053"]'),
scope=Scope.settings, scope=Scope.settings,
) )
exclude_questions = Dict(
display_name=_("Questions to be hidden"),
help=_(
"Optional rules to exclude specific questions both from displaying in dashboard and from the calculated "
"average. Rules must start with the url_name of a mentoring block, followed by list of question numbers "
"to exclude. Rule set must be in JSON format. Question numbers are one-based (the first question being "
"number 1). Must be in JSON format. Examples: {examples_here}"
).format(
examples_here='{"2754b8afc03a439693b9887b6f1d9e36":[1,2], "215028f7df3d4c68b14fb5fea4da7053":[1,5]}'
),
scope=Scope.content,
multiline_editor=True,
resettable_editor=False,
)
color_rules = String( color_rules = String(
display_name=_("Color Coding Rules"), display_name=_("Color Coding Rules"),
help=_( help=_(
...@@ -207,8 +222,27 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock): ...@@ -207,8 +222,27 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock):
), ),
scope=Scope.content, scope=Scope.content,
) )
average_labels = Dict(
display_name=_("Label for average value"),
help=_(
"This settings allows overriding label for the calculated average per mentoring block. Must be in JSON "
"format. Examples: {examples_here}."
).format(
examples_here='{"2754b8afc03a439693b9887b6f1d9e36": "Avg.", "215028f7df3d4c68b14fb5fea4da7053": "Mean"}'
),
scope=Scope.content,
)
show_numbers = Boolean(
display_name=_("Display values"),
default=True,
help=_("Toggles if numeric values are displayed"),
scope=Scope.content
)
editable_fields = ('display_name', 'mentoring_ids', 'color_rules', 'visual_rules', 'visual_title', 'visual_desc') editable_fields = (
'display_name', 'mentoring_ids', 'exclude_questions', 'average_labels', 'show_numbers',
'color_rules', 'visual_rules', 'visual_title', 'visual_desc'
)
css_path = 'public/css/dashboard.css' css_path = 'public/css/dashboard.css'
js_path = 'public/js/dashboard.js' js_path = 'public/js/dashboard.js'
...@@ -321,6 +355,12 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock): ...@@ -321,6 +355,12 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock):
except Exception: except Exception:
return "" return ""
def _get_problem_questions(self, mentoring_block):
""" Generator returning only children of specified block that are MCQs """
for child_id in mentoring_block.children:
if child_isinstance(mentoring_block, child_id, MCQBlock):
yield child_id
def student_view(self, context=None): # pylint: disable=unused-argument def student_view(self, context=None): # pylint: disable=unused-argument
""" """
Standard view of this XBlock. Standard view of this XBlock.
...@@ -336,8 +376,22 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock): ...@@ -336,8 +376,22 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock):
'display_name': mentoring_block.display_name, 'display_name': mentoring_block.display_name,
'mcqs': [] 'mcqs': []
} }
for child_id in mentoring_block.children: try:
if child_isinstance(mentoring_block, child_id, MCQBlock): hide_questions = self.exclude_questions.get(mentoring_block.url_name, [])
except Exception: # pylint: disable=broad-except-clause
log.exception("Cannot parse exclude_questions setting - probably malformed: %s", self.exclude_questions)
hide_questions = []
for question_number, child_id in enumerate(self._get_problem_questions(mentoring_block), 1):
try:
if question_number in hide_questions:
continue
except TypeError:
log.exception(
"Cannot check question number - expected list of ints got: %s",
hide_questions
)
# Get the student's submitted answer to this MCQ from the submissions API: # Get the student's submitted answer to this MCQ from the submissions API:
mcq_block = self.runtime.get_block(child_id) mcq_block = self.runtime.get_block(child_id)
mcq_submission_key = self._get_submission_key(child_id) mcq_submission_key = self._get_submission_key(child_id)
...@@ -345,9 +399,11 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock): ...@@ -345,9 +399,11 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock):
value = sub_api.get_submissions(mcq_submission_key, limit=1)[0]["answer"] value = sub_api.get_submissions(mcq_submission_key, limit=1)[0]["answer"]
except IndexError: except IndexError:
value = None value = None
block['mcqs'].append({ block['mcqs'].append({
"display_name": mcq_block.display_name_with_default, "display_name": mcq_block.display_name_with_default,
"value": value, "value": value,
"accessible_value": _("Score: {score}").format(score=value) if value else _("No value yet"),
"color": self.color_for_value(value) if value is not None else None, "color": self.color_for_value(value) if value is not None else None,
}) })
# If the values are numeric, display an average: # If the values are numeric, display an average:
...@@ -358,6 +414,11 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock): ...@@ -358,6 +414,11 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock):
if numeric_values: if numeric_values:
average_value = sum(numeric_values) / len(numeric_values) average_value = sum(numeric_values) / len(numeric_values)
block['average'] = average_value block['average'] = average_value
# average block is shown only if average value exists, so accessible text for no data is not required
block['accessible_average'] = _("Score: {score}").format(
score=floatformat(average_value)
)
block['average_label'] = self.average_labels.get(mentoring_block.url_name, _("Average"))
block['has_average'] = True block['has_average'] = True
block['average_color'] = self.color_for_value(average_value) block['average_color'] = self.color_for_value(average_value)
blocks.append(block) blocks.append(block)
...@@ -384,6 +445,7 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock): ...@@ -384,6 +445,7 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock):
'blocks': blocks, 'blocks': blocks,
'display_name': self.display_name, 'display_name': self.display_name,
'visual_repr': visual_repr, 'visual_repr': visual_repr,
'show_numbers': self.show_numbers,
}) })
fragment = Fragment(html) fragment = Fragment(html)
...@@ -406,6 +468,37 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock): ...@@ -406,6 +468,37 @@ class DashboardBlock(StudioEditableXBlockMixin, XBlock):
except InvalidUrlName as e: except InvalidUrlName as e:
add_error(_(u'Invalid block url_name given: "{bad_url_name}"').format(bad_url_name=unicode(e))) add_error(_(u'Invalid block url_name given: "{bad_url_name}"').format(bad_url_name=unicode(e)))
if data.exclude_questions:
for key, value in data.exclude_questions.iteritems():
if not isinstance(value, list):
add_error(
_(u"'Questions to be hidden' is malformed: value for key {key} is {value}, "
u"expected list of integers")
.format(key=key, value=value)
)
if key not in data.mentoring_ids:
add_error(
_(u"'Questions to be hidden' is malformed: mentoring url_name {url_name} "
u"is not added to Dashboard")
.format(url_name=key)
)
if data.average_labels:
for key, value in data.average_labels.iteritems():
if not isinstance(value, basestring):
add_error(
_(u"'Label for average value' is malformed: value for key {key} is {value}, expected string")
.format(key=key, value=value)
)
if key not in data.mentoring_ids:
add_error(
_(u"'Label for average value' is malformed: mentoring url_name {url_name} "
u"is not added to Dashboard")
.format(url_name=key)
)
if data.color_rules: if data.color_rules:
try: try:
self.parse_color_rules_str(data.color_rules, ignore_errors=False) self.parse_color_rules_str(data.color_rules, ignore_errors=False)
......
...@@ -142,6 +142,12 @@ class MentoringBlock(XBlock, StepParentMixin, StudioEditableXBlockMixin, StudioC ...@@ -142,6 +142,12 @@ class MentoringBlock(XBlock, StepParentMixin, StudioEditableXBlockMixin, StudioC
default=_("Mentoring Questions"), default=_("Mentoring Questions"),
scope=Scope.settings scope=Scope.settings
) )
feedback_label = String(
display_name=_("Feedback Header"),
help=_("Header for feedback messages"),
default=_("Feedback"),
scope=Scope.content
)
# User state # User state
attempted = Boolean( attempted = Boolean(
...@@ -187,7 +193,7 @@ class MentoringBlock(XBlock, StepParentMixin, StudioEditableXBlockMixin, StudioC ...@@ -187,7 +193,7 @@ class MentoringBlock(XBlock, StepParentMixin, StudioEditableXBlockMixin, StudioC
editable_fields = ( editable_fields = (
'display_name', 'mode', 'followed_by', 'max_attempts', 'enforce_dependency', 'display_name', 'mode', 'followed_by', 'max_attempts', 'enforce_dependency',
'display_submit', 'weight', 'extended_feedback' 'display_submit', 'feedback_label', 'weight', 'extended_feedback'
) )
icon_class = 'problem' icon_class = 'problem'
has_score = True has_score = True
......
.pb-dashboard table { .pb-dashboard table {
max-width: 800px; max-width: 800px;
width: 700px;
table-layout: auto;
border-collapse: collapse; border-collapse: collapse;
margin-left: auto;
margin-right: auto;
margin-bottom: 15px; margin-bottom: 15px;
} }
...@@ -9,6 +13,10 @@ ...@@ -9,6 +13,10 @@
font-weight: bold; font-weight: bold;
} }
.pb-dashboard .avg-row .desc {
font-weight: 600;
}
.pb-dashboard table td, .pb-dashboard table tbody th { .pb-dashboard table td, .pb-dashboard table tbody th {
border-top: 1px solid #ddd; border-top: 1px solid #ddd;
border-bottom: 1px solid #ddd; border-bottom: 1px solid #ddd;
...@@ -24,9 +32,13 @@ ...@@ -24,9 +32,13 @@
min-width: 4em; min-width: 4em;
text-align: right; text-align: right;
padding-right: 5px; padding-right: 5px;
border-right: 0.6em solid transparent; border-right: 2em solid transparent;
} }
.pb-dashboard table .avg-row td.desc { .pb-dashboard table .avg-row td.desc {
font-style: italic; font-style: italic;
} }
.pb-dashboard-visual {
text-align: center;
}
...@@ -22,7 +22,8 @@ function MentoringBlock(runtime, element) { ...@@ -22,7 +22,8 @@ function MentoringBlock(runtime, element) {
hideAllSteps: hideAllSteps, hideAllSteps: hideAllSteps,
step: step, step: step,
steps: steps, steps: steps,
publish_event: publish_event publish_event: publish_event,
data: data
}; };
function publish_event(data) { function publish_event(data) {
......
...@@ -25,7 +25,7 @@ function MentoringStandardView(runtime, element, mentoring) { ...@@ -25,7 +25,7 @@ function MentoringStandardView(runtime, element, mentoring) {
// Messages should only be displayed upon hitting 'submit', not on page reload // Messages should only be displayed upon hitting 'submit', not on page reload
mentoring.setContent(messagesDOM, response.message); mentoring.setContent(messagesDOM, response.message);
if (messagesDOM.html().trim()) { if (messagesDOM.html().trim()) {
messagesDOM.prepend('<div class="title1">' + gettext('Feedback') + '</div>'); messagesDOM.prepend('<div class="title1">' + mentoring.data.feedback_label + '</div>');
messagesDOM.show(); messagesDOM.show();
} }
} }
......
...@@ -42,16 +42,22 @@ ...@@ -42,16 +42,22 @@
{% for mcq in block.mcqs %} {% for mcq in block.mcqs %}
<tr> <tr>
<th class="desc">{{ mcq.display_name }}</th> <th class="desc">{{ mcq.display_name }}</th>
<td class="value" {% if mcq.color %}style="border-right-color: {{mcq.color}};"{% endif %}> <td class="value" {% if mcq.color %} style="border-right-color: {{mcq.color}};"{% endif %}>
{% if mcq.value %}{{ mcq.value }}{% endif %} {% if mcq.value and show_numbers %}
<span aria-hidden="true">{{ mcq.value }}</span>
{% endif %}
<span class="sr">{{ mcq.accessible_value }}</span>
</td> </td>
</tr> </tr>
{% endfor %} {% endfor %}
{% if block.has_average %} {% if block.has_average %}
<tr class="avg-row"> <tr class="avg-row">
<th class="desc">{% trans "Average" %}</th> <th class="desc">{{ block.average_label }}</th>
<td class="value" {% if block.average_color %}style="border-right-color: {{block.average_color}};"{% endif %}> <td class="value" {% if block.average_color %} style="border-right-color: {{block.average_color}};"{% endif %}>
{{ block.average|floatformat }} {% if show_numbers %}
<span aria-hidden="true">{{ block.average|floatformat }}</span>
{% endif %}
<span class="sr">{{ block.accessible_average }}</span>
</td> </td>
</tr> </tr>
{% endif %} {% endif %}
......
...@@ -9,6 +9,20 @@ ...@@ -9,6 +9,20 @@
body { body {
font-family: 'Open Sans', 'Helvetica Neue', Helvetica, Arial, sans-serif; font-family: 'Open Sans', 'Helvetica Neue', Helvetica, Arial, sans-serif;
} }
.pb-dashboard table {
text-align: left;
}
/* screen reader class from edx-platform */
.sr {
border: 0;
clip: rect(1px 1px 1px 1px);
height: 1px;
margin: -1px;
overflow: hidden;
padding: 0;
position: absolute;
width: 1px;
}
{{css}} {{css}}
</style> </style>
</head> </head>
......
{% load i18n %} {% load i18n %}
<div class="mentoring themed-xblock" data-mode="{{ self.mode }}" data-step="{{ self.step }}"> <div class="mentoring themed-xblock" data-mode="{{ self.mode }}" data-step="{{ self.step }}" data-feedback_label="{{ self.feedback_label}}">
<div class="missing-dependency warning" data-missing="{{ self.has_missing_dependency }}"> <div class="missing-dependency warning" data-missing="{{ self.has_missing_dependency }}">
{% with url=missing_dependency_url|safe %} {% with url=missing_dependency_url|safe %}
{% blocktrans with link_start="<a href='"|add:url|add:"'>" link_end="</a>" %} {% blocktrans with link_start="<a href='"|add:url|add:"'>" link_end="</a>" %}
......
...@@ -17,8 +17,11 @@ ...@@ -17,8 +17,11 @@
# along with this program in a file in the toplevel directory called # along with this program in a file in the toplevel directory called
# "AGPLv3". If not, see <http://www.gnu.org/licenses/>. # "AGPLv3". If not, see <http://www.gnu.org/licenses/>.
# #
from textwrap import dedent
from mock import Mock, patch from mock import Mock, patch
from .base_test import ProblemBuilderBaseTest from .base_test import ProblemBuilderBaseTest
from selenium.common.exceptions import NoSuchElementException
from xblockutils.resources import ResourceLoader
class MockSubmissionsAPI(object): class MockSubmissionsAPI(object):
...@@ -54,10 +57,25 @@ class TestDashboardBlock(ProblemBuilderBaseTest): ...@@ -54,10 +57,25 @@ class TestDashboardBlock(ProblemBuilderBaseTest):
""" """
Test the Student View of a dashboard XBlock linked to some problem builder blocks Test the Student View of a dashboard XBlock linked to some problem builder blocks
""" """
SIMPLE_DASHBOARD = """<pb-dashboard mentoring_ids='["dummy-value"]'/>"""
ALTERNATIVE_DASHBOARD = dedent("""
<pb-dashboard mentoring_ids='["dummy-value"]' show_numbers="false"
average_labels='{"Step 1": "Avg.", "Step 2":"Mean", "Step 3":"Second Quartile"}'
/>
""")
HIDE_QUESTIONS_DASHBOARD = dedent("""
<pb-dashboard mentoring_ids='["dummy-value"]'
exclude_questions='{"Step 1": [2, 3], "Step 2":[3], "Step 3":[2]}'
/>
""")
MALFORMED_HIDE_QUESTIONS_DASHBOARD = dedent("""
<pb-dashboard mentoring_ids='["dummy-value"]'
exclude_questions='{"Step 1": "1234", "Step 2":[3], "Step 3":[2]}'
/>
""")
def setUp(self): def setUp(self):
super(TestDashboardBlock, self).setUp() super(TestDashboardBlock, self).setUp()
# Set up our scenario:
self.load_scenario('dashboard.xml')
# Apply a whole bunch of patches that are needed in lieu of the LMS/CMS runtime and edx-submissions: # Apply a whole bunch of patches that are needed in lieu of the LMS/CMS runtime and edx-submissions:
...@@ -78,21 +96,43 @@ class TestDashboardBlock(ProblemBuilderBaseTest): ...@@ -78,21 +96,43 @@ class TestDashboardBlock(ProblemBuilderBaseTest):
), ),
("problem_builder.dashboard.DashboardBlock.get_mentoring_blocks", get_mentoring_blocks), ("problem_builder.dashboard.DashboardBlock.get_mentoring_blocks", get_mentoring_blocks),
("problem_builder.dashboard.sub_api", mock_submisisons_api), ("problem_builder.dashboard.sub_api", mock_submisisons_api),
("problem_builder.mcq.sub_api", mock_submisisons_api) ("problem_builder.mcq.sub_api", mock_submisisons_api),
(
"problem_builder.mentoring.MentoringBlock.url_name",
property(lambda block: block.display_name)
)
) )
for p in patches: for p in patches:
patcher = patch(*p) patcher = patch(*p)
patcher.start() patcher.start()
self.addCleanup(patcher.stop) self.addCleanup(patcher.stop)
# All the patches are installed; now we can proceed with using the XBlocks for tests:
self.go_to_view("student_view") def _install_fixture(self, dashboard_xml):
self.load_scenario("dashboard.xml", {'dashboard': dashboard_xml}, load_immediately=True)
self.vertical = self.load_root_xblock() self.vertical = self.load_root_xblock()
def _get_cell_contents(self, cell):
try:
visible_text = cell.find_element_by_css_selector('span:not(.sr)').text
except NoSuchElementException:
visible_text = ""
screen_reader_text = cell.find_element_by_css_selector('span.sr')
return visible_text, screen_reader_text.text
def _assert_cell_contents(self, cell, expected_visible_text, expected_screen_reader_text):
visible_text, screen_reader_text = self._get_cell_contents(cell)
self.assertEqual(visible_text, expected_visible_text)
self.assertEqual(screen_reader_text, expected_screen_reader_text)
def _format_sr_text(self, visible_text):
return "Score: {value}".format(value=visible_text)
def test_empty_dashboard(self): def test_empty_dashboard(self):
""" """
Test that when the student has not submitted any question answers, we still see Test that when the student has not submitted any question answers, we still see
the dashboard, and its lists all the MCQ questions in the way we expect. the dashboard, and its lists all the MCQ questions in the way we expect.
""" """
self._install_fixture(self.SIMPLE_DASHBOARD)
dashboard = self.browser.find_element_by_css_selector('.pb-dashboard') dashboard = self.browser.find_element_by_css_selector('.pb-dashboard')
step_headers = dashboard.find_elements_by_css_selector('thead') step_headers = dashboard.find_elements_by_css_selector('thead')
self.assertEqual(len(step_headers), 3) self.assertEqual(len(step_headers), 3)
...@@ -104,13 +144,10 @@ class TestDashboardBlock(ProblemBuilderBaseTest): ...@@ -104,13 +144,10 @@ class TestDashboardBlock(ProblemBuilderBaseTest):
mcq_rows = step.find_elements_by_css_selector('tr') mcq_rows = step.find_elements_by_css_selector('tr')
self.assertTrue(2 <= len(mcq_rows) <= 3) self.assertTrue(2 <= len(mcq_rows) <= 3)
for mcq in mcq_rows: for mcq in mcq_rows:
value = mcq.find_element_by_css_selector('td:last-child') cell = mcq.find_element_by_css_selector('td:last-child')
self.assertEqual(value.text, '') self._assert_cell_contents(cell, '', 'No value yet')
def test_dashboard(self): def _set_mentoring_values(self):
"""
Submit an answer to each MCQ, then check that the dashboard reflects those answers.
"""
pbs = self.browser.find_elements_by_css_selector('.mentoring') pbs = self.browser.find_elements_by_css_selector('.mentoring')
for pb in pbs: for pb in pbs:
mcqs = pb.find_elements_by_css_selector('fieldset.choices') mcqs = pb.find_elements_by_css_selector('fieldset.choices')
...@@ -119,22 +156,136 @@ class TestDashboardBlock(ProblemBuilderBaseTest): ...@@ -119,22 +156,136 @@ class TestDashboardBlock(ProblemBuilderBaseTest):
choices[idx].click() choices[idx].click()
self.click_submit(pb) self.click_submit(pb)
def test_dashboard(self):
"""
Submit an answer to each MCQ, then check that the dashboard reflects those answers.
"""
self._install_fixture(self.SIMPLE_DASHBOARD)
self._set_mentoring_values()
# Reload the page:
self.go_to_view("student_view")
dashboard = self.browser.find_element_by_css_selector('.pb-dashboard')
steps = dashboard.find_elements_by_css_selector('tbody')
self.assertEqual(len(steps), 3)
expected_values = ('1', '2', '3', '4', 'B')
for step_num, step in enumerate(steps):
mcq_rows = step.find_elements_by_css_selector('tr:not(.avg-row)')
self.assertTrue(2 <= len(mcq_rows) <= 3)
for mcq in mcq_rows:
cell = mcq.find_element_by_css_selector('td.value')
visible_text, screen_reader_text = self._get_cell_contents(cell)
self.assertIn(visible_text, expected_values)
self.assertIn(screen_reader_text, map(self._format_sr_text, expected_values))
# Check the average:
avg_row = step.find_element_by_css_selector('tr.avg-row')
left_col = avg_row.find_element_by_css_selector('.desc')
self.assertEqual(left_col.text, "Average")
right_col = avg_row.find_element_by_css_selector('.value')
expected_average = {0: "2", 1: "3", 2: "1"}[step_num]
self._assert_cell_contents(right_col, expected_average, self._format_sr_text(expected_average))
def test_dashboard_alternative(self):
"""
Submit an answer to each MCQ, then check that the dashboard reflects those answers with alternative
configuration:
* Average label is "Avg." instead of default "Average"
* Numerical values are not shown
"""
self._install_fixture(self.ALTERNATIVE_DASHBOARD)
self._set_mentoring_values()
# Reload the page: # Reload the page:
self.go_to_view("student_view") self.go_to_view("student_view")
dashboard = self.browser.find_element_by_css_selector('.pb-dashboard') dashboard = self.browser.find_element_by_css_selector('.pb-dashboard')
steps = dashboard.find_elements_by_css_selector('tbody') steps = dashboard.find_elements_by_css_selector('tbody')
self.assertEqual(len(steps), 3) self.assertEqual(len(steps), 3)
average_labels = ["Avg.", "Mean", "Second Quartile"]
expected_values = ('1', '2', '3', '4', 'B')
for step_num, step in enumerate(steps): for step_num, step in enumerate(steps):
mcq_rows = step.find_elements_by_css_selector('tr:not(.avg-row)') mcq_rows = step.find_elements_by_css_selector('tr:not(.avg-row)')
self.assertTrue(2 <= len(mcq_rows) <= 3) self.assertTrue(2 <= len(mcq_rows) <= 3)
for mcq in mcq_rows: for mcq in mcq_rows:
value = mcq.find_element_by_css_selector('td.value') cell = mcq.find_element_by_css_selector('td.value')
self.assertIn(value.text, ('1', '2', '3', '4', 'B')) visible_text, screen_reader_text = self._get_cell_contents(cell)
# this dashboard configured to not show numbers
self.assertEqual(visible_text, '')
# but screen reader content still added
self.assertIn(screen_reader_text, map(self._format_sr_text, expected_values))
# Check the average:
avg_row = step.find_element_by_css_selector('tr.avg-row')
left_col = avg_row.find_element_by_css_selector('.desc')
self.assertEqual(left_col.text, average_labels[step_num])
right_col = avg_row.find_element_by_css_selector('.value')
expected_average = {0: "2", 1: "3", 2: "1"}[step_num]
self._assert_cell_contents(right_col, '', self._format_sr_text(expected_average))
def test_dashboard_exclude_questions(self):
"""
Submit an answer to each MCQ, then check that the dashboard ignores questions it is configured to ignore
"""
self._install_fixture(self.HIDE_QUESTIONS_DASHBOARD)
self._set_mentoring_values()
# Reload the page:
self.go_to_view("student_view")
dashboard = self.browser.find_element_by_css_selector('.pb-dashboard')
steps = dashboard.find_elements_by_css_selector('tbody')
self.assertEqual(len(steps), 3)
expected_values = ('1', '2', '3', '4')
lengths = [1, 2, 1]
for step_num, step in enumerate(steps):
mcq_rows = step.find_elements_by_css_selector('tr:not(.avg-row)')
self.assertEqual(len(mcq_rows), lengths[step_num])
for mcq in mcq_rows:
cell = mcq.find_element_by_css_selector('td.value')
visible_text, screen_reader_text = self._get_cell_contents(cell)
self.assertIn(visible_text, expected_values)
self.assertIn(screen_reader_text, map(self._format_sr_text, expected_values))
# Check the average:
avg_row = step.find_element_by_css_selector('tr.avg-row')
left_col = avg_row.find_element_by_css_selector('.desc')
self.assertEqual(left_col.text, "Average")
right_col = avg_row.find_element_by_css_selector('.value')
expected_average = {0: "1", 1: "3", 2: "1"}[step_num]
self._assert_cell_contents(right_col, expected_average, self._format_sr_text(expected_average))
def test_dashboard_malformed_exclude_questions(self):
"""
Submit an answer to each MCQ, then check that the dashboard ignores questions it is configured to ignore
"""
self._install_fixture(self.MALFORMED_HIDE_QUESTIONS_DASHBOARD)
self._set_mentoring_values()
# Reload the page:
self.go_to_view("student_view")
dashboard = self.browser.find_element_by_css_selector('.pb-dashboard')
steps = dashboard.find_elements_by_css_selector('tbody')
self.assertEqual(len(steps), 3)
expected_values = ('1', '2', '3', '4')
lengths = [3, 2, 1]
for step_num, step in enumerate(steps):
mcq_rows = step.find_elements_by_css_selector('tr:not(.avg-row)')
self.assertEqual(len(mcq_rows), lengths[step_num])
for mcq in mcq_rows:
cell = mcq.find_element_by_css_selector('td.value')
visible_text, screen_reader_text = self._get_cell_contents(cell)
self.assertIn(visible_text, expected_values)
self.assertIn(screen_reader_text, map(self._format_sr_text, expected_values))
# Check the average: # Check the average:
avg_row = step.find_element_by_css_selector('tr.avg-row') avg_row = step.find_element_by_css_selector('tr.avg-row')
left_col = avg_row.find_element_by_css_selector('.desc') left_col = avg_row.find_element_by_css_selector('.desc')
self.assertEqual(left_col.text, "Average") self.assertEqual(left_col.text, "Average")
right_col = avg_row.find_element_by_css_selector('.value') right_col = avg_row.find_element_by_css_selector('.value')
expected_average = {0: "2", 1: "3", 2: "1"}[step_num] expected_average = {0: "2", 1: "3", 2: "1"}[step_num]
self.assertEqual(right_col.text, expected_average) self._assert_cell_contents(right_col, expected_average, self._format_sr_text(expected_average))
...@@ -53,6 +53,5 @@ ...@@ -53,6 +53,5 @@
<pb-choice value="C">Option C</pb-choice> <pb-choice value="C">Option C</pb-choice>
</pb-mcq> </pb-mcq>
</problem-builder> </problem-builder>
<pb-dashboard mentoring_ids='["dummy-value"]'> {{ dashboard|safe }}
</pb-dashboard>
</vertical_demo> </vertical_demo>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment