Commit 7240b625 by muhammad-ammar Committed by muzaffaryousaf

Associate descriptions with question text

TNL-5014
parent 3556f2a3
......@@ -13,6 +13,7 @@ Main module which shows problems (of "capa" type).
This is used by capa_module.
"""
from collections import OrderedDict
from copy import deepcopy
from datetime import datetime
import logging
......@@ -35,6 +36,16 @@ from capa.safe_exec import safe_exec
# extra things displayed after "show answers" is pressed
solution_tags = ['solution']
# fully accessible capa response types
ACCESSIBLE_CAPA_RESPONSE_TYPES = [
'choiceresponse',
'multiplechoiceresponse',
'optionresponse',
'numericalresponse',
'stringresponse',
'formularesponse',
]
# these get captured as student responses
response_properties = ["codeparam", "responseparam", "answer", "openendedparam"]
......@@ -61,6 +72,8 @@ log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# main class for this module
DEFAULT_QUESTION_TEXT = "Formatting error: You must explicitly specify the question text."
class LoncapaSystem(object):
"""
......@@ -855,17 +868,76 @@ class LoncapaProblem(object):
id=response_id_str
)
# assign one answer_id for each input type or solution type
# assign one answer_id for each input type
for entry in inputfields:
entry.attrib['response_id'] = str(response_id)
entry.attrib['answer_id'] = str(answer_id)
entry.attrib['id'] = "%s_%i_%i" % (self.problem_id, response_id, answer_id)
answer_id = answer_id + 1
# Find the label and save it for html transformation step
responsetype_label = response.find('label')
problem_data[self.problem_id + '_' + str(response_id)] = {
'label': responsetype_label.text if responsetype_label is not None else ''
question_id = u'{}_{}'.format(self.problem_id, response_id)
label = ''
element_to_be_deleted = None
# Extract label value from <label> tag or label attribute from inside the responsetype
responsetype_label_tag = response.find('label')
if responsetype_label_tag is not None:
label = responsetype_label_tag.text
# store <label> tag containing question text to delete
# it later otherwise question will be rendered twice
element_to_be_deleted = responsetype_label_tag
elif 'label' in inputfields[0].attrib:
# Extract label value from label attribute
# This is the case when we have a problem
# * with multiple questions without separation
# * single question with old XML format only
label = inputfields[0].attrib['label']
# Get first <p> tag before responsetype, this <p> contains the question text.
p_tag = response.xpath('preceding-sibling::p[1]')
if p_tag:
# It may be possible that label attribute value doesn't match with <p> tag
# This happens when author updated the question <p> tag directly in XML but
# didn't changed the label attribute value. In this case we will consider the
# first <p> tag before responsetype as question.
if label != p_tag[0].text:
label = p_tag[0].text
element_to_be_deleted = p_tag[0]
else:
# In this case the problems don't have tag or label attribute inside the responsetype
# so we will get the first preceding label tag w.r.t to this responsetype.
# This will take care of those multi-question problems that are not using --- in their markdown.
label_tag = response.xpath("preceding-sibling::label[1]")
if label_tag:
label = label_tag[0].text
element_to_be_deleted = label_tag[0]
label = label.strip() or DEFAULT_QUESTION_TEXT
# delete label or p element only if responsetype is fully accessible
if response.tag in ACCESSIBLE_CAPA_RESPONSE_TYPES and element_to_be_deleted is not None:
element_to_be_deleted.getparent().remove(element_to_be_deleted)
# for non-accessible responsetypes it may be possible that label attribute is not present
# in this case pass an empty label. remember label attribute is only used as value for aria-label
if response.tag not in ACCESSIBLE_CAPA_RESPONSE_TYPES and label == DEFAULT_QUESTION_TEXT:
label = ''
# Extract descriptions and set unique id on each description tag
description_tags = response.findall('description')
description_id = 1
descriptions = OrderedDict()
for description in description_tags:
descriptions[
"description_%s_%i_%i" % (self.problem_id, response_id, description_id)
] = description.text
response.remove(description)
description_id += 1
problem_data[question_id] = {
'label': label,
'descriptions': descriptions
}
# instantiate capa Response
......
......@@ -225,7 +225,7 @@ class InputTypeBase(object):
self.hintmode = feedback.get('hintmode', None)
self.input_state = state.get('input_state', {})
self.answervariable = state.get('answervariable', None)
self.response_data = state.get('response_data', None)
self.response_data = state.get('response_data')
# put hint above msg if it should be displayed
if self.hintmode == 'always':
......@@ -319,8 +319,16 @@ class InputTypeBase(object):
'msg': self.msg,
'response_data': self.response_data,
'STATIC_URL': self.capa_system.STATIC_URL,
'describedby': '',
}
# Don't add aria-describedby attribute if there are no descriptions
if self.response_data.get('descriptions'):
description_ids = ' '.join(self.response_data.get('descriptions').keys())
context.update(
{'describedby': 'aria-describedby="{}"'.format(description_ids)}
)
context.update(
(a, v) for (a, v) in self.loaded_attributes.iteritems() if a in self.to_render
)
......@@ -380,7 +388,7 @@ class OptionInput(InputTypeBase):
Example:
<optioninput options="('Up','Down')" label="Where is the sky?" correct="Up"/><text>The location of the sky</text>
<optioninput options="('Up','Down')" correct="Up"/><text>The location of the sky</text>
# TODO: allow ordering to be randomized
"""
......@@ -416,7 +424,6 @@ class OptionInput(InputTypeBase):
Convert options to a convenient format.
"""
return [Attribute('options', transform=cls.parse_options),
Attribute('label', ''),
Attribute('inline', False)]
#-----------------------------------------------------------------------------
......@@ -435,7 +442,7 @@ class ChoiceGroup(InputTypeBase):
Example:
<choicegroup label="Which foil?">
<choicegroup>
<choice correct="false" name="foil1">
<text>This is foil One.</text>
</choice>
......@@ -478,7 +485,6 @@ class ChoiceGroup(InputTypeBase):
# `django.utils.translation.ugettext_noop` because Django cannot be imported in this file
_ = lambda text: text
return [Attribute("show_correctness", "always"),
Attribute('label', ''),
Attribute("submitted_message", _("Answer received."))]
def _extra_context(self):
......@@ -640,7 +646,7 @@ class TextLine(InputTypeBase):
is used e.g. for embedding simulations turned into questions.
Example:
<textline math="1" trailing_text="m/s" label="How fast is a cheetah?" />
<textline math="1" trailing_text="m/s"/>
This example will render out a text line with a math preview and the text 'm/s'
after the end of the text line.
......@@ -656,7 +662,6 @@ class TextLine(InputTypeBase):
"""
return [
Attribute('size', None),
Attribute('label', ''),
Attribute('hidden', False),
Attribute('inline', False),
......@@ -716,7 +721,6 @@ class FileSubmission(InputTypeBase):
Convert the list of allowed files to a convenient format.
"""
return [Attribute('allowed_files', '[]', transform=cls.parse_files),
Attribute('label', ''),
Attribute('required_files', '[]', transform=cls.parse_files), ]
def setup(self):
......@@ -1030,7 +1034,6 @@ class Schematic(InputTypeBase):
Attribute('analyses', None),
Attribute('initial_value', None),
Attribute('submit_analyses', None),
Attribute('label', ''),
]
def _extra_context(self):
......@@ -1066,7 +1069,6 @@ class ImageInput(InputTypeBase):
"""
return [Attribute('src'),
Attribute('height'),
Attribute('label', ''),
Attribute('width'), ]
def setup(self):
......@@ -1157,8 +1159,7 @@ class ChemicalEquationInput(InputTypeBase):
"""
Can set size of text field.
"""
return [Attribute('size', '20'),
Attribute('label', ''), ]
return [Attribute('size', '20'), ]
def _extra_context(self):
"""
......@@ -1221,7 +1222,7 @@ class FormulaEquationInput(InputTypeBase):
Example:
<formulaequationinput size="50" label="Enter the equation for motion" />
<formulaequationinput size="50"/>
options: size -- width of the textbox.
trailing_text -- text to show after the input textbox when
......@@ -1239,7 +1240,6 @@ class FormulaEquationInput(InputTypeBase):
return [
Attribute('size', '20'),
Attribute('inline', False),
Attribute('label', ''),
Attribute('trailing_text', ''),
]
......@@ -1629,7 +1629,7 @@ class ChoiceTextGroup(InputTypeBase):
select the correct choice and fill in numbers to make it accurate.
<endouttext/>
<choicetextresponse>
<radiotextgroup label="What is the correct choice?">
<radiotextgroup>
<choice correct="false">The lowest number rolled was:
<decoy_input/> and the highest number rolled was:
<decoy_input/> .</choice>
......@@ -1652,7 +1652,7 @@ class ChoiceTextGroup(InputTypeBase):
select the correct choices and fill in numbers to make them accurate.
<endouttext/>
<choicetextresponse>
<checkboxtextgroup label="What is the answer?">
<checkboxtextgroup>
<choice correct="true">
The lowest number selected was <numtolerance_input answer="1.4142" tolerance="0.01"/>
</choice>
......@@ -1718,7 +1718,6 @@ class ChoiceTextGroup(InputTypeBase):
return [
Attribute("show_correctness", "always"),
Attribute("submitted_message", _("Answer received.")),
Attribute("label", ""),
]
def _extra_context(self):
......
......@@ -3,7 +3,7 @@
<div class="${status.classname}" id="status_${id}">
<input type="text" name="input_${id}" id="input_${id}" aria-label="${label}" aria-describedby="answer_${id}" data-input-id="${id}" value="${value|h}"
<input type="text" name="input_${id}" id="input_${id}" aria-label="${response_data['label']}" aria-describedby="answer_${id}" data-input-id="${id}" value="${value|h}"
% if size:
size="${size}"
% endif
......
......@@ -5,12 +5,11 @@
))
%>
<form class="choicegroup capa_inputtype" id="inputtype_${id}">
<fieldset>
% if response_data and response_data['label']:
<legend id="${id}-legend" class="response-fieldset-legend question-text">${response_data['label']}</legend>
% else:
<legend>Question</legend>
% endif
<fieldset ${describedby}>
<legend id="${id}-legend" class="response-fieldset-legend field-group-hd">${response_data['label']}</legend>
% for description_id, description_text in response_data['descriptions'].items():
<p class="question-description" id="${description_id}">${description_text}</p>
% endfor
% for choice_id, choice_label in choices:
<div class="field" aria-live="polite" aria-atomic="true">
<%
......@@ -33,8 +32,9 @@
<% label_class += ' choicegroup_' + correctness %>
% endif
% endif
class="${label_class}" >
class="${label_class}"
${describedby}
>
<input type="${input_type}" name="input_${id}${name_array_suffix}" id="input_${id}_${choice_id}" class="field-input input-${input_type}" value="${choice_id}"
## If the student selected this choice...
% if is_radio_input(choice_id):
......
......@@ -9,8 +9,8 @@
<section id="choicetextinput_${id}" class="choicetextinput">
<form class="choicetextgroup capa_inputtype" id="inputtype_${id}">
<div class="script_placeholder" data-src="${STATIC_URL}js/capa/choicetextinput.js"/>
<fieldset aria-label="${label}">
<fieldset aria-label="${response_data['label']}">
% for choice_id, choice_description in choices:
<%choice_id= choice_id %>
<section id="forinput${choice_id}"
......@@ -59,7 +59,7 @@
<span id="answer_${id}"></span>
</fieldset>
<input class= "choicetextvalue" type="hidden" name="input_${id}{}" id="input_${id}" value="${value|h}" />
<div class="indicator-container">
% if input_type == 'checkbox' or not element_checked:
<span class="status ${status.classname}" id="status_${id}"></span>
......
......@@ -7,7 +7,7 @@
% endif
<p class="debug">${status}</p>
<input type="file" name="input_${id}" id="input_${id}" value="${value}" multiple="multiple" data-required_files="${required_files|h}" data-allowed_files="${allowed_files|h}" aria-label="${label}" />
<input type="file" name="input_${id}" id="input_${id}" value="${value}" multiple="multiple" data-required_files="${required_files|h}" data-allowed_files="${allowed_files|h}" aria-label="${response_data['label']}"/>
</div>
<div class="message">${msg|n}</div>
</section>
......@@ -5,8 +5,7 @@
<div class="${status.classname}" id="status_${id}">
<input type="text" name="input_${id}" id="input_${id}"
data-input-id="${id}" value="${value}"
aria-label="${label}"
aria-describedby="${id}_status"
${describedby}
% if size:
size="${size}"
% endif
......
<% doinline = "inline" if inline else "" %>
<form class="inputtype option-input ${doinline}">
<select name="input_${id}" id="input_${id}" aria-label="${label}" aria-describedby="answer_${id}">
<select name="input_${id}" id="input_${id}" aria-label="${response_data['label']}" ${describedby}>
<option value="option_${id}_dummy_default"> </option>
% for option_id, option_description in options:
<option value="${option_id}"
......
......@@ -8,7 +8,7 @@
analyses="${analyses}"
name="input_${id}"
id="input_${id}"
aria-label="${label}"
aria-label="${response_data['label']}"
aria-describedby="answer_${id}"
value="${value|h}"
initial_value="${initial_value|h}"
......
......@@ -16,7 +16,7 @@
<div style="display:none;" name="${hidden}" inputid="input_${id}" />
% endif
<input type="text" name="input_${id}" id="input_${id}" aria-label="${label}" aria-describedby="answer_${id}" value="${value}"
<input type="text" name="input_${id}" id="input_${id}" aria-label="${response_data['label']}" ${describedby} value="${value}"
% if do_math:
class="math"
% endif
......@@ -29,15 +29,15 @@
/>
<span class="trailing_text">${trailing_text}</span>
<span class="status"
<span class="status"
%if status != 'unsubmitted':
%endif
aria-describedby="input_${id}" data-tooltip="${status.display_tooltip}">
<span class="sr">
%if value:
${value}
% else:
${label}
%else:
${response_data['label']}
%endif
-
${status.display_name}
......
<problem>
<p>Select all the fruits from the list. In retrospect, the wordiness of these tests increases the dizziness!</p>
<p>In retrospect, the wordiness of these tests increases the dizziness!</p>
<choiceresponse>
<checkboxgroup label="Select all the fruits from the list">
<label>Select all the fruits from the list</label>
<checkboxgroup>
<choice correct="true" id="alpha">Apple
<choicehint selected="TrUe">You are right that apple is a fruit.
</choicehint>
......@@ -33,9 +34,10 @@
</compoundhint>
</checkboxgroup>
</choiceresponse>
<p>Select all the vegetables from the list</p>
<choiceresponse>
<checkboxgroup label="Select all the vegetables from the list">
<label>Select all the vegetables from the list</label>
<checkboxgroup>
<choice correct="false">Banana
<choicehint selected="true">No, sorry, a banana is a fruit.
</choicehint>
......@@ -52,11 +54,11 @@
<choice correct="true">
Brussel Sprout
<choicehint selected="true">
Brussel sprouts are vegetables.
</choicehint>
<choicehint selected="false">
Brussel sprout is the only vegetable in this list.
</choicehint>
</choice>
......@@ -66,6 +68,7 @@
</compoundhint>
</checkboxgroup>
</choiceresponse>
<p>Compoundhint vs. correctness</p>
<choiceresponse>
<checkboxgroup>
......@@ -80,17 +83,17 @@
<choiceresponse>
<checkboxgroup>
<choice correct="true">
A
A
<choicehint selected="true" label="AA">
aa
</choicehint></choice>
<choice correct="true">
B <choicehint selected="false" label="BB">
bb
</choicehint></choice>
</checkboxgroup>
</choiceresponse>
......@@ -114,4 +117,3 @@
</problem>
<problem>
<p>(note the blank line before mushroom -- be sure to include this test case)</p>
<p>Select the fruit from the list</p>
<multiplechoiceresponse>
<choicegroup label="Select the fruit from the list" type="MultipleChoice">
<label>Select the fruit from the list</label>
<choicegroup type="MultipleChoice">
<choice correct="false">Mushroom
<choicehint label="">Mushroom is a fungus, not a fruit.
</choicehint>
......@@ -14,9 +14,10 @@
</choice>
</choicegroup>
</multiplechoiceresponse>
<p>Select the vegetables from the list</p>
<multiplechoiceresponse>
<choicegroup label="Select the vegetables from the list" type="MultipleChoice">
<label>Select the vegetables from the list</label>
<choicegroup type="MultipleChoice">
<choice correct="false">Mushroom
<choicehint>Mushroom is a fungus, not a vegetable.
</choicehint>
......
<problem>
<p>Select the fruit from the list</p>
<multiplechoiceresponse>
<choicegroup label="Select the fruit from the list" type="MultipleChoice">
<label>Select the fruit from the list</label>
<choicegroup type="MultipleChoice">
<choice correct="false">Mushroom
<choicehint>Mushroom <img src="#" ale="#"/>is a fungus, not a fruit.</choicehint>
</choice>
......
<problem>
<numericalresponse answer="1.141">
<label>What value when squared is approximately equal to 2 (give your answer to 2 decimal places)?</label>
<responseparam default=".01" type="tolerance"/>
<formulaequationinput label="What value when squared is approximately equal to 2 (give your answer to 2 decimal places)?"/>
<formulaequationinput/>
<correcthint label="Nice">
The square root of two turns up in the strangest places.
......@@ -11,8 +12,9 @@
</numericalresponse>
<numericalresponse answer="4">
<label>What is 2 + 2?</label>
<responseparam default=".01" type="tolerance"/>
<formulaequationinput label="What is 2 + 2?"/>
<formulaequationinput/>
<correcthint>
Pretty easy, uh?.
</correcthint>
......@@ -34,4 +36,3 @@ also not multiple correcthint
</lehint>
-->
</problem>
......@@ -2,7 +2,8 @@
<p>In which country would you find the city of Paris?</p>
<stringresponse answer="FranceΩ" type="ci" >
<textline label="In which country would you find the city of Paris?" size="20"/>
<label>In which country would you find the city of Paris?</label>
<textline size="20"/>
<correcthint>
Viva la France!Ω
</correcthint>
......@@ -22,16 +23,18 @@
<p>What color is the sky? A minimal example, case sensitive, not regex.</p>
<stringresponse answer="Blue">
<label>What color is the sky?</label>
<correcthint >The red light is scattered by water molecules leaving only blue light.
</correcthint>
<textline label="What color is the sky?" size="20"/>
<textline size="20"/>
</stringresponse>
<p>(This question will cause an illegal regular expression exception)</p>
<stringresponse answer="Bonk">
<label>Why not?</label>
<correcthint >This hint should never appear.
</correcthint>
<textline label="Why not?" size="20"/>
<textline size="20"/>
<regexphint answer="[">
This hint should never appear either because the regex is illegal.
</regexphint>
......@@ -56,7 +59,7 @@
<regexphint answer="FG+"> hint6 </regexphint>
<textline size="20"/>
</stringresponse>
<!-- backward compatibility for additional_answer: old and new format together in
a problem, scored correclty and new style has a hint -->
<stringresponse answer="A">
......
<problem>
<choiceresponse>
<checkboxgroup label="Select all the vegetables from the list">
<label>Select all the vegetables from the list</label>
<checkboxgroup>
<choice correct="false">Banana
<choicehint selected="true">No, sorry, a banana is a fruit.
</choicehint>
......
......@@ -7,6 +7,7 @@ import mock
from .response_xml_factory import StringResponseXMLFactory, CustomResponseXMLFactory
from . import test_capa_system, new_loncapa_problem
from capa.capa_problem import DEFAULT_QUESTION_TEXT
class CapaHtmlRenderTest(unittest.TestCase):
......@@ -176,7 +177,6 @@ class CapaHtmlRenderTest(unittest.TestCase):
expected_textline_context = {
'STATIC_URL': '/dummy-static/',
'status': the_system.STATUS_CLASS('unsubmitted'),
'label': '',
'value': '',
'preprocessor': None,
'msg': '',
......@@ -186,6 +186,8 @@ class CapaHtmlRenderTest(unittest.TestCase):
'id': '1_2_1',
'trailing_text': '',
'size': None,
'response_data': {'label': DEFAULT_QUESTION_TEXT, 'descriptions': {}},
'describedby': ''
}
expected_solution_context = {'id': '1_solution_1'}
......
......@@ -1256,7 +1256,6 @@ class CapaMixin(CapaFields):
of the problem. If problem related metadata cannot be located it should be replaced with empty
strings ''.
"""
input_metadata = {}
for input_id, internal_answer in answers.iteritems():
answer_input = self.lcp.inputs.get(input_id)
......@@ -1290,7 +1289,7 @@ class CapaMixin(CapaFields):
is_correct = ''
input_metadata[input_id] = {
'question': getattr(answer_input, 'loaded_attributes', {}).get('label', ''),
'question': answer_input.response_data.get('label', ''),
'answer': user_visible_answer,
'response_type': getattr(getattr(answer_response, 'xml', None), 'tag', ''),
'input_type': getattr(answer_input, 'tag', ''),
......
......@@ -153,11 +153,8 @@ div.problem {
}
}
span > label {
display: block;
margin-bottom: $baseline;
font: inherit;
color: inherit;
.question-description {
@include margin(($baseline*0.75), 0);
}
}
......
......@@ -197,7 +197,7 @@ class @MarkdownEditingDescriptor extends XModule.Descriptor
demandHintTags = [];
toXml = `function (markdown) {
var xml = markdown,
i, splits, scriptFlag;
i, splits, makeParagraph;
var responseTypes = [
'optionresponse', 'multiplechoiceresponse', 'stringresponse', 'numericalresponse', 'choiceresponse'
];
......@@ -209,6 +209,20 @@ class @MarkdownEditingDescriptor extends XModule.Descriptor
xml = xml.replace(/(^.*?$)(?=\n\=\=+$)/gm, '<h3 class="hd hd-2 problem-header">$1</h3>');
xml = xml.replace(/\n^\=\=+$/gm, '');
// extract question and description(optional)
// >>question||description<< converts to
// <label>question</label> <description>description</description>
xml = xml.replace(/>>([^]+?)<</gm, function(match, questionText) {
var result = questionText.split('||'),
label = '<label>' + result[0] + '</label>' + '\n';
// don't add empty <description> tag
if (result.length === 1 || !result[1]) {
return label;
}
return label + '<description>' + result[1] + '</description>\n'
})
// Pull out demand hints, || a hint ||
var demandhints = '';
xml = xml.replace(/(^\s*\|\|.*?\|\|\s*$\n?)+/gm, function(match) { // $\n
......@@ -515,35 +529,6 @@ class @MarkdownEditingDescriptor extends XModule.Descriptor
return selectString;
});
// replace labels
// looks for >>arbitrary text<< and inserts it into the label attribute of the input type directly below the text.
var split = xml.split('\n');
var new_xml = [];
var line, i, curlabel, prevlabel = '';
var didinput = false;
for (i = 0; i < split.length; i++) {
line = split[i];
if (match = line.match(/>>(.*)<</)) {
curlabel = match[1].replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/"/g, '&quot;')
.replace(/'/g, '&apos;');
// extract the question text and convert it to a <p> tag
line = line.replace(/>>(.*?)<</, "<p class='qtitle'>$1</p>");
} else if (line.match(/<\w+response/) && didinput && curlabel == prevlabel) {
// reset label to prevent gobbling up previous one (if multiple questions)
curlabel = '';
didinput = false;
} else if (line.match(/<(textline|optioninput|formulaequationinput|choicegroup|checkboxgroup)/) && curlabel != '' && curlabel != undefined) {
line = line.replace(/<(textline|optioninput|formulaequationinput|choicegroup|checkboxgroup)/, '<$1 label="' + curlabel + '"');
didinput = true;
prevlabel = curlabel;
}
new_xml.push(line);
}
xml = new_xml.join('\n');
// replace code blocks
xml = xml.replace(/\[code\]\n?([^\]]*)\[\/?code\]/gmi, function(match, p1) {
var selectString = '<pre><code>\n' + p1 + '</code></pre>';
......@@ -552,20 +537,23 @@ class @MarkdownEditingDescriptor extends XModule.Descriptor
});
// split scripts and preformatted sections, and wrap paragraphs
splits = xml.split(/(\<\/?(?:script|pre).*?\>)/g);
scriptFlag = false;
splits = xml.split(/(\<\/?(?:script|pre|label|description).*?\>)/g);
// Wrap a string by <p> tag when line is not already wrapped by another tag
// true when line is not already wrapped by another tag false otherwise
makeParagraph = true;
for (i = 0; i < splits.length; i += 1) {
if(/\<(script|pre)/.test(splits[i])) {
scriptFlag = true;
if (/\<(script|pre|label|description)/.test(splits[i])) {
makeParagraph = false;
}
if(!scriptFlag) {
if (makeParagraph) {
splits[i] = splits[i].replace(/(^(?!\s*\<|$).*$)/gm, '<p>$1</p>');
}
if(/\<\/(script|pre)/.test(splits[i])) {
scriptFlag = false;
if (/\<\/(script|pre|label|description)/.test(splits[i])) {
makeParagraph = true;
}
}
......@@ -600,11 +588,6 @@ class @MarkdownEditingDescriptor extends XModule.Descriptor
return;
}
// replace <p> tag for question title with <label> tag
if (child.hasAttribute('class') && child.getAttribute('class') === 'qtitle') {
child = $('<label>' + child.textContent + '</label>')[0];
}
if (beforeInputtype) {
// safe-lint: disable=javascript-jquery-insert-into-target
responseType[0].insertBefore(child, inputtype);
......
......@@ -8,15 +8,13 @@ metadata:
You can use the following example problem as a model.
>>The following languages are in the Indo-European family:<<
>>The following languages are in the Indo-European family:||Make sure you select all of the correct options—there may be more than one!<<
[x] Urdu
[ ] Finnish
[x] Marathi
[x] French
[ ] Hungarian
Note: Make sure you select all of the correct options—there may be more than one!
[explanation]
Urdu, Marathi, and French are all Indo-European languages, while Finnish and Hungarian are in the Uralic family.
[explanation]
......@@ -28,14 +26,14 @@ data: |
<p>When you add the problem, be sure to select Settings to specify a Display Name and other values that apply.</p>
<p>You can use the following example problem as a model.</p>
<label>The following languages are in the Indo-European family:</label>
<checkboxgroup label="The following languages are in the Indo-European family:">
<description>Make sure you select all of the correct options—there may be more than one!</description>
<checkboxgroup>
<choice correct="true">Urdu</choice>
<choice correct="false">Finnish</choice>
<choice correct="true">Marathi</choice>
<choice correct="true">French</choice>
<choice correct="false">Hungarian</choice>
</checkboxgroup>
<p>Note: Make sure you select all of the correct options—there may be more than one!</p>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
......
......@@ -13,7 +13,7 @@ metadata:
Use the following example problem as a model.
>>Which of the following is a fruit? Check all that apply.<<
>>Which of the following is a fruit?||Make sure you select all of the correct options—there may be more than one!<<
[x] apple {{ selected: You are correct that an apple is a fruit because it is the fertilized ovary that comes from an apple tree and contains seeds. }, { unselected: Remember that an apple is also a fruit.}}
[x] pumpkin {{ selected: You are correct that a pumpkin is a fruit because it is the fertilized ovary of a squash plant and contains seeds. }, { unselected: Remember that a pumpkin is also a fruit.}}
......@@ -36,8 +36,9 @@ data: |
<p>You can also add hints for learners.</p>
<p>Be sure to select Settings to specify a Display Name and other values that apply.</p>
<p>Use the following example problem as a model.</p>
<label>Which of the following is a fruit? Check all that apply.</label>
<checkboxgroup label="Which of the following is a fruit? Check all that apply.">
<label>Which of the following is a fruit?</label>
<description>Make sure you select all of the correct options—there may be more than one!</description>
<checkboxgroup>
<choice correct="true">apple
<choicehint selected="true">You are correct that an apple is a fruit because it is the fertilized ovary that comes from an apple tree and contains seeds.</choicehint>
<choicehint selected="false">Remember that an apple is also a fruit.</choicehint>
......
......@@ -41,8 +41,8 @@ data: |
</script>
<label>Enter two integers that sum to 10.</label>
<textline size="40" correct_answer="3" label="Integer #1"/><br/>
<textline size="40" correct_answer="7" label="Integer #2"/>
<textline size="40" correct_answer="3"/><br/>
<textline size="40" correct_answer="7"/>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
......@@ -64,8 +64,8 @@ data: |
</script>
<label>Enter two integers that sum to 20.</label>
<textline size="40" correct_answer="11" label="Integer #1"/><br/>
<textline size="40" correct_answer="9" label="Integer #2"/>
<textline size="40" correct_answer="11"/><br/>
<textline size="40" correct_answer="9"/>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
......
......@@ -34,8 +34,9 @@ data: |
<formularesponse type="ci" samples="R_1,R_2,R_3@1,2,3:3,4,5#10" answer="$VoVi">
<label>Write an expression for the product of \( R_1\), \( R_2\), and the inverse of \( R_3\).</label>
<description>Enter the equation</description>
<responseparam type="tolerance" default="0.00001"/>
<formulaequationinput size="40" label="Enter the equation"/>
<formulaequationinput size="40"/>
</formularesponse>
<script type="loncapa/python">
......@@ -48,7 +49,8 @@ data: |
<formularesponse type="ci" samples="x,n@1,2:3,4#10" answer="$derivative">
<label>Let \( x\) be a variable, and let \( n\) be an arbitrary constant. What is the derivative of \( x^n\)?</label>
<description>Enter the equation</description>
<responseparam type="tolerance" default="0.00001"/>
<formulaequationinput size="40" label="Enter the equation"/>
<formulaequationinput size="40"/>
</formularesponse>
</problem>
......@@ -159,7 +159,7 @@ data: |
<label>What was the first post-secondary school in China to allow both male and female students?</label>
<additional_answer>National Central University</additional_answer>
<additional_answer>Nanjing University</additional_answer>
<textline label="What was the first post-secondary school in China to allow both male and female students?" size="40"/>
<textline size="40"/>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
......@@ -170,15 +170,15 @@ data: |
<br/>
<p><strong>Example Custom Python-Evaluated Input Problem</strong></p>
<customresponse cfn="test_add_to_ten">
<script type="loncapa/python">
def test_add_to_ten(expect, ans):
return test_add(10, ans)
</script>
<label>Enter two integers that sum to 10.</label>
<textline size="40" correct_answer="3" label="Integer #1"/><br/>
<textline size="40" correct_answer="7" label="Integer #2"/>
<textline size="40" correct_answer="3"/><br/>
<textline size="40" correct_answer="7"/>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
......@@ -198,8 +198,8 @@ data: |
return False
</script>
<label>Enter two integers that sum to 20.</label>
<textline size="40" correct_answer="11" label="Integer #1"/><br/>
<textline size="40" correct_answer="9" label="Integer #2"/>
<textline size="40" correct_answer="11"/><br/>
<textline size="40" correct_answer="9"/>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
......
......@@ -8,7 +8,7 @@ metadata:
You can use the following example problem as a model.
>>Which of the following countries has the largest population?<<
>>Which of the following countries has the largest population?||You can select only one option.<<
( ) Brazil {{ timely feedback -- explain why an almost correct answer is wrong }}
( ) Germany
(x) Indonesia
......@@ -29,7 +29,8 @@ data: |
<p>When you add the problem, be sure to select Settings to specify a Display Name and other values that apply.</p>
<p>You can use the following example problem as a model.</p>
<label>Which of the following countries has the largest population?</label>
<choicegroup label="Which of the following countries has the largest population?" type="MultipleChoice">
<description>You can select only one option.</description>
<choicegroup type="MultipleChoice">
<choice correct="false">Brazil
<choicehint>timely feedback -- explain why an almost correct answer is wrong</choicehint>
</choice>
......
......@@ -11,7 +11,7 @@ metadata:
Use the following example problem as a model.
>>Which of the following is a vegetable?<<
>>Which of the following is a vegetable?||You can select only one option.<<
( ) apple {{An apple is the fertilized ovary that comes from an apple tree and contains seeds, meaning it is a fruit.}}
( ) pumpkin {{A pumpkin is the fertilized ovary of a squash plant and contains seeds, meaning it is a fruit.}}
(x) potato {{A potato is an edible part of a plant in tuber form and is a vegetable.}}
......@@ -29,7 +29,8 @@ data: |
<p>Be sure to select Settings to specify a Display Name and other values that apply.</p>
<p>Use the following example problem as a model.</p>
<label>Which of the following is a vegetable?</label>
<choicegroup label="Which of the following is a vegetable?" type="MultipleChoice">
<description>You can select only one option.</description>
<choicegroup>
<choice correct="false">apple
<choicehint>An apple is the fertilized ovary that comes from an apple tree and contains seeds, meaning it is a fruit.</choicehint>
</choice>
......
......@@ -10,7 +10,7 @@ metadata:
You can use the following example problems as models.
>>How many miles away from Earth is the sun? Use scientific notation to answer.<<
>>How many miles away from Earth is the sun?||Use scientific notation to answer.<<
= 9.3*10^7
or= 9.296*10^7
......@@ -21,7 +21,7 @@ metadata:
---
>>The square of what number is -100?<<
>>The square of what number is -100?||Use scientific notation to answer.<<
= 10*i
......@@ -37,8 +37,9 @@ data: |
for information about how to enter text into the field.</p>
<p>When you add the problem, be sure to select <strong>Settings</strong> to specify a <strong>Display Name</strong> and other values that apply.</p>
<p>You can use the following example problems as models.</p>
<label>How many miles away from Earth is the sun? Use scientific notation to answer.</label>
<formulaequationinput label="How many million miles are between Earth and the sun? Use scientific notation to answer."/>
<label>How many miles away from Earth is the sun?</label>
<description>Use scientific notation to answer.</description>
<formulaequationinput/>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
......@@ -49,7 +50,8 @@ data: |
<numericalresponse answer="10*i">
<label>The square of what number is -100?</label>
<formulaequationinput label="The square of what number is -100?"/>
<description>Use scientific notation to answer.</description>
<formulaequationinput/>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
......
......@@ -13,7 +13,7 @@ metadata:
Use the following example problem as a model.
>>What is the arithmetic mean for the following set of numbers? (1, 5, 6, 3, 5)<<
>>What is the arithmetic mean for the following set of numbers? (1, 5, 6, 3, 5)||Use scientific notation to answer.<<
= 4 {{The mean for this set of numbers is 20 / 5, which equals 4.}}
......@@ -34,7 +34,8 @@ data: |
<p>Be sure to select Settings to specify a Display Name and other values that apply.</p>
<p>Use the following example problem as a model.</p>
<label>What is the arithmetic mean for the following set of numbers? (1, 5, 6, 3, 5)</label>
<formulaequationinput label="What is the arithmetic mean for the following set of numbers? (1, 5, 6, 3, 5)"/>
<description>Use scientific notation to answer.</description>
<formulaequationinput/>
<correcthint>The mean for this set of numbers is 20 / 5, which equals 4.</correcthint>
<solution>
<div class="detailed-solution">
......
......@@ -8,7 +8,7 @@ metadata:
You can use the following example problem as a model.
>>Which of the following countries celebrates its independence on August 15?<<
>>Which of the following countries celebrates its independence on August 15?||You can select only one option.<<
[[(India), Spain, China, Bermuda]]
......@@ -22,7 +22,8 @@ data: |
<p>When you add the problem, be sure to select Settings to specify a Display Name and other values that apply.</p>
<p>You can use the following example problem as a model.</p>
<label>Which of the following countries celebrates its independence on August 15?</label>
<optioninput label="Which of the following countries celebrates its independence on August 15?" options="('India','Spain','China','Bermuda')" correct="India"/>
<description>You can select only one option.</description>
<optioninput options="('India','Spain','China','Bermuda')" correct="India"/>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
......
......@@ -11,7 +11,7 @@ metadata:
Use the following example problem as a model.
>> A/an ________ is a vegetable.<<
>> A/an ________ is a vegetable.||You can select only one option.<<
[[
apple {{An apple is the fertilized ovary that comes from an apple tree and contains seeds, meaning it is a fruit.}}
......@@ -32,7 +32,8 @@ data: |
<p>Be sure to select Settings to specify a Display Name and other values that apply.</p>
<p>Use the following example problem as a model.</p>
<label>A/an ________ is a vegetable.</label>
<optioninput label=" A/an ________ is a vegetable.">
<description>You can select only one option.</description>
<optioninput>
<option correct="False">apple
<optionhint>An apple is the fertilized ovary that comes from an apple tree and contains seeds, meaning it is a fruit.</optionhint>
</option>
......
......@@ -8,7 +8,7 @@ metadata:
You can use the following example problem as a model.
>>What was the first post-secondary school in China to allow both male and female students?<<
>>What was the first post-secondary school in China to allow both male and female students?||Be sure to check your spelling.<<
= Nanjing Higher Normal Institute
or= National Central University
......@@ -25,9 +25,10 @@ data: |
<p>When you add the problem, be sure to select <strong>Settings</strong> to specify a <strong>Display Name</strong> and other values that apply.</p>
<p>You can use the following example problem as a model.</p>
<label>What was the first post-secondary school in China to allow both male and female students?</label>
<description>Be sure to check your spelling.</description>
<additional_answer>National Central University</additional_answer>
<additional_answer>Nanjing University</additional_answer>
<textline label="What was the first post-secondary school in China to allow both male and female students?" size="40"/>
<textline size="40"/>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
......
......@@ -11,7 +11,7 @@ metadata:
Use the following example problem as a model.
>>Which U.S. state has the largest land area?<<
>>Which U.S. state has the largest land area?||Be sure to check your spelling.<<
=Alaska {{Alaska is 576,400 square miles, more than double the land area
of the second largest state, Texas.}}
......@@ -32,10 +32,11 @@ data: |
<p>Be sure to select Settings to specify a Display Name and other values that apply.</p>
<p>Use the following example problem as a model.</p>
<label>Which U.S. state has the largest land area?</label>
<description>Be sure to check your spelling.</description>
<correcthint>Alaska is 576,400 square miles, more than double the land area of the second largest state, Texas.</correcthint>
<stringequalhint answer="Texas">While many people think Texas is the largest state, it is actually the second largest, with 261,797 square miles.</stringequalhint>
<stringequalhint answer="California">California is the third largest state, with 155,959 square miles.</stringequalhint>
<textline label="Which U.S. state has the largest land area?" size="20"/>
<textline size="20"/>
</stringresponse>
<demandhint>
<hint>Consider the square miles, not population.</hint>
......
......@@ -221,3 +221,17 @@ class ProblemPage(PageObject):
if not self.q(xpath=xpath.format(choice)).is_present():
return False
return True
@property
def problem_question(self):
"""
Return the question text of the problem.
"""
return self.q(css="div.problem .wrapper-problem-response legend").text[0]
@property
def problem_question_descriptions(self):
"""
Return a list of question descriptions of the problem.
"""
return self.q(css="div.problem .wrapper-problem-response .question-description").text
......@@ -71,9 +71,9 @@ class EntranceExamPassTest(EntranceExamTest):
"""
xml = dedent("""
<problem>
<p>What is height of eiffel tower without the antenna?.</p>
<multiplechoiceresponse>
<choicegroup label="What is height of eiffel tower without the antenna?" type="MultipleChoice">
<label>What is height of eiffel tower without the antenna?.</label>
<choicegroup type="MultipleChoice">
<choice correct="false">324 meters<choicehint>Antenna is 24 meters high</choicehint></choice>
<choice correct="true">300 meters</choice>
<choice correct="false">224 meters</choice>
......
......@@ -4,6 +4,7 @@ Bok choy acceptance tests for problems in the LMS
See also old lettuce tests in lms/djangoapps/courseware/features/problems.feature
"""
from nose.plugins.attrib import attr
from textwrap import dedent
from common.test.acceptance.tests.helpers import UniqueCourseTest
......@@ -77,7 +78,8 @@ class ProblemClarificationTest(ProblemsTest):
<clarification>Return on Investment <strong>(per year)</strong></clarification> over 20 years.
</p>
<numericalresponse answer="6.5">
<textline label="Enter the annual ROI" trailing_text="%" />
<label>Enter the annual ROI</label>
<textline trailing_text="%" />
</numericalresponse>
</text>
</problem>
......@@ -263,7 +265,8 @@ class ProblemWithMathjax(ProblemsTest):
<problem>
<p>Check mathjax has rendered [mathjax]E=mc^2[/mathjax]</p>
<multiplechoiceresponse>
<choicegroup label="Answer this?" type="MultipleChoice">
<label>Answer this?</label>
<choicegroup type="MultipleChoice">
<choice correct="true">Choice1 <choicehint>Correct choice message</choicehint></choice>
<choice correct="false">Choice2<choicehint>Wrong choice message</choicehint></choice>
</choicegroup>
......@@ -310,7 +313,8 @@ class ProblemPartialCredit(ProblemsTest):
<problem>
<p>The answer is 1. Partial credit for -1.</p>
<numericalresponse answer="1" partial_credit="list">
<formulaequationinput label="How many miles away from Earth is the sun? Use scientific notation to answer." />
<label>How many miles away from Earth is the sun? Use scientific notation to answer.</label>
<formulaequationinput/>
<responseparam type="tolerance" default="0.01" />
<responseparam partial_answers="-1" />
</numericalresponse>
......@@ -343,9 +347,9 @@ class LogoutDuringAnswering(ProblemsTest):
"""
xml = dedent("""
<problem>
<p>The answer is 1</p>
<numericalresponse answer="1">
<formulaequationinput label="where are the songs of spring?" />
<label>The answer is 1</label>
<formulaequationinput/>
<responseparam type="tolerance" default="0.01" />
</numericalresponse>
</problem>
......@@ -412,3 +416,94 @@ class LogoutDuringAnswering(ProblemsTest):
self.assertTrue(problem_page.is_browser_on_page())
self.assertEqual(problem_page.problem_name, 'TEST PROBLEM')
class ProblemQuestionDescriptionTest(ProblemsTest):
"""TestCase Class to verify question and description rendering."""
descriptions = [
"A vegetable is an edible part of a plant in tuber form.",
"A fruit is a fertilized ovary of a plant and contains seeds."
]
def get_problem(self):
"""
Create a problem with question and description.
"""
xml = dedent("""
<problem>
<choiceresponse>
<label>Eggplant is a _____?</label>
<description>{}</description>
<description>{}</description>
<checkboxgroup>
<choice correct="true">vegetable</choice>
<choice correct="false">fruit</choice>
</checkboxgroup>
</choiceresponse>
</problem>
""".format(*self.descriptions))
return XBlockFixtureDesc('problem', 'Label with Description', data=xml)
def test_question_with_description(self):
"""
Scenario: Test that question and description are rendered as expected.
Given I am enrolled in a course.
When I visit a unit page with a CAPA question.
Then label and description should be rendered correctly.
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
problem_page.wait_for_element_visibility(problem_page.CSS_PROBLEM_HEADER, 'wait for problem header')
self.assertEqual(problem_page.problem_name, 'Label with Description')
self.assertEqual(problem_page.problem_question, 'Eggplant is a _____?')
self.assertEqual(problem_page.problem_question_descriptions, self.descriptions)
@attr('a11y')
class CAPAProblemQuestionDescriptionA11yTest(ProblemsTest):
"""TestCase Class to verify CAPA problem questions accessibility."""
def get_problem(self):
"""
Problem structure.
"""
xml = dedent("""
<problem>
<choiceresponse>
<label>question 1 text here</label>
<description>description 2 text 1</description>
<description>description 2 text 2</description>
<checkboxgroup>
<choice correct="true">True</choice>
<choice correct="false">False</choice>
</checkboxgroup>
</choiceresponse>
<multiplechoiceresponse>
<label>question 2 text here</label>
<description>description 2 text 1</description>
<description>description 2 text 2</description>
<choicegroup type="MultipleChoice">
<choice correct="false">Alpha <choicehint>A hint</choicehint></choice>
<choice correct="true">Beta</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
return XBlockFixtureDesc('problem', 'Problem A11Y TEST', data=xml)
def test_a11y(self):
"""
Scenario: Verifies that each question and description has unique id.
Given I am enrolled in a course.
And I visit a unit page with two CAPA problems
Then I check question and description has unique IDs
"""
self.courseware_page.visit()
problem_page = ProblemPage(self.browser)
# Set the scope to the problem question
problem_page.a11y_audit.config.set_scope(
include=['section.wrapper-problem-response']
)
# Run the accessibility audit.
problem_page.a11y_audit.check_for_accessibility_errors()
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment