Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
E
edx-platform
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
edx
edx-platform
Commits
f91583c2
Commit
f91583c2
authored
Aug 19, 2016
by
muhammad-ammar
Committed by
muzaffaryousaf
Sep 02, 2016
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
make responsetypes with multiple inputtypes accessible
parent
19cc68c8
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
11 changed files
with
141 additions
and
85 deletions
+141
-85
common/lib/capa/capa/capa_problem.py
+64
-46
common/lib/capa/capa/responsetypes.py
+11
-2
common/lib/capa/capa/templates/choicetext.html
+3
-3
common/lib/capa/capa/templates/textline.html
+3
-3
common/lib/capa/capa/tests/__init__.py
+23
-4
common/lib/capa/capa/tests/response_xml_factory.py
+8
-0
common/lib/capa/capa/tests/test_capa_problem.py
+0
-0
common/lib/capa/capa/tests/test_html_render.py
+1
-2
common/lib/xmodule/xmodule/capa_base.py
+3
-0
common/lib/xmodule/xmodule/tests/test_capa_module.py
+24
-18
common/test/acceptance/tests/lms/test_problem_types.py
+1
-7
No files found.
common/lib/capa/capa/capa_problem.py
View file @
f91583c2
...
...
@@ -72,8 +72,6 @@ log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# main class for this module
DEFAULT_QUESTION_TEXT
=
"Formatting error: You must explicitly specify the question text."
class
LoncapaSystem
(
object
):
"""
...
...
@@ -765,8 +763,7 @@ class LoncapaProblem(object):
if
problemtree
.
tag
in
inputtypes
.
registry
.
registered_tags
():
# If this is an inputtype subtree, let it render itself.
response_id
=
self
.
problem_id
+
'_'
+
problemtree
.
get
(
'response_id'
)
response_data
=
self
.
problem_data
[
response_id
]
response_data
=
self
.
problem_data
[
problemid
]
status
=
'unsubmitted'
msg
=
''
...
...
@@ -856,16 +853,16 @@ class LoncapaProblem(object):
problem_data
=
{}
self
.
responders
=
{}
for
response
in
tree
.
xpath
(
'//'
+
"|//"
.
join
(
responsetypes
.
registry
.
registered_tags
())):
response
_id_str
=
self
.
problem_id
+
"_"
+
str
(
response_id
)
response
type_id
=
self
.
problem_id
+
"_"
+
str
(
response_id
)
# create and save ID for this response
response
.
set
(
'id'
,
response
_id_str
)
response
.
set
(
'id'
,
response
type_id
)
response_id
+=
1
answer_id
=
1
input_tags
=
inputtypes
.
registry
.
registered_tags
()
inputfields
=
tree
.
xpath
(
"|"
.
join
([
'//'
+
response
.
tag
+
'[@id=$id]//'
+
x
for
x
in
input_tags
]),
id
=
response
_id_str
id
=
response
type_id
)
# assign one answer_id for each input type
...
...
@@ -875,10 +872,65 @@ class LoncapaProblem(object):
entry
.
attrib
[
'id'
]
=
"
%
s_
%
i_
%
i"
%
(
self
.
problem_id
,
response_id
,
answer_id
)
answer_id
=
answer_id
+
1
question_id
=
u'{}_{}'
.
format
(
self
.
problem_id
,
response_id
)
label
=
''
element_to_be_deleted
=
None
self
.
response_a11y_data
(
response
,
inputfields
,
responsetype_id
,
problem_data
)
# instantiate capa Response
responsetype_cls
=
responsetypes
.
registry
.
get_class_for_tag
(
response
.
tag
)
responder
=
responsetype_cls
(
response
,
inputfields
,
self
.
context
,
self
.
capa_system
,
self
.
capa_module
)
# save in list in self
self
.
responders
[
response
]
=
responder
# get responder answers (do this only once, since there may be a performance cost,
# eg with externalresponse)
self
.
responder_answers
=
{}
for
response
in
self
.
responders
.
keys
():
try
:
self
.
responder_answers
[
response
]
=
self
.
responders
[
response
]
.
get_answers
()
except
:
log
.
debug
(
'responder
%
s failed to properly return get_answers()'
,
self
.
responders
[
response
])
# FIXME
raise
# <solution>...</solution> may not be associated with any specific response; give
# IDs for those separately
# TODO: We should make the namespaces consistent and unique (e.g. %s_problem_%i).
solution_id
=
1
for
solution
in
tree
.
findall
(
'.//solution'
):
solution
.
attrib
[
'id'
]
=
"
%
s_solution_
%
i"
%
(
self
.
problem_id
,
solution_id
)
solution_id
+=
1
return
problem_data
def
response_a11y_data
(
self
,
response
,
inputfields
,
responsetype_id
,
problem_data
):
"""
Construct data to be used for a11y.
Arguments:
response (object): xml response object
inputfields (list): list of inputfields in a responsetype
responsetype_id (str): responsetype id
problem_data (dict): dict to be filled with response data
"""
element_to_be_deleted
=
None
label
=
''
if
len
(
inputfields
)
>
1
:
response
.
set
(
'multiple_inputtypes'
,
'true'
)
group_label_tag
=
response
.
find
(
'label'
)
group_label_tag_text
=
''
if
group_label_tag
is
not
None
:
group_label_tag
.
tag
=
'p'
group_label_tag
.
set
(
'id'
,
responsetype_id
)
group_label_tag
.
set
(
'class'
,
'multi-inputs-group-label'
)
group_label_tag_text
=
group_label_tag
.
text
for
inputfield
in
inputfields
:
problem_data
[
inputfield
.
get
(
'id'
)]
=
{
'group_label'
:
group_label_tag_text
,
'label'
:
inputfield
.
attrib
.
get
(
'label'
,
''
),
'descriptions'
:
{}
}
else
:
# Extract label value from <label> tag or label attribute from inside the responsetype
responsetype_label_tag
=
response
.
find
(
'label'
)
if
responsetype_label_tag
is
not
None
:
...
...
@@ -913,56 +965,22 @@ class LoncapaProblem(object):
label
=
label_tag
[
0
]
.
text
element_to_be_deleted
=
label_tag
[
0
]
label
=
label
.
strip
()
or
DEFAULT_QUESTION_TEXT
# delete label or p element only if responsetype is fully accessible
if
response
.
tag
in
ACCESSIBLE_CAPA_RESPONSE_TYPES
and
element_to_be_deleted
is
not
None
:
element_to_be_deleted
.
getparent
()
.
remove
(
element_to_be_deleted
)
# for non-accessible responsetypes it may be possible that label attribute is not present
# in this case pass an empty label. remember label attribute is only used as value for aria-label
if
response
.
tag
not
in
ACCESSIBLE_CAPA_RESPONSE_TYPES
and
label
==
DEFAULT_QUESTION_TEXT
:
label
=
''
# Extract descriptions and set unique id on each description tag
description_tags
=
response
.
findall
(
'description'
)
description_id
=
1
descriptions
=
OrderedDict
()
for
description
in
description_tags
:
descriptions
[
"description_
%
s_
%
i
_
%
i"
%
(
self
.
problem_id
,
respons
e_id
,
description_id
)
"description_
%
s_
%
i
"
%
(
responsetyp
e_id
,
description_id
)
]
=
description
.
text
response
.
remove
(
description
)
description_id
+=
1
problem_data
[
question_id
]
=
{
problem_data
[
inputfields
[
0
]
.
get
(
'id'
)
]
=
{
'label'
:
label
,
'descriptions'
:
descriptions
}
# instantiate capa Response
responsetype_cls
=
responsetypes
.
registry
.
get_class_for_tag
(
response
.
tag
)
responder
=
responsetype_cls
(
response
,
inputfields
,
self
.
context
,
self
.
capa_system
,
self
.
capa_module
)
# save in list in self
self
.
responders
[
response
]
=
responder
# get responder answers (do this only once, since there may be a performance cost,
# eg with externalresponse)
self
.
responder_answers
=
{}
for
response
in
self
.
responders
.
keys
():
try
:
self
.
responder_answers
[
response
]
=
self
.
responders
[
response
]
.
get_answers
()
except
:
log
.
debug
(
'responder
%
s failed to properly return get_answers()'
,
self
.
responders
[
response
])
# FIXME
raise
# <solution>...</solution> may not be associated with any specific response; give
# IDs for those separately
# TODO: We should make the namespaces consistent and unique (e.g. %s_problem_%i).
solution_id
=
1
for
solution
in
tree
.
findall
(
'.//solution'
):
solution
.
attrib
[
'id'
]
=
"
%
s_solution_
%
i"
%
(
self
.
problem_id
,
solution_id
)
solution_id
+=
1
return
problem_data
common/lib/capa/capa/responsetypes.py
View file @
f91583c2
...
...
@@ -263,6 +263,15 @@ class LoncapaResponse(object):
tree
.
set
(
'tabindex'
,
'-1'
)
tree
.
set
(
'aria-label'
,
response_label
)
if
self
.
xml
.
get
(
'multiple_inputtypes'
):
# add <div> to wrap all inputtypes
content
=
etree
.
SubElement
(
tree
,
'div'
)
content
.
set
(
'class'
,
'multi-inputs-group'
)
content
.
set
(
'role'
,
'group'
)
content
.
set
(
'aria-labelledby'
,
self
.
xml
.
get
(
'id'
))
else
:
content
=
tree
# problem author can make this span display:inline
if
self
.
xml
.
get
(
'inline'
,
''
):
tree
.
set
(
'class'
,
'inline'
)
...
...
@@ -271,12 +280,12 @@ class LoncapaResponse(object):
# call provided procedure to do the rendering
item_xhtml
=
renderer
(
item
)
if
item_xhtml
is
not
None
:
tree
.
append
(
item_xhtml
)
content
.
append
(
item_xhtml
)
tree
.
tail
=
self
.
xml
.
tail
# Add a <div> for the message at the end of the response
if
response_msg
:
tree
.
append
(
self
.
_render_response_msg_html
(
response_msg
))
content
.
append
(
self
.
_render_response_msg_html
(
response_msg
))
return
tree
...
...
common/lib/capa/capa/templates/choicetext.html
View file @
f91583c2
<
%!
from
django
.
utils
.
translation
import
ugettext
as
_
%
>
<
%
element_checked =
False
%
>
% for choice_id, _ in choices:
<
%
choice_id =
choice_id
%
>
<
%
choice_id =
choice_id
%
>
%if choice_id in value:
<
%
element_checked =
True
%
>
%endif
%endfor
%
endfor
<section
id=
"choicetextinput_${id}"
class=
"choicetextinput"
>
<form
class=
"choicetextgroup capa_inputtype"
id=
"inputtype_${id}"
>
<div
class=
"script_placeholder"
data-src=
"${STATIC_URL}js/capa/choicetextinput.js"
/>
<fieldset
aria-label=
"${response_data['label']}"
>
% for choice_id, choice_description in choices:
<
%
choice_id
=
choice_id
%
>
<
%
choice_id
=
choice_id
%
>
<section
id=
"forinput${choice_id}"
%
if
input_type =
=
'
radio
'
and
choice_id
in
value
:
<%
...
...
common/lib/capa/capa/templates/textline.html
View file @
f91583c2
...
...
@@ -56,8 +56,8 @@
</div>
% endif
% if msg:
<span
class=
"message"
>
${HTML(msg)}
</span>
% endif
% if msg:
<span
class=
"message"
>
${HTML(msg)}
</span>
% endif
</div>
common/lib/capa/capa/tests/__init__.py
View file @
f91583c2
"""Tools for helping with testing capa."""
import
gettext
from
path
import
path
# pylint: disable=no-name-in-module
import
os
import
os.path
...
...
@@ -9,12 +10,29 @@ import fs.osfs
from
capa.capa_problem
import
LoncapaProblem
,
LoncapaSystem
from
capa.inputtypes
import
Status
from
mock
import
Mock
,
MagicMock
from
mako.lookup
import
TemplateLookup
import
xml.sax.saxutils
as
saxutils
TEST_DIR
=
os
.
path
.
dirname
(
os
.
path
.
realpath
(
__file__
))
def
get_template
(
template_name
):
"""
Return template for a capa inputtype.
"""
return
TemplateLookup
(
directories
=
[
path
(
__file__
)
.
dirname
()
.
dirname
()
/
'templates'
]
)
.
get_template
(
template_name
)
def
capa_render_template
(
template
,
context
):
"""
Render template for a capa inputtype.
"""
return
get_template
(
template
)
.
render_unicode
(
**
context
)
def
tst_render_template
(
template
,
context
):
"""
A test version of render to template. Renders to the repr of the context, completely ignoring
...
...
@@ -30,7 +48,7 @@ xqueue_interface = MagicMock()
xqueue_interface
.
send_to_queue
.
return_value
=
(
0
,
'Success!'
)
def
test_capa_system
():
def
test_capa_system
(
render_template
=
None
):
"""
Construct a mock LoncapaSystem instance.
...
...
@@ -46,7 +64,7 @@ def test_capa_system():
filestore
=
fs
.
osfs
.
OSFS
(
os
.
path
.
join
(
TEST_DIR
,
"test_files"
)),
i18n
=
gettext
.
NullTranslations
(),
node_path
=
os
.
environ
.
get
(
"NODE_PATH"
,
"/usr/local/lib/node_modules"
),
render_template
=
tst_render_template
,
render_template
=
render_template
or
tst_render_template
,
seed
=
0
,
STATIC_URL
=
'/dummy-static/'
,
STATUS_CLASS
=
Status
,
...
...
@@ -66,9 +84,10 @@ def mock_capa_module():
return
capa_module
def
new_loncapa_problem
(
xml
,
capa_system
=
None
,
seed
=
723
):
def
new_loncapa_problem
(
xml
,
capa_system
=
None
,
seed
=
723
,
use_capa_render_template
=
False
):
"""Construct a `LoncapaProblem` suitable for unit tests."""
return
LoncapaProblem
(
xml
,
id
=
'1'
,
seed
=
seed
,
capa_system
=
capa_system
or
test_capa_system
(),
render_template
=
capa_render_template
if
use_capa_render_template
else
None
return
LoncapaProblem
(
xml
,
id
=
'1'
,
seed
=
seed
,
capa_system
=
capa_system
or
test_capa_system
(
render_template
),
capa_module
=
mock_capa_module
())
...
...
common/lib/capa/capa/tests/response_xml_factory.py
View file @
f91583c2
...
...
@@ -267,6 +267,9 @@ class CustomResponseXMLFactory(ResponseXMLFactory):
*answer_attr*: The "answer" attribute on the tag itself (treated as an
alias to "expect", though "expect" takes priority if both are given)
*group_label*: Text to represent group of inputs when there are
multiple inputs.
"""
# Retrieve **kwargs
...
...
@@ -276,6 +279,7 @@ class CustomResponseXMLFactory(ResponseXMLFactory):
answer
=
kwargs
.
get
(
'answer'
,
None
)
options
=
kwargs
.
get
(
'options'
,
None
)
cfn_extra_args
=
kwargs
.
get
(
'cfn_extra_args'
,
None
)
group_label
=
kwargs
.
get
(
'group_label'
,
None
)
# Create the response element
response_element
=
etree
.
Element
(
"customresponse"
)
...
...
@@ -293,6 +297,10 @@ class CustomResponseXMLFactory(ResponseXMLFactory):
answer_element
=
etree
.
SubElement
(
response_element
,
"answer"
)
answer_element
.
text
=
str
(
answer
)
if
group_label
:
group_label_element
=
etree
.
SubElement
(
response_element
,
"label"
)
group_label_element
.
text
=
group_label
if
options
:
response_element
.
set
(
'options'
,
str
(
options
))
...
...
common/lib/capa/capa/tests/test_capa_problem.py
View file @
f91583c2
This diff is collapsed.
Click to expand it.
common/lib/capa/capa/tests/test_html_render.py
View file @
f91583c2
...
...
@@ -7,7 +7,6 @@ import mock
from
.response_xml_factory
import
StringResponseXMLFactory
,
CustomResponseXMLFactory
from
.
import
test_capa_system
,
new_loncapa_problem
from
capa.capa_problem
import
DEFAULT_QUESTION_TEXT
class
CapaHtmlRenderTest
(
unittest
.
TestCase
):
...
...
@@ -186,7 +185,7 @@ class CapaHtmlRenderTest(unittest.TestCase):
'id'
:
'1_2_1'
,
'trailing_text'
:
''
,
'size'
:
None
,
'response_data'
:
{
'label'
:
DEFAULT_QUESTION_TEXT
,
'descriptions'
:
{}},
'response_data'
:
{
'label'
:
''
,
'descriptions'
:
{}},
'describedby'
:
''
}
...
...
common/lib/xmodule/xmodule/capa_base.py
View file @
f91583c2
...
...
@@ -1296,6 +1296,9 @@ class CapaMixin(CapaFields):
'correct'
:
is_correct
,
'variant'
:
variant
,
}
# Add group_label in event data only if the responsetype contains multiple inputtypes
if
answer_input
.
response_data
.
get
(
'group_label'
):
input_metadata
[
input_id
][
'group_label'
]
=
answer_input
.
response_data
.
get
(
'group_label'
)
return
input_metadata
...
...
common/lib/xmodule/xmodule/tests/test_capa_module.py
View file @
f91583c2
...
...
@@ -21,7 +21,6 @@ from webob.multidict import MultiDict
import
xmodule
from
xmodule.tests
import
DATA_DIR
from
capa
import
responsetypes
from
capa.capa_problem
import
DEFAULT_QUESTION_TEXT
from
capa.responsetypes
import
(
StudentInputError
,
LoncapaProblemError
,
ResponseError
)
from
capa.xqueue_interface
import
XQueueInterface
...
...
@@ -2652,7 +2651,7 @@ class TestProblemCheckTracking(unittest.TestCase):
event
=
self
.
get_event_for_answers
(
module
,
answer_input_dict
)
self
.
assertEquals
(
event
[
'submission'
],
{
factory
.
answer_key
(
2
):
{
'question'
:
DEFAULT_QUESTION_TEXT
,
'question'
:
''
,
'answer'
:
'3.14'
,
'response_type'
:
'numericalresponse'
,
'input_type'
:
'textline'
,
...
...
@@ -2662,19 +2661,19 @@ class TestProblemCheckTracking(unittest.TestCase):
})
def
test_multiple_inputs
(
self
):
group_label
=
'Choose the correct color'
input1_label
=
'What color is the sky?'
input2_label
=
'What color are pine needles?'
factory
=
self
.
capa_factory_for_problem_xml
(
"""
\
<problem display_name="Multiple Inputs">
<p>Choose the correct color</p>
<optionresponse>
<p>What color is the sky?</p>
<optioninput options="('yellow','blue','green')" correct="blue"/>
<p>What color are pine needles?</p>
<optioninput options="('yellow','blue','green')" correct="green"/>
<label>{}</label>
<optioninput options="('yellow','blue','green')" correct="blue" label="{}"/>
<optioninput options="('yellow','blue','green')" correct="green" label="{}"/>
</optionresponse>
</problem>
"""
)
"""
.
format
(
group_label
,
input1_label
,
input2_label
)
)
module
=
factory
.
create
()
answer_input_dict
=
{
factory
.
input_key
(
2
,
1
):
'blue'
,
factory
.
input_key
(
2
,
2
):
'yellow'
,
...
...
@@ -2683,7 +2682,8 @@ class TestProblemCheckTracking(unittest.TestCase):
event
=
self
.
get_event_for_answers
(
module
,
answer_input_dict
)
self
.
assertEquals
(
event
[
'submission'
],
{
factory
.
answer_key
(
2
,
1
):
{
'question'
:
DEFAULT_QUESTION_TEXT
,
'group_label'
:
group_label
,
'question'
:
input1_label
,
'answer'
:
'blue'
,
'response_type'
:
'optionresponse'
,
'input_type'
:
'optioninput'
,
...
...
@@ -2691,7 +2691,8 @@ class TestProblemCheckTracking(unittest.TestCase):
'variant'
:
''
,
},
factory
.
answer_key
(
2
,
2
):
{
'question'
:
DEFAULT_QUESTION_TEXT
,
'group_label'
:
group_label
,
'question'
:
input2_label
,
'answer'
:
'yellow'
,
'response_type'
:
'optionresponse'
,
'input_type'
:
'optioninput'
,
...
...
@@ -2702,11 +2703,14 @@ class TestProblemCheckTracking(unittest.TestCase):
def
test_optioninput_extended_xml
(
self
):
"""Test the new XML form of writing with <option> tag instead of options= attribute."""
group_label
=
'Are you the Gatekeeper?'
input1_label
=
'input 1 label'
input2_label
=
'input 2 label'
factory
=
self
.
capa_factory_for_problem_xml
(
"""
\
<problem display_name="Woo Hoo">
<p>Are you the Gatekeeper?</p>
<optionresponse>
<optioninput>
<label>{}</label>
<optioninput label="{}">
<option correct="True" label="Good Job">
apple
<optionhint>
...
...
@@ -2721,7 +2725,7 @@ class TestProblemCheckTracking(unittest.TestCase):
</option>
</optioninput>
<optioninput>
<optioninput
label="{}"
>
<option correct="True">
apple
<optionhint>
...
...
@@ -2737,7 +2741,7 @@ class TestProblemCheckTracking(unittest.TestCase):
</optioninput>
</optionresponse>
</problem>
"""
)
"""
.
format
(
group_label
,
input1_label
,
input2_label
)
)
module
=
factory
.
create
()
answer_input_dict
=
{
...
...
@@ -2748,7 +2752,8 @@ class TestProblemCheckTracking(unittest.TestCase):
event
=
self
.
get_event_for_answers
(
module
,
answer_input_dict
)
self
.
assertEquals
(
event
[
'submission'
],
{
factory
.
answer_key
(
2
,
1
):
{
'question'
:
DEFAULT_QUESTION_TEXT
,
'group_label'
:
group_label
,
'question'
:
input1_label
,
'answer'
:
'apple'
,
'response_type'
:
'optionresponse'
,
'input_type'
:
'optioninput'
,
...
...
@@ -2756,7 +2761,8 @@ class TestProblemCheckTracking(unittest.TestCase):
'variant'
:
''
,
},
factory
.
answer_key
(
2
,
2
):
{
'question'
:
DEFAULT_QUESTION_TEXT
,
'group_label'
:
group_label
,
'question'
:
input2_label
,
'answer'
:
'cucumber'
,
'response_type'
:
'optionresponse'
,
'input_type'
:
'optioninput'
,
...
...
@@ -2776,7 +2782,7 @@ class TestProblemCheckTracking(unittest.TestCase):
event
=
self
.
get_event_for_answers
(
module
,
answer_input_dict
)
self
.
assertEquals
(
event
[
'submission'
],
{
factory
.
answer_key
(
2
):
{
'question'
:
DEFAULT_QUESTION_TEXT
,
'question'
:
''
,
'answer'
:
'3.14'
,
'response_type'
:
'numericalresponse'
,
'input_type'
:
'textline'
,
...
...
common/test/acceptance/tests/lms/test_problem_types.py
View file @
f91583c2
...
...
@@ -592,10 +592,10 @@ class ScriptProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
factory
=
CustomResponseXMLFactory
()
factory_kwargs
=
{
'question_text'
:
'Enter two integers that sum to 10.'
,
'cfn'
:
'test_add_to_ten'
,
'expect'
:
'10'
,
'num_inputs'
:
2
,
'group_label'
:
'Enter two integers that sum to 10.'
,
'script'
:
textwrap
.
dedent
(
"""
def test_add_to_ten(expect,ans):
try:
...
...
@@ -618,12 +618,6 @@ class ScriptProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
Additional setup for ScriptProblemTypeTest
"""
super
(
ScriptProblemTypeTest
,
self
)
.
setUp
(
*
args
,
**
kwargs
)
self
.
problem_page
.
a11y_audit
.
config
.
set_rules
({
'ignore'
:
[
'section'
,
# TODO: AC-491
'label'
,
# TODO: AC-287
]
})
def
answer_problem
(
self
,
correct
):
"""
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment