Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
E
edx-platform
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
edx
edx-platform
Commits
84281198
Commit
84281198
authored
Mar 20, 2013
by
Jay Zoldak
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #1696 from MITx/feature/will/coderesponse_lettuce_tests
Feature/will/coderesponse lettuce tests
parents
f7700dfc
5411fc76
Hide whitespace changes
Inline
Side-by-side
Showing
8 changed files
with
393 additions
and
10 deletions
+393
-10
common/djangoapps/terrain/steps.py
+13
-0
lms/djangoapps/courseware/features/problems.feature
+8
-4
lms/djangoapps/courseware/features/problems.py
+39
-6
lms/djangoapps/courseware/features/xqueue_setup.py
+32
-0
lms/djangoapps/courseware/mock_xqueue_server/__init__.py
+0
-0
lms/djangoapps/courseware/mock_xqueue_server/mock_xqueue_server.py
+211
-0
lms/djangoapps/courseware/mock_xqueue_server/test_mock_xqueue_server.py
+78
-0
lms/envs/acceptance.py
+12
-0
No files found.
common/djangoapps/terrain/steps.py
View file @
84281198
...
...
@@ -9,6 +9,7 @@ from bs4 import BeautifulSoup
import
time
import
re
import
os.path
from
selenium.common.exceptions
import
WebDriverException
from
logging
import
getLogger
logger
=
getLogger
(
__name__
)
...
...
@@ -214,3 +215,15 @@ def save_the_course_content(path='/tmp'):
f
=
open
(
'
%
s/
%
s'
%
(
path
,
filename
),
'w'
)
f
.
write
(
output
)
f
.
close
@world.absorb
def
css_click
(
css_selector
):
try
:
world
.
browser
.
find_by_css
(
css_selector
)
.
click
()
except
WebDriverException
:
# Occassionally, MathJax or other JavaScript can cover up
# an element temporarily.
# If this happens, wait a second, then try again
time
.
sleep
(
1
)
world
.
browser
.
find_by_css
(
css_selector
)
.
click
()
lms/djangoapps/courseware/features/problems.feature
View file @
84281198
Feature
:
Answer
choice
problems
Feature
:
Answer problems
As a student in an edX course
In order to test my understanding of the material
I want to answer
choice based
problems
I want to answer problems
Scenario
:
I
can answer a problem correctly
Given
I am viewing a
"<ProblemType>"
problem
Given
External graders respond
"correct"
And
I am viewing a
"<ProblemType>"
problem
When
I answer a
"<ProblemType>"
problem
"correctly"
Then
My
"<ProblemType>"
answer is marked
"correct"
...
...
@@ -17,9 +18,11 @@ Feature: Answer choice problems
|
numerical
|
|
formula
|
|
script
|
|
code
|
Scenario
:
I
can answer a problem incorrectly
Given
I am viewing a
"<ProblemType>"
problem
Given
External graders respond
"incorrect"
And
I am viewing a
"<ProblemType>"
problem
When
I answer a
"<ProblemType>"
problem
"incorrectly"
Then
My
"<ProblemType>"
answer is marked
"incorrect"
...
...
@@ -32,6 +35,7 @@ Feature: Answer choice problems
|
numerical
|
|
formula
|
|
script
|
|
code
|
Scenario
:
I
can submit a blank answer
Given
I am viewing a
"<ProblemType>"
problem
...
...
lms/djangoapps/courseware/features/problems.py
View file @
84281198
from
lettuce
import
world
,
step
from
lettuce.django
import
django_url
from
selenium.webdriver.support.ui
import
Select
import
random
import
textwrap
import
time
from
common
import
i_am_registered_for_the_course
,
TEST_SECTION_NAME
,
section_location
from
terrain.factories
import
ItemFactory
from
capa.tests.response_xml_factory
import
OptionResponseXMLFactory
,
\
ChoiceResponseXMLFactory
,
MultipleChoiceResponseXMLFactory
,
\
StringResponseXMLFactory
,
NumericalResponseXMLFactory
,
\
FormulaResponseXMLFactory
,
CustomResponseXMLFactory
FormulaResponseXMLFactory
,
CustomResponseXMLFactory
,
\
CodeResponseXMLFactory
# Factories from capa.tests.response_xml_factory that we will use
# to generate the problem XML, with the keyword args used to configure
...
...
@@ -78,6 +79,12 @@ PROBLEM_FACTORY_DICT = {
a2=0
return (a1+a2)==int(expect)
"""
)}},
'code'
:
{
'factory'
:
CodeResponseXMLFactory
(),
'kwargs'
:
{
'question_text'
:
'Submit code to an external grader'
,
'initial_display'
:
'print "Hello world!"'
,
'grader_payload'
:
'{"grader": "ps1/Spring2013/test_grader.py"}'
,
}},
}
...
...
@@ -116,6 +123,19 @@ def view_problem(step, problem_type):
world
.
browser
.
visit
(
url
)
@step
(
u'External graders respond "([^"]*)"'
)
def
set_external_grader_response
(
step
,
correctness
):
assert
(
correctness
in
[
'correct'
,
'incorrect'
])
response_dict
=
{
'correct'
:
True
if
correctness
==
'correct'
else
False
,
'score'
:
1
if
correctness
==
'correct'
else
0
,
'msg'
:
'Your problem was graded
%
s'
%
correctness
}
# Set the fake xqueue server to always respond
# correct/incorrect when asked to grade a problem
world
.
xqueue_server
.
set_grade_response
(
response_dict
)
@step
(
u'I answer a "([^"]*)" problem "([^"]*)ly"'
)
def
answer_problem
(
step
,
problem_type
,
correctness
):
""" Mark a given problem type correct or incorrect, then submit it.
...
...
@@ -169,18 +189,29 @@ def answer_problem(step, problem_type, correctness):
inputfield
(
'script'
,
input_num
=
1
)
.
fill
(
str
(
first_addend
))
inputfield
(
'script'
,
input_num
=
2
)
.
fill
(
str
(
second_addend
))
elif
problem_type
==
'code'
:
# The fake xqueue server is configured to respond
# correct / incorrect no matter what we submit.
# Furthermore, since the inline code response uses
# JavaScript to make the code display nicely, it's difficult
# to programatically input text
# (there's not <textarea> we can just fill text into)
# For this reason, we submit the initial code in the response
# (configured in the problem XML above)
pass
# Submit the problem
check_problem
(
step
)
@step
(
u'I check a problem'
)
def
check_problem
(
step
):
world
.
browser
.
find_by_css
(
"input.check"
)
.
click
(
)
world
.
css_click
(
"input.check"
)
@step
(
u'I reset the problem'
)
def
reset_problem
(
step
):
world
.
browser
.
find_by_css
(
'input.reset'
)
.
click
(
)
world
.
css_click
(
'input.reset'
)
@step
(
u'My "([^"]*)" answer is marked "([^"]*)"'
)
...
...
@@ -207,7 +238,8 @@ def assert_answer_mark(step, problem_type, correctness):
'string'
:
[
'div.correct'
],
'numerical'
:
[
'div.correct'
],
'formula'
:
[
'div.correct'
],
'script'
:
[
'div.correct'
],
}
'script'
:
[
'div.correct'
],
'code'
:
[
'span.correct'
],
}
incorrect_selectors
=
{
'drop down'
:
[
'span.incorrect'
],
'multiple choice'
:
[
'label.choicegroup_incorrect'
,
...
...
@@ -216,7 +248,8 @@ def assert_answer_mark(step, problem_type, correctness):
'string'
:
[
'div.incorrect'
],
'numerical'
:
[
'div.incorrect'
],
'formula'
:
[
'div.incorrect'
],
'script'
:
[
'div.incorrect'
]}
'script'
:
[
'div.incorrect'
],
'code'
:
[
'span.incorrect'
],
}
assert
(
correctness
in
[
'correct'
,
'incorrect'
,
'unanswered'
])
assert
(
problem_type
in
correct_selectors
and
problem_type
in
incorrect_selectors
)
...
...
lms/djangoapps/courseware/features/xqueue_setup.py
0 → 100644
View file @
84281198
from
courseware.mock_xqueue_server.mock_xqueue_server
import
MockXQueueServer
from
lettuce
import
before
,
after
,
world
from
django.conf
import
settings
import
threading
@before.all
def
setup_mock_xqueue_server
():
# Retrieve the local port from settings
server_port
=
settings
.
XQUEUE_PORT
# Create the mock server instance
server
=
MockXQueueServer
(
server_port
)
# Start the server running in a separate daemon thread
# Because the thread is a daemon, it will terminate
# when the main thread terminates.
server_thread
=
threading
.
Thread
(
target
=
server
.
serve_forever
)
server_thread
.
daemon
=
True
server_thread
.
start
()
# Store the server instance in lettuce's world
# so that other steps can access it
# (and we can shut it down later)
world
.
xqueue_server
=
server
@after.all
def
teardown_mock_xqueue_server
(
total
):
# Stop the xqueue server and free up the port
world
.
xqueue_server
.
shutdown
()
lms/djangoapps/courseware/mock_xqueue_server/__init__.py
0 → 100644
View file @
84281198
lms/djangoapps/courseware/mock_xqueue_server/mock_xqueue_server.py
0 → 100644
View file @
84281198
from
BaseHTTPServer
import
HTTPServer
,
BaseHTTPRequestHandler
import
json
import
urllib
import
urlparse
import
threading
from
logging
import
getLogger
logger
=
getLogger
(
__name__
)
class
MockXQueueRequestHandler
(
BaseHTTPRequestHandler
):
'''
A handler for XQueue POST requests.
'''
protocol
=
"HTTP/1.0"
def
do_HEAD
(
self
):
self
.
_send_head
()
def
do_POST
(
self
):
'''
Handle a POST request from the client
Sends back an immediate success/failure response.
It then POSTS back to the client
with grading results, as configured in MockXQueueServer.
'''
self
.
_send_head
()
# Retrieve the POST data
post_dict
=
self
.
_post_dict
()
# Log the request
logger
.
debug
(
"XQueue received POST request
%
s to path
%
s"
%
(
str
(
post_dict
),
self
.
path
))
# Respond only to grading requests
if
self
.
_is_grade_request
():
try
:
xqueue_header
=
json
.
loads
(
post_dict
[
'xqueue_header'
])
xqueue_body
=
json
.
loads
(
post_dict
[
'xqueue_body'
])
callback_url
=
xqueue_header
[
'lms_callback_url'
]
except
KeyError
:
# If the message doesn't have a header or body,
# then it's malformed.
# Respond with failure
error_msg
=
"XQueue received invalid grade request"
self
.
_send_immediate_response
(
False
,
message
=
error_msg
)
except
ValueError
:
# If we could not decode the body or header,
# respond with failure
error_msg
=
"XQueue could not decode grade request"
self
.
_send_immediate_response
(
False
,
message
=
error_msg
)
else
:
# Send an immediate response of success
# The grade request is formed correctly
self
.
_send_immediate_response
(
True
)
# Wait a bit before POSTing back to the callback url with the
# grade result configured by the server
# Otherwise, the problem will not realize it's
# queued and it will keep waiting for a response
# indefinitely
delayed_grade_func
=
lambda
:
self
.
_send_grade_response
(
callback_url
,
xqueue_header
)
timer
=
threading
.
Timer
(
2
,
delayed_grade_func
)
timer
.
start
()
# If we get a request that's not to the grading submission
# URL, return an error
else
:
error_message
=
"Invalid request URL"
self
.
_send_immediate_response
(
False
,
message
=
error_message
)
def
_send_head
(
self
):
'''
Send the response code and MIME headers
'''
if
self
.
_is_grade_request
():
self
.
send_response
(
200
)
else
:
self
.
send_response
(
500
)
self
.
send_header
(
'Content-type'
,
'text/plain'
)
self
.
end_headers
()
def
_post_dict
(
self
):
'''
Retrieve the POST parameters from the client as a dictionary
'''
try
:
length
=
int
(
self
.
headers
.
getheader
(
'content-length'
))
post_dict
=
urlparse
.
parse_qs
(
self
.
rfile
.
read
(
length
))
# The POST dict will contain a list of values
# for each key.
# None of our parameters are lists, however,
# so we map [val] --> val
# If the list contains multiple entries,
# we pick the first one
post_dict
=
dict
(
map
(
lambda
(
key
,
list_val
):
(
key
,
list_val
[
0
]),
post_dict
.
items
()))
except
:
# We return an empty dict here, on the assumption
# that when we later check that the request has
# the correct fields, it won't find them,
# and will therefore send an error response
return
{}
return
post_dict
def
_send_immediate_response
(
self
,
success
,
message
=
""
):
'''
Send an immediate success/failure message
back to the client
'''
# Send the response indicating success/failure
response_str
=
json
.
dumps
({
'return_code'
:
0
if
success
else
1
,
'content'
:
message
})
# Log the response
logger
.
debug
(
"XQueue: sent response
%
s"
%
response_str
)
self
.
wfile
.
write
(
response_str
)
def
_send_grade_response
(
self
,
postback_url
,
xqueue_header
):
'''
POST the grade response back to the client
using the response provided by the server configuration
'''
response_dict
=
{
'xqueue_header'
:
json
.
dumps
(
xqueue_header
),
'xqueue_body'
:
json
.
dumps
(
self
.
server
.
grade_response
())}
# Log the response
logger
.
debug
(
"XQueue: sent grading response
%
s"
%
str
(
response_dict
))
MockXQueueRequestHandler
.
post_to_url
(
postback_url
,
response_dict
)
def
_is_grade_request
(
self
):
return
'xqueue/submit'
in
self
.
path
@staticmethod
def
post_to_url
(
url
,
param_dict
):
'''
POST *param_dict* to *url*
We make this a separate function so we can easily patch
it during testing.
'''
urllib
.
urlopen
(
url
,
urllib
.
urlencode
(
param_dict
))
class
MockXQueueServer
(
HTTPServer
):
'''
A mock XQueue grading server that responds
to POST requests to localhost.
'''
def
__init__
(
self
,
port_num
,
grade_response_dict
=
{
'correct'
:
True
,
'score'
:
1
,
'msg'
:
''
}):
'''
Initialize the mock XQueue server instance.
*port_num* is the localhost port to listen to
*grade_response_dict* is a dictionary that will be JSON-serialized
and sent in response to XQueue grading requests.
'''
self
.
set_grade_response
(
grade_response_dict
)
handler
=
MockXQueueRequestHandler
address
=
(
''
,
port_num
)
HTTPServer
.
__init__
(
self
,
address
,
handler
)
def
shutdown
(
self
):
'''
Stop the server and free up the port
'''
# First call superclass shutdown()
HTTPServer
.
shutdown
(
self
)
# We also need to manually close the socket
self
.
socket
.
close
()
def
grade_response
(
self
):
return
self
.
_grade_response
def
set_grade_response
(
self
,
grade_response_dict
):
# Check that the grade response has the right keys
assert
(
'correct'
in
grade_response_dict
and
'score'
in
grade_response_dict
and
'msg'
in
grade_response_dict
)
# Wrap the message in <div> tags to ensure that it is valid XML
grade_response_dict
[
'msg'
]
=
"<div>
%
s</div>"
%
grade_response_dict
[
'msg'
]
# Save the response dictionary
self
.
_grade_response
=
grade_response_dict
lms/djangoapps/courseware/mock_xqueue_server/test_mock_xqueue_server.py
0 → 100644
View file @
84281198
import
mock
import
unittest
import
threading
import
json
import
urllib
import
urlparse
import
time
from
mock_xqueue_server
import
MockXQueueServer
,
MockXQueueRequestHandler
class
MockXQueueServerTest
(
unittest
.
TestCase
):
'''
A mock version of the XQueue server that listens on a local
port and responds with pre-defined grade messages.
Used for lettuce BDD tests in lms/courseware/features/problems.feature
and lms/courseware/features/problems.py
This is temporary and will be removed when XQueue is
rewritten using celery.
'''
def
setUp
(
self
):
# Create the server
server_port
=
8034
self
.
server_url
=
'http://127.0.0.1:
%
d'
%
server_port
self
.
server
=
MockXQueueServer
(
server_port
,
{
'correct'
:
True
,
'score'
:
1
,
'msg'
:
''
})
# Start the server in a separate daemon thread
server_thread
=
threading
.
Thread
(
target
=
self
.
server
.
serve_forever
)
server_thread
.
daemon
=
True
server_thread
.
start
()
def
tearDown
(
self
):
# Stop the server, freeing up the port
self
.
server
.
shutdown
()
def
test_grade_request
(
self
):
# Patch post_to_url() so we can intercept
# outgoing POST requests from the server
MockXQueueRequestHandler
.
post_to_url
=
mock
.
Mock
()
# Send a grade request
callback_url
=
'http://127.0.0.1:8000/test_callback'
grade_header
=
json
.
dumps
({
'lms_callback_url'
:
callback_url
,
'lms_key'
:
'test_queuekey'
,
'queue_name'
:
'test_queue'
})
grade_body
=
json
.
dumps
({
'student_info'
:
'test'
,
'grader_payload'
:
'test'
,
'student_response'
:
'test'
})
grade_request
=
{
'xqueue_header'
:
grade_header
,
'xqueue_body'
:
grade_body
}
response_handle
=
urllib
.
urlopen
(
self
.
server_url
+
'/xqueue/submit'
,
urllib
.
urlencode
(
grade_request
))
response_dict
=
json
.
loads
(
response_handle
.
read
())
# Expect that the response is success
self
.
assertEqual
(
response_dict
[
'return_code'
],
0
)
# Wait a bit before checking that the server posted back
time
.
sleep
(
3
)
# Expect that the server tries to post back the grading info
xqueue_body
=
json
.
dumps
({
'correct'
:
True
,
'score'
:
1
,
'msg'
:
'<div></div>'
})
expected_callback_dict
=
{
'xqueue_header'
:
grade_header
,
'xqueue_body'
:
xqueue_body
}
MockXQueueRequestHandler
.
post_to_url
.
assert_called_with
(
callback_url
,
expected_callback_dict
)
lms/envs/acceptance.py
View file @
84281198
...
...
@@ -40,6 +40,18 @@ DATABASES = {
}
}
# Set up XQueue information so that the lms will send
# requests to a mock XQueue server running locally
XQUEUE_PORT
=
8027
XQUEUE_INTERFACE
=
{
"url"
:
"http://127.0.0.1:
%
d"
%
XQUEUE_PORT
,
"django_auth"
:
{
"username"
:
"lms"
,
"password"
:
"***REMOVED***"
},
"basic_auth"
:
(
'anant'
,
'agarwal'
),
}
# Do not display the YouTube videos in the browser while running the
# acceptance tests. This makes them faster and more reliable
MITX_FEATURES
[
'STUB_VIDEO_FOR_TESTING'
]
=
True
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment