Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
E
edx-platform
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
edx
edx-platform
Commits
e7f8c7c6
Commit
e7f8c7c6
authored
Mar 19, 2013
by
Will Daly
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
pep8 fixes
parent
39aa25c2
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
23 additions
and
19 deletions
+23
-19
lms/djangoapps/courseware/features/mock_xqueue_server.py
+10
-9
lms/djangoapps/courseware/features/problems.feature
+2
-2
lms/djangoapps/courseware/features/problems.py
+9
-8
lms/djangoapps/courseware/features/xqueue_setup.py
+2
-0
No files found.
lms/djangoapps/courseware/features/mock_xqueue_server.py
View file @
e7f8c7c6
...
...
@@ -7,6 +7,7 @@ import time
from
logging
import
getLogger
logger
=
getLogger
(
__name__
)
class
MockXQueueRequestHandler
(
BaseHTTPRequestHandler
):
'''
A handler for XQueue POST requests.
...
...
@@ -31,7 +32,7 @@ class MockXQueueRequestHandler(BaseHTTPRequestHandler):
post_dict
=
self
.
_post_dict
()
# Log the request
logger
.
debug
(
"XQueue received POST request
%
s to path
%
s"
%
logger
.
debug
(
"XQueue received POST request
%
s to path
%
s"
%
(
str
(
post_dict
),
self
.
path
))
# Respond only to grading requests
...
...
@@ -52,12 +53,12 @@ class MockXQueueRequestHandler(BaseHTTPRequestHandler):
except
ValueError
:
# If we could not decode the body or header,
# respond with failure
error_msg
=
"XQueue could not decode grade request"
self
.
_send_immediate_response
(
False
,
message
=
error_msg
)
else
:
# Send an immediate response of success
# Send an immediate response of success
# The grade request is formed correctly
self
.
_send_immediate_response
(
True
)
...
...
@@ -66,7 +67,7 @@ class MockXQueueRequestHandler(BaseHTTPRequestHandler):
# Otherwise, the problem will not realize it's
# queued and it will keep waiting for a response
# indefinitely
delayed_grade_func
=
lambda
:
self
.
_send_grade_response
(
callback_url
,
delayed_grade_func
=
lambda
:
self
.
_send_grade_response
(
callback_url
,
xqueue_header
)
timer
=
threading
.
Timer
(
2
,
delayed_grade_func
)
...
...
@@ -166,8 +167,8 @@ class MockXQueueServer(HTTPServer):
to POST requests to localhost.
'''
def
__init__
(
self
,
port_num
,
grade_response_dict
=
{
'correct'
:
True
,
'score'
:
1
,
'msg'
:
''
}):
def
__init__
(
self
,
port_num
,
grade_response_dict
=
{
'correct'
:
True
,
'score'
:
1
,
'msg'
:
''
}):
'''
Initialize the mock XQueue server instance.
...
...
@@ -251,7 +252,7 @@ class MockXQueueServerTest(unittest.TestCase):
'lms_key'
:
'test_queuekey'
,
'queue_name'
:
'test_queue'
})
grade_body
=
json
.
dumps
({
'student_info'
:
'test'
,
grade_body
=
json
.
dumps
({
'student_info'
:
'test'
,
'grader_payload'
:
'test'
,
'student_response'
:
'test'
})
...
...
@@ -270,9 +271,9 @@ class MockXQueueServerTest(unittest.TestCase):
time
.
sleep
(
3
)
# Expect that the server tries to post back the grading info
xqueue_body
=
json
.
dumps
({
'correct'
:
True
,
'score'
:
1
,
xqueue_body
=
json
.
dumps
({
'correct'
:
True
,
'score'
:
1
,
'msg'
:
'<div></div>'
})
expected_callback_dict
=
{
'xqueue_header'
:
grade_header
,
'xqueue_body'
:
xqueue_body
}
'xqueue_body'
:
xqueue_body
}
MockXQueueRequestHandler
.
post_to_url
.
assert_called_with
(
callback_url
,
expected_callback_dict
)
lms/djangoapps/courseware/features/problems.feature
View file @
e7f8c7c6
...
...
@@ -4,7 +4,7 @@ Feature: Answer problems
I want to answer problems
Scenario
:
I
can answer a problem correctly
Given
External graders respond
"correct"
Given
External graders respond
"correct"
And
I am viewing a
"<ProblemType>"
problem
When
I answer a
"<ProblemType>"
problem
"correctly"
Then
My
"<ProblemType>"
answer is marked
"correct"
...
...
@@ -21,7 +21,7 @@ Feature: Answer problems
|
code
|
Scenario
:
I
can answer a problem incorrectly
Given
External graders respond
"incorrect"
Given
External graders respond
"incorrect"
And
I am viewing a
"<ProblemType>"
problem
When
I answer a
"<ProblemType>"
problem
"incorrectly"
Then
My
"<ProblemType>"
answer is marked
"incorrect"
...
...
lms/djangoapps/courseware/features/problems.py
View file @
e7f8c7c6
...
...
@@ -80,7 +80,7 @@ PROBLEM_FACTORY_DICT = {
a1=0
a2=0
return (a1+a2)==int(expect)
"""
)
}},
"""
)}},
'code'
:
{
'factory'
:
CodeResponseXMLFactory
(),
'kwargs'
:
{
...
...
@@ -124,6 +124,7 @@ def view_problem(step, problem_type):
world
.
browser
.
visit
(
url
)
@step
(
u'External graders respond "([^"]*)"'
)
def
set_external_grader_response
(
step
,
correctness
):
assert
(
correctness
in
[
'correct'
,
'incorrect'
])
...
...
@@ -193,14 +194,14 @@ def answer_problem(step, problem_type, correctness):
elif
problem_type
==
'code'
:
# The fake xqueue server is configured to respond
# correct / incorrect no matter what we submit.
# Furthermore, since the inline code response uses
# JavaScript to make the code display nicely, it's difficult
# to programatically input text
# Furthermore, since the inline code response uses
# JavaScript to make the code display nicely, it's difficult
# to programatically input text
# (there's not <textarea> we can just fill text into)
# For this reason, we submit the initial code in the response
# (configured in the problem XML above)
pass
# Submit the problem
check_problem
(
step
)
...
...
@@ -246,8 +247,8 @@ def assert_answer_mark(step, problem_type, correctness):
'checkbox'
:
[
'span.correct'
],
'string'
:
[
'div.correct'
],
'numerical'
:
[
'div.correct'
],
'formula'
:
[
'div.correct'
],
'script'
:
[
'div.correct'
],
'formula'
:
[
'div.correct'
],
'script'
:
[
'div.correct'
],
'code'
:
[
'span.correct'
],
}
incorrect_selectors
=
{
'drop down'
:
[
'span.incorrect'
],
...
...
@@ -256,7 +257,7 @@ def assert_answer_mark(step, problem_type, correctness):
'checkbox'
:
[
'span.incorrect'
],
'string'
:
[
'div.incorrect'
],
'numerical'
:
[
'div.incorrect'
],
'formula'
:
[
'div.incorrect'
],
'formula'
:
[
'div.incorrect'
],
'script'
:
[
'div.incorrect'
],
'code'
:
[
'span.incorrect'
],
}
...
...
lms/djangoapps/courseware/features/xqueue_setup.py
View file @
e7f8c7c6
...
...
@@ -3,6 +3,7 @@ from lettuce import before, after, world
from
django.conf
import
settings
import
threading
@before.all
def
setup_mock_xqueue_server
():
...
...
@@ -24,6 +25,7 @@ def setup_mock_xqueue_server():
# (and we can shut it down later)
world
.
xqueue_server
=
server
@after.all
def
teardown_mock_xqueue_server
(
total
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment