Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
E
edx-platform
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
edx
edx-platform
Commits
725e4908
Commit
725e4908
authored
Nov 02, 2014
by
Sarina Canelake
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Remove more pep8 violations
parent
09c1f524
Show whitespace changes
Inline
Side-by-side
Showing
76 changed files
with
355 additions
and
253 deletions
+355
-253
cms/djangoapps/contentstore/features/video.py
+6
-3
cms/djangoapps/contentstore/tests/test_course_settings.py
+4
-2
cms/djangoapps/contentstore/tests/test_crud.py
+19
-8
common/djangoapps/edxmako/management/commands/preprocess_assets.py
+2
-4
common/lib/capa/capa/safe_exec/tests/test_safe_exec.py
+7
-7
common/lib/capa/capa/tests/response_xml_factory.py
+10
-8
common/lib/chem/chem/miller.py
+11
-6
common/lib/extract_tar.py
+7
-2
common/lib/symmath/symmath/symmath_check.py
+11
-10
common/lib/symmath/symmath/test_formula.py
+2
-4
common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py
+8
-9
common/lib/xmodule/xmodule/open_ended_grading_classes/openendedchild.py
+8
-8
common/lib/xmodule/xmodule/poll_module.py
+8
-4
common/lib/xmodule/xmodule/tests/test_annotatable_module.py
+11
-6
common/lib/xmodule/xmodule/tests/test_fields.py
+69
-43
common/lib/xmodule/xmodule/tests/test_html_module.py
+1
-3
common/lib/xmodule/xmodule/tests/test_util_open_ended.py
+3
-0
common/lib/xmodule/xmodule/tests/test_xml_module.py
+9
-7
common/test/acceptance/pages/lms/problem.py
+0
-2
lms/djangoapps/branding/__init__.py
+1
-0
lms/djangoapps/branding/tests.py
+1
-1
lms/djangoapps/branding/views.py
+4
-3
lms/djangoapps/bulk_email/forms.py
+1
-0
lms/djangoapps/bulk_email/models.py
+1
-0
lms/djangoapps/certificates/management/commands/fix_ungraded_certs.py
+10
-6
lms/djangoapps/certificates/models.py
+2
-1
lms/djangoapps/certificates/tests/factories.py
+1
-0
lms/djangoapps/certificates/views.py
+6
-4
lms/djangoapps/class_dashboard/dashboard_data.py
+1
-0
lms/djangoapps/courseware/features/video.py
+3
-3
lms/djangoapps/courseware/management/commands/clean_history.py
+6
-4
lms/djangoapps/courseware/management/commands/clean_xml.py
+8
-8
lms/djangoapps/courseware/management/commands/metadata_to_json.py
+5
-2
lms/djangoapps/courseware/tests/test_about.py
+0
-0
lms/djangoapps/courseware/tests/test_lti_integration.py
+2
-1
lms/djangoapps/courseware/tests/test_registration_extra_vars.py
+3
-1
lms/djangoapps/courseware/tests/test_video_handlers.py
+4
-1
lms/djangoapps/courseware/tests/test_video_mongo.py
+3
-5
lms/djangoapps/django_comment_client/forum/views.py
+4
-0
lms/djangoapps/django_comment_client/helpers.py
+4
-0
lms/djangoapps/foldit/models.py
+0
-2
lms/djangoapps/instructor/tests/test_hint_manager.py
+14
-8
lms/djangoapps/instructor/views/instructor_dashboard.py
+2
-2
lms/djangoapps/linkedin/management/commands/linkedin_mailusers.py
+1
-3
lms/djangoapps/mobile_api/users/views.py
+0
-0
lms/djangoapps/mobile_api/video_outlines/views.py
+0
-0
lms/djangoapps/open_ended_grading/tests.py
+2
-2
lms/djangoapps/open_ended_grading/views.py
+4
-1
lms/djangoapps/shoppingcart/processors/CyberSource.py
+8
-4
lms/djangoapps/shoppingcart/tests/test_views.py
+1
-4
lms/djangoapps/student_account/test/test_views.py
+0
-1
lms/djangoapps/verify_student/ssencrypt.py
+0
-1
lms/djangoapps/verify_student/tests/test_models.py
+3
-0
lms/djangoapps/verify_student/views.py
+5
-4
lms/envs/cms/acceptance.py
+2
-2
lms/envs/cms/dev.py
+2
-2
lms/envs/common.py
+1
-1
lms/envs/content.py
+5
-5
lms/envs/dev.py
+11
-11
lms/envs/dev_int.py
+0
-0
lms/envs/devgroups/courses.py
+0
-0
lms/envs/devplus.py
+5
-5
lms/envs/devstack.py
+0
-0
lms/envs/edx4edx_aws.py
+4
-2
lms/envs/load_test.py
+6
-5
lms/envs/sauce.py
+4
-2
lms/envs/test.py
+2
-2
lms/lib/comment_client/commentable.py
+1
-0
lms/lib/comment_client/thread.py
+1
-0
lms/lib/comment_client/user.py
+2
-3
lms/lib/xblock/mixin.py
+0
-0
lms/startup.py
+0
-0
lms/tests.py
+0
-1
lms/urls.py
+1
-1
lms/wsgi.py
+0
-1
scripts/all-tests.sh
+2
-2
No files found.
cms/djangoapps/contentstore/features/video.py
View file @
725e4908
# pylint: disable=
C0111
# pylint: disable=
missing-docstring
from
lettuce
import
world
,
step
from
selenium.webdriver.common.keys
import
Keys
...
...
@@ -20,9 +20,10 @@ SELECTORS = {
# We should wait 300 ms for event handler invocation + 200ms for safety.
DELAY
=
0.5
@step
(
'youtube stub server (.*) YouTube API'
)
def
configure_youtube_api
(
_step
,
action
):
action
=
action
.
strip
()
action
=
action
.
strip
()
if
action
==
'proxies'
:
world
.
youtube
.
config
[
'youtube_api_blocked'
]
=
False
elif
action
==
'blocks'
:
...
...
@@ -30,6 +31,7 @@ def configure_youtube_api(_step, action):
else
:
raise
ValueError
(
'Parameter `action` should be one of "proxies" or "blocks".'
)
@step
(
'I have created a Video component$'
)
def
i_created_a_video_component
(
step
):
step
.
given
(
'I am in Studio editing a new unit'
)
...
...
@@ -47,6 +49,7 @@ def i_created_a_video_component(step):
if
not
world
.
youtube
.
config
.
get
(
'youtube_api_blocked'
):
world
.
wait_for_visible
(
SELECTORS
[
'controls'
])
@step
(
'I have created a Video component with subtitles$'
)
def
i_created_a_video_with_subs
(
_step
):
_step
.
given
(
'I have created a Video component with subtitles "OEoXaMPEzfM"'
)
...
...
@@ -221,7 +224,7 @@ def see_a_range_slider_with_proper_range(_step):
def
do_not_see_or_not_button_video
(
_step
,
action
,
button_type
):
world
.
wait
(
DELAY
)
world
.
wait_for_ajax_complete
()
action
=
action
.
strip
()
action
=
action
.
strip
()
button
=
button_type
.
strip
()
if
action
==
'do not'
:
assert
not
world
.
is_css_present
(
VIDEO_BUTTONS
[
button
])
...
...
cms/djangoapps/contentstore/tests/test_course_settings.py
View file @
725e4908
...
...
@@ -608,8 +608,10 @@ class CourseMetadataEditingTest(CourseTestCase):
def
test_correct_http_status
(
self
):
json_data
=
json
.
dumps
({
"advertised_start"
:
{
"value"
:
1
,
"display_name"
:
"Course Advertised Start Date"
,
},
"days_early_for_beta"
:
{
"value"
:
"supposed to be an integer"
,
"display_name"
:
"Days Early for Beta Users"
,
},
"days_early_for_beta"
:
{
"value"
:
"supposed to be an integer"
,
"display_name"
:
"Days Early for Beta Users"
,
},
"advanced_modules"
:
{
"value"
:
1
,
"display_name"
:
"Advanced Module List"
,
},
})
response
=
self
.
client
.
ajax_post
(
self
.
course_setting_url
,
json_data
)
...
...
cms/djangoapps/contentstore/tests/test_crud.py
View file @
725e4908
...
...
@@ -66,8 +66,10 @@ class TemplateTests(unittest.TestCase):
self
.
assertEqual
(
index_info
[
'course'
],
'course'
)
self
.
assertEqual
(
index_info
[
'run'
],
'2014'
)
test_chapter
=
persistent_factories
.
ItemFactory
.
create
(
display_name
=
'chapter 1'
,
parent_location
=
test_course
.
location
)
test_chapter
=
persistent_factories
.
ItemFactory
.
create
(
display_name
=
'chapter 1'
,
parent_location
=
test_course
.
location
)
self
.
assertIsInstance
(
test_chapter
,
SequenceDescriptor
)
# refetch parent which should now point to child
test_course
=
self
.
split_store
.
get_course
(
test_course
.
id
.
version_agnostic
())
...
...
@@ -156,8 +158,10 @@ class TemplateTests(unittest.TestCase):
course
=
'history'
,
run
=
'doomed'
,
org
=
'edu.harvard'
,
display_name
=
'doomed test course'
,
user_id
=
'testbot'
)
persistent_factories
.
ItemFactory
.
create
(
display_name
=
'chapter 1'
,
parent_location
=
test_course
.
location
)
persistent_factories
.
ItemFactory
.
create
(
display_name
=
'chapter 1'
,
parent_location
=
test_course
.
location
)
id_locator
=
test_course
.
id
.
for_branch
(
ModuleStoreEnum
.
BranchName
.
draft
)
guid_locator
=
test_course
.
location
.
course_agnostic
()
...
...
@@ -180,10 +184,17 @@ class TemplateTests(unittest.TestCase):
display_name
=
'history test course'
,
user_id
=
'testbot'
)
chapter
=
persistent_factories
.
ItemFactory
.
create
(
display_name
=
'chapter 1'
,
parent_location
=
test_course
.
location
,
user_id
=
'testbot'
)
sub
=
persistent_factories
.
ItemFactory
.
create
(
display_name
=
'subsection 1'
,
parent_location
=
chapter
.
location
,
user_id
=
'testbot'
,
category
=
'vertical'
)
chapter
=
persistent_factories
.
ItemFactory
.
create
(
display_name
=
'chapter 1'
,
parent_location
=
test_course
.
location
,
user_id
=
'testbot'
)
sub
=
persistent_factories
.
ItemFactory
.
create
(
display_name
=
'subsection 1'
,
parent_location
=
chapter
.
location
,
user_id
=
'testbot'
,
category
=
'vertical'
)
first_problem
=
persistent_factories
.
ItemFactory
.
create
(
display_name
=
'problem 1'
,
parent_location
=
sub
.
location
,
user_id
=
'testbot'
,
category
=
'problem'
,
data
=
"<problem></problem>"
...
...
common/djangoapps/edxmako/management/commands/preprocess_assets.py
View file @
725e4908
...
...
@@ -16,6 +16,7 @@ from django.conf import settings
from
mako.template
import
Template
import
textwrap
class
Command
(
NoArgsCommand
):
"""
Basic management command to preprocess asset template files.
...
...
@@ -45,7 +46,6 @@ class Command(NoArgsCommand):
self
.
__preprocess
(
os
.
path
.
join
(
root
,
filename
),
os
.
path
.
join
(
root
,
outfile
))
def
__context
(
self
):
"""
Return a dict that contains all of the available context
...
...
@@ -55,10 +55,9 @@ class Command(NoArgsCommand):
# TODO: do this with the django-settings-context-processor
return
{
"FEATURES"
:
settings
.
FEATURES
,
"THEME_NAME"
:
getattr
(
settings
,
"THEME_NAME"
,
None
),
"THEME_NAME"
:
getattr
(
settings
,
"THEME_NAME"
,
None
),
}
def
__preprocess
(
self
,
infile
,
outfile
):
"""
Run `infile` through the Mako template engine, storing the
...
...
@@ -73,4 +72,3 @@ class Command(NoArgsCommand):
*/
"""
%
infile
))
_outfile
.
write
(
Template
(
filename
=
str
(
infile
))
.
render
(
env
=
self
.
__context
()))
common/lib/capa/capa/safe_exec/tests/test_safe_exec.py
View file @
725e4908
...
...
@@ -168,8 +168,8 @@ class TestSafeExecCaching(unittest.TestCase):
def
test_unicode_submission
(
self
):
# Check that using non-ASCII unicode does not raise an encoding error.
# Try several non-ASCII unicode characters
for
code
in
[
129
,
500
,
2
**
8
-
1
,
2
**
16
-
1
]:
# Try several non-ASCII unicode characters
.
for
code
in
[
129
,
500
,
2
**
8
-
1
,
2
**
16
-
1
]:
code_with_unichr
=
unicode
(
"# "
)
+
unichr
(
code
)
try
:
safe_exec
(
code_with_unichr
,
{},
cache
=
DictCache
({}))
...
...
@@ -194,7 +194,7 @@ class TestUpdateHash(unittest.TestCase):
make them different.
"""
d1
=
{
k
:
1
for
k
in
"abcdefghijklmnopqrstuvwxyz"
}
d1
=
{
k
:
1
for
k
in
"abcdefghijklmnopqrstuvwxyz"
}
d2
=
dict
(
d1
)
for
i
in
xrange
(
10000
):
d2
[
i
]
=
1
...
...
@@ -216,8 +216,8 @@ class TestUpdateHash(unittest.TestCase):
self
.
assertNotEqual
(
h1
,
hs1
)
def
test_list_ordering
(
self
):
h1
=
self
.
hash_obj
({
'a'
:
[
1
,
2
,
3
]})
h2
=
self
.
hash_obj
({
'a'
:
[
3
,
2
,
1
]})
h1
=
self
.
hash_obj
({
'a'
:
[
1
,
2
,
3
]})
h2
=
self
.
hash_obj
({
'a'
:
[
3
,
2
,
1
]})
self
.
assertNotEqual
(
h1
,
h2
)
def
test_dict_ordering
(
self
):
...
...
@@ -228,8 +228,8 @@ class TestUpdateHash(unittest.TestCase):
def
test_deep_ordering
(
self
):
d1
,
d2
=
self
.
equal_but_different_dicts
()
o1
=
{
'a'
:[
1
,
2
,
[
d1
],
3
,
4
]}
o2
=
{
'a'
:[
1
,
2
,
[
d2
],
3
,
4
]}
o1
=
{
'a'
:
[
1
,
2
,
[
d1
],
3
,
4
]}
o2
=
{
'a'
:
[
1
,
2
,
[
d2
],
3
,
4
]}
h1
=
self
.
hash_obj
(
o1
)
h2
=
self
.
hash_obj
(
o2
)
self
.
assertEqual
(
h1
,
h2
)
...
...
common/lib/capa/capa/tests/response_xml_factory.py
View file @
725e4908
...
...
@@ -134,9 +134,11 @@ class ResponseXMLFactory(object):
len(choice_names) == len(choices)
"""
# Names of group elements
group_element_names
=
{
'checkbox'
:
'checkboxgroup'
,
group_element_names
=
{
'checkbox'
:
'checkboxgroup'
,
'radio'
:
'radiogroup'
,
'multiple'
:
'choicegroup'
}
'multiple'
:
'choicegroup'
}
# Retrieve **kwargs
choices
=
kwargs
.
get
(
'choices'
,
[
True
])
...
...
@@ -441,7 +443,6 @@ class FormulaResponseXMLFactory(ResponseXMLFactory):
sample_str
=
self
.
_sample_str
(
sample_dict
,
num_samples
,
tolerance
)
response_element
.
set
(
"samples"
,
sample_str
)
# Set the tolerance
responseparam_element
=
etree
.
SubElement
(
response_element
,
"responseparam"
)
responseparam_element
.
set
(
"type"
,
"tolerance"
)
...
...
@@ -487,10 +488,12 @@ class FormulaResponseXMLFactory(ResponseXMLFactory):
variables
=
[
str
(
v
)
for
v
in
sample_dict
.
keys
()]
low_range_vals
=
[
str
(
f
[
0
])
for
f
in
sample_dict
.
values
()]
high_range_vals
=
[
str
(
f
[
1
])
for
f
in
sample_dict
.
values
()]
sample_str
=
(
","
.
join
(
sample_dict
.
keys
())
+
"@"
+
sample_str
=
(
","
.
join
(
sample_dict
.
keys
())
+
"@"
+
","
.
join
(
low_range_vals
)
+
":"
+
","
.
join
(
high_range_vals
)
+
"#"
+
str
(
num_samples
))
"#"
+
str
(
num_samples
)
)
return
sample_str
...
...
@@ -501,7 +504,6 @@ class ImageResponseXMLFactory(ResponseXMLFactory):
""" Create the <imageresponse> element."""
return
etree
.
Element
(
"imageresponse"
)
def
create_input_element
(
self
,
**
kwargs
):
""" Create the <imageinput> element.
...
...
@@ -764,7 +766,7 @@ class AnnotationResponseXMLFactory(ResponseXMLFactory):
text_children
=
[
{
'tag'
:
'title'
,
'text'
:
kwargs
.
get
(
'title'
,
'super cool annotation'
)},
{
'tag'
:
'text'
,
'text'
:
kwargs
.
get
(
'text'
,
'texty text'
)},
{
'tag'
:
'comment'
,
'text'
:
kwargs
.
get
(
'comment'
,
'blah blah erudite comment blah blah'
)},
{
'tag'
:
'comment'
,
'text'
:
kwargs
.
get
(
'comment'
,
'blah blah erudite comment blah blah'
)},
{
'tag'
:
'comment_prompt'
,
'text'
:
kwargs
.
get
(
'comment_prompt'
,
'type a commentary below'
)},
{
'tag'
:
'tag_prompt'
,
'text'
:
kwargs
.
get
(
'tag_prompt'
,
'select one tag'
)}
]
...
...
@@ -772,7 +774,7 @@ class AnnotationResponseXMLFactory(ResponseXMLFactory):
for
child
in
text_children
:
etree
.
SubElement
(
input_element
,
child
[
'tag'
])
.
text
=
child
[
'text'
]
default_options
=
[(
'green'
,
'correct'
),(
'eggs'
,
'incorrect'
),
(
'ham'
,
'partially-correct'
)]
default_options
=
[(
'green'
,
'correct'
),
(
'eggs'
,
'incorrect'
),
(
'ham'
,
'partially-correct'
)]
options
=
kwargs
.
get
(
'options'
,
default_options
)
options_element
=
etree
.
SubElement
(
input_element
,
'options'
)
...
...
common/lib/chem/chem/miller.py
View file @
725e4908
...
...
@@ -96,9 +96,11 @@ def sub_miller(segments):
'''
fracts
=
[
segment_to_fraction
(
segment
)
for
segment
in
segments
]
common_denominator
=
reduce
(
lcm
,
[
fract
.
denominator
for
fract
in
fracts
])
miller
=
([
fract
.
numerator
*
math
.
fabs
(
common_denominator
)
/
fract
.
denominator
for
fract
in
fracts
])
return
'('
+
','
.
join
(
map
(
str
,
map
(
decimal
.
Decimal
,
miller
)))
+
')'
miller_indices
=
([
fract
.
numerator
*
math
.
fabs
(
common_denominator
)
/
fract
.
denominator
for
fract
in
fracts
])
return
'('
+
','
.
join
(
map
(
str
,
map
(
decimal
.
Decimal
,
miller_indices
)))
+
')'
def
miller
(
points
):
...
...
@@ -145,10 +147,13 @@ def miller(points):
O
=
np
.
array
([
0
,
0
,
0
])
P
=
points
[
0
]
# point of plane
Ccs
=
map
(
np
.
array
,
[[
1.0
,
0
,
0
],
[
0
,
1.0
,
0
],
[
0
,
0
,
1.0
]])
segments
=
([
np
.
dot
(
P
-
O
,
N
)
/
np
.
dot
(
ort
,
N
)
if
np
.
dot
(
ort
,
N
)
!=
0
else
np
.
nan
for
ort
in
Ccs
])
segments
=
([
np
.
dot
(
P
-
O
,
N
)
/
np
.
dot
(
ort
,
N
)
if
np
.
dot
(
ort
,
N
)
!=
0
else
np
.
nan
for
ort
in
Ccs
])
if
any
(
x
==
0
for
x
in
segments
):
# Plane goes through origin.
vertices
=
[
# top:
vertices
=
[
# top:
np
.
array
([
1.0
,
1.0
,
1.0
]),
np
.
array
([
0.0
,
0.0
,
1.0
]),
np
.
array
([
1.0
,
0.0
,
1.0
]),
...
...
common/lib/extract_tar.py
View file @
725e4908
...
...
@@ -9,7 +9,8 @@ from os.path import abspath, realpath, dirname, join as joinpath
from
django.core.exceptions
import
SuspiciousOperation
import
logging
log
=
logging
.
getLogger
(
__name__
)
# pylint: disable=C0103
log
=
logging
.
getLogger
(
__name__
)
def
resolved
(
rpath
):
"""
...
...
@@ -17,12 +18,14 @@ def resolved(rpath):
"""
return
realpath
(
abspath
(
rpath
))
def
_is_bad_path
(
path
,
base
):
"""
Is (the canonical absolute path of) `path` outside `base`?
"""
return
not
resolved
(
joinpath
(
base
,
path
))
.
startswith
(
base
)
def
_is_bad_link
(
info
,
base
):
"""
Does the file sym- ord hard-link to files outside `base`?
...
...
@@ -31,6 +34,7 @@ def _is_bad_link(info, base):
tip
=
resolved
(
joinpath
(
base
,
dirname
(
info
.
name
)))
return
_is_bad_path
(
info
.
linkname
,
base
=
tip
)
def
safemembers
(
members
):
"""
Check that all elements of a tar file are safe.
...
...
@@ -43,7 +47,7 @@ def safemembers(members):
log
.
debug
(
"File
%
r is blocked (illegal path)"
,
finfo
.
name
)
raise
SuspiciousOperation
(
"Illegal path"
)
elif
finfo
.
issym
()
and
_is_bad_link
(
finfo
,
base
):
log
.
debug
(
"File
%
r is blocked: Hard link to
%
r"
,
finfo
.
name
,
finfo
.
linkname
)
log
.
debug
(
"File
%
r is blocked: Hard link to
%
r"
,
finfo
.
name
,
finfo
.
linkname
)
raise
SuspiciousOperation
(
"Hard link"
)
elif
finfo
.
islnk
()
and
_is_bad_link
(
finfo
,
base
):
log
.
debug
(
"File
%
r is blocked: Symlink to
%
r"
,
finfo
.
name
,
...
...
@@ -56,6 +60,7 @@ def safemembers(members):
return
members
def
safetar_extractall
(
tarf
,
*
args
,
**
kwargs
):
"""
Safe version of `tarf.extractall()`.
...
...
common/lib/symmath/symmath/symmath_check.py
View file @
725e4908
...
...
@@ -21,13 +21,14 @@ log = logging.getLogger(__name__)
def
symmath_check_simple
(
expect
,
ans
,
adict
=
{},
symtab
=
None
,
extra_options
=
None
):
'''
"""
Check a symbolic mathematical expression using sympy.
The input is an ascii string (not MathML) converted to math using sympy.sympify.
'''
"""
options
=
{
'__MATRIX__'
:
False
,
'__ABC__'
:
False
,
'__LOWER__'
:
False
}
if
extra_options
:
options
.
update
(
extra_options
)
if
extra_options
:
options
.
update
(
extra_options
)
for
op
in
options
:
# find options in expect string
if
op
in
expect
:
expect
=
expect
.
replace
(
op
,
''
)
...
...
@@ -145,6 +146,7 @@ def make_error_message(msg):
msg
=
'<div class="capa_alert">
%
s</div>'
%
msg
return
msg
def
is_within_tolerance
(
expected
,
actual
,
tolerance
):
if
expected
==
0
:
return
(
abs
(
actual
)
<
tolerance
)
...
...
@@ -158,7 +160,7 @@ def is_within_tolerance(expected, actual, tolerance):
def
symmath_check
(
expect
,
ans
,
dynamath
=
None
,
options
=
None
,
debug
=
None
,
xml
=
None
):
'''
"""
Check a symbolic mathematical expression using sympy.
The input may be presentation MathML. Uses formula.
...
...
@@ -181,7 +183,7 @@ def symmath_check(expect, ans, dynamath=None, options=None, debug=None, xml=None
-qubit - passed to my_sympify
-imaginary - used in formla, presumably to signal to use i as sqrt(-1)?
-numerical - force numerical comparison.
'''
"""
msg
=
''
# msg += '<p/>abname=%s' % abname
...
...
@@ -208,7 +210,6 @@ def symmath_check(expect, ans, dynamath=None, options=None, debug=None, xml=None
msg
+=
'<p>Error
%
s in parsing OUR expected answer "
%
s"</p>'
%
(
err
,
expect
)
return
{
'ok'
:
False
,
'msg'
:
make_error_message
(
msg
)}
###### Sympy input #######
# if expected answer is a number, try parsing provided answer as a number also
try
:
...
...
@@ -217,8 +218,8 @@ def symmath_check(expect, ans, dynamath=None, options=None, debug=None, xml=None
fans
=
None
# do a numerical comparison if both expected and answer are numbers
if
(
hasattr
(
fexpect
,
'is_number'
)
and
fexpect
.
is_number
and
hasattr
(
fans
,
'is_number'
)
and
fans
.
is_number
)
:
if
hasattr
(
fexpect
,
'is_number'
)
and
fexpect
.
is_number
\
and
hasattr
(
fans
,
'is_number'
)
and
fans
.
is_number
:
if
is_within_tolerance
(
fexpect
,
fans
,
threshold
):
return
{
'ok'
:
True
,
'msg'
:
msg
}
else
:
...
...
@@ -236,7 +237,6 @@ def symmath_check(expect, ans, dynamath=None, options=None, debug=None, xml=None
msg
+=
'<p>You entered:
%
s</p>'
%
to_latex
(
fans
)
return
{
'ok'
:
True
,
'msg'
:
msg
}
###### PMathML input ######
# convert mathml answer to formula
try
:
...
...
@@ -298,7 +298,8 @@ def symmath_check(expect, ans, dynamath=None, options=None, debug=None, xml=None
return
{
'ok'
:
False
,
'msg'
:
make_error_message
(
msg
)}
except
Exception
,
err
:
msg
+=
"<p>Error
%
s in comparing expected (a list) and your answer</p>"
%
str
(
err
)
.
replace
(
'<'
,
'<'
)
if
DEBUG
:
msg
+=
"<p/><pre>
%
s</pre>"
%
traceback
.
format_exc
()
if
DEBUG
:
msg
+=
"<p/><pre>
%
s</pre>"
%
traceback
.
format_exc
()
return
{
'ok'
:
False
,
'msg'
:
make_error_message
(
msg
)}
#diff = (fexpect-fsym).simplify()
...
...
common/lib/symmath/symmath/test_formula.py
View file @
725e4908
"""
Tests of symbolic math
"""
import
unittest
import
formula
import
re
from
lxml
import
etree
def
stripXML
(
xml
):
xml
=
xml
.
replace
(
'
\n
'
,
''
)
xml
=
re
.
sub
(
r'\> +\<'
,
'><'
,
xml
)
return
xml
class
FormulaTest
(
unittest
.
TestCase
):
# for readability later
mathml_start
=
'<math xmlns="http://www.w3.org/1998/Math/MathML"><mstyle displaystyle="true">'
...
...
@@ -41,7 +41,6 @@ class FormulaTest(unittest.TestCase):
# success?
self
.
assertEqual
(
test
,
expected
)
def
test_fix_simple_superscripts
(
self
):
expr
=
'''
<msup>
...
...
@@ -91,7 +90,6 @@ class FormulaTest(unittest.TestCase):
# success?
self
.
assertEqual
(
test
,
expected
)
def
test_fix_msubsup
(
self
):
expr
=
'''
<msubsup>
...
...
common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py
View file @
725e4908
...
...
@@ -257,7 +257,6 @@ class CombinedOpenEndedV1Module():
"""
return
all
(
self
.
is_initial_child_state
(
child
)
for
child
in
task_state
)
def
states_sort_key
(
self
,
idx_task_states
):
"""
Return a key for sorting a list of indexed task_states, by how far the student got
...
...
@@ -544,8 +543,8 @@ class CombinedOpenEndedV1Module():
last_response_data
=
self
.
get_last_response
(
self
.
current_task_number
-
1
)
current_response_data
=
self
.
get_current_attributes
(
self
.
current_task_number
)
if
(
current_response_data
[
'min_score_to_attempt'
]
>
last_response_data
[
'score'
]
or
current_response_data
[
'max_score_to_attempt'
]
<
last_response_data
[
'score'
])
:
if
current_response_data
[
'min_score_to_attempt'
]
>
last_response_data
[
'score'
]
or
\
current_response_data
[
'max_score_to_attempt'
]
<
last_response_data
[
'score'
]
:
self
.
state
=
self
.
DONE
self
.
ready_to_reset
=
True
...
...
@@ -818,7 +817,7 @@ class CombinedOpenEndedV1Module():
log
.
error
(
"Invalid response from grading server for location {0} and student {1}"
.
format
(
self
.
location
,
student_id
))
error_message
=
"Received invalid response from the graders. Please notify course staff."
return
success
,
allowed_to_submit
,
error_message
if
count_graded
>=
count_required
or
count_available
==
0
:
if
count_graded
>=
count_required
or
count_available
==
0
:
error_message
=
""
return
success
,
allowed_to_submit
,
error_message
else
:
...
...
@@ -853,7 +852,7 @@ class CombinedOpenEndedV1Module():
contexts
=
[]
rubric_number
=
self
.
current_task_number
if
self
.
ready_to_reset
:
rubric_number
+=
1
rubric_number
+=
1
response
=
self
.
get_last_response
(
rubric_number
)
score_length
=
len
(
response
[
'grader_types'
])
for
z
in
xrange
(
score_length
):
...
...
@@ -861,7 +860,7 @@ class CombinedOpenEndedV1Module():
try
:
feedback
=
response
[
'feedback_dicts'
][
z
]
.
get
(
'feedback'
,
''
)
except
TypeError
:
return
{
'success'
:
False
}
return
{
'success'
:
False
}
rubric_scores
=
[[
response
[
'rubric_scores'
][
z
]]]
grader_types
=
[[
response
[
'grader_types'
][
z
]]]
feedback_items
=
[[
response
[
'feedback_items'
][
z
]]]
...
...
@@ -879,14 +878,14 @@ class CombinedOpenEndedV1Module():
# That longer string appears when a user is viewing a graded rubric
# returned from one of the graders of their openended response problem.
'task_name'
:
ugettext
(
'Scored rubric'
),
'feedback'
:
feedback
'feedback'
:
feedback
})
context
=
{
'results'
:
contexts
,
}
html
=
self
.
system
.
render_template
(
'{0}/combined_open_ended_results.html'
.
format
(
self
.
TEMPLATE_DIR
),
context
)
return
{
'html'
:
html
,
'success'
:
True
,
'hide_reset'
:
False
}
return
{
'html'
:
html
,
'success'
:
True
,
'hide_reset'
:
False
}
def
get_legend
(
self
,
_data
):
"""
...
...
@@ -978,7 +977,7 @@ class CombinedOpenEndedV1Module():
max_number_of_attempts
=
self
.
max_attempts
)
}
self
.
student_attempts
+=
1
self
.
student_attempts
+=
1
self
.
state
=
self
.
INITIAL
self
.
ready_to_reset
=
False
for
i
in
xrange
(
len
(
self
.
task_xml
)):
...
...
common/lib/xmodule/xmodule/open_ended_grading_classes/openendedchild.py
View file @
725e4908
...
...
@@ -245,7 +245,7 @@ class OpenEndedChild(object):
Replaces "
\n
" newlines with <br/>
"""
retv
=
re
.
sub
(
r'</p>$'
,
''
,
re
.
sub
(
r'^<p>'
,
''
,
html
))
return
re
.
sub
(
"
\n
"
,
"<br/>"
,
retv
)
return
re
.
sub
(
"
\n
"
,
"<br/>"
,
retv
)
def
new_history_entry
(
self
,
answer
):
"""
...
...
@@ -333,13 +333,13 @@ class OpenEndedChild(object):
previous_answer
=
latest
else
:
previous_answer
=
""
previous_answer
=
previous_answer
.
replace
(
"<br/>"
,
"
\n
"
)
.
replace
(
"<br>"
,
"
\n
"
)
previous_answer
=
previous_answer
.
replace
(
"<br/>"
,
"
\n
"
)
.
replace
(
"<br>"
,
"
\n
"
)
else
:
if
latest
is
not
None
and
len
(
latest
)
>
0
:
previous_answer
=
latest
else
:
previous_answer
=
""
previous_answer
=
previous_answer
.
replace
(
"
\n
"
,
"<br/>"
)
previous_answer
=
previous_answer
.
replace
(
"
\n
"
,
"<br/>"
)
return
previous_answer
...
...
@@ -439,10 +439,10 @@ class OpenEndedChild(object):
image_tag
=
""
# Ensure that a valid file was uploaded.
if
(
'valid_files_attached'
in
data
and
data
[
'valid_files_attached'
]
in
[
'true'
,
'1'
,
True
]
and
data
[
'student_file'
]
is
not
None
and
len
(
data
[
'student_file'
])
>
0
)
:
if
'valid_files_attached'
in
data
and
\
data
[
'valid_files_attached'
]
in
[
'true'
,
'1'
,
True
]
and
\
data
[
'student_file'
]
is
not
None
and
\
len
(
data
[
'student_file'
])
>
0
:
has_file_to_upload
=
True
student_file
=
data
[
'student_file'
][
0
]
...
...
@@ -521,7 +521,7 @@ class OpenEndedChild(object):
# Find all links in the string.
links
=
re
.
findall
(
r'(https?://\S+)'
,
string
)
if
len
(
links
)
>
0
:
if
len
(
links
)
>
0
:
has_link
=
True
# Autolink by wrapping links in anchor tags.
...
...
common/lib/xmodule/xmodule/poll_module.py
View file @
725e4908
...
...
@@ -42,8 +42,10 @@ class PollModule(PollFields, XModule):
"""Poll Module"""
js
=
{
'coffee'
:
[
resource_string
(
__name__
,
'js/src/javascript_loader.coffee'
)],
'js'
:
[
resource_string
(
__name__
,
'js/src/poll/poll.js'
),
resource_string
(
__name__
,
'js/src/poll/poll_main.js'
)]
'js'
:
[
resource_string
(
__name__
,
'js/src/poll/poll.js'
),
resource_string
(
__name__
,
'js/src/poll/poll_main.js'
)
]
}
css
=
{
'scss'
:
[
resource_string
(
__name__
,
'css/poll/display.scss'
)]}
js_module_name
=
"Poll"
...
...
@@ -127,13 +129,15 @@ class PollModule(PollFields, XModule):
answers_to_json
[
answer
[
'id'
]]
=
cgi
.
escape
(
answer
[
'text'
])
self
.
poll_answers
=
temp_poll_answers
return
json
.
dumps
({
'answers'
:
answers_to_json
,
return
json
.
dumps
({
'answers'
:
answers_to_json
,
'question'
:
cgi
.
escape
(
self
.
question
),
# to show answered poll after reload:
'poll_answer'
:
self
.
poll_answer
,
'poll_answers'
:
self
.
poll_answers
if
self
.
voted
else
{},
'total'
:
sum
(
self
.
poll_answers
.
values
())
if
self
.
voted
else
0
,
'reset'
:
str
(
self
.
descriptor
.
xml_attributes
.
get
(
'reset'
,
'true'
))
.
lower
()})
'reset'
:
str
(
self
.
descriptor
.
xml_attributes
.
get
(
'reset'
,
'true'
))
.
lower
()
})
class
PollDescriptor
(
PollFields
,
MakoModuleDescriptor
,
XmlDescriptor
):
...
...
common/lib/xmodule/xmodule/tests/test_annotatable_module.py
View file @
725e4908
...
...
@@ -12,6 +12,7 @@ from opaque_keys.edx.locations import Location
from
.
import
get_test_system
class
AnnotatableModuleTestCase
(
unittest
.
TestCase
):
sample_xml
=
'''
<annotatable display_name="Iliad">
...
...
@@ -42,7 +43,7 @@ class AnnotatableModuleTestCase(unittest.TestCase):
el
=
etree
.
fromstring
(
'<annotation title="bar" body="foo" problem="0">test</annotation>'
)
expected_attr
=
{
'data-comment-body'
:
{
'value'
:
'foo'
,
'_delete'
:
'body'
},
'data-comment-body'
:
{
'value'
:
'foo'
,
'_delete'
:
'body'
},
'data-comment-title'
:
{
'value'
:
'bar'
,
'_delete'
:
'title'
},
'data-problem-id'
:
{
'value'
:
'0'
,
'_delete'
:
'problem'
}
}
...
...
@@ -56,7 +57,7 @@ class AnnotatableModuleTestCase(unittest.TestCase):
xml
=
'<annotation title="x" body="y" problem="0">test</annotation>'
el
=
etree
.
fromstring
(
xml
)
expected_attr
=
{
'class'
:
{
'value'
:
'annotatable-span highlight'
}
}
expected_attr
=
{
'class'
:
{
'value'
:
'annotatable-span highlight'
}
}
actual_attr
=
self
.
annotatable
.
_get_annotation_class_attr
(
0
,
el
)
self
.
assertIsInstance
(
actual_attr
,
dict
)
...
...
@@ -69,9 +70,11 @@ class AnnotatableModuleTestCase(unittest.TestCase):
el
=
etree
.
fromstring
(
xml
.
format
(
highlight
=
color
))
value
=
'annotatable-span highlight highlight-{highlight}'
.
format
(
highlight
=
color
)
expected_attr
=
{
'class'
:
{
expected_attr
=
{
'class'
:
{
'value'
:
value
,
'_delete'
:
'highlight'
}
'_delete'
:
'highlight'
}
}
actual_attr
=
self
.
annotatable
.
_get_annotation_class_attr
(
0
,
el
)
...
...
@@ -83,9 +86,11 @@ class AnnotatableModuleTestCase(unittest.TestCase):
for
invalid_color
in
[
'rainbow'
,
'blink'
,
'invisible'
,
''
,
None
]:
el
=
etree
.
fromstring
(
xml
.
format
(
highlight
=
invalid_color
))
expected_attr
=
{
'class'
:
{
expected_attr
=
{
'class'
:
{
'value'
:
'annotatable-span highlight'
,
'_delete'
:
'highlight'
}
'_delete'
:
'highlight'
}
}
actual_attr
=
self
.
annotatable
.
_get_annotation_class_attr
(
0
,
el
)
...
...
common/lib/xmodule/xmodule/tests/test_fields.py
View file @
725e4908
...
...
@@ -11,46 +11,60 @@ class DateTest(unittest.TestCase):
date
=
Date
()
def
compare_dates
(
self
,
dt1
,
dt2
,
expected_delta
):
self
.
assertEqual
(
dt1
-
dt2
,
expected_delta
,
str
(
dt1
)
+
"-"
+
str
(
dt2
)
+
"!="
+
str
(
expected_delta
))
self
.
assertEqual
(
dt1
-
dt2
,
expected_delta
,
str
(
dt1
)
+
"-"
+
str
(
dt2
)
+
"!="
+
str
(
expected_delta
)
)
def
test_from_json
(
self
):
'''Test conversion from iso compatible date strings to struct_time'''
"""Test conversion from iso compatible date strings to struct_time"""
self
.
compare_dates
(
DateTest
.
date
.
from_json
(
"2013-01-01"
),
DateTest
.
date
.
from_json
(
"2012-12-31"
),
datetime
.
timedelta
(
days
=
1
))
datetime
.
timedelta
(
days
=
1
)
)
self
.
compare_dates
(
DateTest
.
date
.
from_json
(
"2013-01-01T00"
),
DateTest
.
date
.
from_json
(
"2012-12-31T23"
),
datetime
.
timedelta
(
hours
=
1
))
datetime
.
timedelta
(
hours
=
1
)
)
self
.
compare_dates
(
DateTest
.
date
.
from_json
(
"2013-01-01T00:00"
),
DateTest
.
date
.
from_json
(
"2012-12-31T23:59"
),
datetime
.
timedelta
(
minutes
=
1
))
datetime
.
timedelta
(
minutes
=
1
)
)
self
.
compare_dates
(
DateTest
.
date
.
from_json
(
"2013-01-01T00:00:00"
),
DateTest
.
date
.
from_json
(
"2012-12-31T23:59:59"
),
datetime
.
timedelta
(
seconds
=
1
))
datetime
.
timedelta
(
seconds
=
1
)
)
self
.
compare_dates
(
DateTest
.
date
.
from_json
(
"2013-01-01T00:00:00Z"
),
DateTest
.
date
.
from_json
(
"2012-12-31T23:59:59Z"
),
datetime
.
timedelta
(
seconds
=
1
))
datetime
.
timedelta
(
seconds
=
1
)
)
self
.
compare_dates
(
DateTest
.
date
.
from_json
(
"2012-12-31T23:00:01-01:00"
),
DateTest
.
date
.
from_json
(
"2013-01-01T00:00:00+01:00"
),
datetime
.
timedelta
(
hours
=
1
,
seconds
=
1
))
datetime
.
timedelta
(
hours
=
1
,
seconds
=
1
)
)
def
test_enforce_type
(
self
):
self
.
assertEqual
(
DateTest
.
date
.
enforce_type
(
None
),
None
)
self
.
assertEqual
(
DateTest
.
date
.
enforce_type
(
""
),
None
)
self
.
assertEqual
(
DateTest
.
date
.
enforce_type
(
"2012-12-31T23:00:01"
),
datetime
.
datetime
(
2012
,
12
,
31
,
23
,
0
,
1
,
tzinfo
=
UTC
()))
self
.
assertEqual
(
DateTest
.
date
.
enforce_type
(
1234567890000
),
datetime
.
datetime
(
2009
,
2
,
13
,
23
,
31
,
30
,
tzinfo
=
UTC
()))
self
.
assertEqual
(
DateTest
.
date
.
enforce_type
(
datetime
.
datetime
(
2014
,
5
,
9
,
21
,
1
,
27
,
tzinfo
=
UTC
())),
datetime
.
datetime
(
2014
,
5
,
9
,
21
,
1
,
27
,
tzinfo
=
UTC
()))
self
.
assertEqual
(
DateTest
.
date
.
enforce_type
(
"2012-12-31T23:00:01"
),
datetime
.
datetime
(
2012
,
12
,
31
,
23
,
0
,
1
,
tzinfo
=
UTC
())
)
self
.
assertEqual
(
DateTest
.
date
.
enforce_type
(
1234567890000
),
datetime
.
datetime
(
2009
,
2
,
13
,
23
,
31
,
30
,
tzinfo
=
UTC
())
)
self
.
assertEqual
(
DateTest
.
date
.
enforce_type
(
datetime
.
datetime
(
2014
,
5
,
9
,
21
,
1
,
27
,
tzinfo
=
UTC
())),
datetime
.
datetime
(
2014
,
5
,
9
,
21
,
1
,
27
,
tzinfo
=
UTC
())
)
with
self
.
assertRaises
(
TypeError
):
DateTest
.
date
.
enforce_type
([
1
])
...
...
@@ -64,10 +78,12 @@ class DateTest(unittest.TestCase):
current
=
datetime
.
datetime
.
today
()
self
.
assertEqual
(
datetime
.
datetime
(
current
.
year
,
3
,
12
,
12
,
tzinfo
=
UTC
()),
DateTest
.
date
.
from_json
(
"March 12 12:00"
))
DateTest
.
date
.
from_json
(
"March 12 12:00"
)
)
self
.
assertEqual
(
datetime
.
datetime
(
current
.
year
,
12
,
4
,
16
,
30
,
tzinfo
=
UTC
()),
DateTest
.
date
.
from_json
(
"December 4 16:30"
))
DateTest
.
date
.
from_json
(
"December 4 16:30"
)
)
self
.
assertIsNone
(
DateTest
.
date
.
from_json
(
"12 12:00"
))
def
test_non_std_from_json
(
self
):
...
...
@@ -76,27 +92,29 @@ class DateTest(unittest.TestCase):
"""
now
=
datetime
.
datetime
.
now
(
UTC
())
delta
=
now
-
datetime
.
datetime
.
fromtimestamp
(
0
,
UTC
())
self
.
assertEqual
(
DateTest
.
date
.
from_json
(
delta
.
total_seconds
()
*
1000
),
now
)
self
.
assertEqual
(
DateTest
.
date
.
from_json
(
delta
.
total_seconds
()
*
1000
),
# pylint: disable=maybe-no-member
now
)
yesterday
=
datetime
.
datetime
.
now
(
UTC
())
-
datetime
.
timedelta
(
days
=-
1
)
self
.
assertEqual
(
DateTest
.
date
.
from_json
(
yesterday
),
yesterday
)
def
test_to_json
(
self
):
'''
"""
Test converting time reprs to iso dates
'''
"""
self
.
assertEqual
(
DateTest
.
date
.
to_json
(
datetime
.
datetime
.
strptime
(
"2012-12-31T23:59:59Z"
,
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
SZ"
)),
"2012-12-31T23:59:59Z"
)
DateTest
.
date
.
to_json
(
datetime
.
datetime
.
strptime
(
"2012-12-31T23:59:59Z"
,
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
SZ"
)),
"2012-12-31T23:59:59Z"
)
self
.
assertEqual
(
DateTest
.
date
.
to_json
(
DateTest
.
date
.
from_json
(
"2012-12-31T23:59:59Z"
)),
"2012-12-31T23:59:59Z"
)
DateTest
.
date
.
to_json
(
DateTest
.
date
.
from_json
(
"2012-12-31T23:59:59Z"
)),
"2012-12-31T23:59:59Z"
)
self
.
assertEqual
(
DateTest
.
date
.
to_json
(
DateTest
.
date
.
from_json
(
"2012-12-31T23:00:01-01:00"
)),
"2012-12-31T23:00:01-01:00"
)
DateTest
.
date
.
to_json
(
DateTest
.
date
.
from_json
(
"2012-12-31T23:00:01-01:00"
)),
"2012-12-31T23:00:01-01:00"
)
with
self
.
assertRaises
(
TypeError
):
DateTest
.
date
.
to_json
(
'2012-12-31T23:00:01-01:00'
)
...
...
@@ -117,11 +135,14 @@ class TimedeltaTest(unittest.TestCase):
def
test_enforce_type
(
self
):
self
.
assertEqual
(
TimedeltaTest
.
delta
.
enforce_type
(
None
),
None
)
self
.
assertEqual
(
TimedeltaTest
.
delta
.
enforce_type
(
datetime
.
timedelta
(
days
=
1
,
seconds
=
46799
)),
datetime
.
timedelta
(
days
=
1
,
seconds
=
46799
))
self
.
assertEqual
(
TimedeltaTest
.
delta
.
enforce_type
(
'1 day 46799 seconds'
),
datetime
.
timedelta
(
days
=
1
,
seconds
=
46799
))
self
.
assertEqual
(
TimedeltaTest
.
delta
.
enforce_type
(
datetime
.
timedelta
(
days
=
1
,
seconds
=
46799
)),
datetime
.
timedelta
(
days
=
1
,
seconds
=
46799
)
)
self
.
assertEqual
(
TimedeltaTest
.
delta
.
enforce_type
(
'1 day 46799 seconds'
),
datetime
.
timedelta
(
days
=
1
,
seconds
=
46799
)
)
with
self
.
assertRaises
(
TypeError
):
TimedeltaTest
.
delta
.
enforce_type
([
1
])
...
...
@@ -137,8 +158,10 @@ class TimeInfoTest(unittest.TestCase):
due_date
=
datetime
.
datetime
(
2000
,
4
,
14
,
10
,
tzinfo
=
UTC
())
grace_pd_string
=
'1 day 12 hours 59 minutes 59 seconds'
timeinfo
=
TimeInfo
(
due_date
,
grace_pd_string
)
self
.
assertEqual
(
timeinfo
.
close_date
,
due_date
+
Timedelta
()
.
from_json
(
grace_pd_string
))
self
.
assertEqual
(
timeinfo
.
close_date
,
due_date
+
Timedelta
()
.
from_json
(
grace_pd_string
)
)
class
RelativeTimeTest
(
unittest
.
TestCase
):
...
...
@@ -168,11 +191,14 @@ class RelativeTimeTest(unittest.TestCase):
def
test_enforce_type
(
self
):
self
.
assertEqual
(
RelativeTimeTest
.
delta
.
enforce_type
(
None
),
None
)
self
.
assertEqual
(
RelativeTimeTest
.
delta
.
enforce_type
(
datetime
.
timedelta
(
days
=
1
,
seconds
=
46799
)),
datetime
.
timedelta
(
days
=
1
,
seconds
=
46799
))
self
.
assertEqual
(
RelativeTimeTest
.
delta
.
enforce_type
(
'0:05:07'
),
datetime
.
timedelta
(
seconds
=
307
))
self
.
assertEqual
(
RelativeTimeTest
.
delta
.
enforce_type
(
datetime
.
timedelta
(
days
=
1
,
seconds
=
46799
)),
datetime
.
timedelta
(
days
=
1
,
seconds
=
46799
)
)
self
.
assertEqual
(
RelativeTimeTest
.
delta
.
enforce_type
(
'0:05:07'
),
datetime
.
timedelta
(
seconds
=
307
)
)
with
self
.
assertRaises
(
TypeError
):
RelativeTimeTest
.
delta
.
enforce_type
([
1
])
...
...
common/lib/xmodule/xmodule/tests/test_html_module.py
View file @
725e4908
...
...
@@ -7,6 +7,7 @@ from xmodule.html_module import HtmlModule
from
.
import
get_test_system
class
HtmlModuleSubstitutionTestCase
(
unittest
.
TestCase
):
descriptor
=
Mock
()
...
...
@@ -17,7 +18,6 @@ class HtmlModuleSubstitutionTestCase(unittest.TestCase):
module
=
HtmlModule
(
self
.
descriptor
,
module_system
,
field_data
,
Mock
())
self
.
assertEqual
(
module
.
get_html
(),
str
(
module_system
.
anonymous_student_id
))
def
test_substitution_without_magic_string
(
self
):
sample_xml
=
'''
<html>
...
...
@@ -29,7 +29,6 @@ class HtmlModuleSubstitutionTestCase(unittest.TestCase):
module
=
HtmlModule
(
self
.
descriptor
,
module_system
,
field_data
,
Mock
())
self
.
assertEqual
(
module
.
get_html
(),
sample_xml
)
def
test_substitution_without_anonymous_student_id
(
self
):
sample_xml
=
'''
%%
USER_ID
%%
'''
field_data
=
DictFieldData
({
'data'
:
sample_xml
})
...
...
@@ -37,4 +36,3 @@ class HtmlModuleSubstitutionTestCase(unittest.TestCase):
module_system
.
anonymous_student_id
=
None
module
=
HtmlModule
(
self
.
descriptor
,
module_system
,
field_data
,
Mock
())
self
.
assertEqual
(
module
.
get_html
(),
sample_xml
)
common/lib/xmodule/xmodule/tests/test_util_open_ended.py
View file @
725e4908
...
...
@@ -20,6 +20,7 @@ S3_INTERFACE = {
"storage_bucket_name"
:
""
,
}
class
MockS3Key
(
object
):
"""
Mock an S3 Key object from boto. Used for file upload testing.
...
...
@@ -96,6 +97,7 @@ class DummyModulestore(object):
descriptor
.
xmodule_runtime
=
self
.
get_module_system
(
descriptor
)
return
descriptor
def
serialize_child_history
(
task_state
):
"""
To json serialize feedback and post_assessment in child_history of task state.
...
...
@@ -107,6 +109,7 @@ def serialize_child_history(task_state):
attempt
[
"post_assessment"
][
"feedback"
]
=
json
.
dumps
(
attempt
[
"post_assessment"
]
.
get
(
"feedback"
))
task_state
[
"child_history"
][
i
][
"post_assessment"
]
=
json
.
dumps
(
attempt
[
"post_assessment"
])
def
serialize_open_ended_instance_state
(
json_str
):
"""
To json serialize task_states and old_task_states in instance state.
...
...
common/lib/xmodule/xmodule/tests/test_xml_module.py
View file @
725e4908
...
...
@@ -132,7 +132,6 @@ class InheritingFieldDataTest(unittest.TestCase):
self
.
assertEqual
(
child
.
not_inherited
,
"nothing"
)
class
EditableMetadataFieldsTest
(
unittest
.
TestCase
):
def
test_display_name_field
(
self
):
editable_fields
=
self
.
get_xml_editable_fields
(
DictFieldData
({}))
...
...
@@ -331,7 +330,6 @@ class TestDeserializeInteger(TestDeserialize):
# 2.78 can be converted to int, so the string will be deserialized
self
.
assertDeserializeEqual
(
-
2.78
,
'-2.78'
)
def
test_deserialize_unsupported_types
(
self
):
self
.
assertDeserializeEqual
(
'[3]'
,
'[3]'
)
# '2.78' cannot be converted to int, so input value is returned
...
...
@@ -415,7 +413,7 @@ class TestDeserializeAny(TestDeserialize):
def
test_deserialize
(
self
):
self
.
assertDeserializeEqual
(
'hAlf'
,
'"hAlf"'
)
self
.
assertDeserializeEqual
(
'false'
,
'"false"'
)
self
.
assertDeserializeEqual
({
'bar'
:
'hat'
,
'frog'
:
'green'
},
'{"bar": "hat", "frog": "green"}'
)
self
.
assertDeserializeEqual
({
'bar'
:
'hat'
,
'frog'
:
'green'
},
'{"bar": "hat", "frog": "green"}'
)
self
.
assertDeserializeEqual
([
3.5
,
5.6
],
'[3.5, 5.6]'
)
self
.
assertDeserializeEqual
(
'['
,
'['
)
self
.
assertDeserializeEqual
(
False
,
'false'
)
...
...
@@ -457,10 +455,14 @@ class TestDeserializeTimedelta(TestDeserialize):
test_field
=
Timedelta
def
test_deserialize
(
self
):
self
.
assertDeserializeEqual
(
'1 day 12 hours 59 minutes 59 seconds'
,
'1 day 12 hours 59 minutes 59 seconds'
)
self
.
assertDeserializeEqual
(
'1 day 12 hours 59 minutes 59 seconds'
,
'"1 day 12 hours 59 minutes 59 seconds"'
)
self
.
assertDeserializeEqual
(
'1 day 12 hours 59 minutes 59 seconds'
,
'1 day 12 hours 59 minutes 59 seconds'
)
self
.
assertDeserializeEqual
(
'1 day 12 hours 59 minutes 59 seconds'
,
'"1 day 12 hours 59 minutes 59 seconds"'
)
self
.
assertDeserializeNonString
()
...
...
common/test/acceptance/pages/lms/problem.py
View file @
725e4908
...
...
@@ -46,5 +46,3 @@ class ProblemPage(PageObject):
Is there a "correct" status showing?
"""
return
self
.
q
(
css
=
"div.problem div.capa_inputtype.textline div.correct p.status"
)
.
is_present
()
lms/djangoapps/branding/__init__.py
View file @
725e4908
...
...
@@ -5,6 +5,7 @@ from django.conf import settings
from
opaque_keys.edx.locations
import
SlashSeparatedCourseKey
from
microsite_configuration
import
microsite
def
get_visible_courses
():
"""
Return the set of CourseDescriptors that should be visible in this branded instance
...
...
lms/djangoapps/branding/tests.py
View file @
725e4908
...
...
@@ -33,7 +33,7 @@ class AnonymousIndexPageTest(ModuleStoreTestCase):
self
.
factory
=
RequestFactory
()
self
.
course
=
CourseFactory
.
create
(
days_early_for_beta
=
5
,
enrollment_start
=
datetime
.
datetime
.
now
(
UTC
)
+
datetime
.
timedelta
(
days
=
3
),
enrollment_start
=
datetime
.
datetime
.
now
(
UTC
)
+
datetime
.
timedelta
(
days
=
3
),
user_id
=
self
.
user
.
id
,
)
...
...
lms/djangoapps/branding/views.py
View file @
725e4908
...
...
@@ -44,9 +44,10 @@ def index(request):
# courses in his/her dashboard. Otherwise UX is a bit cryptic.
# In this case, we want to have the user stay on a course catalog
# page to make it easier to browse for courses (and register)
if
microsite
.
get_value
(
'ALWAYS_REDIRECT_HOMEPAGE_TO_DASHBOARD_FOR_AUTHENTICATED_USER'
,
\
settings
.
FEATURES
.
get
(
'ALWAYS_REDIRECT_HOMEPAGE_TO_DASHBOARD_FOR_AUTHENTICATED_USER'
,
True
))
or
\
get_course_enrollments
(
request
.
user
):
if
microsite
.
get_value
(
'ALWAYS_REDIRECT_HOMEPAGE_TO_DASHBOARD_FOR_AUTHENTICATED_USER'
,
settings
.
FEATURES
.
get
(
'ALWAYS_REDIRECT_HOMEPAGE_TO_DASHBOARD_FOR_AUTHENTICATED_USER'
,
True
)
)
or
get_course_enrollments
(
request
.
user
):
return
redirect
(
reverse
(
'dashboard'
))
...
...
lms/djangoapps/bulk_email/forms.py
View file @
725e4908
...
...
@@ -21,6 +21,7 @@ class CourseEmailTemplateForm(forms.ModelForm): # pylint: disable=R0924
"""Form providing validation of CourseEmail templates."""
name
=
forms
.
CharField
(
required
=
False
)
class
Meta
:
# pylint: disable=C0111
model
=
CourseEmailTemplate
fields
=
(
'html_template'
,
'plain_template'
,
'name'
)
...
...
lms/djangoapps/bulk_email/models.py
View file @
725e4908
...
...
@@ -130,6 +130,7 @@ class CourseEmail(Email):
"""
return
CourseEmailTemplate
.
get_template
(
name
=
self
.
template_name
)
class
Optout
(
models
.
Model
):
"""
Stores users that have opted out of receiving emails from a course.
...
...
lms/djangoapps/certificates/management/commands/fix_ungraded_certs.py
View file @
725e4908
...
...
@@ -13,21 +13,25 @@ class Command(BaseCommand):
"""
option_list
=
BaseCommand
.
option_list
+
(
make_option
(
'-n'
,
'--noop'
,
make_option
(
'-n'
,
'--noop'
,
action
=
'store_true'
,
dest
=
'noop'
,
default
=
False
,
help
=
"Print but do not update the GeneratedCertificate table"
),
make_option
(
'-c'
,
'--course'
,
help
=
"Print but do not update the GeneratedCertificate table"
),
make_option
(
'-c'
,
'--course'
,
metavar
=
'COURSE_ID'
,
dest
=
'course'
,
default
=
False
,
help
=
'Grade ungraded users for this course'
),
help
=
'Grade ungraded users for this course'
),
)
def
handle
(
self
,
*
args
,
**
options
):
course_id
=
options
[
'course'
]
print
"Fetching ungraded students for {0}"
.
format
(
course_id
)
ungraded
=
GeneratedCertificate
.
objects
.
filter
(
...
...
lms/djangoapps/certificates/models.py
View file @
725e4908
...
...
@@ -49,7 +49,6 @@ Eligibility:
If the user and course is present in the certificate whitelist table
then the student will be issued a certificate regardless of his grade,
unless he has allow_certificate set to False.
"""
...
...
@@ -64,6 +63,7 @@ class CertificateStatuses(object):
restricted
=
'restricted'
unavailable
=
'unavailable'
class
CertificateWhitelist
(
models
.
Model
):
"""
Tracks students who are whitelisted, all users
...
...
@@ -117,6 +117,7 @@ class GeneratedCertificate(models.Model):
return
None
def
certificate_status_for_student
(
student
,
course_id
):
'''
This returns a dictionary with a key for status, and other information.
...
...
lms/djangoapps/certificates/tests/factories.py
View file @
725e4908
...
...
@@ -4,6 +4,7 @@ from opaque_keys.edx.locations import SlashSeparatedCourseKey
from
certificates.models
import
GeneratedCertificate
,
CertificateStatuses
# Factories don't have __init__ methods, and are self documenting
# pylint: disable=W0232
class
GeneratedCertificateFactory
(
DjangoModelFactory
):
...
...
lms/djangoapps/certificates/views.py
View file @
725e4908
...
...
@@ -92,7 +92,6 @@ def update_certificate(request):
# HTTP error (reason=error(32, 'Broken pipe'), filename=None) :
# certificate_agent.py:175
cert
.
error_reason
=
xqueue_body
[
'error_reason'
]
else
:
if
cert
.
status
in
[
status
.
generating
,
status
.
regenerating
]:
...
...
@@ -105,10 +104,13 @@ def update_certificate(request):
else
:
logger
.
critical
(
'Invalid state for cert update: {0}'
.
format
(
cert
.
status
))
return
HttpResponse
(
json
.
dumps
({
return
HttpResponse
(
json
.
dumps
({
'return_code'
:
1
,
'content'
:
'invalid cert status'
}),
mimetype
=
'application/json'
)
'content'
:
'invalid cert status'
}),
mimetype
=
'application/json'
)
dog_stats_api
.
increment
(
XQUEUE_METRIC_NAME
,
tags
=
[
u'action:update_certificate'
,
...
...
lms/djangoapps/class_dashboard/dashboard_data.py
View file @
725e4908
...
...
@@ -17,6 +17,7 @@ from opaque_keys.edx.locations import Location
# Used to limit the length of list displayed to the screen.
MAX_SCREEN_LIST_LENGTH
=
250
def
get_problem_grade_distribution
(
course_id
):
"""
Returns the grade distribution per problem for the course
...
...
lms/djangoapps/courseware/features/video.py
View file @
725e4908
...
...
@@ -105,6 +105,7 @@ class RequestHandlerWithSessionId(object):
return
True
return
False
def
get_metadata
(
parent_location
,
player_mode
,
data
,
display_name
=
'Video'
):
kwargs
=
{
'parent_location'
:
parent_location
,
...
...
@@ -316,7 +317,7 @@ def reload_the_page_with_video(_step):
@step
(
'youtube stub server (.*) YouTube API'
)
def
configure_youtube_api
(
_step
,
action
):
action
=
action
.
strip
()
action
=
action
.
strip
()
if
action
==
'proxies'
:
world
.
youtube
.
config
[
'youtube_api_blocked'
]
=
False
elif
action
==
'blocks'
:
...
...
@@ -569,7 +570,7 @@ def video_alignment(_step, transcript_visibility):
set_window_dimensions
(
300
,
600
)
real
,
expected
=
get_all_dimensions
()
width
=
round
(
100
*
real
[
'width'
]
/
expected
[
'width'
])
==
wrapper_width
width
=
round
(
100
*
real
[
'width'
]
/
expected
[
'width'
])
==
wrapper_width
set_window_dimensions
(
600
,
300
)
real
,
expected
=
get_all_dimensions
()
...
...
@@ -662,4 +663,3 @@ def i_see_active_button(_step, button, state):
assert
world
.
css_has_class
(
selector
,
'active'
)
else
:
assert
not
world
.
css_has_class
(
selector
,
'active'
)
lms/djangoapps/courseware/management/commands/clean_history.py
View file @
725e4908
...
...
@@ -160,11 +160,11 @@ class StudentModuleHistoryCleaner(object):
"""
start
=
self
.
next_student_module_id
for
smid
in
range
(
start
,
start
+
batch_size
):
for
smid
in
range
(
start
,
start
+
batch_size
):
if
smid
>
self
.
last_student_module_id
:
break
yield
smid
self
.
next_student_module_id
=
smid
+
1
self
.
next_student_module_id
=
smid
+
1
def
get_history_for_student_modules
(
self
,
student_module_id
):
"""
...
...
@@ -177,7 +177,8 @@ class StudentModuleHistoryCleaner(object):
"""
cursor
=
connection
.
cursor
()
cursor
.
execute
(
"""
cursor
.
execute
(
"""
SELECT id, created FROM courseware_studentmodulehistory
WHERE student_module_id =
%
s
ORDER BY created, id
...
...
@@ -196,7 +197,8 @@ class StudentModuleHistoryCleaner(object):
"""
assert
ids_to_delete
cursor
=
connection
.
cursor
()
cursor
.
execute
(
"""
cursor
.
execute
(
"""
DELETE FROM courseware_studentmodulehistory
WHERE id IN ({ids})
"""
.
format
(
ids
=
","
.
join
(
str
(
i
)
for
i
in
ids_to_delete
))
...
...
lms/djangoapps/courseware/management/commands/clean_xml.py
View file @
725e4908
...
...
@@ -12,7 +12,7 @@ from xmodule.modulestore.xml import XMLModuleStore
def
traverse_tree
(
course
):
'''Load every descriptor in course. Return bool success value.'''
"""Load every descriptor in course. Return bool success value."""
queue
=
[
course
]
while
len
(
queue
)
>
0
:
node
=
queue
.
pop
()
...
...
@@ -56,9 +56,11 @@ def import_with_checks(course_dir, verbose=True):
# No default class--want to complain if it doesn't find plugins for any
# module.
modulestore
=
XMLModuleStore
(
data_dir
,
modulestore
=
XMLModuleStore
(
data_dir
,
default_class
=
None
,
course_dirs
=
course_dirs
)
course_dirs
=
course_dirs
)
def
str_of_err
(
tpl
):
(
msg
,
exc_str
)
=
tpl
...
...
@@ -83,8 +85,7 @@ def import_with_checks(course_dir, verbose=True):
print
"="
*
40
print
'
\n
'
#print course
# print course
validators
=
(
traverse_tree
,
)
...
...
@@ -96,7 +97,6 @@ def import_with_checks(course_dir, verbose=True):
print
'Running {0}'
.
format
(
validate
.
__name__
)
all_ok
=
validate
(
course
)
and
all_ok
if
all_ok
:
print
'Course passes all checks!'
else
:
...
...
@@ -105,7 +105,7 @@ def import_with_checks(course_dir, verbose=True):
def
check_roundtrip
(
course_dir
):
'''Check that import->export leaves the course the same'''
"""Check that import->export leaves the course the same"""
print
"====== Roundtrip import ======="
(
ok
,
course
)
=
import_with_checks
(
course_dir
)
...
...
@@ -135,7 +135,6 @@ def clean_xml(course_dir, export_dir, force):
print
"Did NOT export"
class
Command
(
BaseCommand
):
help
=
"""Imports specified course.xml, validate it, then exports in
a canonical format.
...
...
@@ -145,6 +144,7 @@ Usage: clean_xml PATH-TO-COURSE-DIR PATH-TO-OUTPUT-DIR [force]
If 'force' is specified as the last argument, exports even if there
were import errors.
"""
def
handle
(
self
,
*
args
,
**
options
):
n
=
len
(
args
)
if
n
<
2
or
n
>
3
:
...
...
lms/djangoapps/courseware/management/commands/metadata_to_json.py
View file @
725e4908
...
...
@@ -21,9 +21,11 @@ def import_course(course_dir, verbose=True):
# No default class--want to complain if it doesn't find plugins for any
# module.
modulestore
=
XMLModuleStore
(
data_dir
,
modulestore
=
XMLModuleStore
(
data_dir
,
default_class
=
None
,
course_dirs
=
course_dirs
)
course_dirs
=
course_dirs
)
def
str_of_err
(
tpl
):
(
msg
,
exc_str
)
=
tpl
...
...
@@ -89,6 +91,7 @@ Usage: metadata_to_json PATH-TO-COURSE-DIR OUTPUT-PATH
if OUTPUT-PATH isn't given, print to stdout.
"""
def
handle
(
self
,
*
args
,
**
options
):
n
=
len
(
args
)
if
n
<
1
or
n
>
2
:
...
...
lms/djangoapps/courseware/tests/test_about.py
View file @
725e4908
lms/djangoapps/courseware/tests/test_lti_integration.py
View file @
725e4908
...
...
@@ -21,6 +21,7 @@ from courseware.tests.modulestore_config import TEST_DATA_MIXED_MODULESTORE
from
courseware.views
import
get_course_lti_endpoints
from
lms.lib.xblock.runtime
import
quote_slashes
class
TestLTI
(
BaseTestXmodule
):
"""
Integration test for lti xmodule.
...
...
@@ -107,7 +108,7 @@ class TestLTI(BaseTestXmodule):
old_parsed
[
u'OAuth oauth_nonce'
]
=
mocked_nonce
old_parsed
[
u'oauth_timestamp'
]
=
mocked_timestamp
old_parsed
[
u'oauth_signature'
]
=
mocked_signature_after_sign
headers
[
u'Authorization'
]
=
', '
.
join
([
k
+
'="'
+
v
+
'"'
for
k
,
v
in
old_parsed
.
items
()])
headers
[
u'Authorization'
]
=
', '
.
join
([
k
+
'="'
+
v
+
'"'
for
k
,
v
in
old_parsed
.
items
()])
return
None
,
headers
,
None
patcher
=
mock
.
patch
.
object
(
oauthlib
.
oauth1
.
Client
,
"sign"
,
mocked_sign
)
...
...
lms/djangoapps/courseware/tests/test_registration_extra_vars.py
View file @
725e4908
...
...
@@ -10,6 +10,7 @@ from mock import patch
from
bs4
import
BeautifulSoup
from
django.utils
import
translation
class
TestSortedCountryList
(
TestCase
):
"""
Test that country list is always sorted alphabetically
...
...
@@ -48,7 +49,7 @@ class TestSortedCountryList(TestCase):
self
.
assertLess
(
options
[
1
]
.
text
,
options
[
10
]
.
text
)
@patch.dict
(
settings
.
REGISTRATION_EXTRA_FIELDS
,
{
'country'
:
'required'
})
def
test_country_sorting_french
(
self
):
def
test_country_sorting_french
(
self
):
"""
Test that country list is always sorted alphabetically in French
"""
...
...
@@ -74,6 +75,7 @@ class TestSortedCountryList(TestCase):
# testing two option elements to be in alphabetical order
self
.
assertLess
(
options
[
1
]
.
text
,
options
[
10
]
.
text
)
class
TestExtraRegistrationVariables
(
TestCase
):
"""
Test that extra registration variables are properly checked according to settings
...
...
lms/djangoapps/courseware/tests/test_video_handlers.py
View file @
725e4908
...
...
@@ -58,6 +58,7 @@ def _check_asset(location, asset_name):
else
:
return
True
def
_clear_assets
(
location
):
"""
Clear all assets for location.
...
...
@@ -154,6 +155,7 @@ class TestVideo(BaseTestXmodule):
def
tearDown
(
self
):
_clear_assets
(
self
.
item_descriptor
.
location
)
class
TestTranscriptAvailableTranslationsDispatch
(
TestVideo
):
"""
Test video handler that provide available translations info.
...
...
@@ -337,7 +339,8 @@ class TestTranscriptTranslationGetDispatch(TestVideo):
u'start'
:
[
12
],
u'text'
:
[
u'
\u041f\u0440\u0438\u0432\u0456\u0442
, edX
\u0432\u0456\u0442\u0430\u0454
\u0432\u0430\u0441
.'
]}
]
}
self
.
non_en_file
.
seek
(
0
)
_upload_file
(
self
.
non_en_file
,
self
.
item_descriptor
.
location
,
os
.
path
.
split
(
self
.
non_en_file
.
name
)[
1
])
subs_id
=
_get_subs_id
(
self
.
non_en_file
.
name
)
...
...
lms/djangoapps/courseware/tests/test_video_mongo.py
View file @
725e4908
...
...
@@ -372,7 +372,6 @@ class TestGetHtmlMethod(BaseTestXmodule):
self
.
item_descriptor
.
xmodule_runtime
.
render_template
(
'video.html'
,
expected_context
)
)
def
test_get_html_with_non_existant_edx_video_id
(
self
):
"""
Tests the VideoModule get_html where a edx_video_id is given but a video is not found
...
...
@@ -395,7 +394,7 @@ class TestGetHtmlMethod(BaseTestXmodule):
<source src="example.mp4"/>
<source src="example.webm"/>
"""
,
'edx_video_id'
:
"meow"
,
'edx_video_id'
:
"meow"
,
'result'
:
{
'download_video_link'
:
u'example_source.mp4'
,
'sources'
:
json
.
dumps
([
u'example.mp4'
,
u'example.webm'
]),
...
...
@@ -416,7 +415,7 @@ class TestGetHtmlMethod(BaseTestXmodule):
@mock.patch
(
'edxval.api.get_video_info'
)
def
test_get_html_with_mocked_edx_video_id
(
self
,
mock_get_video_info
):
mock_get_video_info
.
return_value
=
{
'url'
:
'/edxval/video/example'
,
'url'
:
'/edxval/video/example'
,
'edx_video_id'
:
u'example'
,
'duration'
:
111.0
,
'client_video_id'
:
u'The example video'
,
...
...
@@ -556,7 +555,7 @@ class TestGetHtmlMethod(BaseTestXmodule):
<source src="example.mp4"/>
<source src="example.webm"/>
"""
,
'edx_video_id'
:
"thundercats"
,
'edx_video_id'
:
"thundercats"
,
'result'
:
{
'download_video_link'
:
u'http://fake-video.edx.org/thundercats.mp4'
,
'sources'
:
json
.
dumps
([
u'example.mp4'
,
u'example.webm'
]),
...
...
@@ -717,7 +716,6 @@ class TestGetHtmlMethod(BaseTestXmodule):
self
.
item_descriptor
.
xmodule_runtime
.
render_template
(
'video.html'
,
expected_context
)
)
@patch
(
'xmodule.video_module.video_module.get_video_from_cdn'
)
def
test_get_html_cdn_source
(
self
,
mocked_get_video
):
"""
...
...
lms/djangoapps/django_comment_client/forum/views.py
View file @
725e4908
...
...
@@ -39,6 +39,7 @@ def _attr_safe_json(obj):
"""
return
saxutils
.
escape
(
json
.
dumps
(
obj
),
{
'"'
:
'"'
})
@newrelic.agent.function_trace
()
def
make_course_settings
(
course
):
"""
...
...
@@ -56,6 +57,7 @@ def make_course_settings(course):
return
obj
@newrelic.agent.function_trace
()
def
get_threads
(
request
,
course_key
,
discussion_id
=
None
,
per_page
=
THREADS_PER_PAGE
):
"""
...
...
@@ -156,6 +158,7 @@ def inline_discussion(request, course_id, discussion_id):
'course_settings'
:
make_course_settings
(
course
)
})
@login_required
def
forum_form_discussion
(
request
,
course_id
):
"""
...
...
@@ -314,6 +317,7 @@ def single_thread(request, course_id, discussion_id, thread_id):
}
return
render_to_response
(
'discussion/index.html'
,
context
)
@require_GET
@login_required
def
user_profile
(
request
,
course_id
,
user_id
):
...
...
lms/djangoapps/django_comment_client/helpers.py
View file @
725e4908
...
...
@@ -9,12 +9,16 @@ def include_mustache_templates():
def
is_valid_file_name
(
file_name
):
return
file_name
.
endswith
(
'.mustache'
)
def
read_file
(
file_name
):
return
open
(
mustache_dir
/
file_name
,
"r"
)
.
read
()
.
decode
(
'utf-8'
)
def
template_id_from_file_name
(
file_name
):
return
file_name
.
rpartition
(
'.'
)[
0
]
def
process_mako
(
template_content
):
return
Template
(
template_content
)
.
render_unicode
()
def
make_script_tag
(
id
,
content
):
return
u"<script type='text/template' id='{0}'>{1}</script>"
.
format
(
id
,
content
)
...
...
lms/djangoapps/foldit/models.py
View file @
725e4908
...
...
@@ -121,7 +121,6 @@ class PuzzleComplete(models.Model):
'subset'
:
c
.
puzzle_subset
,
'created'
:
c
.
created
}
for
c
in
complete
]
@staticmethod
def
is_level_complete
(
anonymous_user_id
,
level
,
sub_level
,
due
=
None
):
"""
...
...
@@ -141,4 +140,3 @@ class PuzzleComplete(models.Model):
complete
=
complete
.
filter
(
created__lte
=
due
)
return
complete
.
exists
()
lms/djangoapps/instructor/tests/test_hint_manager.py
View file @
725e4908
...
...
@@ -28,19 +28,25 @@ class HintManagerTest(ModuleStoreTestCase):
self
.
c
.
login
(
username
=
'robot'
,
password
=
'test'
)
self
.
course_id
=
self
.
course
.
id
self
.
problem_id
=
self
.
course_id
.
make_usage_key
(
'crowdsource_hinter'
,
'crowdsource_hinter_001'
)
UserStateSummaryFactory
.
create
(
field_name
=
'hints'
,
UserStateSummaryFactory
.
create
(
field_name
=
'hints'
,
usage_id
=
self
.
problem_id
,
value
=
json
.
dumps
({
'1.0'
:
{
'1'
:
[
'Hint 1'
,
2
],
'3'
:
[
'Hint 3'
,
12
]},
value
=
json
.
dumps
({
'1.0'
:
{
'1'
:
[
'Hint 1'
,
2
],
'3'
:
[
'Hint 3'
,
12
]},
'2.0'
:
{
'4'
:
[
'Hint 4'
,
3
]}
}))
UserStateSummaryFactory
.
create
(
field_name
=
'mod_queue'
,
})
)
UserStateSummaryFactory
.
create
(
field_name
=
'mod_queue'
,
usage_id
=
self
.
problem_id
,
value
=
json
.
dumps
({
'2.0'
:
{
'2'
:
[
'Hint 2'
,
1
]}}))
value
=
json
.
dumps
({
'2.0'
:
{
'2'
:
[
'Hint 2'
,
1
]}})
)
UserStateSummaryFactory
.
create
(
field_name
=
'hint_pk'
,
UserStateSummaryFactory
.
create
(
field_name
=
'hint_pk'
,
usage_id
=
self
.
problem_id
,
value
=
5
)
value
=
5
)
# Mock out location_to_problem_name, which ordinarily accesses the modulestore.
# (I can't figure out how to get fake structures into the modulestore.)
view
.
location_to_problem_name
=
lambda
course_id
,
loc
:
"Test problem"
...
...
lms/djangoapps/instructor/views/instructor_dashboard.py
View file @
725e4908
...
...
@@ -226,13 +226,13 @@ def _section_course_info(course, access):
try
:
advance
=
lambda
memo
,
(
letter
,
score
):
"{}: {}, "
.
format
(
letter
,
score
)
+
memo
section_data
[
'grade_cutoffs'
]
=
reduce
(
advance
,
course
.
grade_cutoffs
.
items
(),
""
)[:
-
2
]
except
Exception
:
except
Exception
:
# pylint: disable=broad-except
section_data
[
'grade_cutoffs'
]
=
"Not Available"
# section_data['offline_grades'] = offline_grades_available(course_key)
try
:
section_data
[
'course_errors'
]
=
[(
escape
(
a
),
''
)
for
(
a
,
_unused
)
in
modulestore
()
.
get_course_errors
(
course
.
id
)]
except
Exception
:
except
Exception
:
# pylint: disable=broad-except
section_data
[
'course_errors'
]
=
[(
'Error fetching errors'
,
''
)]
return
section_data
...
...
lms/djangoapps/linkedin/management/commands/linkedin_mailusers.py
View file @
725e4908
...
...
@@ -78,12 +78,11 @@ BULK_EMAIL_FAILURE_ERRORS = (
SMTPException
,
)
MAX_ATTEMPTS
=
10
log
=
logging
.
getLogger
(
"linkedin"
)
class
Command
(
BaseCommand
):
"""
Django command for inviting users to add their course certificates to their
...
...
@@ -152,7 +151,6 @@ class Command(BaseCommand):
transaction
.
commit
()
def
certificate_url
(
self
,
certificate
):
"""
Generates a certificate URL based on LinkedIn's documentation. The
...
...
lms/djangoapps/mobile_api/users/views.py
View file @
725e4908
lms/djangoapps/mobile_api/video_outlines/views.py
View file @
725e4908
lms/djangoapps/open_ended_grading/tests.py
View file @
725e4908
...
...
@@ -273,7 +273,7 @@ class TestPeerGradingService(ModuleStoreTestCase, LoginEnrollmentTestCase):
self
.
location_string
=
self
.
course_id
.
make_usage_key
(
'html'
,
'TestLocation'
)
.
to_deprecated_string
()
self
.
toy
=
modulestore
()
.
get_course
(
self
.
course_id
)
location
=
"i4x://edX/toy/peergrading/init"
field_data
=
DictFieldData
({
'data'
:
"<peergrading/>"
,
'location'
:
location
,
'category'
:
'peergrading'
})
field_data
=
DictFieldData
({
'data'
:
"<peergrading/>"
,
'location'
:
location
,
'category'
:
'peergrading'
})
self
.
mock_service
=
peer_grading_service
.
MockPeerGradingService
()
self
.
system
=
LmsModuleSystem
(
static_url
=
settings
.
STATIC_URL
,
...
...
@@ -321,7 +321,7 @@ class TestPeerGradingService(ModuleStoreTestCase, LoginEnrollmentTestCase):
'feedback'
:
'feedback'
,
'submission_flagged'
:
'false'
,
'answer_unknown'
:
'false'
,
'rubric_scores_complete'
:
'true'
'rubric_scores_complete'
:
'true'
}
qdict
=
MagicMock
()
...
...
lms/djangoapps/open_ended_grading/views.py
View file @
725e4908
...
...
@@ -61,6 +61,7 @@ ALERT_DICT = {
'Flagged Submissions'
:
_
(
"Submissions have been flagged for review"
),
}
@cache_control
(
no_cache
=
True
,
no_store
=
True
,
must_revalidate
=
True
)
def
staff_grading
(
request
,
course_id
):
"""
...
...
@@ -133,6 +134,7 @@ def peer_grading(request, course_id):
return
HttpResponseRedirect
(
problem_url
)
@cache_control
(
no_cache
=
True
,
no_store
=
True
,
must_revalidate
=
True
)
def
student_problem_list
(
request
,
course_id
):
"""
...
...
@@ -179,6 +181,7 @@ def student_problem_list(request, course_id):
return
render_to_response
(
'open_ended_problems/open_ended_problems.html'
,
context
)
@cache_control
(
no_cache
=
True
,
no_store
=
True
,
must_revalidate
=
True
)
def
flagged_problem_list
(
request
,
course_id
):
'''
...
...
@@ -336,4 +339,4 @@ def take_action_on_flags(request, course_id):
'success'
:
False
,
'error'
:
STAFF_ERROR_MESSAGE
}
return
HttpResponse
(
json
.
dumps
(
response
),
mimetype
=
"application/json"
)
return
HttpResponse
(
json
.
dumps
(
response
),
mimetype
=
"application/json"
)
lms/djangoapps/shoppingcart/processors/CyberSource.py
View file @
725e4908
...
...
@@ -250,7 +250,8 @@ def get_processor_decline_html(params):
You were not charged. Please try a different form of payment.
Contact us with payment-related questions at {email}.
</p>
"""
))
"""
))
return
msg
.
format
(
decision
=
params
[
'decision'
],
...
...
@@ -273,7 +274,8 @@ def get_processor_exception_html(exception):
The specific error message is: <span class="exception_msg">{msg}</span>.
Your credit card may possibly have been charged. Contact us with payment-specific questions at {email}.
</p>
"""
.
format
(
msg
=
exception
.
message
,
email
=
payment_support_email
)))
"""
.
format
(
msg
=
exception
.
message
,
email
=
payment_support_email
)
))
return
msg
elif
isinstance
(
exception
,
CCProcessorWrongAmountException
):
msg
=
dedent
(
_
(
...
...
@@ -283,7 +285,8 @@ def get_processor_exception_html(exception):
The specific error message is: <span class="exception_msg">{msg}</span>.
Your credit card has probably been charged. Contact us with payment-specific questions at {email}.
</p>
"""
.
format
(
msg
=
exception
.
message
,
email
=
payment_support_email
)))
"""
.
format
(
msg
=
exception
.
message
,
email
=
payment_support_email
)
))
return
msg
elif
isinstance
(
exception
,
CCProcessorSignatureException
):
msg
=
dedent
(
_
(
...
...
@@ -295,7 +298,8 @@ def get_processor_exception_html(exception):
We apologize that we cannot verify whether the charge went through and take further action on your order.
Your credit card may possibly have been charged. Contact us with payment-specific questions at {email}.
</p>
"""
.
format
(
msg
=
exception
.
message
,
email
=
payment_support_email
)))
"""
.
format
(
msg
=
exception
.
message
,
email
=
payment_support_email
)
))
return
msg
# fallthrough case, which basically never happens
...
...
lms/djangoapps/shoppingcart/tests/test_views.py
View file @
725e4908
...
...
@@ -49,6 +49,7 @@ def mock_render_purchase_form_html(*args, **kwargs):
form_mock
=
Mock
(
side_effect
=
mock_render_purchase_form_html
)
def
mock_render_to_response
(
*
args
,
**
kwargs
):
return
render_to_response
(
*
args
,
**
kwargs
)
...
...
@@ -633,7 +634,6 @@ class ShoppingCartViewsTests(ModuleStoreTestCase):
self
.
assertEqual
(
resp
.
status_code
,
200
)
self
.
assertTrue
(
PaidCourseRegistration
.
contained_in_order
(
self
.
cart
,
self
.
course_key
))
@patch
(
'shoppingcart.views.render_purchase_form_html'
,
form_mock
)
@patch
(
'shoppingcart.views.render_to_response'
,
render_mock
)
def
test_show_cart
(
self
):
...
...
@@ -911,12 +911,10 @@ class ShoppingCartViewsTests(ModuleStoreTestCase):
self
.
assertIn
(
'FirstNameTesting123'
,
resp
.
content
)
self
.
assertIn
(
'80.00'
,
resp
.
content
)
((
template
,
context
),
_
)
=
render_mock
.
call_args
# When we come from the upgrade flow, we get these context variables
self
.
assertEqual
(
template
,
'shoppingcart/receipt.html'
)
self
.
assertEqual
(
context
[
'order'
],
self
.
cart
)
self
.
assertIn
(
reg_item
,
context
[
'shoppingcart_items'
][
0
])
...
...
@@ -1167,7 +1165,6 @@ class DonationViewTest(ModuleStoreTestCase):
)
self
.
assertEqual
(
response
.
status_code
,
405
)
def
test_donations_disabled
(
self
):
config
=
DonationConfiguration
.
current
()
config
.
enabled
=
False
...
...
lms/djangoapps/student_account/test/test_views.py
View file @
725e4908
...
...
@@ -253,7 +253,6 @@ class StudentAccountViewTest(UrlResetMixin, TestCase):
result
=
self
.
client
.
login
(
username
=
self
.
USERNAME
,
password
=
self
.
NEW_PASSWORD
)
self
.
assertTrue
(
result
)
@ddt.data
(
True
,
False
)
def
test_password_change_logged_out
(
self
,
send_email
):
# Log the user out
...
...
lms/djangoapps/verify_student/ssencrypt.py
View file @
725e4908
...
...
@@ -205,4 +205,3 @@ def body_string(body_dict, prefix=""):
body_list
.
append
(
u"{}{}:{}
\n
"
.
format
(
prefix
,
key
,
value
)
.
encode
(
'utf-8'
))
return
""
.
join
(
body_list
)
# Note that trailing \n's are important
lms/djangoapps/verify_student/tests/test_models.py
View file @
725e4908
...
...
@@ -75,6 +75,7 @@ class MockS3Connection(object):
def
get_bucket
(
self
,
bucket_name
):
return
MockBucket
(
bucket_name
)
def
mock_software_secure_post
(
url
,
headers
=
None
,
data
=
None
,
**
kwargs
):
"""
Mocks our interface when we post to Software Secure. Does basic assertions
...
...
@@ -103,6 +104,7 @@ def mock_software_secure_post(url, headers=None, data=None, **kwargs):
return
response
def
mock_software_secure_post_error
(
url
,
headers
=
None
,
data
=
None
,
**
kwargs
):
"""
Simulates what happens if our post to Software Secure is rejected, for
...
...
@@ -112,6 +114,7 @@ def mock_software_secure_post_error(url, headers=None, data=None, **kwargs):
response
.
status_code
=
400
return
response
def
mock_software_secure_post_unavailable
(
url
,
headers
=
None
,
data
=
None
,
**
kwargs
):
"""Simulates a connection failure when we try to submit to Software Secure."""
raise
requests
.
exceptions
.
ConnectionError
...
...
lms/djangoapps/verify_student/views.py
View file @
725e4908
...
...
@@ -46,6 +46,7 @@ EVENT_NAME_USER_ENTERED_MIDCOURSE_REVERIFY_VIEW = 'edx.course.enrollment.reverif
EVENT_NAME_USER_SUBMITTED_MIDCOURSE_REVERIFY
=
'edx.course.enrollment.reverify.submitted'
EVENT_NAME_USER_REVERIFICATION_REVIEWED_BY_SOFTWARESECURE
=
'edx.course.enrollment.reverify.reviewed'
class
VerifyView
(
View
):
@method_decorator
(
login_required
)
...
...
@@ -134,7 +135,6 @@ class VerifiedView(View):
if
CourseEnrollment
.
enrollment_mode_for_user
(
request
.
user
,
course_id
)
==
(
'verified'
,
True
):
return
redirect
(
reverse
(
'dashboard'
))
modes_dict
=
CourseMode
.
modes_for_course_dict
(
course_id
)
# we prefer professional over verify
...
...
@@ -335,8 +335,10 @@ def show_requirements(request, course_id):
return
redirect
(
reverse
(
'dashboard'
))
if
SoftwareSecurePhotoVerification
.
user_has_valid_or_pending
(
request
.
user
):
return
redirect
(
reverse
(
'verify_student_verified'
,
kwargs
=
{
'course_id'
:
course_id
.
to_deprecated_string
()})
+
"?upgrade={}"
.
format
(
upgrade
)
reverse
(
'verify_student_verified'
,
kwargs
=
{
'course_id'
:
course_id
.
to_deprecated_string
()}
)
+
"?upgrade={}"
.
format
(
upgrade
)
)
upgrade
=
request
.
GET
.
get
(
'upgrade'
,
False
)
...
...
@@ -523,7 +525,6 @@ def toggle_failed_banner_off(request):
return
HttpResponse
(
'Success'
)
@login_required
def
reverification_submission_confirmation
(
_request
):
"""
...
...
lms/envs/cms/acceptance.py
View file @
725e4908
...
...
@@ -6,7 +6,7 @@ Toolbar. I it suitable to run against acceptance tests.
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=
W0401, W0614
# pylint: disable=
wildcard-import, unused-wildcard-import, undefined-variable
from
.dev
import
*
...
...
@@ -15,7 +15,7 @@ from .dev import *
INSTALLED_APPS
=
tuple
(
e
for
e
in
INSTALLED_APPS
if
e
!=
'debug_toolbar'
)
INSTALLED_APPS
=
tuple
(
e
for
e
in
INSTALLED_APPS
if
e
!=
'debug_toolbar_mongo'
)
MIDDLEWARE_CLASSES
=
tuple
(
e
for
e
in
MIDDLEWARE_CLASSES
\
MIDDLEWARE_CLASSES
=
tuple
(
e
for
e
in
MIDDLEWARE_CLASSES
# pylint: disable=used-before-assignment
if
e
!=
'debug_toolbar.middleware.DebugToolbarMiddleware'
)
...
...
lms/envs/cms/dev.py
View file @
725e4908
...
...
@@ -33,12 +33,12 @@ CONTENTSTORE = {
INSTALLED_APPS
+=
(
# Mongo perf stats
'debug_toolbar_mongo'
,
)
)
DEBUG_TOOLBAR_PANELS
+=
(
'debug_toolbar_mongo.panel.MongoDebugPanel'
,
)
)
# HOSTNAME_MODULESTORE_DEFAULT_MAPPINGS defines, as dictionary of regex's, a set of mappings of HTTP request hostnames to
# what the 'default' modulestore to use while processing the request
...
...
lms/envs/common.py
View file @
725e4908
...
...
@@ -1237,7 +1237,7 @@ STATICFILES_IGNORE_PATTERNS = (
"common_static"
,
)
PIPELINE_UGLIFYJS_BINARY
=
'node_modules/.bin/uglifyjs'
PIPELINE_UGLIFYJS_BINARY
=
'node_modules/.bin/uglifyjs'
# Setting that will only affect the edX version of django-pipeline until our changes are merged upstream
PIPELINE_COMPILE_INPLACE
=
True
...
...
lms/envs/content.py
View file @
725e4908
...
...
@@ -28,9 +28,9 @@ DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.signals.SignalDebugPanel'
,
'debug_toolbar.panels.logger.LoggingPanel'
,
# Enabling the profiler has a weird bug as of django-debug-toolbar==0.9.4 and
# Django=1.3.1/1.4 where requests to views get duplicated (your method gets
# hit twice). So you can uncomment when you need to diagnose performance
# problems, but you shouldn't leave it on.
# 'debug_toolbar.panels.profiling.ProfilingDebugPanel',
# Enabling the profiler has a weird bug as of django-debug-toolbar==0.9.4 and
# Django=1.3.1/1.4 where requests to views get duplicated (your method gets
# hit twice). So you can uncomment when you need to diagnose performance
# problems, but you shouldn't leave it on.
# 'debug_toolbar.panels.profiling.ProfilingDebugPanel',
)
lms/envs/dev.py
View file @
725e4908
...
...
@@ -171,12 +171,12 @@ EDX_PLATFORM_VERSION_STRING = os.popen('cd %s; git describe' % REPO_ROOT).read()
############################ Open ended grading config #####################
OPEN_ENDED_GRADING_INTERFACE
=
{
'url'
:
'http://127.0.0.1:3033/'
,
'username'
:
'lms'
,
'password'
:
'abcd'
,
'staff_grading'
:
'staff_grading'
,
'peer_grading'
:
'peer_grading'
,
'grading_controller'
:
'grading_controller'
'url'
:
'http://127.0.0.1:3033/'
,
'username'
:
'lms'
,
'password'
:
'abcd'
,
'staff_grading'
:
'staff_grading'
,
'peer_grading'
:
'peer_grading'
,
'grading_controller'
:
'grading_controller'
}
############################## LMS Migration ##################################
...
...
@@ -239,11 +239,11 @@ DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.signals.SignalDebugPanel'
,
'debug_toolbar.panels.logger.LoggingPanel'
,
#
Enabling the profiler has a weird bug as of django-debug-toolbar==0.9.4 and
#
Django=1.3.1/1.4 where requests to views get duplicated (your method gets
#
hit twice). So you can uncomment when you need to diagnose performance
#
problems, but you shouldn't leave it on.
#
'debug_toolbar.panels.profiling.ProfilingDebugPanel',
#
Enabling the profiler has a weird bug as of django-debug-toolbar==0.9.4 and
#
Django=1.3.1/1.4 where requests to views get duplicated (your method gets
#
hit twice). So you can uncomment when you need to diagnose performance
#
problems, but you shouldn't leave it on.
#
'debug_toolbar.panels.profiling.ProfilingDebugPanel',
)
DEBUG_TOOLBAR_CONFIG
=
{
...
...
lms/envs/dev_int.py
View file @
725e4908
lms/envs/devgroups/courses.py
View file @
725e4908
lms/envs/devplus.py
View file @
725e4908
...
...
@@ -16,7 +16,7 @@ Dir structure:
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=
W0401, W0614
# pylint: disable=
wildcard-import, unused-wildcard-import
from
.dev
import
*
...
...
@@ -66,10 +66,10 @@ DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.signals.SignalDebugPanel'
,
'debug_toolbar.panels.logger.LoggingPanel'
,
# Enabling the profiler has a weird bug as of django-debug-toolbar==0.9.4 and
# Django=1.3.1/1.4 where requests to views get duplicated (your method gets
# hit twice). So you can uncomment when you need to diagnose performance
# problems, but you shouldn't leave it on.
# Enabling the profiler has a weird bug as of django-debug-toolbar==0.9.4 and
# Django=1.3.1/1.4 where requests to views get duplicated (your method gets
# hit twice). So you can uncomment when you need to diagnose performance
# problems, but you shouldn't leave it on.
'debug_toolbar.panels.profiling.ProfilingDebugPanel'
,
)
...
...
lms/envs/devstack.py
View file @
725e4908
lms/envs/edx4edx_aws.py
View file @
725e4908
...
...
@@ -16,7 +16,9 @@ PIPELINE_CSS_COMPRESSOR = None
PIPELINE_JS_COMPRESSOR
=
None
COURSE_DEFAULT
=
'edx4edx'
COURSE_SETTINGS
=
{
'edx4edx'
:
{
'number'
:
'edX.01'
,
COURSE_SETTINGS
=
{
'edx4edx'
:
{
'number'
:
'edX.01'
,
'title'
:
'edx4edx: edX Author Course'
,
'xmlpath'
:
'/edx4edx/'
,
'github_url'
:
'https://github.com/MITx/edx4edx'
,
...
...
@@ -24,7 +26,7 @@ COURSE_SETTINGS = {'edx4edx': {'number' : 'edX.01',
'default_chapter'
:
'Introduction'
,
'default_section'
:
'edx4edx_Course'
,
},
}
}
STATICFILES_DIRS
=
[
PROJECT_ROOT
/
"static"
,
...
...
lms/envs/load_test.py
View file @
725e4908
...
...
@@ -9,8 +9,9 @@ Settings for load testing.
from
.aws
import
*
# Disable CSRF for load testing
exclude_csrf
=
lambda
elem
:
not
elem
in
\
[
'django.core.context_processors.csrf'
,
'django.middleware.csrf.CsrfViewMiddleware'
]
TEMPLATE_CONTEXT_PROCESSORS
=
filter
(
exclude_csrf
,
TEMPLATE_CONTEXT_PROCESSORS
)
MIDDLEWARE_CLASSES
=
filter
(
exclude_csrf
,
MIDDLEWARE_CLASSES
)
EXCLUDE_CSRF
=
lambda
elem
:
not
elem
in
[
'django.core.context_processors.csrf'
,
'django.middleware.csrf.CsrfViewMiddleware'
]
TEMPLATE_CONTEXT_PROCESSORS
=
filter
(
EXCLUDE_CSRF
,
TEMPLATE_CONTEXT_PROCESSORS
)
MIDDLEWARE_CLASSES
=
filter
(
EXCLUDE_CSRF
,
MIDDLEWARE_CLASSES
)
lms/envs/sauce.py
View file @
725e4908
...
...
@@ -10,11 +10,13 @@ so that we can run the lettuce acceptance tests on SauceLabs.
from
selenium.webdriver.common.desired_capabilities
import
DesiredCapabilities
import
os
PORTS
=
[
2000
,
2001
,
2020
,
2109
,
2222
,
2310
,
3000
,
3001
,
PORTS
=
[
2000
,
2001
,
2020
,
2109
,
2222
,
2310
,
3000
,
3001
,
3030
,
3210
,
3333
,
4000
,
4001
,
4040
,
4321
,
4502
,
4503
,
5050
,
5555
,
5432
,
6060
,
6666
,
6543
,
7000
,
7070
,
7774
,
7777
,
8003
,
8031
,
8080
,
8081
,
8765
,
8888
,
9080
,
9090
,
9876
,
9999
,
49221
,
55001
]
9080
,
9090
,
9876
,
9999
,
49221
,
55001
]
DESIRED_CAPABILITIES
=
{
'chrome'
:
DesiredCapabilities
.
CHROME
,
...
...
lms/envs/test.py
View file @
725e4908
...
...
@@ -119,8 +119,8 @@ STATICFILES_DIRS += [
# If we don't add these settings, then Django templates that can't
# find pipelined assets will raise a ValueError.
# http://stackoverflow.com/questions/12816941/unit-testing-with-django-pipeline
STATICFILES_STORAGE
=
'pipeline.storage.NonPackagingPipelineStorage'
PIPELINE_ENABLED
=
False
STATICFILES_STORAGE
=
'pipeline.storage.NonPackagingPipelineStorage'
PIPELINE_ENABLED
=
False
update_module_store_settings
(
MODULESTORE
,
...
...
lms/lib/comment_client/commentable.py
View file @
725e4908
...
...
@@ -2,6 +2,7 @@
import
models
import
settings
class
Commentable
(
models
.
Model
):
base_url
=
"{prefix}/commentables"
.
format
(
prefix
=
settings
.
PREFIX
)
...
...
lms/lib/comment_client/thread.py
View file @
725e4908
...
...
@@ -8,6 +8,7 @@ import settings
log
=
logging
.
getLogger
(
__name__
)
class
Thread
(
models
.
Model
):
accessible_fields
=
[
...
...
lms/lib/comment_client/user.py
View file @
725e4908
...
...
@@ -6,7 +6,8 @@ import settings
class
User
(
models
.
Model
):
accessible_fields
=
[
'username'
,
'follower_ids'
,
'upvoted_ids'
,
'downvoted_ids'
,
accessible_fields
=
[
'username'
,
'follower_ids'
,
'upvoted_ids'
,
'downvoted_ids'
,
'id'
,
'external_id'
,
'subscribed_user_ids'
,
'children'
,
'course_id'
,
'group_id'
,
'subscribed_thread_ids'
,
'subscribed_commentable_ids'
,
'subscribed_course_ids'
,
'threads_count'
,
'comments_count'
,
...
...
@@ -165,5 +166,3 @@ def _url_for_user_active_threads(user_id):
def
_url_for_user_subscribed_threads
(
user_id
):
return
"{prefix}/users/{user_id}/subscribed_threads"
.
format
(
prefix
=
settings
.
PREFIX
,
user_id
=
user_id
)
lms/lib/xblock/mixin.py
View file @
725e4908
lms/startup.py
View file @
725e4908
lms/tests.py
View file @
725e4908
...
...
@@ -4,7 +4,6 @@ import mimetypes
from
mock
import
patch
from
django.test
import
TestCase
from
django.test.utils
import
override_settings
from
django.core.urlresolvers
import
reverse
from
edxmako
import
add_lookup
,
LOOKUP
...
...
lms/urls.py
View file @
725e4908
...
...
@@ -153,7 +153,7 @@ if not settings.FEATURES["USE_CUSTOM_THEME"]:
# Press releases
url
(
r'^press/([_a-zA-Z0-9-]+)$'
,
'static_template_view.views.render_press_release'
,
name
=
'press_release'
),
)
)
# Only enable URLs for those marketing links actually enabled in the
# settings. Disable URLs by marking them as None.
...
...
lms/wsgi.py
View file @
725e4908
...
...
@@ -25,4 +25,3 @@ modulestore()
# as well as any WSGI server configured to use this file.
from
django.core.wsgi
import
get_wsgi_application
application
=
get_wsgi_application
()
scripts/all-tests.sh
View file @
725e4908
...
...
@@ -56,8 +56,8 @@ set -e
###############################################################################
# Violations thresholds for failing the build
PYLINT_THRESHOLD
=
4
800
PEP8_THRESHOLD
=
675
PYLINT_THRESHOLD
=
4
725
PEP8_THRESHOLD
=
400
source
$HOME
/jenkins_env
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment