Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
E
edx-platform
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
edx
edx-platform
Commits
7ada9569
Commit
7ada9569
authored
Nov 11, 2014
by
Sarina Canelake
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #5918 from stvstnfrd/pep8/whitespace
Fix whitespace-related PEP8 issues
parents
d02747ff
bd4658e6
Show whitespace changes
Inline
Side-by-side
Showing
43 changed files
with
81 additions
and
49 deletions
+81
-49
cms/djangoapps/contentstore/features/video_handout.py
+0
-0
cms/djangoapps/contentstore/management/commands/prompt.py
+7
-2
cms/djangoapps/contentstore/views/preview.py
+1
-1
cms/djangoapps/contentstore/views/transcripts_ajax.py
+8
-2
cms/envs/common.py
+0
-0
cms/envs/devstack.py
+0
-0
cms/envs/test.py
+2
-2
common/djangoapps/microsite_configuration/templatetags/microsite.py
+1
-1
common/djangoapps/student/tests/test_course_listing.py
+4
-1
common/djangoapps/terrain/browser.py
+4
-2
common/djangoapps/terrain/stubs/http.py
+2
-2
common/djangoapps/terrain/stubs/ora.py
+1
-1
common/djangoapps/terrain/stubs/tests/test_http.py
+3
-1
common/djangoapps/terrain/stubs/tests/test_lti_stub.py
+1
-1
common/djangoapps/terrain/stubs/xqueue.py
+2
-1
common/djangoapps/track/middleware.py
+0
-0
common/lib/capa/capa/safe_exec/safe_exec.py
+1
-1
common/lib/capa/capa/xqueue_interface.py
+0
-0
common/lib/sandbox-packages/verifiers/draganddrop.py
+8
-8
common/lib/xmodule/xmodule/combined_open_ended_module.py
+1
-1
common/lib/xmodule/xmodule/lti_module.py
+1
-1
common/lib/xmodule/xmodule/modulestore/tests/test_xml_importer.py
+1
-1
common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_rubric.py
+2
-2
common/lib/xmodule/xmodule/open_ended_grading_classes/peer_grading_service.py
+9
-2
common/lib/xmodule/xmodule/open_ended_grading_classes/self_assessment_module.py
+1
-1
common/lib/xmodule/xmodule/split_test_module.py
+0
-0
common/lib/xmodule/xmodule/tests/__init__.py
+7
-1
common/lib/xmodule/xmodule/tests/test_video.py
+2
-2
common/lib/xmodule/xmodule/video_module/video_module.py
+0
-0
common/test/acceptance/pages/lms/annotation_component.py
+1
-1
common/test/data/uploads/python_lib_zip/number_helpers.py
+1
-1
lms/djangoapps/courseware/features/high-level-tabs.py
+1
-1
lms/djangoapps/courseware/masquerade.py
+1
-1
lms/djangoapps/courseware/tests/test_navigation.py
+1
-1
lms/djangoapps/instructor/tests/test_legacy_xss.py
+0
-0
lms/djangoapps/notification_prefs/tests.py
+2
-2
lms/djangoapps/notification_prefs/views.py
+1
-1
lms/djangoapps/open_ended_grading/staff_grading_service.py
+1
-1
lms/djangoapps/shoppingcart/processors/CyberSource2.py
+0
-0
lms/djangoapps/staticbook/views.py
+1
-1
pavelib/utils/test/suites/acceptance_suite.py
+0
-0
scripts/all-tests.sh
+1
-1
scripts/cov_merge.py
+1
-1
No files found.
cms/djangoapps/contentstore/features/video_handout.py
View file @
7ada9569
cms/djangoapps/contentstore/management/commands/prompt.py
View file @
7ada9569
...
...
@@ -11,8 +11,13 @@ def query_yes_no(question, default="yes"):
The "answer" return value is one of "yes" or "no".
"""
valid
=
{
"yes"
:
True
,
"y"
:
True
,
"ye"
:
True
,
"no"
:
False
,
"n"
:
False
}
valid
=
{
"yes"
:
True
,
"y"
:
True
,
"ye"
:
True
,
"no"
:
False
,
"n"
:
False
,
}
if
default
is
None
:
prompt
=
" [y/n] "
elif
default
==
"yes"
:
...
...
cms/djangoapps/contentstore/views/preview.py
View file @
7ada9569
...
...
@@ -151,7 +151,7 @@ def _preview_module_system(request, descriptor):
replace_urls
=
partial
(
static_replace
.
replace_static_urls
,
data_directory
=
None
,
course_id
=
course_id
),
user
=
request
.
user
,
can_execute_unsafe_code
=
(
lambda
:
can_execute_unsafe_code
(
course_id
)),
get_python_lib_zip
=
(
lambda
:
get_python_lib_zip
(
contentstore
,
course_id
)),
get_python_lib_zip
=
(
lambda
:
get_python_lib_zip
(
contentstore
,
course_id
)),
mixins
=
settings
.
XBLOCK_MIXINS
,
course_id
=
course_id
,
anonymous_student_id
=
'student'
,
...
...
cms/djangoapps/contentstore/views/transcripts_ajax.py
View file @
7ada9569
...
...
@@ -377,7 +377,10 @@ def choose_transcripts(request):
if
item
.
sub
!=
html5_id
:
# update sub value
item
.
sub
=
html5_id
item
.
save_with_metadata
(
request
.
user
)
response
=
{
'status'
:
'Success'
,
'subs'
:
item
.
sub
}
response
=
{
'status'
:
'Success'
,
'subs'
:
item
.
sub
,
}
return
JsonResponse
(
response
)
...
...
@@ -408,7 +411,10 @@ def replace_transcripts(request):
item
.
sub
=
youtube_id
item
.
save_with_metadata
(
request
.
user
)
response
=
{
'status'
:
'Success'
,
'subs'
:
item
.
sub
}
response
=
{
'status'
:
'Success'
,
'subs'
:
item
.
sub
,
}
return
JsonResponse
(
response
)
...
...
cms/envs/common.py
View file @
7ada9569
cms/envs/devstack.py
View file @
7ada9569
cms/envs/test.py
View file @
7ada9569
...
...
@@ -69,9 +69,9 @@ STATICFILES_DIRS += [
# If we don't add these settings, then Django templates that can't
# find pipelined assets will raise a ValueError.
# http://stackoverflow.com/questions/12816941/unit-testing-with-django-pipeline
STATICFILES_STORAGE
=
'pipeline.storage.NonPackagingPipelineStorage'
STATICFILES_STORAGE
=
'pipeline.storage.NonPackagingPipelineStorage'
STATIC_URL
=
"/static/"
PIPELINE_ENABLED
=
False
PIPELINE_ENABLED
=
False
# Update module store settings per defaults for tests
update_module_store_settings
(
...
...
common/djangoapps/microsite_configuration/templatetags/microsite.py
View file @
7ada9569
...
...
@@ -43,7 +43,7 @@ def platform_name():
@register.simple_tag
(
name
=
"favicon_path"
)
def
favicon_path
(
default
=
getattr
(
settings
,
'FAVICON_PATH'
,
'images/favicon.ico'
)):
def
favicon_path
(
default
=
getattr
(
settings
,
'FAVICON_PATH'
,
'images/favicon.ico'
)):
"""
Django template tag that outputs the configured favicon:
{
%
favicon_path
%
}
...
...
common/djangoapps/student/tests/test_course_listing.py
View file @
7ada9569
...
...
@@ -105,7 +105,10 @@ class TestCourseListing(ModuleStoreTestCase):
course_location
=
SlashSeparatedCourseKey
(
'testOrg'
,
'erroredCourse'
,
'RunBabyRun'
)
course
=
self
.
_create_course_with_access_groups
(
course_location
)
course_db_record
=
mongo_store
.
_find_one
(
course
.
location
)
course_db_record
.
setdefault
(
'metadata'
,
{})
.
get
(
'tabs'
,
[])
.
append
({
"type"
:
"wiko"
,
"name"
:
"Wiki"
})
course_db_record
.
setdefault
(
'metadata'
,
{})
.
get
(
'tabs'
,
[])
.
append
({
"type"
:
"wiko"
,
"name"
:
"Wiki"
,
})
mongo_store
.
collection
.
update
(
{
'_id'
:
course
.
location
.
to_deprecated_son
()},
{
'$set'
:
{
...
...
common/djangoapps/terrain/browser.py
View file @
7ada9569
...
...
@@ -95,7 +95,9 @@ def initial_setup(server):
if
browser_driver
==
'chrome'
:
desired_capabilities
=
DesiredCapabilities
.
CHROME
desired_capabilities
[
'loggingPrefs'
]
=
{
'browser'
:
'ALL'
}
desired_capabilities
[
'loggingPrefs'
]
=
{
'browser'
:
'ALL'
,
}
elif
browser_driver
==
'firefox'
:
desired_capabilities
=
DesiredCapabilities
.
FIREFOX
else
:
...
...
@@ -239,7 +241,7 @@ def capture_console_log(scenario):
output_dir
=
'{}/log'
.
format
(
settings
.
TEST_ROOT
)
file_name
=
'{}/{}.log'
.
format
(
output_dir
,
scenario
.
name
.
replace
(
' '
,
'_'
))
with
open
(
file_name
,
'w'
)
as
output_file
:
with
open
(
file_name
,
'w'
)
as
output_file
:
for
line
in
log
:
output_file
.
write
(
"{}{}"
.
format
(
dumps
(
line
),
'
\n
'
))
...
...
common/djangoapps/terrain/stubs/http.py
View file @
7ada9569
...
...
@@ -123,8 +123,8 @@ class StubHttpRequestHandler(BaseHTTPRequestHandler, object):
# By default, `parse_qs` returns a list of values for each param
# For convenience, we replace lists of 1 element with just the element
return
{
k
:
v
[
0
]
if
len
(
v
)
==
1
else
v
for
k
,
v
in
urlparse
.
parse_qs
(
query
)
.
items
()
k
ey
:
value
[
0
]
if
len
(
value
)
==
1
else
value
for
k
ey
,
value
in
urlparse
.
parse_qs
(
query
)
.
items
()
}
@lazy
...
...
common/djangoapps/terrain/stubs/ora.py
View file @
7ada9569
...
...
@@ -45,7 +45,7 @@ class StudentState(object):
@property
def
num_pending
(
self
):
return
max
(
self
.
INITIAL_ESSAYS_AVAILABLE
-
self
.
num_graded
,
0
)
return
max
(
self
.
INITIAL_ESSAYS_AVAILABLE
-
self
.
num_graded
,
0
)
@property
def
num_required
(
self
):
...
...
common/djangoapps/terrain/stubs/tests/test_http.py
View file @
7ada9569
...
...
@@ -25,7 +25,9 @@ class StubHttpServiceTest(unittest.TestCase):
'test_empty'
:
''
,
'test_int'
:
12345
,
'test_float'
:
123.45
,
'test_dict'
:
{
'test_key'
:
'test_val'
},
'test_dict'
:
{
'test_key'
:
'test_val'
,
},
'test_empty_dict'
:
{},
'test_unicode'
:
u'
\u2603
the snowman'
,
'test_none'
:
None
,
...
...
common/djangoapps/terrain/stubs/tests/test_lti_stub.py
View file @
7ada9569
...
...
@@ -35,7 +35,7 @@ class StubLtiServiceTest(unittest.TestCase):
'launch_presentation_return_url'
:
''
,
'lis_outcome_service_url'
:
'http://localhost:8001/test_callback'
,
'lis_result_sourcedid'
:
''
,
'resource_link_id'
:
''
,
'resource_link_id'
:
''
,
}
def
test_invalid_request_url
(
self
):
...
...
common/djangoapps/terrain/stubs/xqueue.py
View file @
7ada9569
...
...
@@ -214,6 +214,7 @@ class StubXQueueService(StubHttpService):
except for 'default' and 'register_submission_url' which have special meaning
"""
return
{
key
:
val
for
key
,
val
in
self
.
config
.
iteritems
()
key
:
value
for
key
,
value
in
self
.
config
.
iteritems
()
if
key
not
in
self
.
NON_QUEUE_CONFIG_KEYS
}
.
items
()
common/djangoapps/track/middleware.py
View file @
7ada9569
common/lib/capa/capa/safe_exec/safe_exec.py
View file @
7ada9569
...
...
@@ -21,7 +21,7 @@ random.Random = random_module.Random
sys.modules['random'] = random
"""
ASSUMED_IMPORTS
=
[
ASSUMED_IMPORTS
=
[
(
"numpy"
,
"numpy"
),
(
"math"
,
"math"
),
(
"scipy"
,
"scipy"
),
...
...
common/lib/capa/capa/xqueue_interface.py
View file @
7ada9569
common/lib/sandbox-packages/verifiers/draganddrop.py
View file @
7ada9569
...
...
@@ -21,7 +21,7 @@ or:
{ "molecule": "[100, 200]" },
]
}
values are (x,y) coordinates of centers of dragged images.
values are (x,
y) coordinates of centers of dragged images.
"""
import
json
...
...
@@ -77,7 +77,7 @@ class PositionsCompare(list):
list or string::
"abc" - target
[10, 20] - list of integers
[[10,20], 200] list of list and integer
[[10,
20], 200] list of list and integer
"""
def
__eq__
(
self
,
other
):
...
...
@@ -223,10 +223,10 @@ class DragAndDrop(object):
Examples:
- many draggables per position:
user ['1',
'2','2',
'2'] is 'anyof' equal to ['1', '2', '3']
user ['1',
'2', '2',
'2'] is 'anyof' equal to ['1', '2', '3']
- draggables can be placed in any order:
user ['1',
'2','3',
'4'] is 'anyof' equal to ['4', '2', '1', 3']
user ['1',
'2', '3',
'4'] is 'anyof' equal to ['4', '2', '1', 3']
'unordered_equal' is same as 'exact' but disregards on order
...
...
@@ -235,7 +235,7 @@ class DragAndDrop(object):
Equality functon depends on type of element. They declared in
PositionsCompare class. For position like targets
ids ("t1", "t2", etc..) it is string equality function. For coordinate
positions ([1,
2] or [[1,
2], 15]) it is coordinate_positions_compare
positions ([1,
2] or [[1,
2], 15]) it is coordinate_positions_compare
function (see docstrings in PositionsCompare class)
Args:
...
...
@@ -352,7 +352,7 @@ class DragAndDrop(object):
# correct_answer entries. If the draggable is mentioned in at least one
# correct_answer entry, the value is False.
# default to consider every user answer excess until proven otherwise.
self
.
excess_draggables
=
dict
((
users_draggable
.
keys
()[
0
],
True
)
self
.
excess_draggables
=
dict
((
users_draggable
.
keys
()[
0
],
True
)
for
users_draggable
in
user_answer
)
# Convert nested `user_answer` to flat format.
...
...
@@ -414,8 +414,8 @@ def grade(user_input, correct_answer):
'rule': 'anyof'
},
{
'draggables': ['l1_c','l8_c'],
'targets': ['t5_c','t6_c'],
'draggables': ['l1_c',
'l8_c'],
'targets': ['t5_c',
't6_c'],
'rule': 'anyof'
}
]
...
...
common/lib/xmodule/xmodule/combined_open_ended_module.py
View file @
7ada9569
...
...
@@ -208,7 +208,7 @@ class CombinedOpenEndedFields(object):
"This field is only populated if the instructor changes tasks after "
"the module is created and students have attempted it (for example, if a self assessed problem is "
"changed to self and peer assessed)."
),
scope
=
Scope
.
user_state
scope
=
Scope
.
user_state
,
)
task_states
=
List
(
help
=
_
(
"List of state dictionaries of each task within this module."
),
...
...
common/lib/xmodule/xmodule/lti_module.py
View file @
7ada9569
...
...
@@ -773,7 +773,7 @@ oauth_consumer_key="", oauth_signature="frVp4JuvT1mVXlxktiAUjQ7%2F1cw%3D"'}
imsx_messageIdentifier
=
root
.
xpath
(
"//def:imsx_messageIdentifier"
,
namespaces
=
namespaces
)[
0
]
.
text
or
''
sourcedId
=
root
.
xpath
(
"//def:sourcedId"
,
namespaces
=
namespaces
)[
0
]
.
text
score
=
root
.
xpath
(
"//def:textString"
,
namespaces
=
namespaces
)[
0
]
.
text
action
=
root
.
xpath
(
"//def:imsx_POXBody"
,
namespaces
=
namespaces
)[
0
]
.
getchildren
()[
0
]
.
tag
.
replace
(
'{'
+
lti_spec_namespace
+
'}'
,
''
)
action
=
root
.
xpath
(
"//def:imsx_POXBody"
,
namespaces
=
namespaces
)[
0
]
.
getchildren
()[
0
]
.
tag
.
replace
(
'{'
+
lti_spec_namespace
+
'}'
,
''
)
# Raise exception if score is not float or not in range 0.0-1.0 regarding spec.
score
=
float
(
score
)
if
not
0
<=
score
<=
1
:
...
...
common/lib/xmodule/xmodule/modulestore/tests/test_xml_importer.py
View file @
7ada9569
...
...
@@ -88,7 +88,7 @@ def modulestore():
ModuleStoreNoSettings
.
modulestore
=
class_
(
None
,
# contentstore
ModuleStoreNoSettings
.
MODULESTORE
[
'DOC_STORE_CONFIG'
],
branch_setting_func
=
lambda
:
ModuleStoreEnum
.
Branch
.
draft_preferred
,
branch_setting_func
=
lambda
:
ModuleStoreEnum
.
Branch
.
draft_preferred
,
**
options
)
...
...
common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_rubric.py
View file @
7ada9569
...
...
@@ -236,7 +236,7 @@ class CombinedOpenEndedRubric(object):
rubric_categories
[
i
][
'options'
][
j
][
'grader_types'
]
.
append
(
grader_type
)
#Grab the score and add it to the actual scores. J will be the score for the selected
#grader type
if
len
(
actual_scores
)
<=
i
:
if
len
(
actual_scores
)
<=
i
:
#Initialize a new list in the list of lists
actual_scores
.
append
([
j
])
else
:
...
...
@@ -249,7 +249,7 @@ class CombinedOpenEndedRubric(object):
for
(
i
,
a
)
in
enumerate
(
actual_scores
):
if
int
(
a
)
==
max_scores
[
i
]:
correct
.
append
(
1
)
elif
int
(
a
)
==
0
:
elif
int
(
a
)
==
0
:
correct
.
append
(
0
)
else
:
correct
.
append
(
.
5
)
...
...
common/lib/xmodule/xmodule/open_ended_grading_classes/peer_grading_service.py
View file @
7ada9569
...
...
@@ -103,7 +103,7 @@ class PeerGradingService(GradingService):
self
.
_record_result
(
'get_problem_list'
,
result
)
dog_stats_api
.
histogram
(
self
.
_metric_name
(
'get_problem_list.result.length'
),
len
(
result
.
get
(
'problem_list'
,
[]))
len
(
result
.
get
(
'problem_list'
,
[])),
)
return
result
...
...
@@ -160,4 +160,11 @@ class MockPeerGradingService(object):
]}
def
get_data_for_location
(
self
,
problem_location
,
student_id
):
return
{
"version"
:
1
,
"count_graded"
:
3
,
"count_required"
:
3
,
"success"
:
True
,
"student_sub_count"
:
1
,
'submissions_available'
:
0
}
return
{
"version"
:
1
,
"count_graded"
:
3
,
"count_required"
:
3
,
"success"
:
True
,
"student_sub_count"
:
1
,
'submissions_available'
:
0
,
}
common/lib/xmodule/xmodule/open_ended_grading_classes/self_assessment_module.py
View file @
7ada9569
...
...
@@ -198,7 +198,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
'success'
:
success
,
'rubric_html'
:
self
.
get_rubric_html
(
system
),
'error'
:
error_message
,
'student_response'
:
data
[
'student_answer'
]
.
replace
(
"
\n
"
,
"<br/>"
)
'student_response'
:
data
[
'student_answer'
]
.
replace
(
"
\n
"
,
"<br/>"
),
}
def
save_assessment
(
self
,
data
,
_system
):
...
...
common/lib/xmodule/xmodule/split_test_module.py
View file @
7ada9569
common/lib/xmodule/xmodule/tests/__init__.py
View file @
7ada9569
...
...
@@ -89,7 +89,13 @@ def get_test_system(course_id=SlashSeparatedCourseKey('org', 'course', 'run')):
filestore
=
Mock
(),
debug
=
True
,
hostname
=
"edx.org"
,
xqueue
=
{
'interface'
:
None
,
'callback_url'
:
'/'
,
'default_queuename'
:
'testqueue'
,
'waittime'
:
10
,
'construct_callback'
:
Mock
(
side_effect
=
"/"
)},
xqueue
=
{
'interface'
:
None
,
'callback_url'
:
'/'
,
'default_queuename'
:
'testqueue'
,
'waittime'
:
10
,
'construct_callback'
:
Mock
(
side_effect
=
"/"
),
},
node_path
=
os
.
environ
.
get
(
"NODE_PATH"
,
"/usr/local/lib/node_modules"
),
anonymous_student_id
=
'student'
,
open_ended_grading_interface
=
open_ended_grading_interface
,
...
...
common/lib/xmodule/xmodule/tests/test_video.py
View file @
7ada9569
...
...
@@ -566,7 +566,7 @@ class VideoCdnTest(unittest.TestCase):
original_video_url
=
"http://www.original_video.com/original_video.mp4"
cdn_response_video_url
=
"http://www.cdn_video.com/cdn_video.mp4"
cdn_response_content
=
'{{"sources":["{cdn_url}"]}}'
.
format
(
cdn_url
=
cdn_response_video_url
)
cdn_response
.
return_value
=
Mock
(
status_code
=
200
,
content
=
cdn_response_content
)
cdn_response
.
return_value
=
Mock
(
status_code
=
200
,
content
=
cdn_response_content
)
fake_cdn_url
=
'http://fake_cdn.com/'
self
.
assertEqual
(
get_video_from_cdn
(
fake_cdn_url
,
original_video_url
),
...
...
@@ -579,6 +579,6 @@ class VideoCdnTest(unittest.TestCase):
Test if no alternative video in CDN exists.
"""
original_video_url
=
"http://www.original_video.com/original_video.mp4"
cdn_response
.
return_value
=
Mock
(
status_code
=
404
)
cdn_response
.
return_value
=
Mock
(
status_code
=
404
)
fake_cdn_url
=
'http://fake_cdn.com/'
self
.
assertIsNone
(
get_video_from_cdn
(
fake_cdn_url
,
original_video_url
))
common/lib/xmodule/xmodule/video_module/video_module.py
View file @
7ada9569
common/test/acceptance/pages/lms/annotation_component.py
View file @
7ada9569
...
...
@@ -41,7 +41,7 @@ class AnnotationComponentPage(PageObject):
Return css selector for current active problem with sub_selector.
"""
return
'div[data-problem-id="{}"] {}'
.
format
(
self
.
q
(
css
=
'.vert-{}'
.
format
(
self
.
active_problem
+
1
))
.
map
(
self
.
q
(
css
=
'.vert-{}'
.
format
(
self
.
active_problem
+
1
))
.
map
(
lambda
el
:
el
.
get_attribute
(
'data-id'
))
.
results
[
0
],
sub_selector
,
)
...
...
common/test/data/uploads/python_lib_zip/number_helpers.py
View file @
7ada9569
...
...
@@ -3,4 +3,4 @@ def seventeen():
def
fortytwo
(
x
):
return
42
+
x
return
42
+
x
lms/djangoapps/courseware/features/high-level-tabs.py
View file @
7ada9569
...
...
@@ -7,5 +7,5 @@ def i_click_on_the_tab_and_check(step):
tab_text
=
tab_title
[
'TabName'
]
title
=
tab_title
[
'PageTitle'
]
world
.
click_link
(
tab_text
)
world
.
wait_for
(
lambda
_driver
:
title
in
world
.
browser
.
title
)
world
.
wait_for
(
lambda
_driver
:
title
in
world
.
browser
.
title
)
assert
(
title
in
world
.
browser
.
title
)
lms/djangoapps/courseware/masquerade.py
View file @
7ada9569
...
...
@@ -62,4 +62,4 @@ def is_masquerading_as_student(user):
Return True if user is masquerading as a student, False otherwise
'''
masq
=
getattr
(
user
,
'masquerade_as_student'
,
False
)
return
masq
==
True
return
masq
is
True
lms/djangoapps/courseware/tests/test_navigation.py
View file @
7ada9569
...
...
@@ -79,7 +79,7 @@ class TestNavigation(ModuleStoreTestCase, LoginEnrollmentTestCase):
''' Check if the progress tab is active in the tab set '''
for
line
in
response
.
content
.
split
(
'
\n
'
):
if
tabname
in
line
and
'active'
in
line
:
raise
AssertionError
(
"assertTabInactive failed: "
+
tabname
+
" active"
)
raise
AssertionError
(
"assertTabInactive failed: "
+
tabname
+
" active"
)
return
def
test_chrome_settings
(
self
):
...
...
lms/djangoapps/instructor/tests/test_legacy_xss.py
View file @
7ada9569
lms/djangoapps/notification_prefs/tests.py
View file @
7ada9569
...
...
@@ -67,7 +67,7 @@ class NotificationPrefViewTest(UrlResetMixin, TestCase):
request
.
user
=
self
.
user
response
=
ajax_status
(
request
)
self
.
assertEqual
(
response
.
status_code
,
200
)
self
.
assertEqual
(
json
.
loads
(
response
.
content
),
{
"status"
:
0
})
self
.
assertEqual
(
json
.
loads
(
response
.
content
),
{
"status"
:
0
})
def
test_ajax_status_get_1
(
self
):
self
.
create_prefs
()
...
...
@@ -75,7 +75,7 @@ class NotificationPrefViewTest(UrlResetMixin, TestCase):
request
.
user
=
self
.
user
response
=
ajax_status
(
request
)
self
.
assertEqual
(
response
.
status_code
,
200
)
self
.
assertEqual
(
json
.
loads
(
response
.
content
),
{
"status"
:
1
})
self
.
assertEqual
(
json
.
loads
(
response
.
content
),
{
"status"
:
1
})
def
test_ajax_status_post
(
self
):
request
=
self
.
request_factory
.
post
(
"dummy"
)
...
...
lms/djangoapps/notification_prefs/views.py
View file @
7ada9569
...
...
@@ -149,7 +149,7 @@ def ajax_status(request):
key
=
NOTIFICATION_PREF_KEY
)
return
HttpResponse
(
json
.
dumps
({
"status"
:
len
(
qs
)}),
content_type
=
"application/json"
)
return
HttpResponse
(
json
.
dumps
({
"status"
:
len
(
qs
)}),
content_type
=
"application/json"
)
@require_GET
...
...
lms/djangoapps/open_ended_grading/staff_grading_service.py
View file @
7ada9569
...
...
@@ -321,7 +321,7 @@ def get_problem_list(request, course_id):
u'If not, please do so and return to this page.'
)
valid_problem_list
=
[]
for
i
in
xrange
(
0
,
len
(
problem_list
)):
for
i
in
xrange
(
0
,
len
(
problem_list
)):
# Needed to ensure that the 'location' key can be accessed.
try
:
problem_list
[
i
]
=
json
.
loads
(
problem_list
[
i
])
...
...
lms/djangoapps/shoppingcart/processors/CyberSource2.py
View file @
7ada9569
lms/djangoapps/staticbook/views.py
View file @
7ada9569
...
...
@@ -110,7 +110,7 @@ def pdf_index(request, course_id, book_index, chapter=None, page=None):
if
page
is
not
None
:
viewer_params
+=
'&page={}'
.
format
(
page
)
if
request
.
GET
.
get
(
'viewer'
,
''
)
==
'true'
:
if
request
.
GET
.
get
(
'viewer'
,
''
)
==
'true'
:
template
=
'pdf_viewer.html'
else
:
template
=
'static_pdfbook.html'
...
...
pavelib/utils/test/suites/acceptance_suite.py
View file @
7ada9569
scripts/all-tests.sh
View file @
7ada9569
...
...
@@ -57,7 +57,7 @@ set -e
# Violations thresholds for failing the build
PYLINT_THRESHOLD
=
4725
PEP8_THRESHOLD
=
20
0
PEP8_THRESHOLD
=
15
0
source
$HOME
/jenkins_env
...
...
scripts/cov_merge.py
View file @
7ada9569
...
...
@@ -119,7 +119,7 @@ class ReportMerge(object):
report_path
=
os
.
path
.
join
(
self
.
DESTINATION
,
output_file
)
else
:
report_filename
=
path
.
split
(
'reports/'
)[
1
]
.
split
(
'/cover'
)[
0
]
.
replace
(
'/'
,
'_'
)
report_path
=
os
.
path
.
join
(
self
.
DESTINATION
,
report_filename
+
'_coverage.html'
)
report_path
=
os
.
path
.
join
(
self
.
DESTINATION
,
report_filename
+
'_coverage.html'
)
# Write everything to single report file
with
open
(
report_path
,
'w'
)
as
report_file
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment