Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
E
edx-platform
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
edx
edx-platform
Commits
8f22b511
Commit
8f22b511
authored
May 15, 2015
by
Christine Lytwynec
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #8056 from edx/clytwynec/TE-868
use mock_open for paver quality tests
parents
90daffe3
71bc1476
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
21 additions
and
6 deletions
+21
-6
pavelib/paver_tests/test_paver_quality.py
+21
-6
No files found.
pavelib/paver_tests/test_paver_quality.py
View file @
8f22b511
...
...
@@ -4,11 +4,12 @@ Tests for paver quality tasks
import
os
import
tempfile
import
unittest
from
mock
import
patch
,
MagicMock
from
mock
import
patch
,
MagicMock
,
mock_open
from
ddt
import
ddt
,
file_data
import
pavelib.quality
import
paver.easy
import
paver.tasks
from
paver.easy
import
BuildFailure
...
...
@@ -63,6 +64,19 @@ class TestPaverRunQuality(unittest.TestCase):
def
setUp
(
self
):
super
(
TestPaverRunQuality
,
self
)
.
setUp
()
# test_no_diff_quality_failures seems to alter the way that paver
# executes these lines is subsequent tests.
# https://github.com/paver/paver/blob/master/paver/tasks.py#L175-L180
#
# The other tests don't appear to have the same impact. This was
# causing a test order dependency. This line resets that state
# of environment._task_in_progress so that the paver commands in the
# tests will be considered top level tasks by paver, and we can predict
# which path it will chose in the above code block.
#
# TODO: Figure out why one test is altering the state to begin with.
paver
.
tasks
.
environment
=
paver
.
tasks
.
Environment
()
# mock the @needs decorator to skip it
self
.
_mock_paver_needs
=
patch
.
object
(
pavelib
.
quality
.
run_quality
,
'needs'
)
.
start
()
self
.
_mock_paver_needs
.
return_value
=
0
...
...
@@ -71,7 +85,7 @@ class TestPaverRunQuality(unittest.TestCase):
self
.
addCleanup
(
patcher
.
stop
)
self
.
addCleanup
(
self
.
_mock_paver_needs
.
stop
)
@
unittest.skip
(
"TODO: TE-868"
)
@
patch
(
'__builtin__.open'
,
mock_open
()
)
def
test_failure_on_diffquality_pep8
(
self
):
"""
If pep8 finds errors, pylint should still be run
...
...
@@ -90,7 +104,7 @@ class TestPaverRunQuality(unittest.TestCase):
self
.
assertEqual
(
_mock_pep8_violations
.
call_count
,
1
)
self
.
assertEqual
(
self
.
_mock_paver_sh
.
call_count
,
1
)
@
unittest.skip
(
"TODO: TE-868"
)
@
patch
(
'__builtin__.open'
,
mock_open
()
)
def
test_failure_on_diffquality_pylint
(
self
):
"""
If diff-quality fails on pylint, the paver task should also fail
...
...
@@ -109,19 +123,20 @@ class TestPaverRunQuality(unittest.TestCase):
# And assert that sh was called once (for the call to "pylint")
self
.
assertEqual
(
self
.
_mock_paver_sh
.
call_count
,
1
)
@
unittest.skip
(
"TODO: Fix order dependency on test_no_diff_quality_failures"
)
@
patch
(
'__builtin__.open'
,
mock_open
()
)
def
test_other_exception
(
self
):
"""
If diff-quality fails for an unknown reason on the first run (pep8), then
pylint should not be run
"""
self
.
_mock_paver_sh
.
side_effect
=
[
Exception
(
'unrecognized failure!'
),
0
]
with
self
.
assertRaises
(
Exception
):
with
self
.
assertRaises
(
SystemExit
):
pavelib
.
quality
.
run_quality
(
""
)
self
.
assertRaises
(
Exception
)
# Test that pylint is NOT called by counting calls
self
.
assertEqual
(
self
.
_mock_paver_sh
.
call_count
,
1
)
@
unittest.skip
(
"TODO: TE-868"
)
@
patch
(
'__builtin__.open'
,
mock_open
()
)
def
test_no_diff_quality_failures
(
self
):
# Assert nothing is raised
_mock_pep8_violations
=
MagicMock
(
return_value
=
(
0
,
[]))
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment