Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
E
edx-platform
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
edx
edx-platform
Commits
67be6807
Commit
67be6807
authored
Nov 15, 2016
by
Ben Patterson
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Add tests. Enjoy your coverage and feel confident.
parent
f52e3cf8
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
115 additions
and
36 deletions
+115
-36
pavelib/paver_tests/test_paver_quality.py
+7
-34
pavelib/paver_tests/test_prereqs.py
+52
-1
pavelib/paver_tests/utils.py
+52
-0
pavelib/prereqs.py
+4
-1
No files found.
pavelib/paver_tests/test_paver_quality.py
View file @
67be6807
"""
"""
Tests for paver quality tasks
Tests for paver quality tasks
"""
"""
import
os
from
path
import
Path
as
path
import
tempfile
import
tempfile
import
textwrap
import
textwrap
import
unittest
import
unittest
from
mock
import
patch
,
MagicMock
,
mock_open
from
ddt
import
ddt
,
file_data
import
pavelib.quality
import
os
import
paver.easy
import
paver.easy
import
paver.tasks
import
paver.tasks
from
ddt
import
ddt
,
file_data
from
mock
import
patch
,
MagicMock
,
mock_open
from
path
import
Path
as
path
from
paver.easy
import
BuildFailure
from
paver.easy
import
BuildFailure
import
pavelib.quality
from
pavelib.paver_tests.utils
import
CustomShMock
@ddt
@ddt
class
TestPaverQualityViolations
(
unittest
.
TestCase
):
class
TestPaverQualityViolations
(
unittest
.
TestCase
):
...
@@ -351,32 +353,3 @@ class TestPaverRunQuality(unittest.TestCase):
...
@@ -351,32 +353,3 @@ class TestPaverRunQuality(unittest.TestCase):
self
.
assertEqual
(
_mock_pep8_violations
.
call_count
,
1
)
self
.
assertEqual
(
_mock_pep8_violations
.
call_count
,
1
)
# And assert that sh was called twice (for the call to "pylint" & "eslint")
# And assert that sh was called twice (for the call to "pylint" & "eslint")
self
.
assertEqual
(
self
.
_mock_paver_sh
.
call_count
,
2
)
self
.
assertEqual
(
self
.
_mock_paver_sh
.
call_count
,
2
)
class
CustomShMock
(
object
):
"""
Diff-quality makes a number of sh calls. None of those calls should be made during tests; however, some
of them need to have certain responses.
"""
def
fail_on_pylint
(
self
,
arg
):
"""
For our tests, we need the call for diff-quality running pep8 reports to fail, since that is what
is going to fail when we pass in a percentage ("p") requirement.
"""
if
"pylint"
in
arg
:
# Essentially mock diff-quality exiting with 1
paver
.
easy
.
sh
(
"exit 1"
)
else
:
return
def
fail_on_eslint
(
self
,
arg
):
"""
For our tests, we need the call for diff-quality running pep8 reports to fail, since that is what
is going to fail when we pass in a percentage ("p") requirement.
"""
if
"eslint"
in
arg
:
# Essentially mock diff-quality exiting with 1
paver
.
easy
.
sh
(
"exit 1"
)
else
:
return
pavelib/paver_tests/test_prereqs.py
View file @
67be6807
...
@@ -4,7 +4,11 @@ Tests covering the Open edX Paver prequisites installation workflow
...
@@ -4,7 +4,11 @@ Tests covering the Open edX Paver prequisites installation workflow
import
os
import
os
import
unittest
import
unittest
from
pavelib.prereqs
import
no_prereq_install
from
mock
import
call
,
patch
from
paver.easy
import
BuildFailure
from
pavelib.prereqs
import
no_prereq_install
,
node_prereqs_installation
from
pavelib.paver_tests.utils
import
PaverTestCase
,
CustomShMock
from
pavelib.paver_tests.test_paver_quality
import
CustomShMock
class
TestPaverPrereqInstall
(
unittest
.
TestCase
):
class
TestPaverPrereqInstall
(
unittest
.
TestCase
):
...
@@ -68,3 +72,50 @@ class TestPaverPrereqInstall(unittest.TestCase):
...
@@ -68,3 +72,50 @@ class TestPaverPrereqInstall(unittest.TestCase):
Ensure that '1' will be True.
Ensure that '1' will be True.
"""
"""
self
.
check_val
(
'1'
,
True
)
self
.
check_val
(
'1'
,
True
)
class
TestPaverNodeInstall
(
PaverTestCase
):
"""
Test node install logic
"""
def
setUp
(
self
):
super
(
TestPaverNodeInstall
,
self
)
.
setUp
()
os
.
environ
[
'NO_PREREQ_INSTALL'
]
=
'false'
patcher
=
patch
(
'pavelib.prereqs.sh'
,
return_value
=
True
)
self
.
_mock_paver_sh
=
patcher
.
start
()
self
.
addCleanup
(
patcher
.
stop
)
def
test_npm_install_with_subprocess_error
(
self
):
"""
Test that we handle a subprocess 1 (proxy for cb() never called error)
TE-1767
"""
self
.
_mock_paver_sh
.
side_effect
=
CustomShMock
()
.
fail_on_npm_install
with
self
.
assertRaises
(
BuildFailure
):
node_prereqs_installation
()
actual_calls
=
self
.
_mock_paver_sh
.
mock_calls
# npm install will be called twice
self
.
assertEqual
(
actual_calls
.
count
(
call
(
'npm install'
)),
2
)
def
test_npm_install_called_once_when_successful
(
self
):
"""
Vanilla npm install should only be calling npm install one time
"""
node_prereqs_installation
()
actual_calls
=
self
.
_mock_paver_sh
.
mock_calls
# when there's no failure, npm install is only called once
self
.
assertEqual
(
actual_calls
.
count
(
call
(
'npm install'
)),
1
)
def
test_npm_install_with_unexpected_subprocess_error
(
self
):
"""
If there's some other error, only call npm install once, and raise a failure
"""
self
.
_mock_paver_sh
.
side_effect
=
CustomShMock
()
.
unexpected_fail_on_npm_install
with
self
.
assertRaises
(
BuildFailure
):
node_prereqs_installation
()
actual_calls
=
self
.
_mock_paver_sh
.
mock_calls
self
.
assertEqual
(
actual_calls
.
count
(
call
(
'npm install'
)),
1
)
pavelib/paver_tests/utils.py
View file @
67be6807
"""Unit tests for the Paver server tasks."""
"""Unit tests for the Paver server tasks."""
import
os
import
os
import
paver.easy
from
paver
import
tasks
from
paver
import
tasks
from
unittest
import
TestCase
from
unittest
import
TestCase
from
paver.easy
import
BuildFailure
class
PaverTestCase
(
TestCase
):
class
PaverTestCase
(
TestCase
):
"""
"""
...
@@ -58,3 +61,52 @@ class MockEnvironment(tasks.Environment):
...
@@ -58,3 +61,52 @@ class MockEnvironment(tasks.Environment):
output
=
message
output
=
message
if
not
output
.
startswith
(
"--->"
):
if
not
output
.
startswith
(
"--->"
):
self
.
messages
.
append
(
unicode
(
output
))
self
.
messages
.
append
(
unicode
(
output
))
class
CustomShMock
(
object
):
"""
Diff-quality makes a number of sh calls. None of those calls should be made during tests; however, some
of them need to have certain responses.
"""
def
fail_on_pylint
(
self
,
arg
):
"""
For our tests, we need the call for diff-quality running pep8 reports to fail, since that is what
is going to fail when we pass in a percentage ("p") requirement.
"""
if
"pylint"
in
arg
:
# Essentially mock diff-quality exiting with 1
paver
.
easy
.
sh
(
"exit 1"
)
else
:
return
def
fail_on_eslint
(
self
,
arg
):
"""
For our tests, we need the call for diff-quality running pep8 reports to fail, since that is what
is going to fail when we pass in a percentage ("p") requirement.
"""
if
"eslint"
in
arg
:
# Essentially mock diff-quality exiting with 1
paver
.
easy
.
sh
(
"exit 1"
)
else
:
return
def
fail_on_npm_install
(
self
,
arg
):
"""
For our tests, we need the call for diff-quality running pep8 reports to fail, since that is what
is going to fail when we pass in a percentage ("p") requirement.
"""
if
"npm install"
in
arg
:
raise
BuildFailure
(
'Subprocess return code: 1'
)
else
:
return
def
unexpected_fail_on_npm_install
(
self
,
arg
):
"""
For our tests, we need the call for diff-quality running pep8 reports to fail, since that is what
is going to fail when we pass in a percentage ("p") requirement.
"""
if
"npm install"
in
arg
:
raise
BuildFailure
(
'Subprocess return code: 50'
)
else
:
return
pavelib/prereqs.py
View file @
67be6807
...
@@ -138,13 +138,16 @@ def node_prereqs_installation():
...
@@ -138,13 +138,16 @@ def node_prereqs_installation():
" {reg})"
.
format
(
reg
=
NPM_REGISTRY
))
" {reg})"
.
format
(
reg
=
NPM_REGISTRY
))
# Error handling around a race condition that produces "cb() never called" error. This
# Error handling around a race condition that produces "cb() never called" error. This
# ought to disappear when we upgrade npm to 3 or higher. TODO: clean this up when we do that.
# evinces itself as `cb_error_text` and it ought to disappear when we upgrade
# npm to 3 or higher. TODO: clean this up when we do that.
try
:
try
:
sh
(
'npm install'
)
sh
(
'npm install'
)
except
BuildFailure
,
error_text
:
except
BuildFailure
,
error_text
:
if
cb_error_text
in
error_text
:
if
cb_error_text
in
error_text
:
print
"npm install error detected. Retrying..."
print
"npm install error detected. Retrying..."
sh
(
'npm install'
)
sh
(
'npm install'
)
else
:
raise
BuildFailure
(
error_text
)
def
python_prereqs_installation
():
def
python_prereqs_installation
():
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment