Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
E
edx-platform
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
edx
edx-platform
Commits
ec442789
Commit
ec442789
authored
Apr 18, 2016
by
Robert Raposa
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Add Jenkins support for Safe Templates Linter
parent
28b0d993
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
111 additions
and
0 deletions
+111
-0
pavelib/paver_tests/test_safelint.py
+62
-0
pavelib/quality.py
+45
-0
scripts/all-tests.sh
+1
-0
scripts/generic-ci-tests.sh
+3
-0
No files found.
pavelib/paver_tests/test_safelint.py
0 → 100755
View file @
ec442789
"""
Tests for paver quality tasks
"""
from
mock
import
patch
import
pavelib.quality
from
paver.easy
import
call_task
from
.utils
import
PaverTestCase
class
PaverSafeLintTest
(
PaverTestCase
):
"""
Test run_safelint with a mocked environment in order to pass in opts
"""
def
setUp
(
self
):
super
(
PaverSafeLintTest
,
self
)
.
setUp
()
self
.
reset_task_messages
()
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_get_count_from_last_line'
)
def
test_safelint_violation_number_not_found
(
self
,
_mock_count
,
_mock_report_dir
,
_mock_write_metric
):
"""
run_safelint encounters an error parsing the safelint output log
"""
_mock_count
.
return_value
=
None
with
self
.
assertRaises
(
SystemExit
):
call_task
(
'pavelib.quality.run_safelint'
)
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_get_count_from_last_line'
)
def
test_safelint_vanilla
(
self
,
_mock_count
,
_mock_report_dir
,
_mock_write_metric
):
"""
run_safelint finds violations, but a limit was not set
"""
_mock_count
.
return_value
=
1
call_task
(
'pavelib.quality.run_safelint'
)
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_get_count_from_last_line'
)
def
test_safelint_too_many_violations
(
self
,
_mock_count
,
_mock_report_dir
,
_mock_write_metric
):
"""
run_safelint finds more violations than are allowed
"""
_mock_count
.
return_value
=
4
with
self
.
assertRaises
(
SystemExit
):
call_task
(
'pavelib.quality.run_safelint'
,
options
=
{
"limit"
:
"3"
})
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_get_count_from_last_line'
)
def
test_safelint_under_limit
(
self
,
_mock_count
,
_mock_report_dir
,
_mock_write_metric
):
"""
run_safelint finds fewer violations than are allowed
"""
_mock_count
.
return_value
=
4
# No System Exit is expected
call_task
(
'pavelib.quality.run_safelint'
,
options
=
{
"limit"
:
"5"
})
pavelib/quality.py
View file @
ec442789
...
@@ -298,6 +298,51 @@ def run_jshint(options):
...
@@ -298,6 +298,51 @@ def run_jshint(options):
)
)
@task
@needs
(
'pavelib.prereqs.install_python_prereqs'
)
@cmdopts
([
(
"limit="
,
"l"
,
"limit for number of acceptable violations"
),
])
def
run_safelint
(
options
):
"""
Runs safe_template_linter.py on the codebase
"""
violations_limit
=
int
(
getattr
(
options
,
'limit'
,
-
1
))
safelint_report_dir
=
(
Env
.
REPORT_DIR
/
"safelint"
)
safelint_report
=
safelint_report_dir
/
"safelint.report"
_prepare_report_dir
(
safelint_report_dir
)
sh
(
"{repo_root}/scripts/safe_template_linter.py >> {safelint_report}"
.
format
(
repo_root
=
Env
.
REPO_ROOT
,
safelint_report
=
safelint_report
,
),
ignore_error
=
True
)
try
:
num_violations
=
int
(
_get_count_from_last_line
(
safelint_report
,
"safelint"
))
except
TypeError
:
raise
BuildFailure
(
"Error. Number of safelint violations could not be found in {safelint_report}"
.
format
(
safelint_report
=
safelint_report
)
)
# Record the metric
_write_metric
(
num_violations
,
(
Env
.
METRICS_DIR
/
"safelint"
))
# Fail if number of violations is greater than the limit
if
num_violations
>
violations_limit
>
-
1
:
raise
Exception
(
"SafeTemplateLinter Failed. Too many violations ({count}).
\n
The limit is {violations_limit}."
.
format
(
count
=
num_violations
,
violations_limit
=
violations_limit
)
)
def
_write_metric
(
metric
,
filename
):
def
_write_metric
(
metric
,
filename
):
"""
"""
Write a given metric to a given file
Write a given metric to a given file
...
...
scripts/all-tests.sh
View file @
ec442789
...
@@ -13,6 +13,7 @@ set -e
...
@@ -13,6 +13,7 @@ set -e
# Violations thresholds for failing the build
# Violations thresholds for failing the build
export
PYLINT_THRESHOLD
=
4175
export
PYLINT_THRESHOLD
=
4175
export
JSHINT_THRESHOLD
=
9080
export
JSHINT_THRESHOLD
=
9080
export
SAFELINT_THRESHOLD
=
2550
doCheckVars
()
{
doCheckVars
()
{
if
[
-n
"
$CIRCLECI
"
]
;
then
if
[
-n
"
$CIRCLECI
"
]
;
then
...
...
scripts/generic-ci-tests.sh
View file @
ec442789
...
@@ -84,7 +84,10 @@ case "$TEST_SUITE" in
...
@@ -84,7 +84,10 @@ case "$TEST_SUITE" in
paver run_jshint
-l
$JSHINT_THRESHOLD
>
jshint.log
||
{
cat
jshint.log
;
EXIT
=
1
;
}
paver run_jshint
-l
$JSHINT_THRESHOLD
>
jshint.log
||
{
cat
jshint.log
;
EXIT
=
1
;
}
echo
"Running code complexity report (python)."
echo
"Running code complexity report (python)."
paver run_complexity
>
reports/code_complexity.log
||
echo
"Unable to calculate code complexity. Ignoring error."
paver run_complexity
>
reports/code_complexity.log
||
echo
"Unable to calculate code complexity. Ignoring error."
echo
"Running safe template linter report."
paver run_safelint
-l
$SAFELINT_THRESHOLD
>
safelint.log
||
{
cat
safelint.log
;
EXIT
=
1
;
}
# Run quality task. Pass in the 'fail-under' percentage to diff-quality
# Run quality task. Pass in the 'fail-under' percentage to diff-quality
echo
"Running diff quality."
paver run_quality
-p
100
||
EXIT
=
1
paver run_quality
-p
100
||
EXIT
=
1
# Need to create an empty test result so the post-build
# Need to create an empty test result so the post-build
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment