Commit ec442789 by Robert Raposa

Add Jenkins support for Safe Templates Linter

parent 28b0d993
"""
Tests for paver quality tasks
"""
from mock import patch
import pavelib.quality
from paver.easy import call_task
from .utils import PaverTestCase
class PaverSafeLintTest(PaverTestCase):
"""
Test run_safelint with a mocked environment in order to pass in opts
"""
def setUp(self):
super(PaverSafeLintTest, self).setUp()
self.reset_task_messages()
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line')
def test_safelint_violation_number_not_found(self, _mock_count, _mock_report_dir, _mock_write_metric):
"""
run_safelint encounters an error parsing the safelint output log
"""
_mock_count.return_value = None
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint')
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line')
def test_safelint_vanilla(self, _mock_count, _mock_report_dir, _mock_write_metric):
"""
run_safelint finds violations, but a limit was not set
"""
_mock_count.return_value = 1
call_task('pavelib.quality.run_safelint')
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line')
def test_safelint_too_many_violations(self, _mock_count, _mock_report_dir, _mock_write_metric):
"""
run_safelint finds more violations than are allowed
"""
_mock_count.return_value = 4
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"limit": "3"})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line')
def test_safelint_under_limit(self, _mock_count, _mock_report_dir, _mock_write_metric):
"""
run_safelint finds fewer violations than are allowed
"""
_mock_count.return_value = 4
# No System Exit is expected
call_task('pavelib.quality.run_safelint', options={"limit": "5"})
...@@ -298,6 +298,51 @@ def run_jshint(options): ...@@ -298,6 +298,51 @@ def run_jshint(options):
) )
@task
@needs('pavelib.prereqs.install_python_prereqs')
@cmdopts([
("limit=", "l", "limit for number of acceptable violations"),
])
def run_safelint(options):
"""
Runs safe_template_linter.py on the codebase
"""
violations_limit = int(getattr(options, 'limit', -1))
safelint_report_dir = (Env.REPORT_DIR / "safelint")
safelint_report = safelint_report_dir / "safelint.report"
_prepare_report_dir(safelint_report_dir)
sh(
"{repo_root}/scripts/safe_template_linter.py >> {safelint_report}".format(
repo_root=Env.REPO_ROOT,
safelint_report=safelint_report,
),
ignore_error=True
)
try:
num_violations = int(_get_count_from_last_line(safelint_report, "safelint"))
except TypeError:
raise BuildFailure(
"Error. Number of safelint violations could not be found in {safelint_report}".format(
safelint_report=safelint_report
)
)
# Record the metric
_write_metric(num_violations, (Env.METRICS_DIR / "safelint"))
# Fail if number of violations is greater than the limit
if num_violations > violations_limit > -1:
raise Exception(
"SafeTemplateLinter Failed. Too many violations ({count}).\nThe limit is {violations_limit}.".format(
count=num_violations, violations_limit=violations_limit
)
)
def _write_metric(metric, filename): def _write_metric(metric, filename):
""" """
Write a given metric to a given file Write a given metric to a given file
......
...@@ -13,6 +13,7 @@ set -e ...@@ -13,6 +13,7 @@ set -e
# Violations thresholds for failing the build # Violations thresholds for failing the build
export PYLINT_THRESHOLD=4175 export PYLINT_THRESHOLD=4175
export JSHINT_THRESHOLD=9080 export JSHINT_THRESHOLD=9080
export SAFELINT_THRESHOLD=2550
doCheckVars() { doCheckVars() {
if [ -n "$CIRCLECI" ] ; then if [ -n "$CIRCLECI" ] ; then
......
...@@ -84,7 +84,10 @@ case "$TEST_SUITE" in ...@@ -84,7 +84,10 @@ case "$TEST_SUITE" in
paver run_jshint -l $JSHINT_THRESHOLD > jshint.log || { cat jshint.log; EXIT=1; } paver run_jshint -l $JSHINT_THRESHOLD > jshint.log || { cat jshint.log; EXIT=1; }
echo "Running code complexity report (python)." echo "Running code complexity report (python)."
paver run_complexity > reports/code_complexity.log || echo "Unable to calculate code complexity. Ignoring error." paver run_complexity > reports/code_complexity.log || echo "Unable to calculate code complexity. Ignoring error."
echo "Running safe template linter report."
paver run_safelint -l $SAFELINT_THRESHOLD > safelint.log || { cat safelint.log; EXIT=1; }
# Run quality task. Pass in the 'fail-under' percentage to diff-quality # Run quality task. Pass in the 'fail-under' percentage to diff-quality
echo "Running diff quality."
paver run_quality -p 100 || EXIT=1 paver run_quality -p 100 || EXIT=1
# Need to create an empty test result so the post-build # Need to create an empty test result so the post-build
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment