Commit afdec6a9 by Robert Raposa

Enhance Jenkins integration of safe template linting

- Add rule based thresholds
- Add safecommit to Jenkins
parent 80c3f842
...@@ -4,6 +4,7 @@ Tests for paver quality tasks ...@@ -4,6 +4,7 @@ Tests for paver quality tasks
import os import os
from path import Path as path from path import Path as path
import tempfile import tempfile
import textwrap
import unittest import unittest
from mock import patch, MagicMock, mock_open from mock import patch, MagicMock, mock_open
from ddt import ddt, file_data from ddt import ddt, file_data
...@@ -132,6 +133,64 @@ class TestPaverReportViolationsCounts(unittest.TestCase): ...@@ -132,6 +133,64 @@ class TestPaverReportViolationsCounts(unittest.TestCase):
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "foo") # pylint: disable=protected-access actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "foo") # pylint: disable=protected-access
self.assertEqual(actual_count, None) self.assertEqual(actual_count, None)
def test_get_safelint_counts_happy(self):
report = textwrap.dedent("""
test.html: 30:53: javascript-jquery-append: $('#test').append(print_tos);
javascript-concat-html: 310 violations
javascript-escape: 7 violations
2608 violations total
""")
with open(self.f.name, 'w') as f:
f.write(report)
counts = pavelib.quality._get_safelint_counts(self.f.name) # pylint: disable=protected-access
self.assertDictEqual(counts, {
'rules': {
'javascript-concat-html': 310,
'javascript-escape': 7,
},
'total': 2608,
})
def test_get_safelint_counts_bad_counts(self):
report = textwrap.dedent("""
javascript-concat-html: violations
""")
with open(self.f.name, 'w') as f:
f.write(report)
counts = pavelib.quality._get_safelint_counts(self.f.name) # pylint: disable=protected-access
self.assertDictEqual(counts, {
'rules': {},
'total': None,
})
def test_get_safecommit_count_happy(self):
report = textwrap.dedent("""
Linting lms/templates/navigation.html:
2 violations total
Linting scripts/tests/templates/test.underscore:
3 violations total
""")
with open(self.f.name, 'w') as f:
f.write(report)
count = pavelib.quality._get_safecommit_count(self.f.name) # pylint: disable=protected-access
self.assertEqual(count, 5)
def test_get_safecommit_count_bad_counts(self):
report = textwrap.dedent("""
Linting lms/templates/navigation.html:
""")
with open(self.f.name, 'w') as f:
f.write(report)
count = pavelib.quality._get_safecommit_count(self.f.name) # pylint: disable=protected-access
self.assertIsNone(count)
class TestPrepareReportDir(unittest.TestCase): class TestPrepareReportDir(unittest.TestCase):
""" """
......
"""
Tests for paver safecommit quality tasks
"""
from mock import patch
import pavelib.quality
from paver.easy import call_task
from .utils import PaverTestCase
class PaverSafeCommitTest(PaverTestCase):
"""
Test run_safecommit_report with a mocked environment in order to pass in
opts.
"""
def setUp(self):
super(PaverSafeCommitTest, self).setUp()
self.reset_task_messages()
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safecommit_count')
def test_safecommit_violation_number_not_found(self, _mock_count, _mock_report_dir, _mock_write_metric):
"""
run_safecommit_report encounters an error parsing the safecommit output
log.
"""
_mock_count.return_value = None
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safecommit_report')
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safecommit_count')
def test_safecommit_vanilla(self, _mock_count, _mock_report_dir, _mock_write_metric):
"""
run_safecommit_report finds violations.
"""
_mock_count.return_value = 0
call_task('pavelib.quality.run_safecommit_report')
""" """
Tests for paver quality tasks Tests for paver safelint quality tasks
""" """
from mock import patch from mock import patch
...@@ -20,43 +20,99 @@ class PaverSafeLintTest(PaverTestCase): ...@@ -20,43 +20,99 @@ class PaverSafeLintTest(PaverTestCase):
@patch.object(pavelib.quality, '_write_metric') @patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir') @patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line') @patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_violation_number_not_found(self, _mock_count, _mock_report_dir, _mock_write_metric): def test_safelint_violation_number_not_found(self, _mock_counts, _mock_report_dir, _mock_write_metric):
""" """
run_safelint encounters an error parsing the safelint output log run_safelint encounters an error parsing the safelint output log
""" """
_mock_count.return_value = None _mock_counts.return_value = {}
with self.assertRaises(SystemExit): with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint') call_task('pavelib.quality.run_safelint')
@patch.object(pavelib.quality, '_write_metric') @patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir') @patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line') @patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_vanilla(self, _mock_count, _mock_report_dir, _mock_write_metric): def test_safelint_vanilla(self, _mock_counts, _mock_report_dir, _mock_write_metric):
""" """
run_safelint finds violations, but a limit was not set run_safelint finds violations, but a limit was not set
""" """
_mock_count.return_value = 1 _mock_counts.return_value = {'total': 0}
call_task('pavelib.quality.run_safelint') call_task('pavelib.quality.run_safelint')
@patch.object(pavelib.quality, '_write_metric') @patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir') @patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line') @patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_too_many_violations(self, _mock_count, _mock_report_dir, _mock_write_metric): def test_safelint_invalid_thresholds_option(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint fails when thresholds option is poorly formatted
"""
_mock_counts.return_value = {'total': 0}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"thresholds": "invalid"})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_invalid_thresholds_option_key(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint fails when thresholds option is poorly formatted
"""
_mock_counts.return_value = {'total': 0}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"invalid": 3}'})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_too_many_violations(self, _mock_counts, _mock_report_dir, _mock_write_metric):
""" """
run_safelint finds more violations than are allowed run_safelint finds more violations than are allowed
""" """
_mock_count.return_value = 4 _mock_counts.return_value = {'total': 4}
with self.assertRaises(SystemExit): with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"limit": "3"}) call_task('pavelib.quality.run_safelint', options={"thresholds": '{"total": 3}'})
@patch.object(pavelib.quality, '_write_metric') @patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir') @patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line') @patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_under_limit(self, _mock_count, _mock_report_dir, _mock_write_metric): def test_safelint_under_limit(self, _mock_counts, _mock_report_dir, _mock_write_metric):
""" """
run_safelint finds fewer violations than are allowed run_safelint finds fewer violations than are allowed
""" """
_mock_count.return_value = 4 _mock_counts.return_value = {'total': 4}
# No System Exit is expected
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"total": 5}'})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_rule_violation_number_not_found(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint encounters an error parsing the safelint output log for a
given rule threshold that was set.
"""
_mock_counts.return_value = {'total': 4}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"rules": {"javascript-escape": 3}}'})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_too_many_rule_violations(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint finds more rule violations than are allowed
"""
_mock_counts.return_value = {'total': 4, 'rules': {'javascript-escape': 4}}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"rules": {"javascript-escape": 3}}'})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_under_rule_limit(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint finds fewer rule violations than are allowed
"""
_mock_counts.return_value = {'total': 4, 'rules': {'javascript-escape': 4}}
# No System Exit is expected # No System Exit is expected
call_task('pavelib.quality.run_safelint', options={"limit": "5"}) call_task('pavelib.quality.run_safelint', options={"thresholds": '{"rules": {"javascript-escape": 5}}'})
...@@ -13,7 +13,7 @@ set -e ...@@ -13,7 +13,7 @@ set -e
# Violations thresholds for failing the build # Violations thresholds for failing the build
export PYLINT_THRESHOLD=4175 export PYLINT_THRESHOLD=4175
export JSHINT_THRESHOLD=7550 export JSHINT_THRESHOLD=7550
export SAFELINT_THRESHOLD=2700 source scripts/safelint_thresholds.sh
doCheckVars() { doCheckVars() {
if [ -n "$CIRCLECI" ] ; then if [ -n "$CIRCLECI" ] ; then
......
...@@ -85,7 +85,9 @@ case "$TEST_SUITE" in ...@@ -85,7 +85,9 @@ case "$TEST_SUITE" in
echo "Running code complexity report (python)." echo "Running code complexity report (python)."
paver run_complexity > reports/code_complexity.log || echo "Unable to calculate code complexity. Ignoring error." paver run_complexity > reports/code_complexity.log || echo "Unable to calculate code complexity. Ignoring error."
echo "Running safe template linter report." echo "Running safe template linter report."
paver run_safelint -l $SAFELINT_THRESHOLD > safelint.log || { cat safelint.log; EXIT=1; } paver run_safelint -t $SAFELINT_THRESHOLDS > safelint.log || { cat safelint.log; EXIT=1; }
echo "Running safe commit linter report."
paver run_safecommit_report > safecommit.log || { cat safecommit.log; EXIT=1; }
# Run quality task. Pass in the 'fail-under' percentage to diff-quality # Run quality task. Pass in the 'fail-under' percentage to diff-quality
echo "Running diff quality." echo "Running diff quality."
paver run_quality -p 100 || EXIT=1 paver run_quality -p 100 || EXIT=1
......
#!/usr/bin/env bash
set -e
###############################################################################
#
# safelint_thresholds.sh
#
# The thresholds used for paver run_safelint when used with various CI
# systems.
#
###############################################################################
# Violations thresholds for failing the build
export SAFELINT_THRESHOLDS='
{
"rules": {
"javascript-concat-html": 313,
"javascript-escape": 7,
"javascript-interpolate": 71,
"javascript-jquery-append": 120,
"javascript-jquery-html": 313,
"javascript-jquery-insert-into-target": 26,
"javascript-jquery-insertion": 30,
"javascript-jquery-prepend": 12,
"mako-html-entities": 0,
"mako-invalid-html-filter": 33,
"mako-invalid-js-filter": 249,
"mako-js-html-string": 0,
"mako-js-missing-quotes": 0,
"mako-missing-default": 248,
"mako-multiple-page-tags": 0,
"mako-unknown-context": 0,
"mako-unparseable-expression": 0,
"mako-unwanted-html-filter": 0,
"python-close-before-format": 0,
"python-concat-html": 28,
"python-custom-escape": 13,
"python-deprecated-display-name": 53,
"python-interpolate-html": 68,
"python-parse-error": 0,
"python-requires-html-or-text": 0,
"python-wrap-html": 289,
"underscore-not-escaped": 709
},
"total": 2565
}'
export SAFELINT_THRESHOLDS=${SAFELINT_THRESHOLDS//[[:space:]]/}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment