Commit 143a22fb by Robert Raposa

Enhance Jenkins integration of safe templates take 2

- Handle case when no files are linted
- Skip deleted files for safe commit script
- Add verbose options for safe commit
parent ebc11849
......@@ -4,6 +4,7 @@ Tests for paver quality tasks
import os
from path import Path as path
import tempfile
import textwrap
import unittest
from mock import patch, MagicMock, mock_open
from ddt import ddt, file_data
......@@ -59,7 +60,8 @@ class TestPaverQualityViolations(unittest.TestCase):
class TestPaverReportViolationsCounts(unittest.TestCase):
"""
For testing run_jshint and run_complexity utils
For testing utility functions for getting counts from reports for
run_jshint, run_complexity, run_safelint, and run_safecommit_report.
"""
def setUp(self):
......@@ -132,6 +134,91 @@ class TestPaverReportViolationsCounts(unittest.TestCase):
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "foo") # pylint: disable=protected-access
self.assertEqual(actual_count, None)
def test_get_safelint_counts_happy(self):
"""
Test happy path getting violation counts from safelint report.
"""
report = textwrap.dedent("""
test.html: 30:53: javascript-jquery-append: $('#test').append(print_tos);
javascript-concat-html: 310 violations
javascript-escape: 7 violations
2608 violations total
""")
with open(self.f.name, 'w') as f:
f.write(report)
counts = pavelib.quality._get_safelint_counts(self.f.name) # pylint: disable=protected-access
self.assertDictEqual(counts, {
'rules': {
'javascript-concat-html': 310,
'javascript-escape': 7,
},
'total': 2608,
})
def test_get_safelint_counts_bad_counts(self):
"""
Test getting violation counts from truncated and malformed safelint
report.
"""
report = textwrap.dedent("""
javascript-concat-html: violations
""")
with open(self.f.name, 'w') as f:
f.write(report)
counts = pavelib.quality._get_safelint_counts(self.f.name) # pylint: disable=protected-access
self.assertDictEqual(counts, {
'rules': {},
'total': None,
})
def test_get_safecommit_count_happy(self):
"""
Test happy path getting violation count from safecommit report.
"""
report = textwrap.dedent("""
Linting lms/templates/navigation.html:
2 violations total
Linting scripts/tests/templates/test.underscore:
3 violations total
""")
with open(self.f.name, 'w') as f:
f.write(report)
count = pavelib.quality._get_safecommit_count(self.f.name) # pylint: disable=protected-access
self.assertEqual(count, 5)
def test_get_safecommit_count_bad_counts(self):
"""
Test getting violation count from truncated safecommit report.
"""
report = textwrap.dedent("""
Linting lms/templates/navigation.html:
""")
with open(self.f.name, 'w') as f:
f.write(report)
count = pavelib.quality._get_safecommit_count(self.f.name) # pylint: disable=protected-access
self.assertIsNone(count)
def test_get_safecommit_count_no_files(self):
"""
Test getting violation count from safecommit report where no files were
linted.
"""
report = textwrap.dedent("""
No files linted.
""")
with open(self.f.name, 'w') as f:
f.write(report)
count = pavelib.quality._get_safecommit_count(self.f.name) # pylint: disable=protected-access
self.assertEqual(count, 0)
class TestPrepareReportDir(unittest.TestCase):
"""
......
"""
Tests for paver safecommit quality tasks
"""
from mock import patch
import pavelib.quality
from paver.easy import call_task
from .utils import PaverTestCase
class PaverSafeCommitTest(PaverTestCase):
"""
Test run_safecommit_report with a mocked environment in order to pass in
opts.
"""
def setUp(self):
super(PaverSafeCommitTest, self).setUp()
self.reset_task_messages()
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safecommit_count')
def test_safecommit_violation_number_not_found(self, _mock_count, _mock_report_dir, _mock_write_metric):
"""
run_safecommit_report encounters an error parsing the safecommit output
log.
"""
_mock_count.return_value = None
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safecommit_report')
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safecommit_count')
def test_safecommit_vanilla(self, _mock_count, _mock_report_dir, _mock_write_metric):
"""
run_safecommit_report finds violations.
"""
_mock_count.return_value = 0
call_task('pavelib.quality.run_safecommit_report')
"""
Tests for paver quality tasks
Tests for paver safelint quality tasks
"""
from mock import patch
......@@ -20,43 +20,99 @@ class PaverSafeLintTest(PaverTestCase):
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line')
def test_safelint_violation_number_not_found(self, _mock_count, _mock_report_dir, _mock_write_metric):
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_violation_number_not_found(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint encounters an error parsing the safelint output log
"""
_mock_count.return_value = None
_mock_counts.return_value = {}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint')
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line')
def test_safelint_vanilla(self, _mock_count, _mock_report_dir, _mock_write_metric):
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_vanilla(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint finds violations, but a limit was not set
"""
_mock_count.return_value = 1
_mock_counts.return_value = {'total': 0}
call_task('pavelib.quality.run_safelint')
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line')
def test_safelint_too_many_violations(self, _mock_count, _mock_report_dir, _mock_write_metric):
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_invalid_thresholds_option(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint fails when thresholds option is poorly formatted
"""
_mock_counts.return_value = {'total': 0}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"thresholds": "invalid"})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_invalid_thresholds_option_key(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint fails when thresholds option is poorly formatted
"""
_mock_counts.return_value = {'total': 0}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"invalid": 3}'})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_too_many_violations(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint finds more violations than are allowed
"""
_mock_count.return_value = 4
_mock_counts.return_value = {'total': 4}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"limit": "3"})
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"total": 3}'})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line')
def test_safelint_under_limit(self, _mock_count, _mock_report_dir, _mock_write_metric):
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_under_limit(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint finds fewer violations than are allowed
"""
_mock_count.return_value = 4
_mock_counts.return_value = {'total': 4}
# No System Exit is expected
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"total": 5}'})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_rule_violation_number_not_found(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint encounters an error parsing the safelint output log for a
given rule threshold that was set.
"""
_mock_counts.return_value = {'total': 4}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"rules": {"javascript-escape": 3}}'})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_too_many_rule_violations(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint finds more rule violations than are allowed
"""
_mock_counts.return_value = {'total': 4, 'rules': {'javascript-escape': 4}}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"rules": {"javascript-escape": 3}}'})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_under_rule_limit(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint finds fewer rule violations than are allowed
"""
_mock_counts.return_value = {'total': 4, 'rules': {'javascript-escape': 4}}
# No System Exit is expected
call_task('pavelib.quality.run_safelint', options={"limit": "5"})
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"rules": {"javascript-escape": 5}}'})
......@@ -13,7 +13,9 @@ set -e
# Violations thresholds for failing the build
export PYLINT_THRESHOLD=4175
export JSHINT_THRESHOLD=7550
export SAFELINT_THRESHOLD=2700
SAFELINT_THRESHOLDS=`cat scripts/safelint_thresholds.json`
export SAFELINT_THRESHOLDS=${SAFELINT_THRESHOLDS//[[:space:]]/}
doCheckVars() {
if [ -n "$CIRCLECI" ] ; then
......
......@@ -85,7 +85,9 @@ case "$TEST_SUITE" in
echo "Running code complexity report (python)."
paver run_complexity > reports/code_complexity.log || echo "Unable to calculate code complexity. Ignoring error."
echo "Running safe template linter report."
paver run_safelint -l $SAFELINT_THRESHOLD > safelint.log || { cat safelint.log; EXIT=1; }
paver run_safelint -t $SAFELINT_THRESHOLDS > safelint.log || { cat safelint.log; EXIT=1; }
echo "Running safe commit linter report."
paver run_safecommit_report > safecommit.log || { cat safecommit.log; EXIT=1; }
# Run quality task. Pass in the 'fail-under' percentage to diff-quality
echo "Running diff quality."
paver run_quality -p 100 || EXIT=1
......
......@@ -19,6 +19,7 @@ show_help() {
echo " -m, --main-branch=COMMIT Run against files changed between the"
echo " current branch and this commit."
echo " Defaults to origin/master."
echo " -v, --verbose Output details of git commands run."
echo ""
echo "This scripts does not give a grand total. Be sure to check for"
echo "0 violations on each file."
......@@ -30,12 +31,25 @@ show_help() {
}
show_verbose() {
echo "Files linted is based on the following:"
echo "- Current commit: ${current_branch_hash}"
echo "- Main commit: ${MAIN_COMMIT}"
echo "- Merge base command: ${merge_base_command}"
echo "- Merge base: ${merge_base}"
echo "- Diff command: ${diff_command}"
}
for i in "$@"; do
case $i in
-m=*|--main-branch=*)
MAIN_COMMIT="${i#*=}"
shift # past argument=value
;;
-v|--verbose)
VERBOSE=true
;;
-h|--help|*)
# help or unknown option
show_help
......@@ -51,11 +65,24 @@ if [ -z "${MAIN_COMMIT+x}" ]; then
MAIN_COMMIT="origin/master"
fi
merge_base=`git merge-base "$current_branch_hash" "$MAIN_COMMIT"`
diff_files=`git diff --name-only "$current_branch_hash" "$merge_base"`
merge_base_command="git merge-base $current_branch_hash $MAIN_COMMIT"
merge_base=$(${merge_base_command})
diff_command="git diff --name-only --diff-filter=ACM $current_branch_hash $merge_base"
diff_files=$(${diff_command})
for f in $diff_files; do
if [ "$diff_files" = "" ]; then
# When no files are found, automatically display verbose details to help
# understand why.
show_verbose
echo ""
echo "Linting $f:"
./scripts/safe_template_linter.py $f
done
echo "No files linted."
else
if [ ${VERBOSE} ] ; then
show_verbose
fi
for f in $diff_files; do
echo ""
echo "Linting $f:"
./scripts/safe_template_linter.py $f
done
fi
{
"rules": {
"javascript-concat-html": 313,
"javascript-escape": 7,
"javascript-interpolate": 71,
"javascript-jquery-append": 120,
"javascript-jquery-html": 313,
"javascript-jquery-insert-into-target": 26,
"javascript-jquery-insertion": 30,
"javascript-jquery-prepend": 12,
"mako-html-entities": 0,
"mako-invalid-html-filter": 33,
"mako-invalid-js-filter": 222,
"mako-js-html-string": 0,
"mako-js-missing-quotes": 0,
"mako-missing-default": 234,
"mako-multiple-page-tags": 0,
"mako-unknown-context": 0,
"mako-unparseable-expression": 0,
"mako-unwanted-html-filter": 0,
"python-close-before-format": 0,
"python-concat-html": 28,
"python-custom-escape": 13,
"python-deprecated-display-name": 53,
"python-interpolate-html": 68,
"python-parse-error": 0,
"python-requires-html-or-text": 0,
"python-wrap-html": 279,
"underscore-not-escaped": 709
},
"total": 2509
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment