Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
E
edx-platform
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
edx
edx-platform
Commits
5ea529ae
Commit
5ea529ae
authored
May 11, 2016
by
Robert Raposa
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Revert: Enhance Jenkins integration of safe template linting
parent
8340703f
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
7 changed files
with
17 additions
and
225 deletions
+17
-225
pavelib/paver_tests/test_paver_quality.py
+0
-59
pavelib/paver_tests/test_safecommit.py
+0
-44
pavelib/paver_tests/test_safelint.py
+15
-71
pavelib/quality.py
+0
-0
scripts/all-tests.sh
+1
-1
scripts/generic-ci-tests.sh
+1
-3
scripts/safelint_thresholds.sh
+0
-47
No files found.
pavelib/paver_tests/test_paver_quality.py
View file @
5ea529ae
...
@@ -4,7 +4,6 @@ Tests for paver quality tasks
...
@@ -4,7 +4,6 @@ Tests for paver quality tasks
import
os
import
os
from
path
import
Path
as
path
from
path
import
Path
as
path
import
tempfile
import
tempfile
import
textwrap
import
unittest
import
unittest
from
mock
import
patch
,
MagicMock
,
mock_open
from
mock
import
patch
,
MagicMock
,
mock_open
from
ddt
import
ddt
,
file_data
from
ddt
import
ddt
,
file_data
...
@@ -133,64 +132,6 @@ class TestPaverReportViolationsCounts(unittest.TestCase):
...
@@ -133,64 +132,6 @@ class TestPaverReportViolationsCounts(unittest.TestCase):
actual_count
=
pavelib
.
quality
.
_get_count_from_last_line
(
self
.
f
.
name
,
"foo"
)
# pylint: disable=protected-access
actual_count
=
pavelib
.
quality
.
_get_count_from_last_line
(
self
.
f
.
name
,
"foo"
)
# pylint: disable=protected-access
self
.
assertEqual
(
actual_count
,
None
)
self
.
assertEqual
(
actual_count
,
None
)
def
test_get_safelint_counts_happy
(
self
):
report
=
textwrap
.
dedent
(
"""
test.html: 30:53: javascript-jquery-append: $('#test').append(print_tos);
javascript-concat-html: 310 violations
javascript-escape: 7 violations
2608 violations total
"""
)
with
open
(
self
.
f
.
name
,
'w'
)
as
f
:
f
.
write
(
report
)
counts
=
pavelib
.
quality
.
_get_safelint_counts
(
self
.
f
.
name
)
# pylint: disable=protected-access
self
.
assertDictEqual
(
counts
,
{
'rules'
:
{
'javascript-concat-html'
:
310
,
'javascript-escape'
:
7
,
},
'total'
:
2608
,
})
def
test_get_safelint_counts_bad_counts
(
self
):
report
=
textwrap
.
dedent
(
"""
javascript-concat-html: violations
"""
)
with
open
(
self
.
f
.
name
,
'w'
)
as
f
:
f
.
write
(
report
)
counts
=
pavelib
.
quality
.
_get_safelint_counts
(
self
.
f
.
name
)
# pylint: disable=protected-access
self
.
assertDictEqual
(
counts
,
{
'rules'
:
{},
'total'
:
None
,
})
def
test_get_safecommit_count_happy
(
self
):
report
=
textwrap
.
dedent
(
"""
Linting lms/templates/navigation.html:
2 violations total
Linting scripts/tests/templates/test.underscore:
3 violations total
"""
)
with
open
(
self
.
f
.
name
,
'w'
)
as
f
:
f
.
write
(
report
)
count
=
pavelib
.
quality
.
_get_safecommit_count
(
self
.
f
.
name
)
# pylint: disable=protected-access
self
.
assertEqual
(
count
,
5
)
def
test_get_safecommit_count_bad_counts
(
self
):
report
=
textwrap
.
dedent
(
"""
Linting lms/templates/navigation.html:
"""
)
with
open
(
self
.
f
.
name
,
'w'
)
as
f
:
f
.
write
(
report
)
count
=
pavelib
.
quality
.
_get_safecommit_count
(
self
.
f
.
name
)
# pylint: disable=protected-access
self
.
assertIsNone
(
count
)
class
TestPrepareReportDir
(
unittest
.
TestCase
):
class
TestPrepareReportDir
(
unittest
.
TestCase
):
"""
"""
...
...
pavelib/paver_tests/test_safecommit.py
deleted
100644 → 0
View file @
8340703f
"""
Tests for paver safecommit quality tasks
"""
from
mock
import
patch
import
pavelib.quality
from
paver.easy
import
call_task
from
.utils
import
PaverTestCase
class
PaverSafeCommitTest
(
PaverTestCase
):
"""
Test run_safecommit_report with a mocked environment in order to pass in
opts.
"""
def
setUp
(
self
):
super
(
PaverSafeCommitTest
,
self
)
.
setUp
()
self
.
reset_task_messages
()
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_get_safecommit_count'
)
def
test_safecommit_violation_number_not_found
(
self
,
_mock_count
,
_mock_report_dir
,
_mock_write_metric
):
"""
run_safecommit_report encounters an error parsing the safecommit output
log.
"""
_mock_count
.
return_value
=
None
with
self
.
assertRaises
(
SystemExit
):
call_task
(
'pavelib.quality.run_safecommit_report'
)
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_get_safecommit_count'
)
def
test_safecommit_vanilla
(
self
,
_mock_count
,
_mock_report_dir
,
_mock_write_metric
):
"""
run_safecommit_report finds violations.
"""
_mock_count
.
return_value
=
0
call_task
(
'pavelib.quality.run_safecommit_report'
)
pavelib/paver_tests/test_safelint.py
View file @
5ea529ae
"""
"""
Tests for paver
safelint
quality tasks
Tests for paver quality tasks
"""
"""
from
mock
import
patch
from
mock
import
patch
...
@@ -20,99 +20,43 @@ class PaverSafeLintTest(PaverTestCase):
...
@@ -20,99 +20,43 @@ class PaverSafeLintTest(PaverTestCase):
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_get_
safelint_counts
'
)
@patch.object
(
pavelib
.
quality
,
'_get_
count_from_last_line
'
)
def
test_safelint_violation_number_not_found
(
self
,
_mock_count
s
,
_mock_report_dir
,
_mock_write_metric
):
def
test_safelint_violation_number_not_found
(
self
,
_mock_count
,
_mock_report_dir
,
_mock_write_metric
):
"""
"""
run_safelint encounters an error parsing the safelint output log
run_safelint encounters an error parsing the safelint output log
"""
"""
_mock_count
s
.
return_value
=
{}
_mock_count
.
return_value
=
None
with
self
.
assertRaises
(
SystemExit
):
with
self
.
assertRaises
(
SystemExit
):
call_task
(
'pavelib.quality.run_safelint'
)
call_task
(
'pavelib.quality.run_safelint'
)
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_get_
safelint_counts
'
)
@patch.object
(
pavelib
.
quality
,
'_get_
count_from_last_line
'
)
def
test_safelint_vanilla
(
self
,
_mock_count
s
,
_mock_report_dir
,
_mock_write_metric
):
def
test_safelint_vanilla
(
self
,
_mock_count
,
_mock_report_dir
,
_mock_write_metric
):
"""
"""
run_safelint finds violations, but a limit was not set
run_safelint finds violations, but a limit was not set
"""
"""
_mock_count
s
.
return_value
=
{
'total'
:
0
}
_mock_count
.
return_value
=
1
call_task
(
'pavelib.quality.run_safelint'
)
call_task
(
'pavelib.quality.run_safelint'
)
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_get_safelint_counts'
)
@patch.object
(
pavelib
.
quality
,
'_get_count_from_last_line'
)
def
test_safelint_invalid_thresholds_option
(
self
,
_mock_counts
,
_mock_report_dir
,
_mock_write_metric
):
def
test_safelint_too_many_violations
(
self
,
_mock_count
,
_mock_report_dir
,
_mock_write_metric
):
"""
run_safelint fails when thresholds option is poorly formatted
"""
_mock_counts
.
return_value
=
{
'total'
:
0
}
with
self
.
assertRaises
(
SystemExit
):
call_task
(
'pavelib.quality.run_safelint'
,
options
=
{
"thresholds"
:
"invalid"
})
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_get_safelint_counts'
)
def
test_safelint_invalid_thresholds_option_key
(
self
,
_mock_counts
,
_mock_report_dir
,
_mock_write_metric
):
"""
run_safelint fails when thresholds option is poorly formatted
"""
_mock_counts
.
return_value
=
{
'total'
:
0
}
with
self
.
assertRaises
(
SystemExit
):
call_task
(
'pavelib.quality.run_safelint'
,
options
=
{
"thresholds"
:
'{"invalid": 3}'
})
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_get_safelint_counts'
)
def
test_safelint_too_many_violations
(
self
,
_mock_counts
,
_mock_report_dir
,
_mock_write_metric
):
"""
"""
run_safelint finds more violations than are allowed
run_safelint finds more violations than are allowed
"""
"""
_mock_count
s
.
return_value
=
{
'total'
:
4
}
_mock_count
.
return_value
=
4
with
self
.
assertRaises
(
SystemExit
):
with
self
.
assertRaises
(
SystemExit
):
call_task
(
'pavelib.quality.run_safelint'
,
options
=
{
"
thresholds"
:
'{"total": 3}'
})
call_task
(
'pavelib.quality.run_safelint'
,
options
=
{
"
limit"
:
"3"
})
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_get_
safelint_counts
'
)
@patch.object
(
pavelib
.
quality
,
'_get_
count_from_last_line
'
)
def
test_safelint_under_limit
(
self
,
_mock_count
s
,
_mock_report_dir
,
_mock_write_metric
):
def
test_safelint_under_limit
(
self
,
_mock_count
,
_mock_report_dir
,
_mock_write_metric
):
"""
"""
run_safelint finds fewer violations than are allowed
run_safelint finds fewer violations than are allowed
"""
"""
_mock_counts
.
return_value
=
{
'total'
:
4
}
_mock_count
.
return_value
=
4
# No System Exit is expected
call_task
(
'pavelib.quality.run_safelint'
,
options
=
{
"thresholds"
:
'{"total": 5}'
})
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_get_safelint_counts'
)
def
test_safelint_rule_violation_number_not_found
(
self
,
_mock_counts
,
_mock_report_dir
,
_mock_write_metric
):
"""
run_safelint encounters an error parsing the safelint output log for a
given rule threshold that was set.
"""
_mock_counts
.
return_value
=
{
'total'
:
4
}
with
self
.
assertRaises
(
SystemExit
):
call_task
(
'pavelib.quality.run_safelint'
,
options
=
{
"thresholds"
:
'{"rules": {"javascript-escape": 3}}'
})
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_get_safelint_counts'
)
def
test_safelint_too_many_rule_violations
(
self
,
_mock_counts
,
_mock_report_dir
,
_mock_write_metric
):
"""
run_safelint finds more rule violations than are allowed
"""
_mock_counts
.
return_value
=
{
'total'
:
4
,
'rules'
:
{
'javascript-escape'
:
4
}}
with
self
.
assertRaises
(
SystemExit
):
call_task
(
'pavelib.quality.run_safelint'
,
options
=
{
"thresholds"
:
'{"rules": {"javascript-escape": 3}}'
})
@patch.object
(
pavelib
.
quality
,
'_write_metric'
)
@patch.object
(
pavelib
.
quality
,
'_prepare_report_dir'
)
@patch.object
(
pavelib
.
quality
,
'_get_safelint_counts'
)
def
test_safelint_under_rule_limit
(
self
,
_mock_counts
,
_mock_report_dir
,
_mock_write_metric
):
"""
run_safelint finds fewer rule violations than are allowed
"""
_mock_counts
.
return_value
=
{
'total'
:
4
,
'rules'
:
{
'javascript-escape'
:
4
}}
# No System Exit is expected
# No System Exit is expected
call_task
(
'pavelib.quality.run_safelint'
,
options
=
{
"
thresholds"
:
'{"rules": {"javascript-escape": 5}}'
})
call_task
(
'pavelib.quality.run_safelint'
,
options
=
{
"
limit"
:
"5"
})
pavelib/quality.py
View file @
5ea529ae
This diff is collapsed.
Click to expand it.
scripts/all-tests.sh
View file @
5ea529ae
...
@@ -13,7 +13,7 @@ set -e
...
@@ -13,7 +13,7 @@ set -e
# Violations thresholds for failing the build
# Violations thresholds for failing the build
export
PYLINT_THRESHOLD
=
4175
export
PYLINT_THRESHOLD
=
4175
export
JSHINT_THRESHOLD
=
7550
export
JSHINT_THRESHOLD
=
7550
source
scripts/safelint_thresholds.sh
export
SAFELINT_THRESHOLD
=
2700
doCheckVars
()
{
doCheckVars
()
{
if
[
-n
"
$CIRCLECI
"
]
;
then
if
[
-n
"
$CIRCLECI
"
]
;
then
...
...
scripts/generic-ci-tests.sh
View file @
5ea529ae
...
@@ -85,9 +85,7 @@ case "$TEST_SUITE" in
...
@@ -85,9 +85,7 @@ case "$TEST_SUITE" in
echo
"Running code complexity report (python)."
echo
"Running code complexity report (python)."
paver run_complexity
>
reports/code_complexity.log
||
echo
"Unable to calculate code complexity. Ignoring error."
paver run_complexity
>
reports/code_complexity.log
||
echo
"Unable to calculate code complexity. Ignoring error."
echo
"Running safe template linter report."
echo
"Running safe template linter report."
paver run_safelint
-t
$SAFELINT_THRESHOLDS
>
safelint.log
||
{
cat
safelint.log
;
EXIT
=
1
;
}
paver run_safelint
-l
$SAFELINT_THRESHOLD
>
safelint.log
||
{
cat
safelint.log
;
EXIT
=
1
;
}
echo
"Running safe commit linter report."
paver run_safecommit_report
>
safecommit.log
||
{
cat
safecommit.log
;
EXIT
=
1
;
}
# Run quality task. Pass in the 'fail-under' percentage to diff-quality
# Run quality task. Pass in the 'fail-under' percentage to diff-quality
echo
"Running diff quality."
echo
"Running diff quality."
paver run_quality
-p
100
||
EXIT
=
1
paver run_quality
-p
100
||
EXIT
=
1
...
...
scripts/safelint_thresholds.sh
deleted
100755 → 0
View file @
8340703f
#!/usr/bin/env bash
set
-e
###############################################################################
#
# safelint_thresholds.sh
#
# The thresholds used for paver run_safelint when used with various CI
# systems.
#
###############################################################################
# Violations thresholds for failing the build
export
SAFELINT_THRESHOLDS
=
'
{
"rules": {
"javascript-concat-html": 313,
"javascript-escape": 7,
"javascript-interpolate": 71,
"javascript-jquery-append": 120,
"javascript-jquery-html": 313,
"javascript-jquery-insert-into-target": 26,
"javascript-jquery-insertion": 30,
"javascript-jquery-prepend": 12,
"mako-html-entities": 0,
"mako-invalid-html-filter": 33,
"mako-invalid-js-filter": 249,
"mako-js-html-string": 0,
"mako-js-missing-quotes": 0,
"mako-missing-default": 248,
"mako-multiple-page-tags": 0,
"mako-unknown-context": 0,
"mako-unparseable-expression": 0,
"mako-unwanted-html-filter": 0,
"python-close-before-format": 0,
"python-concat-html": 28,
"python-custom-escape": 13,
"python-deprecated-display-name": 53,
"python-interpolate-html": 68,
"python-parse-error": 0,
"python-requires-html-or-text": 0,
"python-wrap-html": 289,
"underscore-not-escaped": 709
},
"total": 2565
}'
export
SAFELINT_THRESHOLDS
=
${
SAFELINT_THRESHOLDS
//[[
:space:]]/
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment