Commit ad4e185f by Robert Raposa

Merge pull request #12391 from edx/robrap/jenkins-rule-thresholds

TNL-4509: Set Jenkins thresholds per rule.
parents 98f6785b afdec6a9
......@@ -19,9 +19,9 @@ from openedx.core.djangolib.markup import Text, HTML
${_('The page that you were looking for was not found.')}
${Text(_('Go back to the {homepage} or let us know about any pages that may have been moved at {email}.')).format(
homepage=HTML('<a href="/">homepage</a>'),
email=HTML('<a href="mailto:{address}">{address}</a>'.format(
email=HTML('<a href="mailto:{address}">{address}</a>').format(
address=Text(settings.TECH_SUPPORT_EMAIL)
))
)
)}
</p>
</article>
......
......@@ -30,9 +30,9 @@ ${Text(_("{studio_name} Server Error")).format(
)}
${_("We've logged the error and our staff is currently working to resolve this error as soon as possible.")}
${Text(_(u'If the problem persists, please email us at {email_link}.')).format(
email_link=HTML(u'<a href="mailto:{email_address}">{email_address}</a>'.format(
email_address=Text(settings.TECH_SUPPORT_EMAIL),
))
email_link=HTML(u'<a href="mailto:{email_address}">{email_address}</a>').format(
email_address=Text(settings.TECH_SUPPORT_EMAIL),
)
)}
</p>
</article>
......
......@@ -121,7 +121,7 @@ from openedx.core.djangolib.markup import Text, HTML
<h3 class="sr">${_("Page Actions")}</h3>
<ul>
<li class="nav-item">
<a href="#" class="button button-new" data-category="chapter" data-parent="${context_course.location | h}" data-default-name="${_('Section')}" title="${_('Click to add a new section')}">
<a href="#" class="button button-new" data-category="chapter" data-parent="${context_course.location}" data-default-name="${_('Section')}" title="${_('Click to add a new section')}">
<i class="icon fa fa-plus"></i>${_('New Section')}
</a>
</li>
......
......@@ -103,7 +103,7 @@ from openedx.core.djangolib.markup import Text, HTML
<h3 class="title-3">${_("Other Course Settings")}</h3>
<nav class="nav-related" aria-label="${_('Other Course Settings')}">
<ul>
<li class="nav-item"><a href="${details_url}">${_("Details &amp; Schedule")}</a></li>
<li class="nav-item"><a href="${details_url}">${_("Details & Schedule")}</a></li>
<li class="nav-item"><a href="${grading_url}">${_("Grading")}</a></li>
<li class="nav-item"><a href="${course_team_url}">${_("Course Team")}</a></li>
<li class="nav-item"><a href="${advanced_settings_url}">${_("Advanced Settings")}</a></li>
......
<%page expression_filter="h"/>
<%page expression_filter="h" args="online_help_token"/>
<%namespace name='static' file='../static_content.html'/>
<%!
from django.conf import settings
......@@ -6,7 +6,6 @@
from django.utils.translation import ugettext as _
from contentstore.context_processors import doc_url
%>
<%page args="online_help_token"/>
<div class="wrapper-header wrapper" id="view-top">
<header class="primary" role="banner">
......@@ -61,7 +60,7 @@
<a href="${tabs_url}">${_("Pages")}</a>
</li>
<li class="nav-item nav-course-courseware-uploads">
<a href="${assets_url}">${_("Files &amp; Uploads")}</a>
<a href="${assets_url}">${_("Files & Uploads")}</a>
</li>
<li class="nav-item nav-course-courseware-textbooks">
<a href="${textbooks_url}">${_("Textbooks")}</a>
......@@ -83,7 +82,7 @@
<div class="nav-sub">
<ul>
<li class="nav-item nav-course-settings-schedule">
<a href="${settings_url}">${_("Schedule &amp; Details")}</a>
<a href="${settings_url}">${_("Schedule & Details")}</a>
</li>
<li class="nav-item nav-course-settings-grading">
<a href="${grading_url}">${_("Grading")}</a>
......
......@@ -14,7 +14,7 @@ from openedx.core.djangolib.markup import HTML, Text
link_start=HTML('<a href="{dashboard_url}" target="_blank">').format(
dashboard_url=escape_uri_path('{base_url}/courses/{course_id}'.format(
base_url=settings.ANALYTICS_DASHBOARD_URL,
course_id=Text(section_data['course_id'])
course_id=section_data['course_id'],
))
),
analytics_dashboard_name=settings.ANALYTICS_DASHBOARD_NAME,
......
......@@ -9,7 +9,7 @@ from django.utils.translation import ugettext as _
from lms.djangoapps.ccx.overrides import get_current_ccx
from microsite_configuration import microsite
from microsite_configuration.templatetags.microsite import platform_name
from openedx.core.djangolib.markup import Text, HTML
from openedx.core.djangolib.markup import HTML, Text
# App that handles subdomain specific branding
from branding import api as branding_api
......@@ -194,15 +194,12 @@ site_status_msg = get_site_status_msg(course_id)
</header>
% if course:
<!--[if lte IE 9]>
<div class="ie-banner" aria-hidden="true">
${Text(_('{begin_strong}Warning:{end_strong} Your browser is not fully supported. We strongly recommend using {chrome_link} or {ff_link}.')).format(
<div class="ie-banner" aria-hidden="true">${Text(_('{begin_strong}Warning:{end_strong} Your browser is not fully supported. We strongly recommend using {chrome_link} or {ff_link}.')).format(
begin_strong=HTML('<strong>'),
end_strong=HTML('</strong>'),
chrome_link=HTML('<a href="https://www.google.com/chrome" target="_blank">Chrome</a>'),
ff_link=HTML('<a href="http://www.mozilla.org/firefox" target="_blank">Firefox</a>'),
)
}
</div>
)}</div>
<![endif]-->
% endif
......
......@@ -4,6 +4,7 @@ Tests for paver quality tasks
import os
from path import Path as path
import tempfile
import textwrap
import unittest
from mock import patch, MagicMock, mock_open
from ddt import ddt, file_data
......@@ -132,6 +133,64 @@ class TestPaverReportViolationsCounts(unittest.TestCase):
actual_count = pavelib.quality._get_count_from_last_line(self.f.name, "foo") # pylint: disable=protected-access
self.assertEqual(actual_count, None)
def test_get_safelint_counts_happy(self):
report = textwrap.dedent("""
test.html: 30:53: javascript-jquery-append: $('#test').append(print_tos);
javascript-concat-html: 310 violations
javascript-escape: 7 violations
2608 violations total
""")
with open(self.f.name, 'w') as f:
f.write(report)
counts = pavelib.quality._get_safelint_counts(self.f.name) # pylint: disable=protected-access
self.assertDictEqual(counts, {
'rules': {
'javascript-concat-html': 310,
'javascript-escape': 7,
},
'total': 2608,
})
def test_get_safelint_counts_bad_counts(self):
report = textwrap.dedent("""
javascript-concat-html: violations
""")
with open(self.f.name, 'w') as f:
f.write(report)
counts = pavelib.quality._get_safelint_counts(self.f.name) # pylint: disable=protected-access
self.assertDictEqual(counts, {
'rules': {},
'total': None,
})
def test_get_safecommit_count_happy(self):
report = textwrap.dedent("""
Linting lms/templates/navigation.html:
2 violations total
Linting scripts/tests/templates/test.underscore:
3 violations total
""")
with open(self.f.name, 'w') as f:
f.write(report)
count = pavelib.quality._get_safecommit_count(self.f.name) # pylint: disable=protected-access
self.assertEqual(count, 5)
def test_get_safecommit_count_bad_counts(self):
report = textwrap.dedent("""
Linting lms/templates/navigation.html:
""")
with open(self.f.name, 'w') as f:
f.write(report)
count = pavelib.quality._get_safecommit_count(self.f.name) # pylint: disable=protected-access
self.assertIsNone(count)
class TestPrepareReportDir(unittest.TestCase):
"""
......
"""
Tests for paver safecommit quality tasks
"""
from mock import patch
import pavelib.quality
from paver.easy import call_task
from .utils import PaverTestCase
class PaverSafeCommitTest(PaverTestCase):
"""
Test run_safecommit_report with a mocked environment in order to pass in
opts.
"""
def setUp(self):
super(PaverSafeCommitTest, self).setUp()
self.reset_task_messages()
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safecommit_count')
def test_safecommit_violation_number_not_found(self, _mock_count, _mock_report_dir, _mock_write_metric):
"""
run_safecommit_report encounters an error parsing the safecommit output
log.
"""
_mock_count.return_value = None
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safecommit_report')
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safecommit_count')
def test_safecommit_vanilla(self, _mock_count, _mock_report_dir, _mock_write_metric):
"""
run_safecommit_report finds violations.
"""
_mock_count.return_value = 0
call_task('pavelib.quality.run_safecommit_report')
"""
Tests for paver quality tasks
Tests for paver safelint quality tasks
"""
from mock import patch
......@@ -20,43 +20,99 @@ class PaverSafeLintTest(PaverTestCase):
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line')
def test_safelint_violation_number_not_found(self, _mock_count, _mock_report_dir, _mock_write_metric):
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_violation_number_not_found(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint encounters an error parsing the safelint output log
"""
_mock_count.return_value = None
_mock_counts.return_value = {}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint')
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line')
def test_safelint_vanilla(self, _mock_count, _mock_report_dir, _mock_write_metric):
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_vanilla(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint finds violations, but a limit was not set
"""
_mock_count.return_value = 1
_mock_counts.return_value = {'total': 0}
call_task('pavelib.quality.run_safelint')
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line')
def test_safelint_too_many_violations(self, _mock_count, _mock_report_dir, _mock_write_metric):
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_invalid_thresholds_option(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint fails when thresholds option is poorly formatted
"""
_mock_counts.return_value = {'total': 0}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"thresholds": "invalid"})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_invalid_thresholds_option_key(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint fails when thresholds option is poorly formatted
"""
_mock_counts.return_value = {'total': 0}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"invalid": 3}'})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_too_many_violations(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint finds more violations than are allowed
"""
_mock_count.return_value = 4
_mock_counts.return_value = {'total': 4}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"limit": "3"})
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"total": 3}'})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_count_from_last_line')
def test_safelint_under_limit(self, _mock_count, _mock_report_dir, _mock_write_metric):
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_under_limit(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint finds fewer violations than are allowed
"""
_mock_count.return_value = 4
_mock_counts.return_value = {'total': 4}
# No System Exit is expected
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"total": 5}'})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_rule_violation_number_not_found(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint encounters an error parsing the safelint output log for a
given rule threshold that was set.
"""
_mock_counts.return_value = {'total': 4}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"rules": {"javascript-escape": 3}}'})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_too_many_rule_violations(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint finds more rule violations than are allowed
"""
_mock_counts.return_value = {'total': 4, 'rules': {'javascript-escape': 4}}
with self.assertRaises(SystemExit):
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"rules": {"javascript-escape": 3}}'})
@patch.object(pavelib.quality, '_write_metric')
@patch.object(pavelib.quality, '_prepare_report_dir')
@patch.object(pavelib.quality, '_get_safelint_counts')
def test_safelint_under_rule_limit(self, _mock_counts, _mock_report_dir, _mock_write_metric):
"""
run_safelint finds fewer rule violations than are allowed
"""
_mock_counts.return_value = {'total': 4, 'rules': {'javascript-escape': 4}}
# No System Exit is expected
call_task('pavelib.quality.run_safelint', options={"limit": "5"})
call_task('pavelib.quality.run_safelint', options={"thresholds": '{"rules": {"javascript-escape": 5}}'})
......@@ -2,9 +2,12 @@
Check code quality using pep8, pylint, and diff_quality.
"""
from paver.easy import sh, task, cmdopts, needs, BuildFailure
import json
import os
import re
from openedx.core.djangolib.markup import HTML
from .utils.envs import Env
ALL_SYSTEMS = 'lms,cms,common,openedx,pavelib'
......@@ -133,8 +136,8 @@ def run_pylint(options):
# Fail number of violations is greater than the limit
if num_violations > violations_limit > -1:
raise Exception("Failed. Too many pylint violations. "
"The limit is {violations_limit}.".format(violations_limit=violations_limit))
raise BuildFailure("Failed. Too many pylint violations. "
"The limit is {violations_limit}.".format(violations_limit=violations_limit))
def _count_pylint_violations(report_file):
......@@ -216,7 +219,7 @@ def run_pep8(options): # pylint: disable=unused-argument
if count:
failure_string = "Too many pep8 violations. " + violations_count_str
failure_string += "\n\nViolations:\n{violations_list}".format(violations_list=violations_list)
raise Exception(failure_string)
raise BuildFailure(failure_string)
@task
......@@ -291,7 +294,7 @@ def run_jshint(options):
# Fail if number of violations is greater than the limit
if num_violations > violations_limit > -1:
raise Exception(
raise BuildFailure(
"JSHint Failed. Too many violations ({count}).\nThe limit is {violations_limit}.".format(
count=num_violations, violations_limit=violations_limit
)
......@@ -301,48 +304,152 @@ def run_jshint(options):
@task
@needs('pavelib.prereqs.install_python_prereqs')
@cmdopts([
("limit=", "l", "limit for number of acceptable violations"),
("thresholds=", "t", "json containing limit for number of acceptable violations per rule"),
])
def run_safelint(options):
"""
Runs safe_template_linter.py on the codebase
"""
violations_limit = int(getattr(options, 'limit', -1))
thresholds_option = getattr(options, 'thresholds', '{}')
try:
violation_thresholds = json.loads(thresholds_option)
except ValueError:
violation_thresholds = None
if isinstance(violation_thresholds, dict) is False or \
any(key not in ("total", "rules") for key in violation_thresholds.keys()):
raise BuildFailure(
"""Error. Thresholds option "{thresholds_option}" was not supplied using proper format.\n"""
"""Here is a properly formatted example, '{{"total":100,"rules":{{"javascript-escape":0}}}}' """
"""with property names in double-quotes.""".format(
thresholds_option=thresholds_option
)
)
safelint_script = "safe_template_linter.py"
safelint_report_dir = (Env.REPORT_DIR / "safelint")
safelint_report = safelint_report_dir / "safelint.report"
_prepare_report_dir(safelint_report_dir)
sh(
"{repo_root}/scripts/safe_template_linter.py >> {safelint_report}".format(
"{repo_root}/scripts/{safelint_script} --rule-totals >> {safelint_report}".format(
repo_root=Env.REPO_ROOT,
safelint_script=safelint_script,
safelint_report=safelint_report,
),
ignore_error=True
)
safelint_counts = _get_safelint_counts(safelint_report)
try:
num_violations = int(_get_count_from_last_line(safelint_report, "safelint"))
metrics_str = "Number of {safelint_script} violations: {num_violations}\n".format(
safelint_script=safelint_script, num_violations=int(safelint_counts['total'])
)
if 'rules' in safelint_counts and any(safelint_counts['rules']):
metrics_str += "\n"
rule_keys = sorted(safelint_counts['rules'].keys())
for rule in rule_keys:
metrics_str += "{rule} violations: {count}\n".format(
rule=rule,
count=int(safelint_counts['rules'][rule])
)
except TypeError:
raise BuildFailure(
"Error. Number of safelint violations could not be found in {safelint_report}".format(
safelint_report=safelint_report
"Error. Number of {safelint_script} violations could not be found in {safelint_report}".format(
safelint_script=safelint_script, safelint_report=safelint_report
)
)
metrics_report = (Env.METRICS_DIR / "safelint")
# Record the metric
_write_metric(num_violations, (Env.METRICS_DIR / "safelint"))
_write_metric(metrics_str, metrics_report)
# Print number of violations to log.
sh("cat {metrics_report}".format(metrics_report=metrics_report), ignore_error=True)
# Fail if number of violations is greater than the limit
if num_violations > violations_limit > -1:
raise Exception(
"SafeTemplateLinter Failed. Too many violations ({count}).\nThe limit is {violations_limit}.".format(
count=num_violations, violations_limit=violations_limit
error_message = ""
# Test total violations against threshold.
if 'total' in violation_thresholds.keys():
if violation_thresholds['total'] < safelint_counts['total']:
error_message = "Too many violations total ({count}).\nThe limit is {violations_limit}.".format(
count=safelint_counts['total'], violations_limit=violation_thresholds['total']
)
# Test rule violations against thresholds.
if 'rules' in violation_thresholds:
threshold_keys = sorted(violation_thresholds['rules'].keys())
for threshold_key in threshold_keys:
if threshold_key not in safelint_counts['rules']:
error_message += (
"\nNumber of {safelint_script} violations for {rule} could not be found in "
"{safelint_report}."
).format(
safelint_script=safelint_script, rule=threshold_key, safelint_report=safelint_report
)
elif violation_thresholds['rules'][threshold_key] < safelint_counts['rules'][threshold_key]:
error_message += \
"\nToo many {rule} violations ({count}).\nThe {rule} limit is {violations_limit}.".format(
rule=threshold_key, count=safelint_counts['rules'][threshold_key],
violations_limit=violation_thresholds['rules'][threshold_key],
)
if error_message is not "":
raise BuildFailure(
"SafeTemplateLinter Failed.\n{error_message}\n"
"See {safelint_report} or run the following command to hone in on the problem:\n"
" ./scripts/safe-commit-linter.sh -h".format(
error_message=error_message, safelint_report=safelint_report
)
)
@task
@needs('pavelib.prereqs.install_python_prereqs')
def run_safecommit_report():
"""
Runs safe-commit-linter.sh on the current branch.
"""
safecommit_script = "safe-commit-linter.sh"
safecommit_report_dir = (Env.REPORT_DIR / "safecommit")
safecommit_report = safecommit_report_dir / "safecommit.report"
_prepare_report_dir(safecommit_report_dir)
sh(
"{repo_root}/scripts/{safecommit_script} >> {safecommit_report}".format(
repo_root=Env.REPO_ROOT,
safecommit_script=safecommit_script,
safecommit_report=safecommit_report,
),
ignore_error=True
)
# Output report to console.
sh("cat {safecommit_report}".format(safecommit_report=safecommit_report), ignore_error=True)
safecommit_count = _get_safecommit_count(safecommit_report)
try:
num_violations = int(safecommit_count)
except TypeError:
raise BuildFailure(
"Error. Number of {safecommit_script} violations could not be found in {safecommit_report}".format(
safecommit_script=safecommit_script, safecommit_report=safecommit_report
)
)
# Print number of violations to log.
violations_count_str = "Number of {safecommit_script} violations: {num_violations}\n".format(
safecommit_script=safecommit_script, num_violations=num_violations
)
# Record the metric
metrics_report = (Env.METRICS_DIR / "safecommit")
_write_metric(violations_count_str, metrics_report)
# Output report to console.
sh("cat {metrics_report}".format(metrics_report=metrics_report), ignore_error=True)
def _write_metric(metric, filename):
"""
Write a given metric to a given file
......@@ -361,15 +468,28 @@ def _prepare_report_dir(dir_name):
dir_name.mkdir_p()
def _get_last_report_line(filename):
def _get_report_contents(filename, last_line_only=False):
"""
Returns the last line of a given file. Used for getting output from quality output files.
Returns the contents of the given file. Use last_line_only to only return
the last line, which can be used for getting output from quality output
files.
Arguments:
last_line_only: True to return the last line only, False to return a
string with full contents.
Returns:
String containing full contents of the report, or the last line.
"""
file_not_found_message = "The following log file could not be found: {file}".format(file=filename)
if os.path.isfile(filename):
with open(filename, 'r') as report_file:
lines = report_file.readlines()
return lines[len(lines) - 1]
if last_line_only:
lines = report_file.readlines()
return lines[len(lines) - 1]
else:
return report_file.read()
else:
# Raise a build error if the file is not found
raise BuildFailure(file_not_found_message)
......@@ -380,7 +500,7 @@ def _get_count_from_last_line(filename, file_type):
This will return the number in the last line of a file.
It is returning only the value (as a floating number).
"""
last_line = _get_last_report_line(filename)
last_line = _get_report_contents(filename, last_line_only=True)
if file_type is "python_complexity":
# Example of the last line of a complexity report: "Average complexity: A (1.93953443446)"
regex = r'\d+.\d+'
......@@ -396,6 +516,62 @@ def _get_count_from_last_line(filename, file_type):
return None
def _get_safelint_counts(filename):
"""
This returns a dict of violations from the safelint report.
Arguments:
filename: The name of the safelint report.
Returns:
A dict containing the following:
rules: A dict containing the count for each rule as follows:
violation-rule-id: N, where N is the number of violations
total: M, where M is the number of total violations
"""
report_contents = _get_report_contents(filename)
rule_count_regex = re.compile(r"^(?P<rule_id>[a-z-]+):\s+(?P<count>\d+) violations", re.MULTILINE)
total_count_regex = re.compile(r"^(?P<count>\d+) violations total", re.MULTILINE)
violations = {'rules': {}}
for violation_match in rule_count_regex.finditer(report_contents):
try:
violations['rules'][violation_match.group('rule_id')] = int(violation_match.group('count'))
except ValueError:
violations['rules'][violation_match.group('rule_id')] = None
try:
violations['total'] = int(total_count_regex.search(report_contents).group('count'))
# An AttributeError will occur if the regex finds no matches.
# A ValueError will occur if the returned regex cannot be cast as a float.
except (AttributeError, ValueError):
violations['total'] = None
return violations
def _get_safecommit_count(filename):
"""
Returns the violation count from the safecommit report.
Arguments:
filename: The name of the safecommit report.
Returns:
The count of safecommit violations, or None if there is a problem.
"""
report_contents = _get_report_contents(filename)
validation_count = None
file_count_regex = re.compile(r"^(?P<count>\d+) violations total", re.MULTILINE)
try:
for count_match in file_count_regex.finditer(report_contents):
if validation_count is None:
validation_count = 0
validation_count += int(count_match.group('count'))
except ValueError:
validation_count = None
return validation_count
@task
@needs('pavelib.prereqs.install_python_prereqs')
@cmdopts([
......@@ -428,9 +604,9 @@ def run_quality(options):
sep = '-------------<br/>\n'
title = "<h1>Quality Report: pep8</h1>\n"
violations_bullets = ''.join(
['<li>{violation}</li><br/>\n'.format(violation=violation) for violation in violations_list]
[HTML('<li>{violation}</li><br/>\n').format(violation=violation) for violation in violations_list]
)
violations_str = '<ul>\n{bullets}</ul>\n'.format(bullets=violations_bullets)
violations_str = HTML('<ul>\n{bullets}</ul>\n').format(bullets=HTML(violations_bullets))
violations_count_str = "<b>Violations</b>: {count}<br/>\n"
fail_line = "<b>FAILURE</b>: pep8 count should be 0<br/>\n"
else:
......
......@@ -13,7 +13,7 @@ set -e
# Violations thresholds for failing the build
export PYLINT_THRESHOLD=4175
export JSHINT_THRESHOLD=7550
export SAFELINT_THRESHOLD=2700
source scripts/safelint_thresholds.sh
doCheckVars() {
if [ -n "$CIRCLECI" ] ; then
......
......@@ -85,7 +85,9 @@ case "$TEST_SUITE" in
echo "Running code complexity report (python)."
paver run_complexity > reports/code_complexity.log || echo "Unable to calculate code complexity. Ignoring error."
echo "Running safe template linter report."
paver run_safelint -l $SAFELINT_THRESHOLD > safelint.log || { cat safelint.log; EXIT=1; }
paver run_safelint -t $SAFELINT_THRESHOLDS > safelint.log || { cat safelint.log; EXIT=1; }
echo "Running safe commit linter report."
paver run_safecommit_report > safecommit.log || { cat safecommit.log; EXIT=1; }
# Run quality task. Pass in the 'fail-under' percentage to diff-quality
echo "Running diff quality."
paver run_quality -p 100 || EXIT=1
......
......@@ -15,11 +15,17 @@ show_help() {
echo "Runs the Safe Template Linter against all files in a git commit."
echo ""
echo "Mandatory arguments to long options are mandatory for short options too."
echo " -h, --help Output this help."
echo " -m, --main-branch=COMMIT Run against files changed between the"
echo " current branch and this commit."
echo " Defaults to origin/master."
echo ""
echo "For additional help:"
echo "This scripts does not give a grand total. Be sure to check for"
echo "0 violations on each file."
echo ""
echo "For more help using the safe template linter, including details on how"
echo "to understand and fix any violations, read the docs here:"
echo ""
echo " http://edx.readthedocs.org/projects/edx-developer-guide/en/latest/conventions/safe_templates.html#safe-template-linter"
}
......
......@@ -189,118 +189,38 @@ class Rules(Enum):
"""
An Enum of each rule which the linter will check.
"""
mako_missing_default = (
'mako-missing-default',
'Missing default <%page expression_filter="h"/>.'
)
mako_multiple_page_tags = (
'mako-multiple-page-tags',
'A Mako template can only have one <%page> tag.'
)
mako_unparseable_expression = (
'mako-unparseable-expression',
'The expression could not be properly parsed.'
)
mako_unwanted_html_filter = (
'mako-unwanted-html-filter',
'Remove explicit h filters when it is provided by the page directive.'
)
mako_invalid_html_filter = (
'mako-invalid-html-filter',
'The expression is using an invalid filter in an HTML context.'
)
mako_invalid_js_filter = (
'mako-invalid-js-filter',
'The expression is using an invalid filter in a JavaScript context.'
)
mako_js_missing_quotes = (
'mako-js-missing-quotes',
'An expression using js_escaped_string must be wrapped in quotes.'
)
mako_js_html_string = (
'mako-js-html-string',
'A JavaScript string containing HTML should not have an embedded Mako expression.'
)
mako_html_entities = (
'mako-html-entities',
"HTML entities should be plain text or wrapped with HTML()."
)
mako_unknown_context = (
'mako-unknown-context',
"The context could not be determined."
)
underscore_not_escaped = (
'underscore-not-escaped',
'Expressions should be escaped using <%- expression %>.'
)
javascript_jquery_append = (
'javascript-jquery-append',
'Use HtmlUtils.append() or .append(HtmlUtils.xxx().toString()).'
)
javascript_jquery_prepend = (
'javascript-jquery-prepend',
'Use HtmlUtils.prepend() or .prepend(HtmlUtils.xxx().toString()).'
)
javascript_jquery_insertion = (
'javascript-jquery-insertion',
'JQuery DOM insertion calls that take content must use HtmlUtils (e.g. $el.after(HtmlUtils.xxx().toString()).'
)
javascript_jquery_insert_into_target = (
'javascript-jquery-insert-into-target',
'JQuery DOM insertion calls that take a target can only be called from elements (e.g. .$el.appendTo()).'
)
javascript_jquery_html = (
'javascript-jquery-html',
"Use HtmlUtils.setHtml(), .html(HtmlUtils.xxx().toString()), or JQuery's text() function."
)
javascript_concat_html = (
'javascript-concat-html',
'Use HtmlUtils functions rather than concatenating strings with HTML.'
)
javascript_escape = (
'javascript-escape',
"Avoid calls to escape(), especially in Backbone. Use templates, HtmlUtils, or JQuery's text() function."
)
javascript_interpolate = (
'javascript-interpolate',
'Use StringUtils.interpolate() or HtmlUtils.interpolateHtml() as appropriate.'
)
python_concat_html = (
'python-concat-html',
'Use HTML() and Text() functions rather than concatenating strings with HTML.'
)
python_custom_escape = (
'python-custom-escape',
"Use markupsafe.escape() rather than custom escaping for '<'."
)
python_deprecated_display_name = (
'python-deprecated-display-name',
'Replace deprecated display_name_with_default_escaped with display_name_with_default.'
)
python_requires_html_or_text = (
'python-requires-html-or-text',
'You must start with Text() or HTML() if you use HTML() or Text() during interpolation.'
)
python_close_before_format = (
'python-close-before-format',
'You must close any call to Text() or HTML() before calling format().'
)
python_wrap_html = (
'python-wrap-html',
"String containing HTML should be wrapped with call to HTML()."
)
python_interpolate_html = (
'python-interpolate-html',
"Use HTML(), Text(), and format() functions for interpolating strings with HTML."
)
python_parse_error = (
'python-parse-error',
'Error parsing Python function or string.'
)
def __init__(self, rule_id, rule_summary):
# IMPORTANT: Do not edit without also updating the docs:
# - http://edx.readthedocs.io/projects/edx-developer-guide/en/latest/conventions/safe_templates.html#safe-template-linter
mako_missing_default = 'mako-missing-default'
mako_multiple_page_tags = 'mako-multiple-page-tags'
mako_unparseable_expression = 'mako-unparseable-expression'
mako_unwanted_html_filter = 'mako-unwanted-html-filter'
mako_invalid_html_filter = 'mako-invalid-html-filter'
mako_invalid_js_filter = 'mako-invalid-js-filter'
mako_js_missing_quotes = 'mako-js-missing-quotes'
mako_js_html_string = 'mako-js-html-string'
mako_html_entities = 'mako-html-entities'
mako_unknown_context = 'mako-unknown-context'
underscore_not_escaped = 'underscore-not-escaped'
javascript_jquery_append = 'javascript-jquery-append'
javascript_jquery_prepend = 'javascript-jquery-prepend'
javascript_jquery_insertion = 'javascript-jquery-insertion'
javascript_jquery_insert_into_target = 'javascript-jquery-insert-into-target'
javascript_jquery_html = 'javascript-jquery-html'
javascript_concat_html = 'javascript-concat-html'
javascript_escape = 'javascript-escape'
javascript_interpolate = 'javascript-interpolate'
python_concat_html = 'python-concat-html'
python_custom_escape = 'python-custom-escape'
python_deprecated_display_name = 'python-deprecated-display-name'
python_requires_html_or_text = 'python-requires-html-or-text'
python_close_before_format = 'python-close-before-format'
python_wrap_html = 'python-wrap-html'
python_interpolate_html = 'python-interpolate-html'
python_parse_error = 'python-parse-error'
def __init__(self, rule_id):
self.rule_id = rule_id
self.rule_summary = rule_summary
class Expression(object):
......@@ -577,6 +497,57 @@ class ExpressionRuleViolation(RuleViolation):
), file=out)
class SummaryResults(object):
"""
Contains the summary results for all violations.
"""
def __init__(self):
"""
Init method.
"""
self.total_violations = 0
self.totals_by_rule = dict.fromkeys(
[rule.rule_id for rule in Rules.__members__.values()], 0
)
def add_violation(self, violation):
"""
Adds a violation to the summary details.
Arguments:
violation: The violation to add to the summary.
"""
self.total_violations += 1
self.totals_by_rule[violation.rule.rule_id] += 1
def print_results(self, options, out):
"""
Prints the results (i.e. violations) in this file.
Arguments:
options: A list of the following options:
list_files: True to print only file names, and False to print
all violations.
rule_totals: If True include totals by rule.
out: output file
"""
if options['list_files'] is False:
if options['rule_totals']:
max_rule_id_len = max(len(rule_id) for rule_id in self.totals_by_rule)
print("", file=out)
for rule_id in sorted(self.totals_by_rule.keys()):
padding = " " * (max_rule_id_len - len(rule_id))
print("{}: {}{} violations".format(rule_id, padding, self.totals_by_rule[rule_id]), file=out)
print("", file=out)
# matches output of jshint for simplicity
print("", file=out)
print("{} violations total".format(self.total_violations), file=out)
class FileResults(object):
"""
Contains the results, or violations, for a file.
......@@ -611,7 +582,7 @@ class FileResults(object):
if line_comment_delim is not None:
self._filter_commented_code(line_comment_delim)
def print_results(self, options, out):
def print_results(self, options, summary_results, out):
"""
Prints the results (i.e. violations) in this file.
......@@ -619,26 +590,23 @@ class FileResults(object):
options: A list of the following options:
list_files: True to print only file names, and False to print
all violations.
summary_results: A SummaryResults with a summary of the violations.
verbose: True for multiple lines of context, False single line.
out: output file
Returns:
The number of violations. When using --quiet, returns number of
files with violations.
Side effect:
Updates the passed SummaryResults.
"""
num_violations = 0
if options['list_files']:
if self.violations is not None and 0 < len(self.violations):
num_violations += 1
print(self.full_path, file=out)
else:
self.violations.sort(key=lambda violation: violation.sort_key())
for violation in self.violations:
if not violation.is_disabled:
num_violations += 1
violation.print_results(options, out)
return num_violations
summary_results.add_violation(violation)
def _filter_commented_code(self, line_comment_delim):
"""
......@@ -789,15 +757,16 @@ class BaseLinter(object):
Init method.
"""
self._skip_dirs = (
'.git',
'.pycharm_helpers',
'common/static/xmodule/modules',
'perf_tests',
'node_modules',
'reports/diff_quality',
'spec',
'scripts/tests/templates',
'spec',
'test_root',
'vendor',
'perf_tests'
)
def _is_skip_dir(self, skip_dirs, directory):
......@@ -1554,7 +1523,7 @@ class HtmlStringVisitor(BaseVisitor):
node: An AST node.
"""
# Skips '<' (and '>') in regex named groups. For example, "(?P<group>)".
if re.search('[(][?]P<', node.s) is None and re.search('[<>]', node.s) is not None:
if re.search('[(][?]P<', node.s) is None and re.search('<', node.s) is not None:
self.unsafe_html_string_nodes.append(node)
if re.search(r"&[#]?[a-zA-Z0-9]+;", node.s):
self.over_escaped_entity_string_nodes.append(node)
......@@ -2505,7 +2474,7 @@ class MakoTemplateLinter(BaseLinter):
return expressions
def _process_file(full_path, template_linters, options, out):
def _process_file(full_path, template_linters, options, summary_results, out):
"""
For each linter, lints the provided file. This means finding and printing
violations.
......@@ -2514,22 +2483,19 @@ def _process_file(full_path, template_linters, options, out):
full_path: The full path of the file to lint.
template_linters: A list of linting objects.
options: A list of the options.
summary_results: A SummaryResults with a summary of the violations.
out: output file
Returns:
The number of violations.
"""
num_violations = 0
directory = os.path.dirname(full_path)
file_name = os.path.basename(full_path)
for template_linter in template_linters:
results = template_linter.process_file(directory, file_name)
num_violations += results.print_results(options, out)
return num_violations
results.print_results(options, summary_results, out)
def _process_current_walk(current_walk, template_linters, options, out):
def _process_current_walk(current_walk, template_linters, options, summary_results, out):
"""
For each linter, lints all the files in the current os walk. This means
finding and printing violations.
......@@ -2538,22 +2504,19 @@ def _process_current_walk(current_walk, template_linters, options, out):
current_walk: A walk returned by os.walk().
template_linters: A list of linting objects.
options: A list of the options.
summary_results: A SummaryResults with a summary of the violations.
out: output file
Returns:
The number of violations.
"""
num_violations = 0
walk_directory = os.path.normpath(current_walk[0])
walk_files = current_walk[2]
for walk_file in walk_files:
full_path = os.path.join(walk_directory, walk_file)
num_violations += _process_file(full_path, template_linters, options, out)
return num_violations
_process_file(full_path, template_linters, options, summary_results, out)
def _process_os_walk(starting_dir, template_linters, options, out):
def _process_os_walk(starting_dir, template_linters, options, summary_results, out):
"""
For each linter, lints all the directories in the starting directory.
......@@ -2561,16 +2524,40 @@ def _process_os_walk(starting_dir, template_linters, options, out):
starting_dir: The initial directory to begin the walk.
template_linters: A list of linting objects.
options: A list of the options.
summary_results: A SummaryResults with a summary of the violations.
out: output file
Returns:
The number of violations.
"""
num_violations = 0
for current_walk in os.walk(starting_dir):
num_violations += _process_current_walk(current_walk, template_linters, options, out)
return num_violations
_process_current_walk(current_walk, template_linters, options, summary_results, out)
def _lint(file_or_dir, template_linters, options, summary_results, out):
"""
For each linter, lints the provided file or directory.
Arguments:
file_or_dir: The file or initial directory to lint.
template_linters: A list of linting objects.
options: A list of the options.
summary_results: A SummaryResults with a summary of the violations.
out: output file
"""
if file_or_dir is not None and os.path.isfile(file_or_dir):
_process_file(file_or_dir, template_linters, options, summary_results, out)
else:
directory = "."
if file_or_dir is not None:
if os.path.exists(file_or_dir):
directory = file_or_dir
else:
raise ValueError("Path [{}] is not a valid file or directory.".format(file_or_dir))
_process_os_walk(directory, template_linters, options, summary_results, out)
summary_results.print_results(options, out)
def main():
......@@ -2579,11 +2566,9 @@ def main():
Prints all violations.
"""
epilog = 'rules:\n'
for rule in Rules.__members__.values():
epilog += " {0[0]}: {0[1]}\n".format(rule.value)
epilog = "For more help using the safe template linter, including details on how\n"
epilog += "to understand and fix any violations, read the docs here:\n"
epilog += "\n"
epilog += "additional help:\n"
# pylint: disable=line-too-long
epilog += " http://edx.readthedocs.org/projects/edx-developer-guide/en/latest/conventions/safe_templates.html#safe-template-linter\n"
......@@ -2597,6 +2582,10 @@ def main():
help='Only display the filenames that contain violations.'
)
parser.add_argument(
'--rule-totals', dest='rule_totals', action='store_true',
help='Display the totals for each rule.'
)
parser.add_argument(
'--verbose', dest='verbose', action='store_true',
help='Print multiple lines where possible for additional context of violations.'
)
......@@ -2606,25 +2595,13 @@ def main():
options = {
'list_files': args.list_files,
'verbose': args.verbose
'rule_totals': args.rule_totals,
'verbose': args.verbose,
}
template_linters = [MakoTemplateLinter(), UnderscoreTemplateLinter(), JavaScriptLinter(), PythonLinter()]
summary_results = SummaryResults()
if args.path is not None and os.path.isfile(args.path):
num_violations = _process_file(args.path, template_linters, options, out=sys.stdout)
else:
directory = "."
if args.path is not None:
if os.path.exists(args.path):
directory = args.path
else:
raise ValueError("Path [{}] is not a valid file or directory.".format(args.path))
num_violations = _process_os_walk(directory, template_linters, options, out=sys.stdout)
if options['list_files'] is False:
# matches output of jshint for simplicity
print("")
print("{} violations found".format(num_violations))
_lint(args.path, template_linters, options, summary_results, out=sys.stdout)
if __name__ == "__main__":
......
#!/usr/bin/env bash
set -e
###############################################################################
#
# safelint_thresholds.sh
#
# The thresholds used for paver run_safelint when used with various CI
# systems.
#
###############################################################################
# Violations thresholds for failing the build
export SAFELINT_THRESHOLDS='
{
"rules": {
"javascript-concat-html": 313,
"javascript-escape": 7,
"javascript-interpolate": 71,
"javascript-jquery-append": 120,
"javascript-jquery-html": 313,
"javascript-jquery-insert-into-target": 26,
"javascript-jquery-insertion": 30,
"javascript-jquery-prepend": 12,
"mako-html-entities": 0,
"mako-invalid-html-filter": 33,
"mako-invalid-js-filter": 249,
"mako-js-html-string": 0,
"mako-js-missing-quotes": 0,
"mako-missing-default": 248,
"mako-multiple-page-tags": 0,
"mako-unknown-context": 0,
"mako-unparseable-expression": 0,
"mako-unwanted-html-filter": 0,
"python-close-before-format": 0,
"python-concat-html": 28,
"python-custom-escape": 13,
"python-deprecated-display-name": 53,
"python-interpolate-html": 68,
"python-parse-error": 0,
"python-requires-html-or-text": 0,
"python-wrap-html": 289,
"underscore-not-escaped": 709
},
"total": 2565
}'
export SAFELINT_THRESHOLDS=${SAFELINT_THRESHOLDS//[[:space:]]/}
<%= invalid %>
<%=
'multi-line invalid'
%>
......@@ -10,8 +10,8 @@ import textwrap
from unittest import TestCase
from ..safe_template_linter import (
_process_os_walk, FileResults, JavaScriptLinter, MakoTemplateLinter, ParseString,
StringLines, PythonLinter, UnderscoreTemplateLinter, Rules
_lint, FileResults, JavaScriptLinter, MakoTemplateLinter, ParseString,
StringLines, PythonLinter, SummaryResults, UnderscoreTemplateLinter, Rules
)
......@@ -83,6 +83,15 @@ class TestSafeTemplateLinter(TestCase):
Test some top-level linter functions
"""
def setUp(self):
"""
Setup patches on linters for testing.
"""
self.patch_is_valid_directory(MakoTemplateLinter)
self.patch_is_valid_directory(JavaScriptLinter)
self.patch_is_valid_directory(UnderscoreTemplateLinter)
self.patch_is_valid_directory(PythonLinter)
def patch_is_valid_directory(self, linter_class):
"""
Creates a mock patch for _is_valid_directory on a Linter to always
......@@ -96,37 +105,137 @@ class TestSafeTemplateLinter(TestCase):
self.addCleanup(patcher.stop)
return patch_start
def test_process_os_walk(self):
def test_lint_defaults(self):
"""
Tests the top-level processing of template files, including Mako
includes.
Tests the top-level linting with default options.
"""
out = StringIO()
options = {
'list_files': False,
'verbose': False,
}
template_linters = [MakoTemplateLinter(), JavaScriptLinter(), UnderscoreTemplateLinter(), PythonLinter()]
self.patch_is_valid_directory(MakoTemplateLinter)
self.patch_is_valid_directory(JavaScriptLinter)
self.patch_is_valid_directory(UnderscoreTemplateLinter)
self.patch_is_valid_directory(PythonLinter)
num_violations = _process_os_walk('scripts/tests/templates', template_linters, options, out)
summary_results = SummaryResults()
_lint(
'scripts/tests/templates',
template_linters=[MakoTemplateLinter(), UnderscoreTemplateLinter(), JavaScriptLinter(), PythonLinter()],
options={
'list_files': False,
'verbose': False,
'rule_totals': False,
},
summary_results=summary_results,
out=out,
)
output = out.getvalue()
self.assertEqual(num_violations, 7)
# Assert violation details are displayed.
self.assertIsNotNone(re.search('test\.html.*{}'.format(Rules.mako_missing_default.rule_id), output))
self.assertIsNotNone(re.search('test\.coffee.*{}'.format(Rules.javascript_concat_html.rule_id), output))
self.assertIsNotNone(re.search('test\.coffee.*{}'.format(Rules.underscore_not_escaped.rule_id), output))
self.assertIsNotNone(re.search('test\.js.*{}'.format(Rules.javascript_concat_html.rule_id), output))
self.assertIsNotNone(re.search('test\.js.*{}'.format(Rules.underscore_not_escaped.rule_id), output))
self.assertIsNotNone(re.search('test\.underscore.*{}'.format(Rules.underscore_not_escaped.rule_id), output))
lines_with_rule = 0
lines_without_rule = 0 # Output with verbose setting only.
for underscore_match in re.finditer('test\.underscore:.*\n', output):
if re.search(Rules.underscore_not_escaped.rule_id, underscore_match.group()) is not None:
lines_with_rule += 1
else:
lines_without_rule += 1
self.assertGreaterEqual(lines_with_rule, 1)
self.assertEquals(lines_without_rule, 0)
self.assertIsNone(re.search('test\.py.*{}'.format(Rules.python_parse_error.rule_id), output))
self.assertIsNotNone(re.search('test\.py.*{}'.format(Rules.python_wrap_html.rule_id), output))
# Assert no rule totals.
self.assertIsNone(re.search('{}:\s*{} violations'.format(Rules.python_parse_error.rule_id, 0), output))
# Assert final total
self.assertIsNotNone(re.search('{} violations total'.format(7), output))
def test_lint_with_verbose(self):
"""
Tests the top-level linting with verbose option.
"""
out = StringIO()
summary_results = SummaryResults()
_lint(
'scripts/tests/templates',
template_linters=[MakoTemplateLinter(), UnderscoreTemplateLinter(), JavaScriptLinter(), PythonLinter()],
options={
'list_files': False,
'verbose': True,
'rule_totals': False,
},
summary_results=summary_results,
out=out,
)
output = out.getvalue()
lines_with_rule = 0
lines_without_rule = 0 # Output with verbose setting only.
for underscore_match in re.finditer('test\.underscore:.*\n', output):
if re.search(Rules.underscore_not_escaped.rule_id, underscore_match.group()) is not None:
lines_with_rule += 1
else:
lines_without_rule += 1
self.assertGreaterEqual(lines_with_rule, 1)
self.assertGreaterEqual(lines_without_rule, 1)
# Assert no rule totals.
self.assertIsNone(re.search('{}:\s*{} violations'.format(Rules.python_parse_error.rule_id, 0), output))
# Assert final total
self.assertIsNotNone(re.search('{} violations total'.format(7), output))
def test_lint_with_rule_totals(self):
"""
Tests the top-level linting with rule totals option.
"""
out = StringIO()
summary_results = SummaryResults()
_lint(
'scripts/tests/templates',
template_linters=[MakoTemplateLinter(), UnderscoreTemplateLinter(), JavaScriptLinter(), PythonLinter()],
options={
'list_files': False,
'verbose': False,
'rule_totals': True,
},
summary_results=summary_results,
out=out,
)
output = out.getvalue()
self.assertIsNotNone(re.search('test\.py.*{}'.format(Rules.python_wrap_html.rule_id), output))
# Assert totals output.
self.assertIsNotNone(re.search('{}:\s*{} violations'.format(Rules.python_parse_error.rule_id, 0), output))
self.assertIsNotNone(re.search('{}:\s*{} violations'.format(Rules.python_wrap_html.rule_id, 1), output))
self.assertIsNotNone(re.search('{} violations total'.format(7), output))
def test_lint_with_list_files(self):
"""
Tests the top-level linting with list files option.
"""
out = StringIO()
summary_results = SummaryResults()
_lint(
'scripts/tests/templates',
template_linters=[MakoTemplateLinter(), UnderscoreTemplateLinter(), JavaScriptLinter(), PythonLinter()],
options={
'list_files': True,
'verbose': False,
'rule_totals': False,
},
summary_results=summary_results,
out=out,
)
output = out.getvalue()
# Assert file with rule is not output.
self.assertIsNone(re.search('test\.py.*{}'.format(Rules.python_wrap_html.rule_id), output))
# Assert file is output.
self.assertIsNotNone(re.search('test\.py', output))
# Assert no totals.
self.assertIsNone(re.search('{}:\s*{} violations'.format(Rules.python_parse_error.rule_id, 0), output))
self.assertIsNone(re.search('{} violations total'.format(7), output))
@ddt
......
......@@ -6,6 +6,7 @@
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from openedx.core.djangolib.markup import HTML, Text
# App that handles subdomain specific branding
import branding
# app that handles site status messages
......@@ -59,9 +60,9 @@ site_status_msg = get_site_status_msg(course_id)
% if course and not disable_courseware_header:
<h2 class="course-header">
<span class="provider">${course.display_org_with_default | h}:</span>
<span class="course-number">${course.display_number_with_default | h}</span>
<span class="course-name">${course.display_name_with_default_escaped}</span>
<span class="provider">${course.display_org_with_default}:</span>
<span class="course-number">${course.display_number_with_default}</span>
<span class="course-name">${course.display_name_with_default}</span>
</h2>
% endif
......@@ -143,7 +144,12 @@ site_status_msg = get_site_status_msg(course_id)
</header>
% if course:
<!--[if lte IE 9]>
<div class="ie-banner" aria-hidden="true">${_('<strong>Warning:</strong> Your browser is not fully supported. We strongly recommend using {chrome_link} or {ff_link}.').format(chrome_link='<a href="https://www.google.com/chrome" target="_blank">Chrome</a>', ff_link='<a href="http://www.mozilla.org/firefox" target="_blank">Firefox</a>')}</div>
<div class="ie-banner" aria-hidden="true">${Text(_('{begin_strong}Warning:{end_strong} Your browser is not fully supported. We strongly recommend using {chrome_link} or {ff_link}.')).format(
begin_strong=HTML('<strong>'),
end_strong=HTML('</strong>'),
chrome_link=HTML('<a href="https://www.google.com/chrome" target="_blank">Chrome</a>'),
ff_link=HTML('<a href="http://www.mozilla.org/firefox" target="_blank">Firefox</a>'),
)}</div>
<![endif]-->
% endif
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment