diff --git a/common/lib/xmodule/xmodule/capa_base.py b/common/lib/xmodule/xmodule/capa_base.py index 0ebdcea..e491e72 100644 --- a/common/lib/xmodule/xmodule/capa_base.py +++ b/common/lib/xmodule/xmodule/capa_base.py @@ -1117,10 +1117,11 @@ class CapaMixin(CapaFields): if dog_stats_api: dog_stats_api.increment(metric_name('checks'), tags=[u'result:success']) - dog_stats_api.histogram( - metric_name('correct_pct'), - float(published_grade['grade']) / published_grade['max_grade'], - ) + if published_grade['max_grade'] != 0: + dog_stats_api.histogram( + metric_name('correct_pct'), + float(published_grade['grade']) / published_grade['max_grade'], + ) dog_stats_api.histogram( metric_name('attempts'), self.attempts, diff --git a/common/lib/xmodule/xmodule/tests/test_capa_module.py b/common/lib/xmodule/xmodule/tests/test_capa_module.py index 354629c..ad13086 100644 --- a/common/lib/xmodule/xmodule/tests/test_capa_module.py +++ b/common/lib/xmodule/xmodule/tests/test_capa_module.py @@ -731,6 +731,20 @@ class CapaModuleTest(unittest.TestCase): # Expect an AJAX alert message in 'success' self.assertIn(error_msg, result['success']) + def test_check_problem_zero_max_grade(self): + """ + Test that a capa problem with a max grade of zero doesn't generate an error. + """ + # Create the module + module = CapaFactory.create(attempts=1) + + # Override the problem score to have a total of zero. + module.lcp.get_score = lambda: {'score': 0, 'total': 0} + + # Check the problem + get_request_dict = {CapaFactory.input_key(): '3.14'} + module.check_problem(get_request_dict) + def test_check_problem_error_nonascii(self): # Try each exception that capa_module should handle