Commit e87388e2 by Nimisha Asthagiri

Grades cleanup before updating grade report

Remove deprecated SingleSectionGrader.  TNL-5987
Remove display_name and module_id from Scores objects
Update CourseGradeFactory.__init__ to not be user-specific
Update some callers to use CourseGrade class instead of "summary" dict
Remove no longer needed course_grades.py module.
Renamed django signal from GRADES_UPDATED to COURSE_GRADE_CHANGED
parent 1956e2cf
...@@ -404,7 +404,7 @@ def _cert_info(user, course_overview, cert_status, course_mode): # pylint: disa ...@@ -404,7 +404,7 @@ def _cert_info(user, course_overview, cert_status, course_mode): # pylint: disa
) )
if status in {'generating', 'ready', 'notpassing', 'restricted', 'auditing', 'unverified'}: if status in {'generating', 'ready', 'notpassing', 'restricted', 'auditing', 'unverified'}:
persisted_grade = CourseGradeFactory(user).get_persisted(course_overview) persisted_grade = CourseGradeFactory().get_persisted(user, course_overview)
if persisted_grade is not None: if persisted_grade is not None:
status_dict['grade'] = unicode(persisted_grade.percent) status_dict['grade'] = unicode(persisted_grade.percent)
elif 'grade' in cert_status: elif 'grade' in cert_status:
......
...@@ -18,17 +18,13 @@ class ScoreBase(object): ...@@ -18,17 +18,13 @@ class ScoreBase(object):
""" """
Abstract base class for encapsulating fields of values scores. Abstract base class for encapsulating fields of values scores.
Field common to all scores include: Field common to all scores include:
display_name (string) - the display name of the module
module_id (UsageKey) - the location of the module
graded (boolean) - whether or not this module is graded graded (boolean) - whether or not this module is graded
attempted (boolean) - whether the module was attempted attempted (boolean) - whether the module was attempted
""" """
__metaclass__ = abc.ABCMeta __metaclass__ = abc.ABCMeta
def __init__(self, graded, display_name, module_id, attempted): def __init__(self, graded, attempted):
self.graded = graded self.graded = graded
self.display_name = display_name
self.module_id = module_id
self.attempted = attempted self.attempted = attempted
def __eq__(self, other): def __eq__(self, other):
...@@ -55,10 +51,10 @@ class ProblemScore(ScoreBase): ...@@ -55,10 +51,10 @@ class ProblemScore(ScoreBase):
""" """
def __init__(self, raw_earned, raw_possible, weighted_earned, weighted_possible, weight, *args, **kwargs): def __init__(self, raw_earned, raw_possible, weighted_earned, weighted_possible, weight, *args, **kwargs):
super(ProblemScore, self).__init__(*args, **kwargs) super(ProblemScore, self).__init__(*args, **kwargs)
self.raw_earned = raw_earned self.raw_earned = float(raw_earned) if raw_earned is not None else None
self.raw_possible = raw_possible self.raw_possible = float(raw_possible) if raw_possible is not None else None
self.earned = weighted_earned self.earned = float(weighted_earned) if weighted_earned is not None else None
self.possible = weighted_possible self.possible = float(weighted_possible) if weighted_possible is not None else None
self.weight = weight self.weight = weight
...@@ -71,8 +67,8 @@ class AggregatedScore(ScoreBase): ...@@ -71,8 +67,8 @@ class AggregatedScore(ScoreBase):
""" """
def __init__(self, tw_earned, tw_possible, *args, **kwargs): def __init__(self, tw_earned, tw_possible, *args, **kwargs):
super(AggregatedScore, self).__init__(*args, **kwargs) super(AggregatedScore, self).__init__(*args, **kwargs)
self.earned = tw_earned self.earned = float(tw_earned) if tw_earned is not None else None
self.possible = tw_possible self.possible = float(tw_possible) if tw_possible is not None else None
def float_sum(iterable): def float_sum(iterable):
...@@ -82,11 +78,9 @@ def float_sum(iterable): ...@@ -82,11 +78,9 @@ def float_sum(iterable):
return float(sum(iterable)) return float(sum(iterable))
def aggregate_scores(scores, display_name="summary", location=None): def aggregate_scores(scores):
""" """
scores: A list of ScoreBase objects scores: A list of ScoreBase objects
display_name: The display name for the score object
location: The location under which all objects in scores are located
returns: A tuple (all_total, graded_total). returns: A tuple (all_total, graded_total).
all_total: A ScoreBase representing the total score summed over all input scores all_total: A ScoreBase representing the total score summed over all input scores
graded_total: A ScoreBase representing the score summed over all graded input scores graded_total: A ScoreBase representing the score summed over all graded input scores
...@@ -100,11 +94,11 @@ def aggregate_scores(scores, display_name="summary", location=None): ...@@ -100,11 +94,11 @@ def aggregate_scores(scores, display_name="summary", location=None):
any_attempted = any(score.attempted for score in scores) any_attempted = any(score.attempted for score in scores)
# regardless of whether it is graded # regardless of whether it is graded
all_total = AggregatedScore(total_correct, total_possible, False, display_name, location, any_attempted) all_total = AggregatedScore(total_correct, total_possible, False, any_attempted)
# selecting only graded things # selecting only graded things
graded_total = AggregatedScore( graded_total = AggregatedScore(
total_correct_graded, total_possible_graded, True, display_name, location, any_attempted_graded, total_correct_graded, total_possible_graded, True, any_attempted_graded,
) )
return all_total, graded_total return all_total, graded_total
...@@ -126,9 +120,8 @@ def grader_from_conf(conf): ...@@ -126,9 +120,8 @@ def grader_from_conf(conf):
This creates a CourseGrader from a configuration (such as in course_settings.py). This creates a CourseGrader from a configuration (such as in course_settings.py).
The conf can simply be an instance of CourseGrader, in which case no work is done. The conf can simply be an instance of CourseGrader, in which case no work is done.
More commonly, the conf is a list of dictionaries. A WeightedSubsectionsGrader More commonly, the conf is a list of dictionaries. A WeightedSubsectionsGrader
with AssignmentFormatGrader's or SingleSectionGrader's as subsections will be with AssignmentFormatGrader's will be generated. Every dictionary should contain
generated. Every dictionary should contain the parameters for making either a the parameters for making an AssignmentFormatGrader, in addition to a 'weight' key.
AssignmentFormatGrader or SingleSectionGrader, in addition to a 'weight' key.
""" """
if isinstance(conf, CourseGrader): if isinstance(conf, CourseGrader):
return conf return conf
...@@ -137,27 +130,14 @@ def grader_from_conf(conf): ...@@ -137,27 +130,14 @@ def grader_from_conf(conf):
for subgraderconf in conf: for subgraderconf in conf:
subgraderconf = subgraderconf.copy() subgraderconf = subgraderconf.copy()
weight = subgraderconf.pop("weight", 0) weight = subgraderconf.pop("weight", 0)
# NOTE: 'name' used to exist in SingleSectionGrader. We are deprecating SingleSectionGrader
# and converting everything into an AssignmentFormatGrader by adding 'min_count' and
# 'drop_count'. AssignmentFormatGrader does not expect 'name', so if it appears
# in bad_args, go ahead remove it (this causes no errors). Eventually, SingleSectionGrader
# should be completely removed.
name = 'name'
try: try:
if 'min_count' in subgraderconf: if 'min_count' in subgraderconf:
#This is an AssignmentFormatGrader #This is an AssignmentFormatGrader
subgrader_class = AssignmentFormatGrader subgrader_class = AssignmentFormatGrader
elif name in subgraderconf:
#This is an SingleSectionGrader
subgrader_class = SingleSectionGrader
else: else:
raise ValueError("Configuration has no appropriate grader class.") raise ValueError("Configuration has no appropriate grader class.")
bad_args = invalid_args(subgrader_class.__init__, subgraderconf) bad_args = invalid_args(subgrader_class.__init__, subgraderconf)
# See note above concerning 'name'.
if bad_args.issuperset({name}):
bad_args = bad_args - {name}
del subgraderconf[name]
if len(bad_args) > 0: if len(bad_args) > 0:
log.warning("Invalid arguments for a subgrader: %s", bad_args) log.warning("Invalid arguments for a subgrader: %s", bad_args)
for key in bad_args: for key in bad_args:
...@@ -264,57 +244,6 @@ class WeightedSubsectionsGrader(CourseGrader): ...@@ -264,57 +244,6 @@ class WeightedSubsectionsGrader(CourseGrader):
'grade_breakdown': grade_breakdown} 'grade_breakdown': grade_breakdown}
class SingleSectionGrader(CourseGrader):
"""
This grades a single section with the format 'type' and the name 'name'.
If the name is not appropriate for the short short_label or category, they each may
be specified individually.
"""
def __init__(self, type, name, short_label=None, category=None): # pylint: disable=redefined-builtin
self.type = type
self.name = name
self.short_label = short_label or name
self.category = category or name
def grade(self, grade_sheet, generate_random_scores=False):
found_score = None
if self.type in grade_sheet:
for score in grade_sheet[self.type]:
if score.display_name == self.name:
found_score = score
break
if found_score or generate_random_scores:
if generate_random_scores: # for debugging!
earned = random.randint(2, 15)
possible = random.randint(earned, 15)
else: # We found the score
earned = found_score.earned
possible = found_score.possible
percent = earned / possible
detail = u"{name} - {percent:.0%} ({earned:.3n}/{possible:.3n})".format(
name=self.name,
percent=percent,
earned=float(earned),
possible=float(possible)
)
else:
percent = 0.0
detail = u"{name} - 0% (?/?)".format(name=self.name)
breakdown = [{'percent': percent, 'label': self.short_label,
'detail': detail, 'category': self.category, 'prominent': True}]
return {
'percent': percent,
'section_breakdown': breakdown,
#No grade_breakdown here
}
class AssignmentFormatGrader(CourseGrader): class AssignmentFormatGrader(CourseGrader):
""" """
Grades all sections matching the format 'type' with an equal weight. A specified Grades all sections matching the format 'type' with an equal weight. A specified
...@@ -332,9 +261,9 @@ class AssignmentFormatGrader(CourseGrader): ...@@ -332,9 +261,9 @@ class AssignmentFormatGrader(CourseGrader):
hide_average is to suppress the display of the total score in this grader and instead hide_average is to suppress the display of the total score in this grader and instead
only show each assignment in this grader in the breakdown. only show each assignment in this grader in the breakdown.
If there is only a single assignment in this grader, then it acts like a SingleSectionGrader If there is only a single assignment in this grader, then it returns only one entry for the
and returns only one entry for the grader. Since the assignment and the total are the same, grader. Since the assignment and the total are the same, the total is returned but is not
the total is returned but is not labeled as an average. labeled as an average.
category should be presentable to the user, but may not appear. When the grade breakdown is category should be presentable to the user, but may not appear. When the grade breakdown is
displayed, scores from the same category will be similar (for example, by color). displayed, scores from the same category will be similar (for example, by color).
...@@ -401,8 +330,8 @@ class AssignmentFormatGrader(CourseGrader): ...@@ -401,8 +330,8 @@ class AssignmentFormatGrader(CourseGrader):
section_name = "Generated" section_name = "Generated"
else: else:
earned = scores[i].earned earned = scores[i].graded_total.earned
possible = scores[i].possible possible = scores[i].graded_total.possible
section_name = scores[i].display_name section_name = scores[i].display_name
percentage = earned / possible percentage = earned / possible
...@@ -442,8 +371,7 @@ class AssignmentFormatGrader(CourseGrader): ...@@ -442,8 +371,7 @@ class AssignmentFormatGrader(CourseGrader):
if len(breakdown) == 1: if len(breakdown) == 1:
# if there is only one entry in a section, suppress the existing individual entry and the average, # if there is only one entry in a section, suppress the existing individual entry and the average,
# and just display a single entry for the section. That way it acts automatically like a # and just display a single entry for the section.
# SingleSectionGrader.
total_detail = u"{section_type} = {percent:.0%}".format( total_detail = u"{section_type} = {percent:.0%}".format(
percent=total_percent, percent=total_percent,
section_type=self.section_type, section_type=self.section_type,
......
"""Grading tests""" """Grading tests"""
import ddt
import unittest import unittest
from xmodule import graders from xmodule import graders
...@@ -12,13 +13,11 @@ class GradesheetTest(unittest.TestCase): ...@@ -12,13 +13,11 @@ class GradesheetTest(unittest.TestCase):
def test_weighted_grading(self): def test_weighted_grading(self):
scores = [] scores = []
agg_fields = dict(display_name="aggregated_score", module_id=None, attempted=False) agg_fields = dict(attempted=False)
prob_fields = dict( prob_fields = dict(raw_earned=0, raw_possible=0, weight=0, attempted=False)
display_name="problem_score", module_id=None, raw_earned=0, raw_possible=0, weight=0, attempted=False,
)
# No scores # No scores
all_total, graded_total = aggregate_scores(scores, display_name=agg_fields['display_name']) all_total, graded_total = aggregate_scores(scores)
self.assertEqual( self.assertEqual(
all_total, all_total,
AggregatedScore(tw_earned=0, tw_possible=0, graded=False, **agg_fields), AggregatedScore(tw_earned=0, tw_possible=0, graded=False, **agg_fields),
...@@ -30,7 +29,7 @@ class GradesheetTest(unittest.TestCase): ...@@ -30,7 +29,7 @@ class GradesheetTest(unittest.TestCase):
# (0/5 non-graded) # (0/5 non-graded)
scores.append(ProblemScore(weighted_earned=0, weighted_possible=5, graded=False, **prob_fields)) scores.append(ProblemScore(weighted_earned=0, weighted_possible=5, graded=False, **prob_fields))
all_total, graded_total = aggregate_scores(scores, display_name=agg_fields['display_name']) all_total, graded_total = aggregate_scores(scores)
self.assertEqual( self.assertEqual(
all_total, all_total,
AggregatedScore(tw_earned=0, tw_possible=5, graded=False, **agg_fields), AggregatedScore(tw_earned=0, tw_possible=5, graded=False, **agg_fields),
...@@ -44,7 +43,7 @@ class GradesheetTest(unittest.TestCase): ...@@ -44,7 +43,7 @@ class GradesheetTest(unittest.TestCase):
prob_fields['attempted'] = True prob_fields['attempted'] = True
agg_fields['attempted'] = True agg_fields['attempted'] = True
scores.append(ProblemScore(weighted_earned=3, weighted_possible=5, graded=True, **prob_fields)) scores.append(ProblemScore(weighted_earned=3, weighted_possible=5, graded=True, **prob_fields))
all_total, graded_total = aggregate_scores(scores, display_name=agg_fields['display_name']) all_total, graded_total = aggregate_scores(scores)
self.assertAlmostEqual( self.assertAlmostEqual(
all_total, all_total,
AggregatedScore(tw_earned=3, tw_possible=10, graded=False, **agg_fields), AggregatedScore(tw_earned=3, tw_possible=10, graded=False, **agg_fields),
...@@ -56,7 +55,7 @@ class GradesheetTest(unittest.TestCase): ...@@ -56,7 +55,7 @@ class GradesheetTest(unittest.TestCase):
# (0/5 non-graded) + (3/5 graded) + (2/5 graded) = 5/15 total, 5/10 graded # (0/5 non-graded) + (3/5 graded) + (2/5 graded) = 5/15 total, 5/10 graded
scores.append(ProblemScore(weighted_earned=2, weighted_possible=5, graded=True, **prob_fields)) scores.append(ProblemScore(weighted_earned=2, weighted_possible=5, graded=True, **prob_fields))
all_total, graded_total = aggregate_scores(scores, display_name=agg_fields['display_name']) all_total, graded_total = aggregate_scores(scores)
self.assertAlmostEqual( self.assertAlmostEqual(
all_total, all_total,
AggregatedScore(tw_earned=5, tw_possible=15, graded=False, **agg_fields), AggregatedScore(tw_earned=5, tw_possible=15, graded=False, **agg_fields),
...@@ -67,6 +66,7 @@ class GradesheetTest(unittest.TestCase): ...@@ -67,6 +66,7 @@ class GradesheetTest(unittest.TestCase):
) )
@ddt.ddt
class GraderTest(unittest.TestCase): class GraderTest(unittest.TestCase):
""" """
Tests grader implementations Tests grader implementations
...@@ -81,50 +81,37 @@ class GraderTest(unittest.TestCase): ...@@ -81,50 +81,37 @@ class GraderTest(unittest.TestCase):
'Midterm': [], 'Midterm': [],
} }
common_fields = dict(graded=True, module_id=None, attempted=True) class MockGrade(object):
"""
Mock class for SubsectionGrade object.
"""
def __init__(self, graded_total, display_name):
self.graded_total = graded_total
self.display_name = display_name
common_fields = dict(graded=True, attempted=True)
test_gradesheet = { test_gradesheet = {
'Homework': [ 'Homework': [
AggregatedScore(tw_earned=2, tw_possible=20.0, display_name='hw1', **common_fields), MockGrade(AggregatedScore(tw_earned=2, tw_possible=20.0, **common_fields), display_name='hw1'),
AggregatedScore(tw_earned=16, tw_possible=16.0, display_name='hw2', **common_fields), MockGrade(AggregatedScore(tw_earned=16, tw_possible=16.0, **common_fields), display_name='hw2'),
], ],
# The dropped scores should be from the assignments that don't exist yet # The dropped scores should be from the assignments that don't exist yet
'Lab': [ 'Lab': [
AggregatedScore(tw_earned=1, tw_possible=2.0, display_name='lab1', **common_fields), # Dropped MockGrade(AggregatedScore(tw_earned=1, tw_possible=2.0, **common_fields), display_name='lab1'), # Dropped
AggregatedScore(tw_earned=1, tw_possible=1.0, display_name='lab2', **common_fields), MockGrade(AggregatedScore(tw_earned=1, tw_possible=1.0, **common_fields), display_name='lab2'),
AggregatedScore(tw_earned=1, tw_possible=1.0, display_name='lab3', **common_fields), MockGrade(AggregatedScore(tw_earned=1, tw_possible=1.0, **common_fields), display_name='lab3'),
AggregatedScore(tw_earned=5, tw_possible=25.0, display_name='lab4', **common_fields), # Dropped MockGrade(AggregatedScore(tw_earned=5, tw_possible=25.0, **common_fields), display_name='lab4'), # Dropped
AggregatedScore(tw_earned=3, tw_possible=4.0, display_name='lab5', **common_fields), # Dropped MockGrade(AggregatedScore(tw_earned=3, tw_possible=4.0, **common_fields), display_name='lab5'), # Dropped
AggregatedScore(tw_earned=6, tw_possible=7.0, display_name='lab6', **common_fields), MockGrade(AggregatedScore(tw_earned=6, tw_possible=7.0, **common_fields), display_name='lab6'),
AggregatedScore(tw_earned=5, tw_possible=6.0, display_name='lab7', **common_fields), MockGrade(AggregatedScore(tw_earned=5, tw_possible=6.0, **common_fields), display_name='lab7'),
], ],
'Midterm': [ 'Midterm': [
AggregatedScore(tw_earned=50.5, tw_possible=100, display_name="Midterm Exam", **common_fields), MockGrade(AggregatedScore(tw_earned=50.5, tw_possible=100, **common_fields), display_name="Midterm Exam"),
], ],
} }
def test_single_section_grader(self):
midterm_grader = graders.SingleSectionGrader("Midterm", "Midterm Exam")
lab4_grader = graders.SingleSectionGrader("Lab", "lab4")
bad_lab_grader = graders.SingleSectionGrader("Lab", "lab42")
for graded in [
midterm_grader.grade(self.empty_gradesheet),
midterm_grader.grade(self.incomplete_gradesheet),
bad_lab_grader.grade(self.test_gradesheet),
]:
self.assertEqual(len(graded['section_breakdown']), 1)
self.assertEqual(graded['percent'], 0.0)
graded = midterm_grader.grade(self.test_gradesheet)
self.assertAlmostEqual(graded['percent'], 0.505)
self.assertEqual(len(graded['section_breakdown']), 1)
graded = lab4_grader.grade(self.test_gradesheet)
self.assertAlmostEqual(graded['percent'], 0.2)
self.assertEqual(len(graded['section_breakdown']), 1)
def test_assignment_format_grader(self): def test_assignment_format_grader(self):
homework_grader = graders.AssignmentFormatGrader("Homework", 12, 2) homework_grader = graders.AssignmentFormatGrader("Homework", 12, 2)
no_drop_grader = graders.AssignmentFormatGrader("Homework", 12, 0) no_drop_grader = graders.AssignmentFormatGrader("Homework", 12, 0)
...@@ -179,8 +166,6 @@ class GraderTest(unittest.TestCase): ...@@ -179,8 +166,6 @@ class GraderTest(unittest.TestCase):
# First, a few sub graders # First, a few sub graders
homework_grader = graders.AssignmentFormatGrader("Homework", 12, 2) homework_grader = graders.AssignmentFormatGrader("Homework", 12, 2)
lab_grader = graders.AssignmentFormatGrader("Lab", 7, 3) lab_grader = graders.AssignmentFormatGrader("Lab", 7, 3)
# phasing out the use of SingleSectionGraders, and instead using AssignmentFormatGraders that
# will act like SingleSectionGraders on single sections.
midterm_grader = graders.AssignmentFormatGrader("Midterm", 1, 0) midterm_grader = graders.AssignmentFormatGrader("Midterm", 1, 0)
weighted_grader = graders.WeightedSubsectionsGrader([ weighted_grader = graders.WeightedSubsectionsGrader([
...@@ -268,6 +253,8 @@ class GraderTest(unittest.TestCase): ...@@ -268,6 +253,8 @@ class GraderTest(unittest.TestCase):
}, },
{ {
'type': "Midterm", 'type': "Midterm",
'min_count': 0,
'drop_count': 0,
'name': "Midterm Exam", 'name': "Midterm Exam",
'short_label': "Midterm", 'short_label': "Midterm",
'weight': 0.5, 'weight': 0.5,
...@@ -294,5 +281,25 @@ class GraderTest(unittest.TestCase): ...@@ -294,5 +281,25 @@ class GraderTest(unittest.TestCase):
self.assertAlmostEqual(graded['percent'], 0.11) self.assertAlmostEqual(graded['percent'], 0.11)
self.assertEqual(len(graded['section_breakdown']), 12 + 1) self.assertEqual(len(graded['section_breakdown']), 12 + 1)
# TODO: How do we test failure cases? The parser only logs an error when @ddt.data(
# it can't parse something. Maybe it should throw exceptions? # empty
(
{},
u"Configuration has no appropriate grader class."
),
# no min_count
(
{'type': "Homework", 'drop_count': 0},
u"Configuration has no appropriate grader class."
),
# no drop_count
(
{'type': "Homework", 'min_count': 0},
u"__init__() takes at least 4 arguments (3 given)"
),
)
@ddt.unpack
def test_grader_with_invalid_conf(self, invalid_conf, expected_error_message):
with self.assertRaises(ValueError) as error:
graders.grader_from_conf([invalid_conf])
self.assertIn(expected_error_message, error.exception.message)
...@@ -33,7 +33,7 @@ from courseware.field_overrides import disable_overrides ...@@ -33,7 +33,7 @@ from courseware.field_overrides import disable_overrides
from django_comment_common.models import FORUM_ROLE_ADMINISTRATOR, assign_role from django_comment_common.models import FORUM_ROLE_ADMINISTRATOR, assign_role
from django_comment_common.utils import seed_permissions_roles from django_comment_common.utils import seed_permissions_roles
from edxmako.shortcuts import render_to_response from edxmako.shortcuts import render_to_response
from lms.djangoapps.grades.course_grades import iterate_grades_for from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
from opaque_keys.edx.keys import CourseKey from opaque_keys.edx.keys import CourseKey
from ccx_keys.locator import CCXLocator from ccx_keys.locator import CCXLocator
from student.roles import CourseCcxCoachRole from student.roles import CourseCcxCoachRole
...@@ -564,30 +564,30 @@ def ccx_grades_csv(request, course, ccx=None): ...@@ -564,30 +564,30 @@ def ccx_grades_csv(request, course, ccx=None):
courseenrollment__course_id=ccx_key, courseenrollment__course_id=ccx_key,
courseenrollment__is_active=1 courseenrollment__is_active=1
).order_by('username').select_related("profile") ).order_by('username').select_related("profile")
grades = iterate_grades_for(course, enrolled_students) grades = CourseGradeFactory().iter(course, enrolled_students)
header = None header = None
rows = [] rows = []
for student, gradeset, __ in grades: for student, course_grade, __ in grades:
if gradeset: if course_grade:
# We were able to successfully grade this student for this # We were able to successfully grade this student for this
# course. # course.
if not header: if not header:
# Encode the header row in utf-8 encoding in case there are # Encode the header row in utf-8 encoding in case there are
# unicode characters # unicode characters
header = [section['label'].encode('utf-8') header = [section['label'].encode('utf-8')
for section in gradeset[u'section_breakdown']] for section in course_grade.summary[u'section_breakdown']]
rows.append(["id", "email", "username", "grade"] + header) rows.append(["id", "email", "username", "grade"] + header)
percents = { percents = {
section['label']: section.get('percent', 0.0) section['label']: section.get('percent', 0.0)
for section in gradeset[u'section_breakdown'] for section in course_grade.summary[u'section_breakdown']
if 'label' in section if 'label' in section
} }
row_percents = [percents.get(label, 0.0) for label in header] row_percents = [percents.get(label, 0.0) for label in header]
rows.append([student.id, student.email, student.username, rows.append([student.id, student.email, student.username,
gradeset['percent']] + row_percents) course_grade.percent] + row_percents)
buf = StringIO() buf = StringIO()
writer = csv.writer(buf) writer = csv.writer(buf)
......
...@@ -5,7 +5,7 @@ Management command which fixes ungraded certificates for students ...@@ -5,7 +5,7 @@ Management command which fixes ungraded certificates for students
from certificates.models import GeneratedCertificate from certificates.models import GeneratedCertificate
from courseware import courses from courseware import courses
from lms.djangoapps.grades import course_grades from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
from django.test.client import RequestFactory from django.test.client import RequestFactory
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from optparse import make_option from optparse import make_option
...@@ -52,8 +52,8 @@ class Command(BaseCommand): ...@@ -52,8 +52,8 @@ class Command(BaseCommand):
for cert in ungraded: for cert in ungraded:
# grade the student # grade the student
grade = course_grades.summary(cert.user, course) grade = CourseGradeFactory().create(cert.user, course)
print "grading {0} - {1}".format(cert.user, grade['percent']) print "grading {0} - {1}".format(cert.user, grade.percent)
cert.grade = grade['percent'] cert.grade = grade.percent
if not options['noop']: if not options['noop']:
cert.save() cert.save()
...@@ -11,7 +11,7 @@ from django.conf import settings ...@@ -11,7 +11,7 @@ from django.conf import settings
from django.core.urlresolvers import reverse from django.core.urlresolvers import reverse
from requests.auth import HTTPBasicAuth from requests.auth import HTTPBasicAuth
from lms.djangoapps.grades import course_grades from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
from xmodule.modulestore.django import modulestore from xmodule.modulestore.django import modulestore
from capa.xqueue_interface import XQueueInterface from capa.xqueue_interface import XQueueInterface
from capa.xqueue_interface import make_xheader, make_hashkey from capa.xqueue_interface import make_xheader, make_hashkey
...@@ -271,7 +271,7 @@ class XQueueCertInterface(object): ...@@ -271,7 +271,7 @@ class XQueueCertInterface(object):
self.request.session = {} self.request.session = {}
is_whitelisted = self.whitelist.filter(user=student, course_id=course_id, whitelist=True).exists() is_whitelisted = self.whitelist.filter(user=student, course_id=course_id, whitelist=True).exists()
grade = course_grades.summary(student, course) grade = CourseGradeFactory().create(student, course).summary
enrollment_mode, __ = CourseEnrollment.enrollment_mode_for_user(student, course_id) enrollment_mode, __ = CourseEnrollment.enrollment_mode_for_user(student, course_id)
mode_is_verified = enrollment_mode in GeneratedCertificate.VERIFIED_CERTS_MODES mode_is_verified = enrollment_mode in GeneratedCertificate.VERIFIED_CERTS_MODES
user_is_verified = SoftwareSecurePhotoVerification.user_is_verified(student) user_is_verified = SoftwareSecurePhotoVerification.user_is_verified(student)
......
...@@ -22,7 +22,7 @@ from capa.tests.response_xml_factory import ( ...@@ -22,7 +22,7 @@ from capa.tests.response_xml_factory import (
from course_modes.models import CourseMode from course_modes.models import CourseMode
from courseware.models import StudentModule, BaseStudentModuleHistory from courseware.models import StudentModule, BaseStudentModuleHistory
from courseware.tests.helpers import LoginEnrollmentTestCase from courseware.tests.helpers import LoginEnrollmentTestCase
from lms.djangoapps.grades import course_grades, progress from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
from openedx.core.djangoapps.credit.api import ( from openedx.core.djangoapps.credit.api import (
set_credit_requirements, get_credit_requirement_status set_credit_requirements, get_credit_requirement_status
) )
...@@ -270,39 +270,17 @@ class TestSubmittingProblems(ModuleStoreTestCase, LoginEnrollmentTestCase, Probl ...@@ -270,39 +270,17 @@ class TestSubmittingProblems(ModuleStoreTestCase, LoginEnrollmentTestCase, Probl
self.update_course(self.course, self.student_user.id) self.update_course(self.course, self.student_user.id)
self.refresh_course() self.refresh_course()
def get_grade_summary(self): def get_course_grade(self):
""" """
calls course_grades.summary for current user and course. Return CourseGrade for current user and course.
the keywords for the returned object are
- grade : A final letter grade.
- percent : The final percent for the class (rounded up).
- section_breakdown : A breakdown of each section that makes
up the grade. (For display)
- grade_breakdown : A breakdown of the major components that
make up the final grade. (For display)
"""
return course_grades.summary(self.student_user, self.course)
def get_progress_summary(self):
"""
Return progress summary structure for current user and course.
Returns
- courseware_summary is a summary of all sections with problems in the course.
It is organized as an array of chapters, each containing an array of sections,
each containing an array of scores. This contains information for graded and
ungraded problems, and is good for displaying a course summary with due dates,
etc.
""" """
return progress.summary(self.student_user, self.course).chapter_grades return CourseGradeFactory().create(self.student_user, self.course)
def check_grade_percent(self, percent): def check_grade_percent(self, percent):
""" """
Assert that percent grade is as expected. Assert that percent grade is as expected.
""" """
grade_summary = self.get_grade_summary() self.assertEqual(self.get_course_grade().percent, percent)
self.assertEqual(grade_summary['percent'], percent)
def earned_hw_scores(self): def earned_hw_scores(self):
""" """
...@@ -310,7 +288,7 @@ class TestSubmittingProblems(ModuleStoreTestCase, LoginEnrollmentTestCase, Probl ...@@ -310,7 +288,7 @@ class TestSubmittingProblems(ModuleStoreTestCase, LoginEnrollmentTestCase, Probl
Returns list of scores: [<points on hw_1>, <points on hw_2>, ..., <points on hw_n>] Returns list of scores: [<points on hw_1>, <points on hw_2>, ..., <points on hw_n>]
""" """
return [s.earned for s in self.get_grade_summary()['totaled_scores']['Homework']] return [s.graded_total.earned for s in self.get_course_grade().graded_subsections_by_format['Homework']]
def hw_grade(self, hw_url_name): def hw_grade(self, hw_url_name):
""" """
...@@ -318,7 +296,7 @@ class TestSubmittingProblems(ModuleStoreTestCase, LoginEnrollmentTestCase, Probl ...@@ -318,7 +296,7 @@ class TestSubmittingProblems(ModuleStoreTestCase, LoginEnrollmentTestCase, Probl
""" """
# list of grade summaries for each section # list of grade summaries for each section
sections_list = [] sections_list = []
for chapter in self.get_progress_summary(): for chapter in self.get_course_grade().chapter_grades:
sections_list.extend(chapter['sections']) sections_list.extend(chapter['sections'])
# get the first section that matches the url (there should only be one) # get the first section that matches the url (there should only be one)
...@@ -431,8 +409,11 @@ class TestCourseGrader(TestSubmittingProblems): ...@@ -431,8 +409,11 @@ class TestCourseGrader(TestSubmittingProblems):
"drop_count": 0, "drop_count": 0,
"short_label": "HW", "short_label": "HW",
"weight": hw_weight "weight": hw_weight
}, { },
{
"type": "Final", "type": "Final",
"min_count": 0,
"drop_count": 0,
"name": "Final Section", "name": "Final Section",
"short_label": "Final", "short_label": "Final",
"weight": final_weight "weight": final_weight
...@@ -558,7 +539,7 @@ class TestCourseGrader(TestSubmittingProblems): ...@@ -558,7 +539,7 @@ class TestCourseGrader(TestSubmittingProblems):
""" """
self.basic_setup() self.basic_setup()
self.check_grade_percent(0) self.check_grade_percent(0)
self.assertEqual(self.get_grade_summary()['grade'], None) self.assertEqual(self.get_course_grade().letter_grade, None)
def test_b_grade_exact(self): def test_b_grade_exact(self):
""" """
...@@ -567,7 +548,7 @@ class TestCourseGrader(TestSubmittingProblems): ...@@ -567,7 +548,7 @@ class TestCourseGrader(TestSubmittingProblems):
self.basic_setup() self.basic_setup()
self.submit_question_answer('p1', {'2_1': 'Correct'}) self.submit_question_answer('p1', {'2_1': 'Correct'})
self.check_grade_percent(0.33) self.check_grade_percent(0.33)
self.assertEqual(self.get_grade_summary()['grade'], 'B') self.assertEqual(self.get_course_grade().letter_grade, 'B')
def test_b_grade_above(self): def test_b_grade_above(self):
""" """
...@@ -577,7 +558,7 @@ class TestCourseGrader(TestSubmittingProblems): ...@@ -577,7 +558,7 @@ class TestCourseGrader(TestSubmittingProblems):
self.submit_question_answer('p1', {'2_1': 'Correct'}) self.submit_question_answer('p1', {'2_1': 'Correct'})
self.submit_question_answer('p2', {'2_1': 'Correct'}) self.submit_question_answer('p2', {'2_1': 'Correct'})
self.check_grade_percent(0.67) self.check_grade_percent(0.67)
self.assertEqual(self.get_grade_summary()['grade'], 'B') self.assertEqual(self.get_course_grade().letter_grade, 'B')
def test_a_grade(self): def test_a_grade(self):
""" """
...@@ -588,7 +569,7 @@ class TestCourseGrader(TestSubmittingProblems): ...@@ -588,7 +569,7 @@ class TestCourseGrader(TestSubmittingProblems):
self.submit_question_answer('p2', {'2_1': 'Correct'}) self.submit_question_answer('p2', {'2_1': 'Correct'})
self.submit_question_answer('p3', {'2_1': 'Correct'}) self.submit_question_answer('p3', {'2_1': 'Correct'})
self.check_grade_percent(1.0) self.check_grade_percent(1.0)
self.assertEqual(self.get_grade_summary()['grade'], 'A') self.assertEqual(self.get_course_grade().letter_grade, 'A')
def test_wrong_answers(self): def test_wrong_answers(self):
""" """
...@@ -599,7 +580,7 @@ class TestCourseGrader(TestSubmittingProblems): ...@@ -599,7 +580,7 @@ class TestCourseGrader(TestSubmittingProblems):
self.submit_question_answer('p2', {'2_1': 'Correct'}) self.submit_question_answer('p2', {'2_1': 'Correct'})
self.submit_question_answer('p3', {'2_1': 'Incorrect'}) self.submit_question_answer('p3', {'2_1': 'Incorrect'})
self.check_grade_percent(0.67) self.check_grade_percent(0.67)
self.assertEqual(self.get_grade_summary()['grade'], 'B') self.assertEqual(self.get_course_grade().letter_grade, 'B')
def test_submissions_api_overrides_scores(self): def test_submissions_api_overrides_scores(self):
""" """
...@@ -610,7 +591,7 @@ class TestCourseGrader(TestSubmittingProblems): ...@@ -610,7 +591,7 @@ class TestCourseGrader(TestSubmittingProblems):
self.submit_question_answer('p2', {'2_1': 'Correct'}) self.submit_question_answer('p2', {'2_1': 'Correct'})
self.submit_question_answer('p3', {'2_1': 'Incorrect'}) self.submit_question_answer('p3', {'2_1': 'Incorrect'})
self.check_grade_percent(0.67) self.check_grade_percent(0.67)
self.assertEqual(self.get_grade_summary()['grade'], 'B') self.assertEqual(self.get_course_grade().letter_grade, 'B')
# But now, set the score with the submissions API and watch # But now, set the score with the submissions API and watch
# as it overrides the score read from StudentModule and our # as it overrides the score read from StudentModule and our
...@@ -625,7 +606,7 @@ class TestCourseGrader(TestSubmittingProblems): ...@@ -625,7 +606,7 @@ class TestCourseGrader(TestSubmittingProblems):
submission = submissions_api.create_submission(student_item, 'any answer') submission = submissions_api.create_submission(student_item, 'any answer')
submissions_api.set_score(submission['uuid'], 1, 1) submissions_api.set_score(submission['uuid'], 1, 1)
self.check_grade_percent(1.0) self.check_grade_percent(1.0)
self.assertEqual(self.get_grade_summary()['grade'], 'A') self.assertEqual(self.get_course_grade().letter_grade, 'A')
def test_submissions_api_anonymous_student_id(self): def test_submissions_api_anonymous_student_id(self):
""" """
...@@ -640,7 +621,7 @@ class TestCourseGrader(TestSubmittingProblems): ...@@ -640,7 +621,7 @@ class TestCourseGrader(TestSubmittingProblems):
mock_get_scores.return_value = { mock_get_scores.return_value = {
self.problem_location('p3').to_deprecated_string(): (1, 1) self.problem_location('p3').to_deprecated_string(): (1, 1)
} }
self.get_grade_summary() self.get_course_grade()
# Verify that the submissions API was sent an anonymized student ID # Verify that the submissions API was sent an anonymized student ID
mock_get_scores.assert_called_with( mock_get_scores.assert_called_with(
...@@ -752,9 +733,6 @@ class TestCourseGrader(TestSubmittingProblems): ...@@ -752,9 +733,6 @@ class TestCourseGrader(TestSubmittingProblems):
# the Django student views, and does not update enrollment if it already exists. # the Django student views, and does not update enrollment if it already exists.
CourseEnrollment.enroll(self.student_user, self.course.id, mode) CourseEnrollment.enroll(self.student_user, self.course.id, mode)
self.submit_question_answer('p1', {'2_1': 'Correct'})
self.submit_question_answer('p2', {'2_1': 'Correct'})
# Enable the course for credit # Enable the course for credit
CreditCourse.objects.create(course_key=self.course.id, enabled=True) CreditCourse.objects.create(course_key=self.course.id, enabled=True)
...@@ -774,7 +752,15 @@ class TestCourseGrader(TestSubmittingProblems): ...@@ -774,7 +752,15 @@ class TestCourseGrader(TestSubmittingProblems):
# Add a single credit requirement (final grade) # Add a single credit requirement (final grade)
set_credit_requirements(self.course.id, requirements) set_credit_requirements(self.course.id, requirements)
self.get_grade_summary() # Credit requirement is not satisfied before passing grade
req_status = get_credit_requirement_status(self.course.id, self.student_user.username, 'grade', 'grade')
self.assertEqual(req_status[0]["status"], None)
self._stop_signal_patch()
self.submit_question_answer('p1', {'2_1': 'Correct'})
self.submit_question_answer('p2', {'2_1': 'Correct'})
# Credit requirement is now satisfied after passing grade
req_status = get_credit_requirement_status(self.course.id, self.student_user.username, 'grade', 'grade') req_status = get_credit_requirement_status(self.course.id, self.student_user.username, 'grade', 'grade')
self.assertEqual(req_status[0]["status"], 'satisfied') self.assertEqual(req_status[0]["status"], 'satisfied')
......
...@@ -723,7 +723,7 @@ def _progress(request, course_key, student_id): ...@@ -723,7 +723,7 @@ def _progress(request, course_key, student_id):
# additional DB lookup (this kills the Progress page in particular). # additional DB lookup (this kills the Progress page in particular).
student = User.objects.prefetch_related("groups").get(id=student.id) student = User.objects.prefetch_related("groups").get(id=student.id)
course_grade = CourseGradeFactory(student).create(course) course_grade = CourseGradeFactory().create(student, course)
courseware_summary = course_grade.chapter_grades courseware_summary = course_grade.chapter_grades
grade_summary = course_grade.summary grade_summary = course_grade.summary
...@@ -1127,7 +1127,7 @@ def is_course_passed(course, grade_summary=None, student=None, request=None): ...@@ -1127,7 +1127,7 @@ def is_course_passed(course, grade_summary=None, student=None, request=None):
success_cutoff = min(nonzero_cutoffs) if nonzero_cutoffs else None success_cutoff = min(nonzero_cutoffs) if nonzero_cutoffs else None
if grade_summary is None: if grade_summary is None:
grade_summary = CourseGradeFactory(student).create(course).summary grade_summary = CourseGradeFactory().create(student, course).summary
return success_cutoff and grade_summary['percent'] >= success_cutoff return success_cutoff and grade_summary['percent'] >= success_cutoff
......
...@@ -148,7 +148,7 @@ class UserGradeView(GradeViewMixin, GenericAPIView): ...@@ -148,7 +148,7 @@ class UserGradeView(GradeViewMixin, GenericAPIView):
return course return course
prep_course_for_grading(course, request) prep_course_for_grading(course, request)
course_grade = CourseGradeFactory(request.user).create(course) course_grade = CourseGradeFactory().create(request.user, course)
return Response([{ return Response([{
'username': username, 'username': username,
......
""" """
Grading Context Grading Context
""" """
from collections import defaultdict from collections import OrderedDict
from openedx.core.djangoapps.content.block_structure.api import get_course_in_cache from openedx.core.djangoapps.content.block_structure.api import get_course_in_cache
from .scores import possibly_scored from .scores import possibly_scored
def grading_context_for_course(course): def grading_context_for_course(course_key):
""" """
Same as grading_context, but takes in a course object. Same as grading_context, but takes in a course object.
""" """
course_structure = get_course_in_cache(course.id) course_structure = get_course_in_cache(course_key)
return grading_context(course_structure) return grading_context(course_structure)
...@@ -21,16 +21,13 @@ def grading_context(course_structure): ...@@ -21,16 +21,13 @@ def grading_context(course_structure):
a student. They are used by grades.grade() a student. They are used by grades.grade()
The grading context has two keys: The grading context has two keys:
graded_sections - This contains the sections that are graded, as all_graded_subsections_by_type - This contains all subsections that are
well as all possible children modules that can affect the graded, keyed by subsection format (assignment type).
grading. This allows some sections to be skipped if the student
hasn't seen any part of it.
The format is a dictionary keyed by section-type. The values are The values are arrays of dictionaries containing
arrays of dictionaries containing "subsection_block" : The subsection block
"section_block" : The section block "scored_descendants" : An array of usage keys for blocks
"scored_descendant_keys" : An array of usage keys for blocks could possibly be in the subsection, for any student
could possibly be in the section, for any student
all_graded_blocks - This contains a list of all blocks that can all_graded_blocks - This contains a list of all blocks that can
affect grading a student. This is used to efficiently fetch affect grading a student. This is used to efficiently fetch
...@@ -39,34 +36,36 @@ def grading_context(course_structure): ...@@ -39,34 +36,36 @@ def grading_context(course_structure):
""" """
all_graded_blocks = [] all_graded_blocks = []
all_graded_sections = defaultdict(list) all_graded_subsections_by_type = OrderedDict()
for chapter_key in course_structure.get_children(course_structure.root_block_usage_key): for chapter_key in course_structure.get_children(course_structure.root_block_usage_key):
for section_key in course_structure.get_children(chapter_key): for subsection_key in course_structure.get_children(chapter_key):
section = course_structure[section_key] subsection = course_structure[subsection_key]
scored_descendants_of_section = [section] scored_descendants_of_subsection = []
if section.graded: if subsection.graded:
for descendant_key in course_structure.post_order_traversal( for descendant_key in course_structure.post_order_traversal(
filter_func=possibly_scored, filter_func=possibly_scored,
start_node=section_key, start_node=subsection_key,
): ):
scored_descendants_of_section.append( scored_descendants_of_subsection.append(
course_structure[descendant_key], course_structure[descendant_key],
) )
# include only those blocks that have scores, not if they are just a parent # include only those blocks that have scores, not if they are just a parent
section_info = { subsection_info = {
'section_block': section, 'subsection_block': subsection,
'scored_descendants': [ 'scored_descendants': [
child for child in scored_descendants_of_section child for child in scored_descendants_of_subsection
if getattr(child, 'has_score', None) if getattr(child, 'has_score', None)
] ]
} }
section_format = getattr(section, 'format', '') subsection_format = getattr(subsection, 'format', '')
all_graded_sections[section_format].append(section_info) if subsection_format not in all_graded_subsections_by_type:
all_graded_blocks.extend(scored_descendants_of_section) all_graded_subsections_by_type[subsection_format] = []
all_graded_subsections_by_type[subsection_format].append(subsection_info)
all_graded_blocks.extend(scored_descendants_of_subsection)
return { return {
'all_graded_sections': all_graded_sections, 'all_graded_subsections_by_type': all_graded_subsections_by_type,
'all_graded_blocks': all_graded_blocks, 'all_graded_blocks': all_graded_blocks,
} }
"""
Functionality for course-level grades.
"""
from collections import namedtuple
from logging import getLogger
import dogstats_wrapper as dog_stats_api
from opaque_keys.edx.keys import CourseKey
from courseware.courses import get_course_by_id
from .new.course_grade import CourseGradeFactory
log = getLogger(__name__)
GradeResult = namedtuple('GradeResult', ['student', 'gradeset', 'err_msg'])
def iterate_grades_for(course_or_id, students):
"""
Given a course_id and an iterable of students (User), yield a GradeResult
for every student enrolled in the course. GradeResult is a named tuple of:
(student, gradeset, err_msg)
If an error occurred, gradeset will be an empty dict and err_msg will be an
exception message. If there was no error, err_msg is an empty string.
The gradeset is a dictionary with the following fields:
- grade : A final letter grade.
- percent : The final percent for the class (rounded up).
- section_breakdown : A breakdown of each section that makes
up the grade. (For display)
- grade_breakdown : A breakdown of the major components that
make up the final grade. (For display)
- raw_scores: contains scores for every graded module
"""
if isinstance(course_or_id, (basestring, CourseKey)):
course = get_course_by_id(course_or_id)
else:
course = course_or_id
for student in students:
with dog_stats_api.timer('lms.grades.iterate_grades_for', tags=[u'action:{}'.format(course.id)]):
try:
gradeset = summary(student, course)
yield GradeResult(student, gradeset, "")
except Exception as exc: # pylint: disable=broad-except
# Keep marching on even if this student couldn't be graded for
# some reason, but log it for future reference.
log.exception(
'Cannot grade student %s (%s) in course %s because of exception: %s',
student.username,
student.id,
course.id,
exc.message
)
yield GradeResult(student, {}, exc.message)
def summary(student, course):
"""
Returns the grade summary of the student for the given course.
Also sends a signal to update the minimum grade requirement status.
"""
return CourseGradeFactory(student).create(course).summary
...@@ -7,7 +7,7 @@ from django.core.management.base import BaseCommand, CommandError ...@@ -7,7 +7,7 @@ from django.core.management.base import BaseCommand, CommandError
import os import os
from lms.djangoapps.courseware import courses from lms.djangoapps.courseware import courses
from lms.djangoapps.certificates.models import GeneratedCertificate from lms.djangoapps.certificates.models import GeneratedCertificate
from lms.djangoapps.grades import course_grades from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
from opaque_keys import InvalidKeyError from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey from opaque_keys.edx.locations import SlashSeparatedCourseKey
...@@ -124,18 +124,18 @@ class Command(BaseCommand): ...@@ -124,18 +124,18 @@ class Command(BaseCommand):
count, total, hours, minutes) count, total, hours, minutes)
start = datetime.datetime.now() start = datetime.datetime.now()
request.user = student request.user = student
grade = course_grades.summary(student, course) grade = CourseGradeFactory().create(student, course)
if not header: if not header:
header = [section['label'] for section in grade[u'section_breakdown']] header = [section['label'] for section in grade.summary[u'section_breakdown']]
rows.append(["email", "username", "certificate-grade", "grade"] + header) rows.append(["email", "username", "certificate-grade", "grade"] + header)
percents = {section['label']: section['percent'] for section in grade[u'section_breakdown']} percents = {section['label']: section['percent'] for section in grade.summary[u'section_breakdown']}
row_percents = [percents[label] for label in header] row_percents = [percents[label] for label in header]
if student.username in cert_grades: if student.username in cert_grades:
rows.append( rows.append(
[student.email, student.username, cert_grades[student.username], grade['percent']] + row_percents, [student.email, student.username, cert_grades[student.username], grade.percent] + row_percents,
) )
else: else:
rows.append([student.email, student.username, "N/A", grade['percent']] + row_percents) rows.append([student.email, student.username, "N/A", grade.percent] + row_percents)
with open(options['output'], 'wb') as f: with open(options['output'], 'wb') as f:
writer = csv.writer(f) writer = csv.writer(f)
writer.writerows(rows) writer.writerows(rows)
...@@ -3,15 +3,17 @@ CourseGrade Class ...@@ -3,15 +3,17 @@ CourseGrade Class
""" """
from collections import defaultdict from collections import defaultdict
from collections import namedtuple
from logging import getLogger from logging import getLogger
from django.conf import settings from django.conf import settings
from django.core.exceptions import PermissionDenied from django.core.exceptions import PermissionDenied
import dogstats_wrapper as dog_stats_api
from lazy import lazy from lazy import lazy
from lms.djangoapps.course_blocks.api import get_course_blocks from lms.djangoapps.course_blocks.api import get_course_blocks
from lms.djangoapps.grades.config.models import PersistentGradesEnabledFlag from lms.djangoapps.grades.config.models import PersistentGradesEnabledFlag
from openedx.core.djangoapps.signals.signals import GRADES_UPDATED from openedx.core.djangoapps.signals.signals import COURSE_GRADE_CHANGED
from xmodule import block_metadata_utils from xmodule import block_metadata_utils
from ..models import PersistentCourseGrade from ..models import PersistentCourseGrade
...@@ -37,7 +39,7 @@ class CourseGrade(object): ...@@ -37,7 +39,7 @@ class CourseGrade(object):
self._subsection_grade_factory = SubsectionGradeFactory(self.student, self.course, self.course_structure) self._subsection_grade_factory = SubsectionGradeFactory(self.student, self.course, self.course_structure)
@lazy @lazy
def subsection_grade_totals_by_format(self): def graded_subsections_by_format(self):
""" """
Returns grades for the subsections in the course in Returns grades for the subsections in the course in
a dict keyed by subsection format types. a dict keyed by subsection format types.
...@@ -48,7 +50,7 @@ class CourseGrade(object): ...@@ -48,7 +50,7 @@ class CourseGrade(object):
if subsection_grade.graded: if subsection_grade.graded:
graded_total = subsection_grade.graded_total graded_total = subsection_grade.graded_total
if graded_total.possible > 0: if graded_total.possible > 0:
subsections_by_format[subsection_grade.format].append(graded_total) subsections_by_format[subsection_grade.format].append(subsection_grade)
return subsections_by_format return subsections_by_format
@lazy @lazy
...@@ -70,7 +72,7 @@ class CourseGrade(object): ...@@ -70,7 +72,7 @@ class CourseGrade(object):
# Grading policy might be overriden by a CCX, need to reset it # Grading policy might be overriden by a CCX, need to reset it
self.course.set_grading_policy(self.course.grading_policy) self.course.set_grading_policy(self.course.grading_policy)
grade_value = self.course.grader.grade( grade_value = self.course.grader.grade(
self.subsection_grade_totals_by_format, self.graded_subsections_by_format,
generate_random_scores=settings.GENERATE_PROFILE_SCORES generate_random_scores=settings.GENERATE_PROFILE_SCORES
) )
# can't use the existing properties due to recursion issues caused by referencing self.grade_value # can't use the existing properties due to recursion issues caused by referencing self.grade_value
...@@ -137,8 +139,6 @@ class CourseGrade(object): ...@@ -137,8 +139,6 @@ class CourseGrade(object):
grade_summary = self.grade_value grade_summary = self.grade_value
grade_summary['percent'] = self.percent grade_summary['percent'] = self.percent
grade_summary['grade'] = self.letter_grade grade_summary['grade'] = self.letter_grade
grade_summary['totaled_scores'] = self.subsection_grade_totals_by_format
grade_summary['raw_scores'] = list(self.locations_to_scores.itervalues())
return grade_summary return grade_summary
...@@ -150,7 +150,7 @@ class CourseGrade(object): ...@@ -150,7 +150,7 @@ class CourseGrade(object):
""" """
subsections_total = sum(len(chapter['sections']) for chapter in self.chapter_grades) subsections_total = sum(len(chapter['sections']) for chapter in self.chapter_grades)
total_graded_subsections = sum(len(x) for x in self.subsection_grade_totals_by_format.itervalues()) total_graded_subsections = sum(len(x) for x in self.graded_subsections_by_format.itervalues())
subsections_created = len(self._subsection_grade_factory._unsaved_subsection_grades) # pylint: disable=protected-access subsections_created = len(self._subsection_grade_factory._unsaved_subsection_grades) # pylint: disable=protected-access
subsections_read = subsections_total - subsections_created subsections_read = subsections_total - subsections_created
blocks_total = len(self.locations_to_scores) blocks_total = len(self.locations_to_scores)
...@@ -295,10 +295,10 @@ class CourseGrade(object): ...@@ -295,10 +295,10 @@ class CourseGrade(object):
""" """
Signal all listeners when grades are computed. Signal all listeners when grades are computed.
""" """
responses = GRADES_UPDATED.send_robust( responses = COURSE_GRADE_CHANGED.send_robust(
sender=None, sender=None,
user=self.student, user=self.student,
grade_summary=self.summary, course_grade=self,
course_key=self.course.id, course_key=self.course.id,
deadline=self.course.end deadline=self.course.end
) )
...@@ -324,32 +324,60 @@ class CourseGradeFactory(object): ...@@ -324,32 +324,60 @@ class CourseGradeFactory(object):
""" """
Factory class to create Course Grade objects Factory class to create Course Grade objects
""" """
def __init__(self, student): def create(self, student, course, read_only=True):
self.student = student
def create(self, course, read_only=True):
""" """
Returns the CourseGrade object for the given student and course. Returns the CourseGrade object for the given student and course.
If read_only is True, doesn't save any updates to the grades. If read_only is True, doesn't save any updates to the grades.
Raises a PermissionDenied if the user does not have course access. Raises a PermissionDenied if the user does not have course access.
""" """
course_structure = get_course_blocks(self.student, course.location) course_structure = get_course_blocks(student, course.location)
# if user does not have access to this course, throw an exception # if user does not have access to this course, throw an exception
if not self._user_has_access_to_course(course_structure): if not self._user_has_access_to_course(course_structure):
raise PermissionDenied("User does not have access to this course") raise PermissionDenied("User does not have access to this course")
return ( return (
self._get_saved_grade(course, course_structure) or self._get_saved_grade(student, course, course_structure) or
self._compute_and_update_grade(course, course_structure, read_only) self._compute_and_update_grade(student, course, course_structure, read_only)
) )
def update(self, course, course_structure): GradeResult = namedtuple('GradeResult', ['student', 'course_grade', 'err_msg'])
def iter(self, course, students):
"""
Given a course and an iterable of students (User), yield a GradeResult
for every student enrolled in the course. GradeResult is a named tuple of:
(student, course_grade, err_msg)
If an error occurred, course_grade will be None and err_msg will be an
exception message. If there was no error, err_msg is an empty string.
"""
for student in students:
with dog_stats_api.timer('lms.grades.CourseGradeFactory.iter', tags=[u'action:{}'.format(course.id)]):
try:
course_grade = CourseGradeFactory().create(student, course)
yield self.GradeResult(student, course_grade, "")
except Exception as exc: # pylint: disable=broad-except
# Keep marching on even if this student couldn't be graded for
# some reason, but log it for future reference.
log.exception(
'Cannot grade student %s (%s) in course %s because of exception: %s',
student.username,
student.id,
course.id,
exc.message
)
yield self.GradeResult(student, None, exc.message)
def update(self, student, course, course_structure):
""" """
Updates the CourseGrade for this Factory's student. Updates the CourseGrade for this Factory's student.
""" """
self._compute_and_update_grade(course, course_structure) self._compute_and_update_grade(student, course, course_structure)
def get_persisted(self, course): def get_persisted(self, student, course):
""" """
Returns the saved grade for the given course and student, Returns the saved grade for the given course and student,
irrespective of whether the saved grade is up-to-date. irrespective of whether the saved grade is up-to-date.
...@@ -357,9 +385,9 @@ class CourseGradeFactory(object): ...@@ -357,9 +385,9 @@ class CourseGradeFactory(object):
if not PersistentGradesEnabledFlag.feature_enabled(course.id): if not PersistentGradesEnabledFlag.feature_enabled(course.id):
return None return None
return CourseGrade.get_persisted_grade(self.student, course) return CourseGrade.get_persisted_grade(student, course)
def _get_saved_grade(self, course, course_structure): def _get_saved_grade(self, student, course, course_structure):
""" """
Returns the saved grade for the given course and student. Returns the saved grade for the given course and student.
""" """
...@@ -367,18 +395,18 @@ class CourseGradeFactory(object): ...@@ -367,18 +395,18 @@ class CourseGradeFactory(object):
return None return None
return CourseGrade.load_persisted_grade( return CourseGrade.load_persisted_grade(
self.student, student,
course, course,
course_structure course_structure
) )
def _compute_and_update_grade(self, course, course_structure, read_only=False): def _compute_and_update_grade(self, student, course, course_structure, read_only=False):
""" """
Freshly computes and updates the grade for the student and course. Freshly computes and updates the grade for the student and course.
If read_only is True, doesn't save any updates to the grades. If read_only is True, doesn't save any updates to the grades.
""" """
course_grade = CourseGrade(self.student, course, course_structure) course_grade = CourseGrade(student, course, course_structure)
course_grade.compute_and_update(read_only) course_grade.compute_and_update(read_only)
return course_grade return course_grade
......
...@@ -68,7 +68,7 @@ class SubsectionGrade(object): ...@@ -68,7 +68,7 @@ class SubsectionGrade(object):
): ):
self._compute_block_score(descendant_key, course_structure, submissions_scores, csm_scores) self._compute_block_score(descendant_key, course_structure, submissions_scores, csm_scores)
self.all_total, self.graded_total = graders.aggregate_scores(self.scores, self.display_name, self.location) self.all_total, self.graded_total = graders.aggregate_scores(self.scores)
self._log_event(log.debug, u"init_from_structure", student) self._log_event(log.debug, u"init_from_structure", student)
return self return self
...@@ -83,16 +83,12 @@ class SubsectionGrade(object): ...@@ -83,16 +83,12 @@ class SubsectionGrade(object):
tw_earned=model.earned_graded, tw_earned=model.earned_graded,
tw_possible=model.possible_graded, tw_possible=model.possible_graded,
graded=True, graded=True,
display_name=self.display_name,
module_id=self.location,
attempted=model.first_attempted is not None, attempted=model.first_attempted is not None,
) )
self.all_total = AggregatedScore( self.all_total = AggregatedScore(
tw_earned=model.earned_all, tw_earned=model.earned_all,
tw_possible=model.possible_all, tw_possible=model.possible_all,
graded=False, graded=False,
display_name=self.display_name,
module_id=self.location,
attempted=model.first_attempted is not None, attempted=model.first_attempted is not None,
) )
self._log_event(log.debug, u"init_from_model", student) self._log_event(log.debug, u"init_from_model", student)
......
"""
Progress Summary of a learner's course grades.
"""
from .new.course_grade import CourseGradeFactory
def summary(student, course):
"""
Returns the CourseGrade for the given course and student.
"""
return CourseGradeFactory(student).create(course)
...@@ -122,8 +122,6 @@ def get_score(submissions_scores, csm_scores, persisted_block, block): ...@@ -122,8 +122,6 @@ def get_score(submissions_scores, csm_scores, persisted_block, block):
weighted_possible, weighted_possible,
weight, weight,
graded, graded,
display_name=display_name_with_default_escaped(block),
module_id=block.location,
attempted=attempted, attempted=attempted,
) )
......
...@@ -182,4 +182,4 @@ def recalculate_course_grade(sender, course, course_structure, user, **kwargs): ...@@ -182,4 +182,4 @@ def recalculate_course_grade(sender, course, course_structure, user, **kwargs):
""" """
Updates a saved course grade. Updates a saved course grade.
""" """
CourseGradeFactory(user).update(course, course_structure) CourseGradeFactory().update(user, course, course_structure)
...@@ -3,11 +3,9 @@ Test grade calculation. ...@@ -3,11 +3,9 @@ Test grade calculation.
""" """
import ddt import ddt
from django.http import Http404
import itertools import itertools
from mock import patch from mock import patch
from nose.plugins.attrib import attr from nose.plugins.attrib import attr
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from capa.tests.response_xml_factory import MultipleChoiceResponseXMLFactory from capa.tests.response_xml_factory import MultipleChoiceResponseXMLFactory
from courseware.model_data import set_score from courseware.model_data import set_score
...@@ -17,49 +15,21 @@ from lms.djangoapps.course_blocks.api import get_course_blocks ...@@ -17,49 +15,21 @@ from lms.djangoapps.course_blocks.api import get_course_blocks
from openedx.core.djangolib.testing.utils import get_mock_request from openedx.core.djangolib.testing.utils import get_mock_request
from student.tests.factories import UserFactory from student.tests.factories import UserFactory
from student.models import CourseEnrollment from student.models import CourseEnrollment
from xmodule.block_metadata_utils import display_name_with_default_escaped
from xmodule.graders import ProblemScore from xmodule.graders import ProblemScore
from xmodule.modulestore import ModuleStoreEnum from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from .utils import answer_problem from .utils import answer_problem
from .. import course_grades
from ..course_grades import summary as grades_summary
from ..module_grades import get_module_score from ..module_grades import get_module_score
from ..new.course_grade import CourseGradeFactory from ..new.course_grade import CourseGradeFactory
from ..new.subsection_grade import SubsectionGradeFactory from ..new.subsection_grade import SubsectionGradeFactory
def _grade_with_errors(student, course):
"""This fake grade method will throw exceptions for student3 and
student4, but allow any other students to go through normal grading.
It's meant to simulate when something goes really wrong while trying to
grade a particular student, so we can test that we won't kill the entire
course grading run.
"""
if student.username in ['student3', 'student4']:
raise Exception("I don't like {}".format(student.username))
return grades_summary(student, course)
def _create_problem_xml():
"""
Creates and returns XML for a multiple choice response problem
"""
return MultipleChoiceResponseXMLFactory().build_xml(
question_text='The correct answer is Choice 3',
choices=[False, False, True, False],
choice_names=['choice_0', 'choice_1', 'choice_2', 'choice_3']
)
@attr(shard=1) @attr(shard=1)
class TestGradeIteration(SharedModuleStoreTestCase): class TestGradeIteration(SharedModuleStoreTestCase):
""" """
Test iteration through student gradesets. Test iteration through student course grades.
""" """
COURSE_NUM = "1000" COURSE_NUM = "1000"
COURSE_NAME = "grading_test_course" COURSE_NAME = "grading_test_course"
...@@ -87,29 +57,25 @@ class TestGradeIteration(SharedModuleStoreTestCase): ...@@ -87,29 +57,25 @@ class TestGradeIteration(SharedModuleStoreTestCase):
] ]
def test_empty_student_list(self): def test_empty_student_list(self):
"""If we don't pass in any students, it should return a zero-length """
iterator, but it shouldn't error.""" If we don't pass in any students, it should return a zero-length
gradeset_results = list(course_grades.iterate_grades_for(self.course.id, [])) iterator, but it shouldn't error.
self.assertEqual(gradeset_results, []) """
grade_results = list(CourseGradeFactory().iter(self.course, []))
def test_nonexistent_course(self): self.assertEqual(grade_results, [])
"""If the course we want to get grades for does not exist, a `Http404`
should be raised. This is a horrible crossing of abstraction boundaries
and should be fixed, but for now we're just testing the behavior. :-("""
with self.assertRaises(Http404):
gradeset_results = course_grades.iterate_grades_for(SlashSeparatedCourseKey("I", "dont", "exist"), [])
gradeset_results.next()
def test_all_empty_grades(self): def test_all_empty_grades(self):
"""No students have grade entries""" """
all_gradesets, all_errors = self._gradesets_and_errors_for(self.course.id, self.students) No students have grade entries.
"""
all_course_grades, all_errors = self._course_grades_and_errors_for(self.course, self.students)
self.assertEqual(len(all_errors), 0) self.assertEqual(len(all_errors), 0)
for gradeset in all_gradesets.values(): for course_grade in all_course_grades.values():
self.assertIsNone(gradeset['grade']) self.assertIsNone(course_grade.letter_grade)
self.assertEqual(gradeset['percent'], 0.0) self.assertEqual(course_grade.percent, 0.0)
@patch('lms.djangoapps.grades.course_grades.summary', _grade_with_errors) @patch('lms.djangoapps.grades.new.course_grade.CourseGradeFactory.create')
def test_grading_exception(self): def test_grading_exception(self, mock_course_grade):
"""Test that we correctly capture exception messages that bubble up from """Test that we correctly capture exception messages that bubble up from
grading. Note that we only see errors at this level if the grading grading. Note that we only see errors at this level if the grading
process for this student fails entirely due to an unexpected event -- process for this student fails entirely due to an unexpected event --
...@@ -118,43 +84,51 @@ class TestGradeIteration(SharedModuleStoreTestCase): ...@@ -118,43 +84,51 @@ class TestGradeIteration(SharedModuleStoreTestCase):
We patch the grade() method with our own, which will generate the errors We patch the grade() method with our own, which will generate the errors
for student3 and student4. for student3 and student4.
""" """
all_gradesets, all_errors = self._gradesets_and_errors_for(self.course.id, self.students)
student1, student2, student3, student4, student5 = self.students student1, student2, student3, student4, student5 = self.students
mock_course_grade.side_effect = [
Exception("Error for {}.".format(student.username))
if student.username in ['student3', 'student4']
else mock_course_grade.return_value
for student in self.students
]
all_course_grades, all_errors = self._course_grades_and_errors_for(self.course, self.students)
self.assertEqual( self.assertEqual(
all_errors, all_errors,
{ {
student3: "I don't like student3", student3: "Error for student3.",
student4: "I don't like student4" student4: "Error for student4.",
} }
) )
# But we should still have five gradesets # But we should still have five gradesets
self.assertEqual(len(all_gradesets), 5) self.assertEqual(len(all_course_grades), 5)
# Even though two will simply be empty # Even though two will simply be empty
self.assertFalse(all_gradesets[student3]) self.assertIsNone(all_course_grades[student3])
self.assertFalse(all_gradesets[student4]) self.assertIsNone(all_course_grades[student4])
# The rest will have grade information in them # The rest will have grade information in them
self.assertTrue(all_gradesets[student1]) self.assertIsNotNone(all_course_grades[student1])
self.assertTrue(all_gradesets[student2]) self.assertIsNotNone(all_course_grades[student2])
self.assertTrue(all_gradesets[student5]) self.assertIsNotNone(all_course_grades[student5])
################################# Helpers ################################# def _course_grades_and_errors_for(self, course, students):
def _gradesets_and_errors_for(self, course_id, students): """
"""Simple helper method to iterate through student grades and give us Simple helper method to iterate through student grades and give us
two dictionaries -- one that has all students and their respective two dictionaries -- one that has all students and their respective
gradesets, and one that has only students that could not be graded and course grades, and one that has only students that could not be graded
their respective error messages.""" and their respective error messages.
students_to_gradesets = {} """
students_to_course_grades = {}
students_to_errors = {} students_to_errors = {}
for student, gradeset, err_msg in course_grades.iterate_grades_for(course_id, students): for student, course_grade, err_msg in CourseGradeFactory().iter(course, students):
students_to_gradesets[student] = gradeset students_to_course_grades[student] = course_grade
if err_msg: if err_msg:
students_to_errors[student] = err_msg students_to_errors[student] = err_msg
return students_to_gradesets, students_to_errors return students_to_course_grades, students_to_errors
@ddt.ddt @ddt.ddt
...@@ -169,7 +143,7 @@ class TestWeightedProblems(SharedModuleStoreTestCase): ...@@ -169,7 +143,7 @@ class TestWeightedProblems(SharedModuleStoreTestCase):
cls.chapter = ItemFactory.create(parent=cls.course, category="chapter", display_name="chapter") cls.chapter = ItemFactory.create(parent=cls.course, category="chapter", display_name="chapter")
cls.sequential = ItemFactory.create(parent=cls.chapter, category="sequential", display_name="sequential") cls.sequential = ItemFactory.create(parent=cls.chapter, category="sequential", display_name="sequential")
cls.vertical = ItemFactory.create(parent=cls.sequential, category="vertical", display_name="vertical1") cls.vertical = ItemFactory.create(parent=cls.sequential, category="vertical", display_name="vertical1")
problem_xml = _create_problem_xml() problem_xml = cls._create_problem_xml()
cls.problems = [] cls.problems = []
for i in range(2): for i in range(2):
cls.problems.append( cls.problems.append(
...@@ -186,6 +160,17 @@ class TestWeightedProblems(SharedModuleStoreTestCase): ...@@ -186,6 +160,17 @@ class TestWeightedProblems(SharedModuleStoreTestCase):
self.user = UserFactory() self.user = UserFactory()
self.request = get_mock_request(self.user) self.request = get_mock_request(self.user)
@classmethod
def _create_problem_xml(cls):
"""
Creates and returns XML for a multiple choice response problem
"""
return MultipleChoiceResponseXMLFactory().build_xml(
question_text='The correct answer is Choice 3',
choices=[False, False, True, False],
choice_names=['choice_0', 'choice_1', 'choice_2', 'choice_3']
)
def _verify_grades(self, raw_earned, raw_possible, weight, expected_score): def _verify_grades(self, raw_earned, raw_possible, weight, expected_score):
""" """
Verifies the computed grades are as expected. Verifies the computed grades are as expected.
...@@ -211,8 +196,6 @@ class TestWeightedProblems(SharedModuleStoreTestCase): ...@@ -211,8 +196,6 @@ class TestWeightedProblems(SharedModuleStoreTestCase):
# verify all problem grades # verify all problem grades
for problem in self.problems: for problem in self.problems:
problem_score = subsection_grade.locations_to_scores[problem.location] problem_score = subsection_grade.locations_to_scores[problem.location]
expected_score.display_name = display_name_with_default_escaped(problem)
expected_score.module_id = problem.location
self.assertEquals(problem_score, expected_score) self.assertEquals(problem_score, expected_score)
# verify subsection grades # verify subsection grades
...@@ -246,8 +229,6 @@ class TestWeightedProblems(SharedModuleStoreTestCase): ...@@ -246,8 +229,6 @@ class TestWeightedProblems(SharedModuleStoreTestCase):
weighted_possible=expected_w_possible, weighted_possible=expected_w_possible,
weight=weight, weight=weight,
graded=expected_graded, graded=expected_graded,
display_name=None, # problem-specific, filled in by _verify_grades
module_id=None, # problem-specific, filled in by _verify_grades
attempted=True, attempted=True,
) )
self._verify_grades(raw_earned, raw_possible, weight, expected_score) self._verify_grades(raw_earned, raw_possible, weight, expected_score)
...@@ -296,7 +277,7 @@ class TestScoreForModule(SharedModuleStoreTestCase): ...@@ -296,7 +277,7 @@ class TestScoreForModule(SharedModuleStoreTestCase):
answer_problem(cls.course, cls.request, cls.l, score=1, max_value=3) answer_problem(cls.course, cls.request, cls.l, score=1, max_value=3)
answer_problem(cls.course, cls.request, cls.n, score=3, max_value=10) answer_problem(cls.course, cls.request, cls.n, score=3, max_value=10)
cls.course_grade = CourseGradeFactory(cls.request.user).create(cls.course) cls.course_grade = CourseGradeFactory().create(cls.request.user, cls.course)
def test_score_chapter(self): def test_score_chapter(self):
earned, possible = self.course_grade.score_for_module(self.a.location) earned, possible = self.course_grade.score_for_module(self.a.location)
......
...@@ -111,7 +111,7 @@ class TestCourseGradeFactory(GradeTestBase): ...@@ -111,7 +111,7 @@ class TestCourseGradeFactory(GradeTestBase):
def test_course_grade_feature_gating(self, feature_flag, course_setting): def test_course_grade_feature_gating(self, feature_flag, course_setting):
# Grades are only saved if the feature flag and the advanced setting are # Grades are only saved if the feature flag and the advanced setting are
# both set to True. # both set to True.
grade_factory = CourseGradeFactory(self.request.user) grade_factory = CourseGradeFactory()
with persistent_grades_feature_flags( with persistent_grades_feature_flags(
global_flag=feature_flag, global_flag=feature_flag,
enabled_for_all_courses=False, enabled_for_all_courses=False,
...@@ -119,32 +119,32 @@ class TestCourseGradeFactory(GradeTestBase): ...@@ -119,32 +119,32 @@ class TestCourseGradeFactory(GradeTestBase):
enabled_for_course=course_setting enabled_for_course=course_setting
): ):
with patch('lms.djangoapps.grades.new.course_grade.CourseGrade.load_persisted_grade') as mock_save_grades: with patch('lms.djangoapps.grades.new.course_grade.CourseGrade.load_persisted_grade') as mock_save_grades:
grade_factory.create(self.course) grade_factory.create(self.request.user, self.course)
self.assertEqual(mock_save_grades.called, feature_flag and course_setting) self.assertEqual(mock_save_grades.called, feature_flag and course_setting)
def test_course_grade_creation(self): def test_course_grade_creation(self):
grade_factory = CourseGradeFactory(self.request.user) grade_factory = CourseGradeFactory()
with mock_get_score(1, 2): with mock_get_score(1, 2):
course_grade = grade_factory.create(self.course) course_grade = grade_factory.create(self.request.user, self.course)
self.assertEqual(course_grade.letter_grade, u'Pass') self.assertEqual(course_grade.letter_grade, u'Pass')
self.assertEqual(course_grade.percent, 0.5) self.assertEqual(course_grade.percent, 0.5)
def test_zero_course_grade(self): def test_zero_course_grade(self):
grade_factory = CourseGradeFactory(self.request.user) grade_factory = CourseGradeFactory()
with mock_get_score(0, 2): with mock_get_score(0, 2):
course_grade = grade_factory.create(self.course) course_grade = grade_factory.create(self.request.user, self.course)
self.assertIsNone(course_grade.letter_grade) self.assertIsNone(course_grade.letter_grade)
self.assertEqual(course_grade.percent, 0.0) self.assertEqual(course_grade.percent, 0.0)
def test_get_persisted(self): def test_get_persisted(self):
grade_factory = CourseGradeFactory(self.request.user) grade_factory = CourseGradeFactory()
# first, create a grade in the database # first, create a grade in the database
with mock_get_score(1, 2): with mock_get_score(1, 2):
grade_factory.create(self.course, read_only=False) grade_factory.create(self.request.user, self.course, read_only=False)
# retrieve the grade, ensuring it is as expected and take just one query # retrieve the grade, ensuring it is as expected and take just one query
with self.assertNumQueries(1): with self.assertNumQueries(1):
course_grade = grade_factory.get_persisted(self.course) course_grade = grade_factory.get_persisted(self.request.user, self.course)
self.assertEqual(course_grade.letter_grade, u'Pass') self.assertEqual(course_grade.letter_grade, u'Pass')
self.assertEqual(course_grade.percent, 0.5) self.assertEqual(course_grade.percent, 0.5)
...@@ -168,7 +168,7 @@ class TestCourseGradeFactory(GradeTestBase): ...@@ -168,7 +168,7 @@ class TestCourseGradeFactory(GradeTestBase):
# ensure the grade can still be retrieved via get_persisted # ensure the grade can still be retrieved via get_persisted
# despite its outdated grading policy # despite its outdated grading policy
with self.assertNumQueries(1): with self.assertNumQueries(1):
course_grade = grade_factory.get_persisted(self.course) course_grade = grade_factory.get_persisted(self.request.user, self.course)
self.assertEqual(course_grade.letter_grade, u'Pass') self.assertEqual(course_grade.letter_grade, u'Pass')
self.assertEqual(course_grade.percent, 0.5) self.assertEqual(course_grade.percent, 0.5)
...@@ -587,7 +587,7 @@ class TestCourseGradeLogging(ProblemSubmissionTestMixin, SharedModuleStoreTestCa ...@@ -587,7 +587,7 @@ class TestCourseGradeLogging(ProblemSubmissionTestMixin, SharedModuleStoreTestCa
Creates a course grade and asserts that the associated logging Creates a course grade and asserts that the associated logging
matches the expected totals passed in to the function. matches the expected totals passed in to the function.
""" """
factory.create(self.course, read_only=False) factory.create(self.request.user, self.course, read_only=False)
log_mock.assert_called_with( log_mock.assert_called_with(
u"Persistent Grades: CourseGrade.{0}, course: {1}, user: {2}".format( u"Persistent Grades: CourseGrade.{0}, course: {1}, user: {2}".format(
log_statement, log_statement,
...@@ -597,7 +597,7 @@ class TestCourseGradeLogging(ProblemSubmissionTestMixin, SharedModuleStoreTestCa ...@@ -597,7 +597,7 @@ class TestCourseGradeLogging(ProblemSubmissionTestMixin, SharedModuleStoreTestCa
) )
def test_course_grade_logging(self): def test_course_grade_logging(self):
grade_factory = CourseGradeFactory(self.request.user) grade_factory = CourseGradeFactory()
with persistent_grades_feature_flags( with persistent_grades_feature_flags(
global_flag=True, global_flag=True,
enabled_for_all_courses=False, enabled_for_all_courses=False,
......
...@@ -175,9 +175,7 @@ class TestGetScore(TestCase): ...@@ -175,9 +175,7 @@ class TestGetScore(TestCase):
self._create_persisted_block(persisted_block_value), self._create_persisted_block(persisted_block_value),
self._create_block(block_value), self._create_block(block_value),
) )
expected_score = ProblemScore( expected_score = ProblemScore(**expected_result._asdict())
display_name=self.display_name, module_id=self.location, **expected_result._asdict()
)
self.assertEquals(score, expected_score) self.assertEquals(score, expected_score)
......
...@@ -9,13 +9,15 @@ from xmodule.graders import ProblemScore ...@@ -9,13 +9,15 @@ from xmodule.graders import ProblemScore
@contextmanager @contextmanager
def mock_passing_grade(grade_pass='Pass', percent=0.75): def mock_passing_grade(grade_pass='Pass', percent=0.75, ):
""" """
Mock the grading function to always return a passing grade. Mock the grading function to always return a passing grade.
""" """
with patch('lms.djangoapps.grades.course_grades.summary') as mock_grade: with patch('lms.djangoapps.grades.new.course_grade.CourseGrade._compute_letter_grade') as mock_letter_grade:
mock_grade.return_value = {'grade': grade_pass, 'percent': percent} with patch('lms.djangoapps.grades.new.course_grade.CourseGrade._calc_percent') as mock_percent_grade:
yield mock_letter_grade.return_value = grade_pass
mock_percent_grade.return_value = percent
yield
@contextmanager @contextmanager
...@@ -31,8 +33,6 @@ def mock_get_score(earned=0, possible=1): ...@@ -31,8 +33,6 @@ def mock_get_score(earned=0, possible=1):
weighted_possible=possible, weighted_possible=possible,
weight=1, weight=1,
graded=True, graded=True,
display_name=None,
module_id=None,
attempted=True, attempted=True,
) )
yield mock_score yield mock_score
......
...@@ -14,7 +14,7 @@ from opaque_keys.edx.keys import CourseKey ...@@ -14,7 +14,7 @@ from opaque_keys.edx.keys import CourseKey
from edxmako.shortcuts import render_to_response from edxmako.shortcuts import render_to_response
from courseware.courses import get_course_with_access from courseware.courses import get_course_with_access
from lms.djangoapps.instructor.views.api import require_level from lms.djangoapps.instructor.views.api import require_level
from lms.djangoapps.grades import course_grades from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
from xmodule.modulestore.django import modulestore from xmodule.modulestore.django import modulestore
...@@ -91,7 +91,7 @@ def get_grade_book_page(request, course, course_key): ...@@ -91,7 +91,7 @@ def get_grade_book_page(request, course, course_key):
'username': student.username, 'username': student.username,
'id': student.id, 'id': student.id,
'email': student.email, 'email': student.email,
'grade_summary': course_grades.summary(student, course) 'grade_summary': CourseGradeFactory().create(student, course).summary
} }
for student in enrolled_students for student in enrolled_students
] ]
......
...@@ -491,14 +491,14 @@ def dump_grading_context(course): ...@@ -491,14 +491,14 @@ def dump_grading_context(course):
msg += hbar msg += hbar
msg += "Listing grading context for course %s\n" % course.id.to_deprecated_string() msg += "Listing grading context for course %s\n" % course.id.to_deprecated_string()
gcontext = grading_context_for_course(course) gcontext = grading_context_for_course(course.id)
msg += "graded sections:\n" msg += "graded sections:\n"
msg += '%s\n' % gcontext['all_graded_sections'].keys() msg += '%s\n' % gcontext['all_graded_subsections_by_type'].keys()
for (gsomething, gsvals) in gcontext['all_graded_sections'].items(): for (gsomething, gsvals) in gcontext['all_graded_subsections_by_type'].items():
msg += "--> Section %s:\n" % (gsomething) msg += "--> Section %s:\n" % (gsomething)
for sec in gsvals: for sec in gsvals:
sdesc = sec['section_block'] sdesc = sec['subsection_block']
frmat = getattr(sdesc, 'format', None) frmat = getattr(sdesc, 'format', None)
aname = '' aname = ''
if frmat in graders: if frmat in graders:
......
...@@ -134,9 +134,9 @@ class TestRescoringTask(TestIntegrationTask): ...@@ -134,9 +134,9 @@ class TestRescoringTask(TestIntegrationTask):
# are in sync. # are in sync.
expected_subsection_grade = expected_score expected_subsection_grade = expected_score
course_grade = CourseGradeFactory(user).create(self.course) course_grade = CourseGradeFactory().create(user, self.course)
self.assertEquals( self.assertEquals(
course_grade.subsection_grade_totals_by_format['Homework'][0].earned, course_grade.graded_subsections_by_format['Homework'][0].graded_total.earned,
expected_subsection_grade, expected_subsection_grade,
) )
......
...@@ -7,7 +7,7 @@ from django.contrib.auth.models import User ...@@ -7,7 +7,7 @@ from django.contrib.auth.models import User
from django.dispatch import receiver from django.dispatch import receiver
import logging import logging
from lms.djangoapps.grades import progress from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
from lms.djangoapps.grades.signals.signals import PROBLEM_WEIGHTED_SCORE_CHANGED from lms.djangoapps.grades.signals.signals import PROBLEM_WEIGHTED_SCORE_CHANGED
from lms import CELERY_APP from lms import CELERY_APP
from lti_provider.models import GradedAssignment from lti_provider.models import GradedAssignment
...@@ -109,8 +109,8 @@ def send_composite_outcome(user_id, course_id, assignment_id, version): ...@@ -109,8 +109,8 @@ def send_composite_outcome(user_id, course_id, assignment_id, version):
mapped_usage_key = assignment.usage_key.map_into_course(course_key) mapped_usage_key = assignment.usage_key.map_into_course(course_key)
user = User.objects.get(id=user_id) user = User.objects.get(id=user_id)
course = modulestore().get_course(course_key, depth=0) course = modulestore().get_course(course_key, depth=0)
progress_summary = progress.summary(user, course) course_grade = CourseGradeFactory().create(user, course)
earned, possible = progress_summary.score_for_module(mapped_usage_key) earned, possible = course_grade.score_for_module(mapped_usage_key)
if possible == 0: if possible == 0:
weighted_score = 0 weighted_score = 0
else: else:
......
...@@ -99,9 +99,9 @@ class SendCompositeOutcomeTest(BaseOutcomeTest): ...@@ -99,9 +99,9 @@ class SendCompositeOutcomeTest(BaseOutcomeTest):
block_type='problem', block_type='problem',
block_id='problem', block_id='problem',
) )
self.weighted_scores = MagicMock() self.course_grade = MagicMock()
self.weighted_scores_mock = self.setup_patch( self.course_grade_mock = self.setup_patch(
'lti_provider.tasks.progress.summary', self.weighted_scores 'lti_provider.tasks.CourseGradeFactory.create', self.course_grade
) )
self.module_store = MagicMock() self.module_store = MagicMock()
self.module_store.get_item = MagicMock(return_value=self.descriptor) self.module_store.get_item = MagicMock(return_value=self.descriptor)
...@@ -117,7 +117,7 @@ class SendCompositeOutcomeTest(BaseOutcomeTest): ...@@ -117,7 +117,7 @@ class SendCompositeOutcomeTest(BaseOutcomeTest):
) )
@ddt.unpack @ddt.unpack
def test_outcome_with_score_score(self, earned, possible, expected): def test_outcome_with_score_score(self, earned, possible, expected):
self.weighted_scores.score_for_module = MagicMock(return_value=(earned, possible)) self.course_grade.score_for_module = MagicMock(return_value=(earned, possible))
tasks.send_composite_outcome( tasks.send_composite_outcome(
self.user.id, unicode(self.course_key), self.assignment.id, 1 self.user.id, unicode(self.course_key), self.assignment.id, 1
) )
...@@ -129,4 +129,4 @@ class SendCompositeOutcomeTest(BaseOutcomeTest): ...@@ -129,4 +129,4 @@ class SendCompositeOutcomeTest(BaseOutcomeTest):
tasks.send_composite_outcome( tasks.send_composite_outcome(
self.user.id, unicode(self.course_key), self.assignment.id, 1 self.user.id, unicode(self.course_key), self.assignment.id, 1
) )
self.assertEqual(self.weighted_scores_mock.call_count, 0) self.assertEqual(self.course_grade_mock.call_count, 0)
...@@ -10,7 +10,7 @@ from opaque_keys.edx.keys import CourseKey ...@@ -10,7 +10,7 @@ from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.django import SignalHandler from xmodule.modulestore.django import SignalHandler
from openedx.core.djangoapps.credit.verification_access import update_verification_partitions from openedx.core.djangoapps.credit.verification_access import update_verification_partitions
from openedx.core.djangoapps.signals.signals import GRADES_UPDATED from openedx.core.djangoapps.signals.signals import COURSE_GRADE_CHANGED
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -52,14 +52,14 @@ def on_pre_publish(sender, course_key, **kwargs): # pylint: disable=unused-argu ...@@ -52,14 +52,14 @@ def on_pre_publish(sender, course_key, **kwargs): # pylint: disable=unused-argu
log.info(u"Finished updating in-course reverification access rules") log.info(u"Finished updating in-course reverification access rules")
@receiver(GRADES_UPDATED) @receiver(COURSE_GRADE_CHANGED)
def listen_for_grade_calculation(sender, user, grade_summary, course_key, deadline, **kwargs): # pylint: disable=unused-argument def listen_for_grade_calculation(sender, user, course_grade, course_key, deadline, **kwargs): # pylint: disable=unused-argument
"""Receive 'MIN_GRADE_REQUIREMENT_STATUS' signal and update minimum grade requirement status. """Receive 'MIN_GRADE_REQUIREMENT_STATUS' signal and update minimum grade requirement status.
Args: Args:
sender: None sender: None
user(User): User Model object user(User): User Model object
grade_summary(dict): Dict containing output from the course grader course_grade(CourseGrade): CourseGrade object
course_key(CourseKey): The key for the course course_key(CourseKey): The key for the course
deadline(datetime): Course end date or None deadline(datetime): Course end date or None
...@@ -78,7 +78,7 @@ def listen_for_grade_calculation(sender, user, grade_summary, course_key, deadli ...@@ -78,7 +78,7 @@ def listen_for_grade_calculation(sender, user, grade_summary, course_key, deadli
criteria = requirements[0].get('criteria') criteria = requirements[0].get('criteria')
if criteria: if criteria:
min_grade = criteria.get('min_grade') min_grade = criteria.get('min_grade')
passing_grade = grade_summary['percent'] >= min_grade passing_grade = course_grade.percent >= min_grade
now = timezone.now() now = timezone.now()
status = None status = None
reason = None reason = None
...@@ -89,7 +89,7 @@ def listen_for_grade_calculation(sender, user, grade_summary, course_key, deadli ...@@ -89,7 +89,7 @@ def listen_for_grade_calculation(sender, user, grade_summary, course_key, deadli
if passing_grade: if passing_grade:
# Student received a passing grade # Student received a passing grade
status = 'satisfied' status = 'satisfied'
reason = {'final_grade': grade_summary['percent']} reason = {'final_grade': course_grade.percent}
else: else:
# Submission after deadline # Submission after deadline
...@@ -104,7 +104,7 @@ def listen_for_grade_calculation(sender, user, grade_summary, course_key, deadli ...@@ -104,7 +104,7 @@ def listen_for_grade_calculation(sender, user, grade_summary, course_key, deadli
# Student failed to receive minimum grade # Student failed to receive minimum grade
status = 'failed' status = 'failed'
reason = { reason = {
'final_grade': grade_summary['percent'], 'final_grade': course_grade.percent,
'minimum_grade': min_grade 'minimum_grade': min_grade
} }
......
...@@ -5,6 +5,7 @@ Tests for minimum grade requirement status ...@@ -5,6 +5,7 @@ Tests for minimum grade requirement status
import ddt import ddt
import pytz import pytz
from datetime import timedelta, datetime from datetime import timedelta, datetime
from mock import MagicMock
from unittest import skipUnless from unittest import skipUnless
from django.conf import settings from django.conf import settings
...@@ -73,7 +74,9 @@ class TestMinGradedRequirementStatus(ModuleStoreTestCase): ...@@ -73,7 +74,9 @@ class TestMinGradedRequirementStatus(ModuleStoreTestCase):
def assert_requirement_status(self, grade, due_date, expected_status): def assert_requirement_status(self, grade, due_date, expected_status):
""" Verify the user's credit requirement status is as expected after simulating a grading calculation. """ """ Verify the user's credit requirement status is as expected after simulating a grading calculation. """
listen_for_grade_calculation(None, self.user, {'percent': grade}, self.course.id, due_date) course_grade = MagicMock()
course_grade.percent = grade
listen_for_grade_calculation(None, self.user, course_grade, self.course.id, due_date)
req_status = get_credit_requirement_status(self.course.id, self.request.user.username, 'grade', 'grade') req_status = get_credit_requirement_status(self.course.id, self.request.user.username, 'grade', 'grade')
self.assertEqual(req_status[0]['status'], expected_status) self.assertEqual(req_status[0]['status'], expected_status)
......
...@@ -5,8 +5,8 @@ This module contains all signals. ...@@ -5,8 +5,8 @@ This module contains all signals.
from django.dispatch import Signal from django.dispatch import Signal
# Signal that fires when a user is graded (in lms/grades/course_grades.py) # Signal that fires when a user is graded
GRADES_UPDATED = Signal(providing_args=["user", "grade_summary", "course_key", "deadline"]) COURSE_GRADE_CHANGED = Signal(providing_args=["user", "course_grade", "course_key", "deadline"])
# Signal that fires when a user is awarded a certificate in a course (in the certificates django app) # Signal that fires when a user is awarded a certificate in a course (in the certificates django app)
# TODO: runtime coupling between apps will be reduced if this event is changed to carry a username # TODO: runtime coupling between apps will be reduced if this event is changed to carry a username
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment