Commit b2cd0359 by Nimisha Asthagiri Committed by GitHub

Merge pull request #14032 from edx/beryl/unattempted-grade-report

Update grade report to distinguish between Not Attempted and 0
parents 41e852fe aca69f78
......@@ -404,7 +404,7 @@ def _cert_info(user, course_overview, cert_status, course_mode): # pylint: disa
)
if status in {'generating', 'ready', 'notpassing', 'restricted', 'auditing', 'unverified'}:
persisted_grade = CourseGradeFactory(user).get_persisted(course_overview)
persisted_grade = CourseGradeFactory().get_persisted(user, course_overview)
if persisted_grade is not None:
status_dict['grade'] = unicode(persisted_grade.percent)
elif 'grade' in cert_status:
......
......@@ -10,7 +10,7 @@ class CorrectMap(object):
in a capa problem. The response evaluation result for each answer_id includes
(correctness, npoints, msg, hint, hintmode).
- correctness : 'correct', 'incorrect', or 'partially-correct'
- correctness : 'correct', 'incorrect', 'partially-correct', or 'incomplete'
- npoints : None, or integer specifying number of points awarded for this answer_id
- msg : string (may have HTML) giving extra message response
(displayed below textline or textbox)
......
"""Grading tests"""
import ddt
import unittest
from xmodule import graders
......@@ -12,13 +13,11 @@ class GradesheetTest(unittest.TestCase):
def test_weighted_grading(self):
scores = []
agg_fields = dict(display_name="aggregated_score", module_id=None, attempted=False)
prob_fields = dict(
display_name="problem_score", module_id=None, raw_earned=0, raw_possible=0, weight=0, attempted=False,
)
agg_fields = dict(attempted=False)
prob_fields = dict(raw_earned=0, raw_possible=0, weight=0, attempted=False)
# No scores
all_total, graded_total = aggregate_scores(scores, display_name=agg_fields['display_name'])
all_total, graded_total = aggregate_scores(scores)
self.assertEqual(
all_total,
AggregatedScore(tw_earned=0, tw_possible=0, graded=False, **agg_fields),
......@@ -30,7 +29,7 @@ class GradesheetTest(unittest.TestCase):
# (0/5 non-graded)
scores.append(ProblemScore(weighted_earned=0, weighted_possible=5, graded=False, **prob_fields))
all_total, graded_total = aggregate_scores(scores, display_name=agg_fields['display_name'])
all_total, graded_total = aggregate_scores(scores)
self.assertEqual(
all_total,
AggregatedScore(tw_earned=0, tw_possible=5, graded=False, **agg_fields),
......@@ -44,7 +43,7 @@ class GradesheetTest(unittest.TestCase):
prob_fields['attempted'] = True
agg_fields['attempted'] = True
scores.append(ProblemScore(weighted_earned=3, weighted_possible=5, graded=True, **prob_fields))
all_total, graded_total = aggregate_scores(scores, display_name=agg_fields['display_name'])
all_total, graded_total = aggregate_scores(scores)
self.assertAlmostEqual(
all_total,
AggregatedScore(tw_earned=3, tw_possible=10, graded=False, **agg_fields),
......@@ -56,7 +55,7 @@ class GradesheetTest(unittest.TestCase):
# (0/5 non-graded) + (3/5 graded) + (2/5 graded) = 5/15 total, 5/10 graded
scores.append(ProblemScore(weighted_earned=2, weighted_possible=5, graded=True, **prob_fields))
all_total, graded_total = aggregate_scores(scores, display_name=agg_fields['display_name'])
all_total, graded_total = aggregate_scores(scores)
self.assertAlmostEqual(
all_total,
AggregatedScore(tw_earned=5, tw_possible=15, graded=False, **agg_fields),
......@@ -67,6 +66,7 @@ class GradesheetTest(unittest.TestCase):
)
@ddt.ddt
class GraderTest(unittest.TestCase):
"""
Tests grader implementations
......@@ -76,55 +76,48 @@ class GraderTest(unittest.TestCase):
}
incomplete_gradesheet = {
'Homework': [],
'Lab': [],
'Midterm': [],
'Homework': {},
'Lab': {},
'Midterm': {},
}
common_fields = dict(graded=True, module_id=None, attempted=True)
class MockGrade(object):
"""
Mock class for SubsectionGrade object.
"""
def __init__(self, graded_total, display_name):
self.graded_total = graded_total
self.display_name = display_name
common_fields = dict(graded=True, attempted=True)
test_gradesheet = {
'Homework': [
AggregatedScore(tw_earned=2, tw_possible=20.0, display_name='hw1', **common_fields),
AggregatedScore(tw_earned=16, tw_possible=16.0, display_name='hw2', **common_fields),
],
'Homework': {
'hw1': MockGrade(AggregatedScore(tw_earned=2, tw_possible=20.0, **common_fields), display_name='hw1'),
'hw2': MockGrade(AggregatedScore(tw_earned=16, tw_possible=16.0, **common_fields), display_name='hw2'),
},
# The dropped scores should be from the assignments that don't exist yet
'Lab': [
AggregatedScore(tw_earned=1, tw_possible=2.0, display_name='lab1', **common_fields), # Dropped
AggregatedScore(tw_earned=1, tw_possible=1.0, display_name='lab2', **common_fields),
AggregatedScore(tw_earned=1, tw_possible=1.0, display_name='lab3', **common_fields),
AggregatedScore(tw_earned=5, tw_possible=25.0, display_name='lab4', **common_fields), # Dropped
AggregatedScore(tw_earned=3, tw_possible=4.0, display_name='lab5', **common_fields), # Dropped
AggregatedScore(tw_earned=6, tw_possible=7.0, display_name='lab6', **common_fields),
AggregatedScore(tw_earned=5, tw_possible=6.0, display_name='lab7', **common_fields),
],
'Midterm': [
AggregatedScore(tw_earned=50.5, tw_possible=100, display_name="Midterm Exam", **common_fields),
],
'Lab': {
# Dropped
'lab1': MockGrade(AggregatedScore(tw_earned=1, tw_possible=2.0, **common_fields), display_name='lab1'),
'lab2': MockGrade(AggregatedScore(tw_earned=1, tw_possible=1.0, **common_fields), display_name='lab2'),
'lab3': MockGrade(AggregatedScore(tw_earned=1, tw_possible=1.0, **common_fields), display_name='lab3'),
# Dropped
'lab4': MockGrade(AggregatedScore(tw_earned=5, tw_possible=25.0, **common_fields), display_name='lab4'),
# Dropped
'lab5': MockGrade(AggregatedScore(tw_earned=3, tw_possible=4.0, **common_fields), display_name='lab5'),
'lab6': MockGrade(AggregatedScore(tw_earned=6, tw_possible=7.0, **common_fields), display_name='lab6'),
'lab7': MockGrade(AggregatedScore(tw_earned=5, tw_possible=6.0, **common_fields), display_name='lab7'),
},
'Midterm': {
'midterm': MockGrade(
AggregatedScore(tw_earned=50.5, tw_possible=100, **common_fields),
display_name="Midterm Exam",
),
},
}
def test_single_section_grader(self):
midterm_grader = graders.SingleSectionGrader("Midterm", "Midterm Exam")
lab4_grader = graders.SingleSectionGrader("Lab", "lab4")
bad_lab_grader = graders.SingleSectionGrader("Lab", "lab42")
for graded in [
midterm_grader.grade(self.empty_gradesheet),
midterm_grader.grade(self.incomplete_gradesheet),
bad_lab_grader.grade(self.test_gradesheet),
]:
self.assertEqual(len(graded['section_breakdown']), 1)
self.assertEqual(graded['percent'], 0.0)
graded = midterm_grader.grade(self.test_gradesheet)
self.assertAlmostEqual(graded['percent'], 0.505)
self.assertEqual(len(graded['section_breakdown']), 1)
graded = lab4_grader.grade(self.test_gradesheet)
self.assertAlmostEqual(graded['percent'], 0.2)
self.assertEqual(len(graded['section_breakdown']), 1)
def test_assignment_format_grader(self):
homework_grader = graders.AssignmentFormatGrader("Homework", 12, 2)
no_drop_grader = graders.AssignmentFormatGrader("Homework", 12, 0)
......@@ -179,8 +172,6 @@ class GraderTest(unittest.TestCase):
# First, a few sub graders
homework_grader = graders.AssignmentFormatGrader("Homework", 12, 2)
lab_grader = graders.AssignmentFormatGrader("Lab", 7, 3)
# phasing out the use of SingleSectionGraders, and instead using AssignmentFormatGraders that
# will act like SingleSectionGraders on single sections.
midterm_grader = graders.AssignmentFormatGrader("Midterm", 1, 0)
weighted_grader = graders.WeightedSubsectionsGrader([
......@@ -268,6 +259,8 @@ class GraderTest(unittest.TestCase):
},
{
'type': "Midterm",
'min_count': 0,
'drop_count': 0,
'name': "Midterm Exam",
'short_label': "Midterm",
'weight': 0.5,
......@@ -294,5 +287,25 @@ class GraderTest(unittest.TestCase):
self.assertAlmostEqual(graded['percent'], 0.11)
self.assertEqual(len(graded['section_breakdown']), 12 + 1)
# TODO: How do we test failure cases? The parser only logs an error when
# it can't parse something. Maybe it should throw exceptions?
@ddt.data(
(
# empty
{},
u"Configuration has no appropriate grader class."
),
(
# no min_count
{'type': "Homework", 'drop_count': 0},
u"Configuration has no appropriate grader class."
),
(
# no drop_count
{'type': "Homework", 'min_count': 0},
u"__init__() takes at least 4 arguments (3 given)"
),
)
@ddt.unpack
def test_grader_with_invalid_conf(self, invalid_conf, expected_error_message):
with self.assertRaises(ValueError) as error:
graders.grader_from_conf([invalid_conf])
self.assertIn(expected_error_message, error.exception.message)
......@@ -1174,11 +1174,8 @@ class TestCCXGrades(FieldOverrideTestMixin, SharedModuleStoreTestCase, LoginEnro
self.assertEqual(len(response.mako_context['students']), 1) # pylint: disable=no-member
student_info = response.mako_context['students'][0] # pylint: disable=no-member
self.assertEqual(student_info['grade_summary']['percent'], 0.5)
self.assertEqual(
student_info['grade_summary']['grade_breakdown'][0]['percent'],
0.5)
self.assertEqual(
len(student_info['grade_summary']['section_breakdown']), 4)
self.assertEqual(student_info['grade_summary']['grade_breakdown'].values()[0]['percent'], 0.5)
self.assertEqual(len(student_info['grade_summary']['section_breakdown']), 4)
def test_grades_csv(self):
self.course.enable_ccx = True
......@@ -1223,7 +1220,7 @@ class TestCCXGrades(FieldOverrideTestMixin, SharedModuleStoreTestCase, LoginEnro
self.assertEqual(response.status_code, 200)
grades = response.mako_context['grade_summary'] # pylint: disable=no-member
self.assertEqual(grades['percent'], 0.5)
self.assertEqual(grades['grade_breakdown'][0]['percent'], 0.5)
self.assertEqual(grades['grade_breakdown'].values()[0]['percent'], 0.5)
self.assertEqual(len(grades['section_breakdown']), 4)
......
......@@ -33,7 +33,7 @@ from courseware.field_overrides import disable_overrides
from django_comment_common.models import FORUM_ROLE_ADMINISTRATOR, assign_role
from django_comment_common.utils import seed_permissions_roles
from edxmako.shortcuts import render_to_response
from lms.djangoapps.grades.course_grades import iterate_grades_for
from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
from opaque_keys.edx.keys import CourseKey
from ccx_keys.locator import CCXLocator
from student.roles import CourseCcxCoachRole
......@@ -564,30 +564,30 @@ def ccx_grades_csv(request, course, ccx=None):
courseenrollment__course_id=ccx_key,
courseenrollment__is_active=1
).order_by('username').select_related("profile")
grades = iterate_grades_for(course, enrolled_students)
grades = CourseGradeFactory().iter(course, enrolled_students)
header = None
rows = []
for student, gradeset, __ in grades:
if gradeset:
for student, course_grade, __ in grades:
if course_grade:
# We were able to successfully grade this student for this
# course.
if not header:
# Encode the header row in utf-8 encoding in case there are
# unicode characters
header = [section['label'].encode('utf-8')
for section in gradeset[u'section_breakdown']]
for section in course_grade.summary[u'section_breakdown']]
rows.append(["id", "email", "username", "grade"] + header)
percents = {
section['label']: section.get('percent', 0.0)
for section in gradeset[u'section_breakdown']
for section in course_grade.summary[u'section_breakdown']
if 'label' in section
}
row_percents = [percents.get(label, 0.0) for label in header]
rows.append([student.id, student.email, student.username,
gradeset['percent']] + row_percents)
course_grade.percent] + row_percents)
buf = StringIO()
writer = csv.writer(buf)
......
"""
Management command which fixes ungraded certificates for students
"""
from django.core.management.base import BaseCommand
import logging
from optparse import make_option
from certificates.models import GeneratedCertificate
from courseware import courses
from lms.djangoapps.grades import course_grades
from django.test.client import RequestFactory
from django.core.management.base import BaseCommand
from optparse import make_option
from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
log = logging.getLogger(__name__)
class Command(BaseCommand):
......@@ -42,18 +44,15 @@ class Command(BaseCommand):
def handle(self, *args, **options):
course_id = options['course']
print "Fetching ungraded students for {0}".format(course_id)
log.info('Fetching ungraded students for %s.', course_id)
ungraded = GeneratedCertificate.objects.filter( # pylint: disable=no-member
course_id__exact=course_id
).filter(grade__exact='')
course = courses.get_course_by_id(course_id)
factory = RequestFactory()
request = factory.get('/')
for cert in ungraded:
# grade the student
grade = course_grades.summary(cert.user, course)
print "grading {0} - {1}".format(cert.user, grade['percent'])
cert.grade = grade['percent']
grade = CourseGradeFactory().create(cert.user, course)
log.info('grading %s - %s', cert.user, grade.percent)
cert.grade = grade.percent
if not options['noop']:
cert.save()
......@@ -11,7 +11,7 @@ from django.conf import settings
from django.core.urlresolvers import reverse
from requests.auth import HTTPBasicAuth
from lms.djangoapps.grades import course_grades
from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
from xmodule.modulestore.django import modulestore
from capa.xqueue_interface import XQueueInterface
from capa.xqueue_interface import make_xheader, make_hashkey
......@@ -271,7 +271,7 @@ class XQueueCertInterface(object):
self.request.session = {}
is_whitelisted = self.whitelist.filter(user=student, course_id=course_id, whitelist=True).exists()
grade = course_grades.summary(student, course)
grade = CourseGradeFactory().create(student, course).summary
enrollment_mode, __ = CourseEnrollment.enrollment_mode_for_user(student, course_id)
mode_is_verified = enrollment_mode in GeneratedCertificate.VERIFIED_CERTS_MODES
user_is_verified = SoftwareSecurePhotoVerification.user_is_verified(student)
......
......@@ -22,7 +22,7 @@ from capa.tests.response_xml_factory import (
from course_modes.models import CourseMode
from courseware.models import StudentModule, BaseStudentModuleHistory
from courseware.tests.helpers import LoginEnrollmentTestCase
from lms.djangoapps.grades import course_grades, progress
from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
from openedx.core.djangoapps.credit.api import (
set_credit_requirements, get_credit_requirement_status
)
......@@ -270,39 +270,17 @@ class TestSubmittingProblems(ModuleStoreTestCase, LoginEnrollmentTestCase, Probl
self.update_course(self.course, self.student_user.id)
self.refresh_course()
def get_grade_summary(self):
def get_course_grade(self):
"""
calls course_grades.summary for current user and course.
the keywords for the returned object are
- grade : A final letter grade.
- percent : The final percent for the class (rounded up).
- section_breakdown : A breakdown of each section that makes
up the grade. (For display)
- grade_breakdown : A breakdown of the major components that
make up the final grade. (For display)
"""
return course_grades.summary(self.student_user, self.course)
def get_progress_summary(self):
"""
Return progress summary structure for current user and course.
Returns
- courseware_summary is a summary of all sections with problems in the course.
It is organized as an array of chapters, each containing an array of sections,
each containing an array of scores. This contains information for graded and
ungraded problems, and is good for displaying a course summary with due dates,
etc.
Return CourseGrade for current user and course.
"""
return progress.summary(self.student_user, self.course).chapter_grades
return CourseGradeFactory().create(self.student_user, self.course)
def check_grade_percent(self, percent):
"""
Assert that percent grade is as expected.
"""
grade_summary = self.get_grade_summary()
self.assertEqual(grade_summary['percent'], percent)
self.assertEqual(self.get_course_grade().percent, percent)
def earned_hw_scores(self):
"""
......@@ -310,7 +288,9 @@ class TestSubmittingProblems(ModuleStoreTestCase, LoginEnrollmentTestCase, Probl
Returns list of scores: [<points on hw_1>, <points on hw_2>, ..., <points on hw_n>]
"""
return [s.earned for s in self.get_grade_summary()['totaled_scores']['Homework']]
return [
s.graded_total.earned for s in self.get_course_grade().graded_subsections_by_format['Homework'].itervalues()
]
def hw_grade(self, hw_url_name):
"""
......@@ -318,7 +298,7 @@ class TestSubmittingProblems(ModuleStoreTestCase, LoginEnrollmentTestCase, Probl
"""
# list of grade summaries for each section
sections_list = []
for chapter in self.get_progress_summary():
for chapter in self.get_course_grade().chapter_grades:
sections_list.extend(chapter['sections'])
# get the first section that matches the url (there should only be one)
......@@ -431,8 +411,11 @@ class TestCourseGrader(TestSubmittingProblems):
"drop_count": 0,
"short_label": "HW",
"weight": hw_weight
}, {
},
{
"type": "Final",
"min_count": 0,
"drop_count": 0,
"name": "Final Section",
"short_label": "Final",
"weight": final_weight
......@@ -558,7 +541,7 @@ class TestCourseGrader(TestSubmittingProblems):
"""
self.basic_setup()
self.check_grade_percent(0)
self.assertEqual(self.get_grade_summary()['grade'], None)
self.assertEqual(self.get_course_grade().letter_grade, None)
def test_b_grade_exact(self):
"""
......@@ -567,7 +550,7 @@ class TestCourseGrader(TestSubmittingProblems):
self.basic_setup()
self.submit_question_answer('p1', {'2_1': 'Correct'})
self.check_grade_percent(0.33)
self.assertEqual(self.get_grade_summary()['grade'], 'B')
self.assertEqual(self.get_course_grade().letter_grade, 'B')
def test_b_grade_above(self):
"""
......@@ -577,7 +560,7 @@ class TestCourseGrader(TestSubmittingProblems):
self.submit_question_answer('p1', {'2_1': 'Correct'})
self.submit_question_answer('p2', {'2_1': 'Correct'})
self.check_grade_percent(0.67)
self.assertEqual(self.get_grade_summary()['grade'], 'B')
self.assertEqual(self.get_course_grade().letter_grade, 'B')
def test_a_grade(self):
"""
......@@ -588,7 +571,7 @@ class TestCourseGrader(TestSubmittingProblems):
self.submit_question_answer('p2', {'2_1': 'Correct'})
self.submit_question_answer('p3', {'2_1': 'Correct'})
self.check_grade_percent(1.0)
self.assertEqual(self.get_grade_summary()['grade'], 'A')
self.assertEqual(self.get_course_grade().letter_grade, 'A')
def test_wrong_answers(self):
"""
......@@ -599,7 +582,7 @@ class TestCourseGrader(TestSubmittingProblems):
self.submit_question_answer('p2', {'2_1': 'Correct'})
self.submit_question_answer('p3', {'2_1': 'Incorrect'})
self.check_grade_percent(0.67)
self.assertEqual(self.get_grade_summary()['grade'], 'B')
self.assertEqual(self.get_course_grade().letter_grade, 'B')
def test_submissions_api_overrides_scores(self):
"""
......@@ -610,7 +593,7 @@ class TestCourseGrader(TestSubmittingProblems):
self.submit_question_answer('p2', {'2_1': 'Correct'})
self.submit_question_answer('p3', {'2_1': 'Incorrect'})
self.check_grade_percent(0.67)
self.assertEqual(self.get_grade_summary()['grade'], 'B')
self.assertEqual(self.get_course_grade().letter_grade, 'B')
# But now, set the score with the submissions API and watch
# as it overrides the score read from StudentModule and our
......@@ -625,7 +608,7 @@ class TestCourseGrader(TestSubmittingProblems):
submission = submissions_api.create_submission(student_item, 'any answer')
submissions_api.set_score(submission['uuid'], 1, 1)
self.check_grade_percent(1.0)
self.assertEqual(self.get_grade_summary()['grade'], 'A')
self.assertEqual(self.get_course_grade().letter_grade, 'A')
def test_submissions_api_anonymous_student_id(self):
"""
......@@ -640,7 +623,7 @@ class TestCourseGrader(TestSubmittingProblems):
mock_get_scores.return_value = {
self.problem_location('p3').to_deprecated_string(): (1, 1)
}
self.get_grade_summary()
self.get_course_grade()
# Verify that the submissions API was sent an anonymized student ID
mock_get_scores.assert_called_with(
......@@ -752,9 +735,6 @@ class TestCourseGrader(TestSubmittingProblems):
# the Django student views, and does not update enrollment if it already exists.
CourseEnrollment.enroll(self.student_user, self.course.id, mode)
self.submit_question_answer('p1', {'2_1': 'Correct'})
self.submit_question_answer('p2', {'2_1': 'Correct'})
# Enable the course for credit
CreditCourse.objects.create(course_key=self.course.id, enabled=True)
......@@ -774,7 +754,15 @@ class TestCourseGrader(TestSubmittingProblems):
# Add a single credit requirement (final grade)
set_credit_requirements(self.course.id, requirements)
self.get_grade_summary()
# Credit requirement is not satisfied before passing grade
req_status = get_credit_requirement_status(self.course.id, self.student_user.username, 'grade', 'grade')
self.assertEqual(req_status[0]["status"], None)
self._stop_signal_patch()
self.submit_question_answer('p1', {'2_1': 'Correct'})
self.submit_question_answer('p2', {'2_1': 'Correct'})
# Credit requirement is now satisfied after passing grade
req_status = get_credit_requirement_status(self.course.id, self.student_user.username, 'grade', 'grade')
self.assertEqual(req_status[0]["status"], 'satisfied')
......
......@@ -1235,7 +1235,7 @@ class ProgressPageTests(ModuleStoreTestCase):
@patch.dict('django.conf.settings.FEATURES', {'CERTIFICATES_HTML_VIEW': True})
@patch(
'lms.djangoapps.grades.new.course_grade.CourseGrade.summary',
PropertyMock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': []}),
PropertyMock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': {}}),
)
def test_view_certificate_link(self):
"""
......@@ -1294,7 +1294,7 @@ class ProgressPageTests(ModuleStoreTestCase):
@patch.dict('django.conf.settings.FEATURES', {'CERTIFICATES_HTML_VIEW': False})
@patch(
'lms.djangoapps.grades.new.course_grade.CourseGrade.summary',
PropertyMock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': []})
PropertyMock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': {}})
)
def test_view_certificate_link_hidden(self):
"""
......@@ -1341,7 +1341,7 @@ class ProgressPageTests(ModuleStoreTestCase):
@patch(
'lms.djangoapps.grades.new.course_grade.CourseGrade.summary',
PropertyMock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': []})
PropertyMock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': {}})
)
@ddt.data(
*itertools.product(
......@@ -1381,7 +1381,7 @@ class ProgressPageTests(ModuleStoreTestCase):
@patch.dict('django.conf.settings.FEATURES', {'CERTIFICATES_HTML_VIEW': True})
@patch(
'lms.djangoapps.grades.new.course_grade.CourseGrade.summary',
PropertyMock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': []})
PropertyMock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': {}})
)
def test_page_with_invalidated_certificate_with_html_view(self):
"""
......@@ -1415,7 +1415,7 @@ class ProgressPageTests(ModuleStoreTestCase):
@patch(
'lms.djangoapps.grades.new.course_grade.CourseGrade.summary',
PropertyMock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': []})
PropertyMock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': {}})
)
def test_page_with_invalidated_certificate_with_pdf(self):
"""
......@@ -1432,7 +1432,7 @@ class ProgressPageTests(ModuleStoreTestCase):
@patch(
'lms.djangoapps.grades.new.course_grade.CourseGrade.summary',
PropertyMock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': []})
PropertyMock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': {}})
)
def test_message_for_audit_mode(self):
""" Verify that message appears on progress page, if learner is enrolled
......
......@@ -723,7 +723,7 @@ def _progress(request, course_key, student_id):
# additional DB lookup (this kills the Progress page in particular).
student = User.objects.prefetch_related("groups").get(id=student.id)
course_grade = CourseGradeFactory(student).create(course)
course_grade = CourseGradeFactory().create(student, course)
courseware_summary = course_grade.chapter_grades
grade_summary = course_grade.summary
......@@ -1127,7 +1127,7 @@ def is_course_passed(course, grade_summary=None, student=None, request=None):
success_cutoff = min(nonzero_cutoffs) if nonzero_cutoffs else None
if grade_summary is None:
grade_summary = CourseGradeFactory(student).create(course).summary
grade_summary = CourseGradeFactory().create(student, course).summary
return success_cutoff and grade_summary['percent'] >= success_cutoff
......
......@@ -148,7 +148,7 @@ class UserGradeView(GradeViewMixin, GenericAPIView):
return course
prep_course_for_grading(course, request)
course_grade = CourseGradeFactory(request.user).create(course)
course_grade = CourseGradeFactory().create(request.user, course)
return Response([{
'username': username,
......
"""
Grading Context
"""
from collections import defaultdict
from collections import OrderedDict
from openedx.core.djangoapps.content.block_structure.api import get_course_in_cache
from .scores import possibly_scored
def grading_context_for_course(course):
def grading_context_for_course(course_key):
"""
Same as grading_context, but takes in a course object.
"""
course_structure = get_course_in_cache(course.id)
course_structure = get_course_in_cache(course_key)
return grading_context(course_structure)
def grading_context(course_structure):
"""
This returns a dictionary with keys necessary for quickly grading
a student. They are used by grades.grade()
a student.
The grading context has two keys:
graded_sections - This contains the sections that are graded, as
well as all possible children modules that can affect the
grading. This allows some sections to be skipped if the student
hasn't seen any part of it.
all_graded_subsections_by_type - This contains all subsections that are
graded, keyed by subsection format (assignment type).
The format is a dictionary keyed by section-type. The values are
arrays of dictionaries containing
"section_block" : The section block
"scored_descendant_keys" : An array of usage keys for blocks
could possibly be in the section, for any student
The values are arrays of dictionaries containing
"subsection_block" : The subsection block
"scored_descendants" : An array of usage keys for blocks
that could possibly be in the subsection, for any student
all_graded_blocks - This contains a list of all blocks that can
affect grading a student. This is used to efficiently fetch
......@@ -39,34 +36,36 @@ def grading_context(course_structure):
"""
all_graded_blocks = []
all_graded_sections = defaultdict(list)
all_graded_subsections_by_type = OrderedDict()
for chapter_key in course_structure.get_children(course_structure.root_block_usage_key):
for section_key in course_structure.get_children(chapter_key):
section = course_structure[section_key]
scored_descendants_of_section = [section]
if section.graded:
for subsection_key in course_structure.get_children(chapter_key):
subsection = course_structure[subsection_key]
scored_descendants_of_subsection = []
if subsection.graded:
for descendant_key in course_structure.post_order_traversal(
filter_func=possibly_scored,
start_node=section_key,
start_node=subsection_key,
):
scored_descendants_of_section.append(
scored_descendants_of_subsection.append(
course_structure[descendant_key],
)
# include only those blocks that have scores, not if they are just a parent
section_info = {
'section_block': section,
subsection_info = {
'subsection_block': subsection,
'scored_descendants': [
child for child in scored_descendants_of_section
child for child in scored_descendants_of_subsection
if getattr(child, 'has_score', None)
]
}
section_format = getattr(section, 'format', '')
all_graded_sections[section_format].append(section_info)
all_graded_blocks.extend(scored_descendants_of_section)
subsection_format = getattr(subsection, 'format', '')
if subsection_format not in all_graded_subsections_by_type:
all_graded_subsections_by_type[subsection_format] = []
all_graded_subsections_by_type[subsection_format].append(subsection_info)
all_graded_blocks.extend(scored_descendants_of_subsection)
return {
'all_graded_sections': all_graded_sections,
'all_graded_subsections_by_type': all_graded_subsections_by_type,
'all_graded_blocks': all_graded_blocks,
}
"""
Functionality for course-level grades.
"""
from collections import namedtuple
from logging import getLogger
import dogstats_wrapper as dog_stats_api
from opaque_keys.edx.keys import CourseKey
from courseware.courses import get_course_by_id
from .new.course_grade import CourseGradeFactory
log = getLogger(__name__)
GradeResult = namedtuple('GradeResult', ['student', 'gradeset', 'err_msg'])
def iterate_grades_for(course_or_id, students):
"""
Given a course_id and an iterable of students (User), yield a GradeResult
for every student enrolled in the course. GradeResult is a named tuple of:
(student, gradeset, err_msg)
If an error occurred, gradeset will be an empty dict and err_msg will be an
exception message. If there was no error, err_msg is an empty string.
The gradeset is a dictionary with the following fields:
- grade : A final letter grade.
- percent : The final percent for the class (rounded up).
- section_breakdown : A breakdown of each section that makes
up the grade. (For display)
- grade_breakdown : A breakdown of the major components that
make up the final grade. (For display)
- raw_scores: contains scores for every graded module
"""
if isinstance(course_or_id, (basestring, CourseKey)):
course = get_course_by_id(course_or_id)
else:
course = course_or_id
for student in students:
with dog_stats_api.timer('lms.grades.iterate_grades_for', tags=[u'action:{}'.format(course.id)]):
try:
gradeset = summary(student, course)
yield GradeResult(student, gradeset, "")
except Exception as exc: # pylint: disable=broad-except
# Keep marching on even if this student couldn't be graded for
# some reason, but log it for future reference.
log.exception(
'Cannot grade student %s (%s) in course %s because of exception: %s',
student.username,
student.id,
course.id,
exc.message
)
yield GradeResult(student, {}, exc.message)
def summary(student, course):
"""
Returns the grade summary of the student for the given course.
Also sends a signal to update the minimum grade requirement status.
"""
return CourseGradeFactory(student).create(course).summary
......@@ -7,7 +7,7 @@ from django.core.management.base import BaseCommand, CommandError
import os
from lms.djangoapps.courseware import courses
from lms.djangoapps.certificates.models import GeneratedCertificate
from lms.djangoapps.grades import course_grades
from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
......@@ -124,18 +124,18 @@ class Command(BaseCommand):
count, total, hours, minutes)
start = datetime.datetime.now()
request.user = student
grade = course_grades.summary(student, course)
grade = CourseGradeFactory().create(student, course)
if not header:
header = [section['label'] for section in grade[u'section_breakdown']]
header = [section['label'] for section in grade.summary[u'section_breakdown']]
rows.append(["email", "username", "certificate-grade", "grade"] + header)
percents = {section['label']: section['percent'] for section in grade[u'section_breakdown']}
percents = {section['label']: section['percent'] for section in grade.summary[u'section_breakdown']}
row_percents = [percents[label] for label in header]
if student.username in cert_grades:
rows.append(
[student.email, student.username, cert_grades[student.username], grade['percent']] + row_percents,
[student.email, student.username, cert_grades[student.username], grade.percent] + row_percents,
)
else:
rows.append([student.email, student.username, "N/A", grade['percent']] + row_percents)
rows.append([student.email, student.username, "N/A", grade.percent] + row_percents)
with open(options['output'], 'wb') as f:
writer = csv.writer(f)
writer.writerows(rows)
......@@ -2,16 +2,17 @@
CourseGrade Class
"""
from collections import defaultdict
from collections import defaultdict, namedtuple, OrderedDict
from logging import getLogger
from django.conf import settings
from django.core.exceptions import PermissionDenied
import dogstats_wrapper as dog_stats_api
from lazy import lazy
from lms.djangoapps.course_blocks.api import get_course_blocks
from lms.djangoapps.grades.config.models import PersistentGradesEnabledFlag
from openedx.core.djangoapps.signals.signals import GRADES_UPDATED
from openedx.core.djangoapps.signals.signals import COURSE_GRADE_CHANGED
from xmodule import block_metadata_utils
from ..models import PersistentCourseGrade
......@@ -37,18 +38,18 @@ class CourseGrade(object):
self._subsection_grade_factory = SubsectionGradeFactory(self.student, self.course, self.course_structure)
@lazy
def subsection_grade_totals_by_format(self):
def graded_subsections_by_format(self):
"""
Returns grades for the subsections in the course in
a dict keyed by subsection format types.
"""
subsections_by_format = defaultdict(list)
subsections_by_format = defaultdict(OrderedDict)
for chapter in self.chapter_grades:
for subsection_grade in chapter['sections']:
if subsection_grade.graded:
graded_total = subsection_grade.graded_total
if graded_total.possible > 0:
subsections_by_format[subsection_grade.format].append(graded_total)
subsections_by_format[subsection_grade.format][subsection_grade.location] = subsection_grade
return subsections_by_format
@lazy
......@@ -70,7 +71,7 @@ class CourseGrade(object):
# Grading policy might be overriden by a CCX, need to reset it
self.course.set_grading_policy(self.course.grading_policy)
grade_value = self.course.grader.grade(
self.subsection_grade_totals_by_format,
self.graded_subsections_by_format,
generate_random_scores=settings.GENERATE_PROFILE_SCORES
)
# can't use the existing properties due to recursion issues caused by referencing self.grade_value
......@@ -137,8 +138,6 @@ class CourseGrade(object):
grade_summary = self.grade_value
grade_summary['percent'] = self.percent
grade_summary['grade'] = self.letter_grade
grade_summary['totaled_scores'] = self.subsection_grade_totals_by_format
grade_summary['raw_scores'] = list(self.locations_to_scores.itervalues())
return grade_summary
......@@ -150,7 +149,7 @@ class CourseGrade(object):
"""
subsections_total = sum(len(chapter['sections']) for chapter in self.chapter_grades)
total_graded_subsections = sum(len(x) for x in self.subsection_grade_totals_by_format.itervalues())
total_graded_subsections = sum(len(x) for x in self.graded_subsections_by_format.itervalues())
subsections_created = len(self._subsection_grade_factory._unsaved_subsection_grades) # pylint: disable=protected-access
subsections_read = subsections_total - subsections_created
blocks_total = len(self.locations_to_scores)
......@@ -295,10 +294,10 @@ class CourseGrade(object):
"""
Signal all listeners when grades are computed.
"""
responses = GRADES_UPDATED.send_robust(
responses = COURSE_GRADE_CHANGED.send_robust(
sender=None,
user=self.student,
grade_summary=self.summary,
course_grade=self,
course_key=self.course.id,
deadline=self.course.end
)
......@@ -324,32 +323,60 @@ class CourseGradeFactory(object):
"""
Factory class to create Course Grade objects
"""
def __init__(self, student):
self.student = student
def create(self, course, read_only=True):
def create(self, student, course, read_only=True):
"""
Returns the CourseGrade object for the given student and course.
If read_only is True, doesn't save any updates to the grades.
Raises a PermissionDenied if the user does not have course access.
"""
course_structure = get_course_blocks(self.student, course.location)
course_structure = get_course_blocks(student, course.location)
# if user does not have access to this course, throw an exception
if not self._user_has_access_to_course(course_structure):
raise PermissionDenied("User does not have access to this course")
return (
self._get_saved_grade(course, course_structure) or
self._compute_and_update_grade(course, course_structure, read_only)
self._get_saved_grade(student, course, course_structure) or
self._compute_and_update_grade(student, course, course_structure, read_only)
)
def update(self, course, course_structure):
GradeResult = namedtuple('GradeResult', ['student', 'course_grade', 'err_msg'])
def iter(self, course, students):
"""
Given a course and an iterable of students (User), yield a GradeResult
for every student enrolled in the course. GradeResult is a named tuple of:
(student, course_grade, err_msg)
If an error occurred, course_grade will be None and err_msg will be an
exception message. If there was no error, err_msg is an empty string.
"""
for student in students:
with dog_stats_api.timer('lms.grades.CourseGradeFactory.iter', tags=[u'action:{}'.format(course.id)]):
try:
course_grade = CourseGradeFactory().create(student, course)
yield self.GradeResult(student, course_grade, "")
except Exception as exc: # pylint: disable=broad-except
# Keep marching on even if this student couldn't be graded for
# some reason, but log it for future reference.
log.exception(
'Cannot grade student %s (%s) in course %s because of exception: %s',
student.username,
student.id,
course.id,
exc.message
)
yield self.GradeResult(student, None, exc.message)
def update(self, student, course, course_structure):
"""
Updates the CourseGrade for this Factory's student.
"""
self._compute_and_update_grade(course, course_structure)
self._compute_and_update_grade(student, course, course_structure)
def get_persisted(self, course):
def get_persisted(self, student, course):
"""
Returns the saved grade for the given course and student,
irrespective of whether the saved grade is up-to-date.
......@@ -357,9 +384,9 @@ class CourseGradeFactory(object):
if not PersistentGradesEnabledFlag.feature_enabled(course.id):
return None
return CourseGrade.get_persisted_grade(self.student, course)
return CourseGrade.get_persisted_grade(student, course)
def _get_saved_grade(self, course, course_structure):
def _get_saved_grade(self, student, course, course_structure):
"""
Returns the saved grade for the given course and student.
"""
......@@ -367,18 +394,18 @@ class CourseGradeFactory(object):
return None
return CourseGrade.load_persisted_grade(
self.student,
student,
course,
course_structure
)
def _compute_and_update_grade(self, course, course_structure, read_only=False):
def _compute_and_update_grade(self, student, course, course_structure, read_only=False):
"""
Freshly computes and updates the grade for the student and course.
If read_only is True, doesn't save any updates to the grades.
"""
course_grade = CourseGrade(self.student, course, course_structure)
course_grade = CourseGrade(student, course, course_structure)
course_grade.compute_and_update(read_only)
return course_grade
......
......@@ -68,7 +68,7 @@ class SubsectionGrade(object):
):
self._compute_block_score(descendant_key, course_structure, submissions_scores, csm_scores)
self.all_total, self.graded_total = graders.aggregate_scores(self.scores, self.display_name, self.location)
self.all_total, self.graded_total = graders.aggregate_scores(self.scores)
self._log_event(log.debug, u"init_from_structure", student)
return self
......@@ -83,16 +83,12 @@ class SubsectionGrade(object):
tw_earned=model.earned_graded,
tw_possible=model.possible_graded,
graded=True,
display_name=self.display_name,
module_id=self.location,
attempted=model.first_attempted is not None,
)
self.all_total = AggregatedScore(
tw_earned=model.earned_all,
tw_possible=model.possible_all,
graded=False,
display_name=self.display_name,
module_id=self.location,
attempted=model.first_attempted is not None,
)
self._log_event(log.debug, u"init_from_model", student)
......
"""
Progress Summary of a learner's course grades.
"""
from .new.course_grade import CourseGradeFactory
def summary(student, course):
"""
Returns the CourseGrade for the given course and student.
"""
return CourseGradeFactory(student).create(course)
......@@ -122,8 +122,6 @@ def get_score(submissions_scores, csm_scores, persisted_block, block):
weighted_possible,
weight,
graded,
display_name=display_name_with_default_escaped(block),
module_id=block.location,
attempted=attempted,
)
......
......@@ -182,4 +182,4 @@ def recalculate_course_grade(sender, course, course_structure, user, **kwargs):
"""
Updates a saved course grade.
"""
CourseGradeFactory(user).update(course, course_structure)
CourseGradeFactory().update(user, course, course_structure)
......@@ -111,7 +111,7 @@ class TestCourseGradeFactory(GradeTestBase):
def test_course_grade_feature_gating(self, feature_flag, course_setting):
# Grades are only saved if the feature flag and the advanced setting are
# both set to True.
grade_factory = CourseGradeFactory(self.request.user)
grade_factory = CourseGradeFactory()
with persistent_grades_feature_flags(
global_flag=feature_flag,
enabled_for_all_courses=False,
......@@ -119,32 +119,32 @@ class TestCourseGradeFactory(GradeTestBase):
enabled_for_course=course_setting
):
with patch('lms.djangoapps.grades.new.course_grade.CourseGrade.load_persisted_grade') as mock_save_grades:
grade_factory.create(self.course)
grade_factory.create(self.request.user, self.course)
self.assertEqual(mock_save_grades.called, feature_flag and course_setting)
def test_course_grade_creation(self):
grade_factory = CourseGradeFactory(self.request.user)
grade_factory = CourseGradeFactory()
with mock_get_score(1, 2):
course_grade = grade_factory.create(self.course)
course_grade = grade_factory.create(self.request.user, self.course)
self.assertEqual(course_grade.letter_grade, u'Pass')
self.assertEqual(course_grade.percent, 0.5)
def test_zero_course_grade(self):
grade_factory = CourseGradeFactory(self.request.user)
grade_factory = CourseGradeFactory()
with mock_get_score(0, 2):
course_grade = grade_factory.create(self.course)
course_grade = grade_factory.create(self.request.user, self.course)
self.assertIsNone(course_grade.letter_grade)
self.assertEqual(course_grade.percent, 0.0)
def test_get_persisted(self):
grade_factory = CourseGradeFactory(self.request.user)
grade_factory = CourseGradeFactory()
# first, create a grade in the database
with mock_get_score(1, 2):
grade_factory.create(self.course, read_only=False)
grade_factory.create(self.request.user, self.course, read_only=False)
# retrieve the grade, ensuring it is as expected and take just one query
with self.assertNumQueries(1):
course_grade = grade_factory.get_persisted(self.course)
course_grade = grade_factory.get_persisted(self.request.user, self.course)
self.assertEqual(course_grade.letter_grade, u'Pass')
self.assertEqual(course_grade.percent, 0.5)
......@@ -168,7 +168,7 @@ class TestCourseGradeFactory(GradeTestBase):
# ensure the grade can still be retrieved via get_persisted
# despite its outdated grading policy
with self.assertNumQueries(1):
course_grade = grade_factory.get_persisted(self.course)
course_grade = grade_factory.get_persisted(self.request.user, self.course)
self.assertEqual(course_grade.letter_grade, u'Pass')
self.assertEqual(course_grade.percent, 0.5)
......@@ -587,7 +587,7 @@ class TestCourseGradeLogging(ProblemSubmissionTestMixin, SharedModuleStoreTestCa
Creates a course grade and asserts that the associated logging
matches the expected totals passed in to the function.
"""
factory.create(self.course, read_only=False)
factory.create(self.request.user, self.course, read_only=False)
log_mock.assert_called_with(
u"Persistent Grades: CourseGrade.{0}, course: {1}, user: {2}".format(
log_statement,
......@@ -597,7 +597,7 @@ class TestCourseGradeLogging(ProblemSubmissionTestMixin, SharedModuleStoreTestCa
)
def test_course_grade_logging(self):
grade_factory = CourseGradeFactory(self.request.user)
grade_factory = CourseGradeFactory()
with persistent_grades_feature_flags(
global_flag=True,
enabled_for_all_courses=False,
......
......@@ -175,9 +175,7 @@ class TestGetScore(TestCase):
self._create_persisted_block(persisted_block_value),
self._create_block(block_value),
)
expected_score = ProblemScore(
display_name=self.display_name, module_id=self.location, **expected_result._asdict()
)
expected_score = ProblemScore(**expected_result._asdict())
self.assertEquals(score, expected_score)
......
......@@ -9,13 +9,15 @@ from xmodule.graders import ProblemScore
@contextmanager
def mock_passing_grade(grade_pass='Pass', percent=0.75):
def mock_passing_grade(grade_pass='Pass', percent=0.75, ):
"""
Mock the grading function to always return a passing grade.
"""
with patch('lms.djangoapps.grades.course_grades.summary') as mock_grade:
mock_grade.return_value = {'grade': grade_pass, 'percent': percent}
yield
with patch('lms.djangoapps.grades.new.course_grade.CourseGrade._compute_letter_grade') as mock_letter_grade:
with patch('lms.djangoapps.grades.new.course_grade.CourseGrade._calc_percent') as mock_percent_grade:
mock_letter_grade.return_value = grade_pass
mock_percent_grade.return_value = percent
yield
@contextmanager
......@@ -31,8 +33,6 @@ def mock_get_score(earned=0, possible=1):
weighted_possible=possible,
weight=1,
graded=True,
display_name=None,
module_id=None,
attempted=True,
)
yield mock_score
......
......@@ -14,7 +14,7 @@ from opaque_keys.edx.keys import CourseKey
from edxmako.shortcuts import render_to_response
from courseware.courses import get_course_with_access
from lms.djangoapps.instructor.views.api import require_level
from lms.djangoapps.grades import course_grades
from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
from xmodule.modulestore.django import modulestore
......@@ -91,7 +91,7 @@ def get_grade_book_page(request, course, course_key):
'username': student.username,
'id': student.id,
'email': student.email,
'grade_summary': course_grades.summary(student, course)
'grade_summary': CourseGradeFactory().create(student, course).summary
}
for student in enrolled_students
]
......
......@@ -483,7 +483,7 @@ def dump_grading_context(course):
if isinstance(course.grader, xmgraders.WeightedSubsectionsGrader):
msg += '\n'
msg += "Graded sections:\n"
for subgrader, category, weight in course.grader.sections:
for subgrader, category, weight in course.grader.subgraders:
msg += " subgrader=%s, type=%s, category=%s, weight=%s\n"\
% (subgrader.__class__, subgrader.type, category, weight)
subgrader.index = 1
......@@ -491,14 +491,14 @@ def dump_grading_context(course):
msg += hbar
msg += "Listing grading context for course %s\n" % course.id.to_deprecated_string()
gcontext = grading_context_for_course(course)
gcontext = grading_context_for_course(course.id)
msg += "graded sections:\n"
msg += '%s\n' % gcontext['all_graded_sections'].keys()
for (gsomething, gsvals) in gcontext['all_graded_sections'].items():
msg += '%s\n' % gcontext['all_graded_subsections_by_type'].keys()
for (gsomething, gsvals) in gcontext['all_graded_subsections_by_type'].items():
msg += "--> Section %s:\n" % (gsomething)
for sec in gsvals:
sdesc = sec['section_block']
sdesc = sec['subsection_block']
frmat = getattr(sdesc, 'format', None)
aname = ''
if frmat in graders:
......
......@@ -2,6 +2,7 @@
Base test classes for LMS instructor-initiated background tasks
"""
# pylint: disable=attribute-defined-outside-init
import os
import json
from mock import Mock, patch
......@@ -12,7 +13,6 @@ from uuid import uuid4
from celery.states import SUCCESS, FAILURE
from django.core.urlresolvers import reverse
from django.test.testcases import TestCase
from django.contrib.auth.models import User
from capa.tests.response_xml_factory import OptionResponseXMLFactory
......@@ -37,7 +37,8 @@ TEST_COURSE_ORG = 'edx'
TEST_COURSE_NAME = 'test_course'
TEST_COURSE_NUMBER = '1.23x'
TEST_COURSE_KEY = SlashSeparatedCourseKey(TEST_COURSE_ORG, TEST_COURSE_NUMBER, TEST_COURSE_NAME)
TEST_SECTION_NAME = "Problem"
TEST_CHAPTER_NAME = "Section"
TEST_SECTION_NAME = "Subsection"
TEST_FAILURE_MESSAGE = 'task failed horribly'
TEST_FAILURE_EXCEPTION = 'RandomCauseError'
......@@ -135,14 +136,18 @@ class InstructorTaskCourseTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase)
Add a chapter and a sequential to the current course.
"""
# Add a chapter to the course
chapter = ItemFactory.create(parent_location=self.course.location,
display_name=TEST_SECTION_NAME)
self.chapter = ItemFactory.create(
parent_location=self.course.location,
display_name=TEST_CHAPTER_NAME,
)
# add a sequence to the course to which the problems can be added
self.problem_section = ItemFactory.create(parent_location=chapter.location,
category='sequential',
metadata={'graded': True, 'format': 'Homework'},
display_name=TEST_SECTION_NAME)
self.problem_section = ItemFactory.create(
parent_location=self.chapter.location,
category='sequential',
metadata={'graded': True, 'format': 'Homework'},
display_name=TEST_SECTION_NAME,
)
@staticmethod
def get_user_email(username):
......@@ -335,10 +340,10 @@ class TestReportMixin(object):
file_index (int): Describes which report store file to
open. Files are ordered by last modified date, and 0
corresponds to the most recently modified file.
verify_order (boolean): When True, we verify that both the
content and order of `expected_rows` matches the
actual csv rows. When False (default), we only verify
that the content matches.
verify_order (boolean): When True (default), we verify that
both the content and order of `expected_rows` matches
the actual csv rows. When False, we only verify that
the content matches.
ignore_other_columns (boolean): When True, we verify that `expected_rows`
contain data which is the subset of actual csv rows.
"""
......
......@@ -134,9 +134,9 @@ class TestRescoringTask(TestIntegrationTask):
# are in sync.
expected_subsection_grade = expected_score
course_grade = CourseGradeFactory(user).create(self.course)
course_grade = CourseGradeFactory().create(user, self.course)
self.assertEquals(
course_grade.subsection_grade_totals_by_format['Homework'][0].earned,
course_grade.graded_subsections_by_format['Homework'][self.problem_section.location].graded_total.earned,
expected_subsection_grade,
)
......@@ -574,13 +574,13 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent,
self.verify_rows_in_csv(
[
merge_dicts(
{'id': str(student.id), 'username': student.username, 'email': student.email},
{'Student ID': str(student.id), 'Username': student.username, 'Email': student.email},
grades,
user_partition_group(student)
)
for student_grades in students_grades for student, grades in student_grades.iteritems()
],
ignore_other_columns=ignore_other_columns
ignore_other_columns=ignore_other_columns,
)
def test_both_groups_problems(self):
......@@ -604,10 +604,20 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent,
self.verify_csv_task_success(result)
self.verify_grades_in_csv(
[
{self.student_a: {'grade': '1.0', 'HW': '1.0'}},
{self.student_b: {'grade': '0.5', 'HW': '0.5'}}
{
self.student_a: {
u'Grade': '1.0',
u'Homework': '1.0',
}
},
{
self.student_b: {
u'Grade': '0.5',
u'Homework': '0.5',
}
},
],
ignore_other_columns=True
ignore_other_columns=True,
)
def test_one_group_problem(self):
......@@ -627,8 +637,18 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent,
self.verify_csv_task_success(result)
self.verify_grades_in_csv(
[
{self.student_a: {'grade': '1.0', 'HW': '1.0'}},
{self.student_b: {'grade': '0.0', 'HW': '0.0'}}
{
self.student_a: {
u'Grade': '1.0',
u'Homework': '1.0',
},
},
{
self.student_b: {
u'Grade': '0.0',
u'Homework': u'Not Accessible',
}
},
],
ignore_other_columns=True
)
......@@ -7,7 +7,7 @@ from django.contrib.auth.models import User
from django.dispatch import receiver
import logging
from lms.djangoapps.grades import progress
from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
from lms.djangoapps.grades.signals.signals import PROBLEM_WEIGHTED_SCORE_CHANGED
from lms import CELERY_APP
from lti_provider.models import GradedAssignment
......@@ -109,8 +109,8 @@ def send_composite_outcome(user_id, course_id, assignment_id, version):
mapped_usage_key = assignment.usage_key.map_into_course(course_key)
user = User.objects.get(id=user_id)
course = modulestore().get_course(course_key, depth=0)
progress_summary = progress.summary(user, course)
earned, possible = progress_summary.score_for_module(mapped_usage_key)
course_grade = CourseGradeFactory().create(user, course)
earned, possible = course_grade.score_for_module(mapped_usage_key)
if possible == 0:
weighted_score = 0
else:
......
......@@ -99,9 +99,9 @@ class SendCompositeOutcomeTest(BaseOutcomeTest):
block_type='problem',
block_id='problem',
)
self.weighted_scores = MagicMock()
self.weighted_scores_mock = self.setup_patch(
'lti_provider.tasks.progress.summary', self.weighted_scores
self.course_grade = MagicMock()
self.course_grade_mock = self.setup_patch(
'lti_provider.tasks.CourseGradeFactory.create', self.course_grade
)
self.module_store = MagicMock()
self.module_store.get_item = MagicMock(return_value=self.descriptor)
......@@ -117,7 +117,7 @@ class SendCompositeOutcomeTest(BaseOutcomeTest):
)
@ddt.unpack
def test_outcome_with_score_score(self, earned, possible, expected):
self.weighted_scores.score_for_module = MagicMock(return_value=(earned, possible))
self.course_grade.score_for_module = MagicMock(return_value=(earned, possible))
tasks.send_composite_outcome(
self.user.id, unicode(self.course_key), self.assignment.id, 1
)
......@@ -129,4 +129,4 @@ class SendCompositeOutcomeTest(BaseOutcomeTest):
tasks.send_composite_outcome(
self.user.id, unicode(self.course_key), self.assignment.id, 1
)
self.assertEqual(self.weighted_scores_mock.call_count, 0)
self.assertEqual(self.course_grade_mock.call_count, 0)
......@@ -100,7 +100,7 @@ $(function () {
extraColorIndex = len(categories) #Keeping track of the next color to use for categories not in categories[]
if show_grade_breakdown:
for section in grade_summary['grade_breakdown']:
for section in grade_summary['grade_breakdown'].itervalues():
if section['percent'] > 0:
if section['category'] in categories:
color = categories[ section['category'] ]['color']
......
......@@ -10,7 +10,7 @@ from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.django import SignalHandler
from openedx.core.djangoapps.credit.verification_access import update_verification_partitions
from openedx.core.djangoapps.signals.signals import GRADES_UPDATED
from openedx.core.djangoapps.signals.signals import COURSE_GRADE_CHANGED
log = logging.getLogger(__name__)
......@@ -52,14 +52,14 @@ def on_pre_publish(sender, course_key, **kwargs): # pylint: disable=unused-argu
log.info(u"Finished updating in-course reverification access rules")
@receiver(GRADES_UPDATED)
def listen_for_grade_calculation(sender, user, grade_summary, course_key, deadline, **kwargs): # pylint: disable=unused-argument
@receiver(COURSE_GRADE_CHANGED)
def listen_for_grade_calculation(sender, user, course_grade, course_key, deadline, **kwargs): # pylint: disable=unused-argument
"""Receive 'MIN_GRADE_REQUIREMENT_STATUS' signal and update minimum grade requirement status.
Args:
sender: None
user(User): User Model object
grade_summary(dict): Dict containing output from the course grader
course_grade(CourseGrade): CourseGrade object
course_key(CourseKey): The key for the course
deadline(datetime): Course end date or None
......@@ -78,7 +78,7 @@ def listen_for_grade_calculation(sender, user, grade_summary, course_key, deadli
criteria = requirements[0].get('criteria')
if criteria:
min_grade = criteria.get('min_grade')
passing_grade = grade_summary['percent'] >= min_grade
passing_grade = course_grade.percent >= min_grade
now = timezone.now()
status = None
reason = None
......@@ -89,7 +89,7 @@ def listen_for_grade_calculation(sender, user, grade_summary, course_key, deadli
if passing_grade:
# Student received a passing grade
status = 'satisfied'
reason = {'final_grade': grade_summary['percent']}
reason = {'final_grade': course_grade.percent}
else:
# Submission after deadline
......@@ -104,7 +104,7 @@ def listen_for_grade_calculation(sender, user, grade_summary, course_key, deadli
# Student failed to receive minimum grade
status = 'failed'
reason = {
'final_grade': grade_summary['percent'],
'final_grade': course_grade.percent,
'minimum_grade': min_grade
}
......
......@@ -5,6 +5,7 @@ Tests for minimum grade requirement status
import ddt
import pytz
from datetime import timedelta, datetime
from mock import MagicMock
from unittest import skipUnless
from django.conf import settings
......@@ -73,7 +74,9 @@ class TestMinGradedRequirementStatus(ModuleStoreTestCase):
def assert_requirement_status(self, grade, due_date, expected_status):
""" Verify the user's credit requirement status is as expected after simulating a grading calculation. """
listen_for_grade_calculation(None, self.user, {'percent': grade}, self.course.id, due_date)
course_grade = MagicMock()
course_grade.percent = grade
listen_for_grade_calculation(None, self.user, course_grade, self.course.id, due_date)
req_status = get_credit_requirement_status(self.course.id, self.request.user.username, 'grade', 'grade')
self.assertEqual(req_status[0]['status'], expected_status)
......
......@@ -5,8 +5,8 @@ This module contains all signals.
from django.dispatch import Signal
# Signal that fires when a user is graded (in lms/grades/course_grades.py)
GRADES_UPDATED = Signal(providing_args=["user", "grade_summary", "course_key", "deadline"])
# Signal that fires when a user is graded
COURSE_GRADE_CHANGED = Signal(providing_args=["user", "course_grade", "course_key", "deadline"])
# Signal that fires when a user is awarded a certificate in a course (in the certificates django app)
# TODO: runtime coupling between apps will be reduced if this event is changed to carry a username
......
......@@ -104,7 +104,7 @@ class TestConditionalContent(ModuleStoreTestCase):
"""
Construct a course with graded problems that exist within a split test.
"""
TEST_SECTION_NAME = 'Problem'
TEST_SECTION_NAME = 'Subsection'
def setUp(self):
"""
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment