Commit b04a6a6a by Nimisha Asthagiri Committed by GitHub

Merge pull request #13053 from edx/tnl/grades-app

Create Grades djangoapp
parents 467bc55d 1ada5c34
......@@ -2,11 +2,12 @@
Management command to generate a list of grades for
all students that are enrolled in a course.
"""
from courseware import grades, courses
from certificates.models import GeneratedCertificate
from django.test.client import RequestFactory
from django.core.management.base import BaseCommand, CommandError
import os
from lms.djangoapps.courseware import courses
from lms.djangoapps.certificates.models import GeneratedCertificate
from lms.djangoapps.grades import course_grades
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
......@@ -117,7 +118,7 @@ class Command(BaseCommand):
count, total, hours, minutes)
start = datetime.datetime.now()
request.user = student
grade = grades.grade(student, request, course)
grade = course_grades.summary(student, request, course)
if not header:
header = [section['label'] for section in grade[u'section_breakdown']]
rows.append(["email", "username", "certificate-grade", "grade"] + header)
......
......@@ -14,7 +14,6 @@ from cStringIO import StringIO
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import (
Http404,
HttpResponse,
HttpResponseForbidden,
)
......@@ -31,8 +30,8 @@ from courseware.access import has_access
from courseware.courses import get_course_by_id
from courseware.field_overrides import disable_overrides
from courseware.grades import iterate_grades_for
from edxmako.shortcuts import render_to_response
from grades.course_grades import iterate_grades_for
from opaque_keys.edx.keys import CourseKey
from ccx_keys.locator import CCXLocator
from student.roles import CourseCcxCoachRole
......
......@@ -4,7 +4,8 @@ Management command which fixes ungraded certificates for students
from certificates.models import GeneratedCertificate
from courseware import grades, courses
from courseware import courses
from lms.djangoapps.grades import course_grades
from django.test.client import RequestFactory
from django.core.management.base import BaseCommand
from optparse import make_option
......@@ -51,7 +52,7 @@ class Command(BaseCommand):
for cert in ungraded:
# grade the student
grade = grades.grade(cert.user, course)
grade = course_grades.summary(cert.user, course)
print "grading {0} - {1}".format(cert.user, grade['percent'])
cert.grade = grade['percent']
if not options['noop']:
......
......@@ -11,7 +11,7 @@ from django.conf import settings
from django.core.urlresolvers import reverse
from requests.auth import HTTPBasicAuth
from courseware import grades
from lms.djangoapps.grades import course_grades
from xmodule.modulestore.django import modulestore
from capa.xqueue_interface import XQueueInterface
from capa.xqueue_interface import make_xheader, make_hashkey
......@@ -257,7 +257,7 @@ class XQueueCertInterface(object):
self.request.session = {}
is_whitelisted = self.whitelist.filter(user=student, course_id=course_id, whitelist=True).exists()
grade = grades.grade(student, course)
grade = course_grades.summary(student, course)
enrollment_mode, __ = CourseEnrollment.enrollment_mode_for_user(student, course_id)
mode_is_verified = enrollment_mode in GeneratedCertificate.VERIFIED_CERTS_MODES
user_is_verified = SoftwareSecurePhotoVerification.user_is_verified(student)
......
......@@ -14,6 +14,7 @@ from config_models.models import cache
from course_modes.models import CourseMode
from course_modes.tests.factories import CourseModeFactory
from courseware.tests.factories import GlobalStaffFactory
from lms.djangoapps.grades.tests.utils import mock_passing_grade
from microsite_configuration import microsite
from student.models import CourseEnrollment
from student.tests.factories import UserFactory
......@@ -48,16 +49,6 @@ class WebCertificateTestMixin(object):
Mixin with helpers for testing Web Certificates.
"""
@contextmanager
def _mock_passing_grade(self):
"""
Mock the grading function to always return a passing grade.
"""
symbol = 'courseware.grades.grade'
with patch(symbol) as mock_grade:
mock_grade.return_value = {'grade': 'Pass', 'percent': 0.75}
yield
@contextmanager
def _mock_queue(self, is_successful=True):
"""
Mock the "send to XQueue" method to return either success or an error.
......@@ -193,7 +184,7 @@ class CertificateDownloadableStatusTests(WebCertificateTestMixin, ModuleStoreTes
def test_with_downloadable_web_cert(self):
CourseEnrollment.enroll(self.student, self.course.id, mode='honor')
self._setup_course_certificate()
with self._mock_passing_grade():
with mock_passing_grade():
certs_api.generate_user_certificates(self.student, self.course.id)
cert_status = certificate_status_for_student(self.student, self.course.id)
......@@ -437,7 +428,7 @@ class GenerateUserCertificatesTest(EventTestMixin, WebCertificateTestMixin, Modu
self.request_factory = RequestFactory()
def test_new_cert_requests_into_xqueue_returns_generating(self):
with self._mock_passing_grade():
with mock_passing_grade():
with self._mock_queue():
certs_api.generate_user_certificates(self.student, self.course.id)
......@@ -455,7 +446,7 @@ class GenerateUserCertificatesTest(EventTestMixin, WebCertificateTestMixin, Modu
)
def test_xqueue_submit_task_error(self):
with self._mock_passing_grade():
with mock_passing_grade():
with self._mock_queue(is_successful=False):
certs_api.generate_user_certificates(self.student, self.course.id)
......@@ -476,7 +467,7 @@ class GenerateUserCertificatesTest(EventTestMixin, WebCertificateTestMixin, Modu
mode='verified'
)
with self._mock_passing_grade():
with mock_passing_grade():
with self._mock_queue(is_successful=False):
status = certs_api.generate_user_certificates(self.student, self.course.id)
self.assertEqual(status, None)
......@@ -487,7 +478,7 @@ class GenerateUserCertificatesTest(EventTestMixin, WebCertificateTestMixin, Modu
Test no message sent to Xqueue if HTML certificate view is enabled
"""
self._setup_course_certificate()
with self._mock_passing_grade():
with mock_passing_grade():
certs_api.generate_user_certificates(self.student, self.course.id)
# Verify that the certificate has status 'downloadable'
......
"""Tests for the resubmit_error_certificates management command. """
import ddt
from contextlib import contextmanager
from django.core.management.base import CommandError
from nose.plugins.attrib import attr
from django.test.utils import override_settings
......@@ -12,6 +11,7 @@ from opaque_keys.edx.locator import CourseLocator
from badges.events.course_complete import get_completion_badge
from badges.models import BadgeAssertion
from badges.tests.factories import BadgeAssertionFactory, CourseCompleteImageConfigurationFactory
from lms.djangoapps.grades.tests.utils import mock_passing_grade
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, check_mongo_calls, ItemFactory
from student.tests.factories import UserFactory, CourseEnrollmentFactory
......@@ -247,7 +247,7 @@ class UngenerateCertificatesTest(CertificateManagementTest):
mock_send_to_queue.return_value = (0, "Successfully queued")
key = self.course.location.course_key
self._create_cert(key, self.user, CertificateStatuses.unavailable)
with self._mock_passing_grade():
with mock_passing_grade():
self._run_command(
course=unicode(key), noop=False, insecure=True, force=False
)
......@@ -257,11 +257,3 @@ class UngenerateCertificatesTest(CertificateManagementTest):
course_id=key
)
self.assertEqual(certificate.status, CertificateStatuses.generating)
@contextmanager
def _mock_passing_grade(self):
"""Mock the grading function to always return a passing grade. """
symbol = 'courseware.grades.grade'
with patch(symbol) as mock_grade:
mock_grade.return_value = {'grade': 'Pass', 'percent': 0.75}
yield
......@@ -13,6 +13,7 @@ import freezegun
import pytz
from course_modes.models import CourseMode
from lms.djangoapps.grades.tests.utils import mock_passing_grade
from opaque_keys.edx.locator import CourseLocator
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from student.tests.factories import UserFactory, CourseEnrollmentFactory
......@@ -58,7 +59,8 @@ class XQueueCertInterfaceAddCertificateTest(ModuleStoreTestCase):
SoftwareSecurePhotoVerificationFactory.create(user=self.user_2, status='approved')
def test_add_cert_callback_url(self):
with patch('courseware.grades.grade', Mock(return_value={'grade': 'Pass', 'percent': 0.75})):
with mock_passing_grade():
with patch.object(XQueueInterface, 'send_to_queue') as mock_send:
mock_send.return_value = (0, None)
self.xqueue.add_cert(self.user, self.course.id)
......@@ -73,7 +75,7 @@ class XQueueCertInterfaceAddCertificateTest(ModuleStoreTestCase):
"""
Tests there is no certificate create message in the queue if generate_pdf is False
"""
with patch('courseware.grades.grade', Mock(return_value={'grade': 'Pass', 'percent': 0.75})):
with mock_passing_grade():
with patch.object(XQueueInterface, 'send_to_queue') as mock_send:
self.xqueue.add_cert(self.user, self.course.id, generate_pdf=False)
......@@ -121,7 +123,7 @@ class XQueueCertInterfaceAddCertificateTest(ModuleStoreTestCase):
CertificateWhitelistFactory(course_id=self.course.id, user=self.user_2)
# Generate certs
with patch('courseware.grades.grade', Mock(return_value={'grade': 'Pass', 'percent': 0.75})):
with mock_passing_grade():
with patch.object(XQueueInterface, 'send_to_queue') as mock_send:
mock_send.return_value = (0, None)
self.xqueue.add_cert(self.user_2, self.course.id)
......@@ -145,7 +147,7 @@ class XQueueCertInterfaceAddCertificateTest(ModuleStoreTestCase):
is_active=True,
mode=mode,
)
with patch('courseware.grades.grade', Mock(return_value={'grade': 'Pass', 'percent': 0.75})):
with mock_passing_grade():
with patch.object(XQueueInterface, 'send_to_queue') as mock_send:
mock_send.return_value = (0, None)
self.xqueue.add_cert(self.user_2, self.course.id)
......@@ -270,7 +272,7 @@ class XQueueCertInterfaceAddCertificateTest(ModuleStoreTestCase):
)
# Run grading/cert generation again
with patch('courseware.grades.grade', Mock(return_value={'grade': grade, 'percent': 0.75})):
with mock_passing_grade(grade_pass=grade):
with patch.object(XQueueInterface, 'send_to_queue') as mock_send:
mock_send.return_value = (0, None)
self.xqueue.add_cert(self.user_2, self.course.id)
......
......@@ -21,6 +21,7 @@ from lms.djangoapps.badges.tests.factories import (
CourseCompleteImageConfigurationFactory,
BadgeClassFactory,
)
from lms.djangoapps.grades.tests.utils import mock_passing_grade
from openedx.core.lib.tests.assertions.events import assert_event_matches
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from student.roles import CourseStaffRole
......@@ -836,8 +837,7 @@ class CertificatesViewsTests(CommonCertificatesTestCase):
request_certificate_url = reverse('certificates.views.request_certificate')
with patch('capa.xqueue_interface.XQueueInterface.send_to_queue') as mock_queue:
mock_queue.return_value = (0, "Successfully queued")
with patch('courseware.grades.grade') as mock_grade:
mock_grade.return_value = {'grade': 'Pass', 'percent': 0.75}
with mock_passing_grade():
response = self.client.post(request_certificate_url, {'course_id': unicode(self.course.id)})
self.assertEqual(response.status_code, 200)
response_json = json.loads(response.content)
......
......@@ -19,11 +19,8 @@ from django.contrib.auth.models import User
from django.conf import settings
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver, Signal
from model_utils.models import TimeStampedModel
from student.models import user_by_anonymous_id
from submissions.models import score_set, score_reset
import coursewarehistoryextended
from xmodule_django.models import CourseKeyField, LocationKeyField, BlockTypeKeyField
......@@ -359,101 +356,3 @@ class StudentFieldOverride(TimeStampedModel):
field = models.CharField(max_length=255)
value = models.TextField(default='null')
# Signal that indicates that a user's score for a problem has been updated.
# This signal is generated when a scoring event occurs either within the core
# platform or in the Submissions module. Note that this signal will be triggered
# regardless of the new and previous values of the score (i.e. it may be the
# case that this signal is generated when a user re-attempts a problem but
# receives the same score).
SCORE_CHANGED = Signal(
providing_args=[
'points_possible', # Maximum score available for the exercise
'points_earned', # Score obtained by the user
'user_id', # Integer User ID
'course_id', # Unicode string representing the course
'usage_id' # Unicode string indicating the courseware instance
]
)
@receiver(score_set)
def submissions_score_set_handler(sender, **kwargs): # pylint: disable=unused-argument
"""
Consume the score_set signal defined in the Submissions API, and convert it
to a SCORE_CHANGED signal defined in this module. Converts the unicode keys
for user, course and item into the standard representation for the
SCORE_CHANGED signal.
This method expects that the kwargs dictionary will contain the following
entries (See the definition of score_set):
- 'points_possible': integer,
- 'points_earned': integer,
- 'anonymous_user_id': unicode,
- 'course_id': unicode,
- 'item_id': unicode
"""
points_possible = kwargs.get('points_possible', None)
points_earned = kwargs.get('points_earned', None)
course_id = kwargs.get('course_id', None)
usage_id = kwargs.get('item_id', None)
user = None
if 'anonymous_user_id' in kwargs:
user = user_by_anonymous_id(kwargs.get('anonymous_user_id'))
# If any of the kwargs were missing, at least one of the following values
# will be None.
if all((user, points_possible, points_earned, course_id, usage_id)):
SCORE_CHANGED.send(
sender=None,
points_possible=points_possible,
points_earned=points_earned,
user_id=user.id,
course_id=course_id,
usage_id=usage_id
)
else:
log.exception(
u"Failed to process score_set signal from Submissions API. "
"points_possible: %s, points_earned: %s, user: %s, course_id: %s, "
"usage_id: %s", points_possible, points_earned, user, course_id, usage_id
)
@receiver(score_reset)
def submissions_score_reset_handler(sender, **kwargs): # pylint: disable=unused-argument
"""
Consume the score_reset signal defined in the Submissions API, and convert
it to a SCORE_CHANGED signal indicating that the score has been set to 0/0.
Converts the unicode keys for user, course and item into the standard
representation for the SCORE_CHANGED signal.
This method expects that the kwargs dictionary will contain the following
entries (See the definition of score_reset):
- 'anonymous_user_id': unicode,
- 'course_id': unicode,
- 'item_id': unicode
"""
course_id = kwargs.get('course_id', None)
usage_id = kwargs.get('item_id', None)
user = None
if 'anonymous_user_id' in kwargs:
user = user_by_anonymous_id(kwargs.get('anonymous_user_id'))
# If any of the kwargs were missing, at least one of the following values
# will be None.
if all((user, course_id, usage_id)):
SCORE_CHANGED.send(
sender=None,
points_possible=0,
points_earned=0,
user_id=user.id,
course_id=course_id,
usage_id=usage_id
)
else:
log.exception(
u"Failed to process score_reset signal from Submissions API. "
"user: %s, course_id: %s, usage_id: %s", user, course_id, usage_id
)
......@@ -46,7 +46,7 @@ from courseware.masquerade import (
setup_masquerade,
)
from courseware.model_data import DjangoKeyValueStore, FieldDataCache, set_score
from courseware.models import SCORE_CHANGED
from lms.djangoapps.grades.signals import SCORE_CHANGED
from edxmako.shortcuts import render_to_string
from lms.djangoapps.lms_xblock.field_data import LmsFieldData
from lms.djangoapps.lms_xblock.models import XBlockAsidesConfig
......
......@@ -18,11 +18,10 @@ from capa.tests.response_xml_factory import (
OptionResponseXMLFactory, CustomResponseXMLFactory, SchematicResponseXMLFactory,
CodeResponseXMLFactory,
)
from courseware import grades
from lms.djangoapps.grades import course_grades, progress
from courseware.models import StudentModule, BaseStudentModuleHistory
from courseware.tests.helpers import LoginEnrollmentTestCase
from lms.djangoapps.lms_xblock.runtime import quote_slashes
from student.tests.factories import UserFactory
from student.models import anonymous_id_for_user
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
......@@ -246,7 +245,7 @@ class TestSubmittingProblems(ModuleStoreTestCase, LoginEnrollmentTestCase, Probl
def get_grade_summary(self):
"""
calls grades.grade for current user and course.
calls course_grades.summary for current user and course.
the keywords for the returned object are
- grade : A final letter grade.
......@@ -256,7 +255,7 @@ class TestSubmittingProblems(ModuleStoreTestCase, LoginEnrollmentTestCase, Probl
- grade_breakdown : A breakdown of the major components that
make up the final grade. (For display)
"""
return grades.grade(self.student_user, self.course)
return course_grades.summary(self.student_user, self.course)
def get_progress_summary(self):
"""
......@@ -269,7 +268,7 @@ class TestSubmittingProblems(ModuleStoreTestCase, LoginEnrollmentTestCase, Probl
ungraded problems, and is good for displaying a course summary with due dates,
etc.
"""
return grades.progress_summary(self.student_user, self.course)
return progress.summary(self.student_user, self.course).chapters
def check_grade_percent(self, percent):
"""
......@@ -990,162 +989,6 @@ class TestPythonGradedResponse(TestSubmittingProblems):
@attr('shard_1')
class TestAnswerDistributions(TestSubmittingProblems):
"""Check that we can pull answer distributions for problems."""
def setUp(self):
"""Set up a simple course with four problems."""
super(TestAnswerDistributions, self).setUp()
self.homework = self.add_graded_section_to_course('homework')
self.p1_html_id = self.add_dropdown_to_section(self.homework.location, 'p1', 1).location.html_id()
self.p2_html_id = self.add_dropdown_to_section(self.homework.location, 'p2', 1).location.html_id()
self.p3_html_id = self.add_dropdown_to_section(self.homework.location, 'p3', 1).location.html_id()
self.refresh_course()
def test_empty(self):
# Just make sure we can process this without errors.
empty_distribution = grades.answer_distributions(self.course.id)
self.assertFalse(empty_distribution) # should be empty
def test_one_student(self):
# Basic test to make sure we have simple behavior right for a student
# Throw in a non-ASCII answer
self.submit_question_answer('p1', {'2_1': u'ⓤⓝⓘⓒⓞⓓⓔ'})
self.submit_question_answer('p2', {'2_1': 'Correct'})
distributions = grades.answer_distributions(self.course.id)
self.assertEqual(
distributions,
{
('p1', 'p1', '{}_2_1'.format(self.p1_html_id)): {
u'ⓤⓝⓘⓒⓞⓓⓔ': 1
},
('p2', 'p2', '{}_2_1'.format(self.p2_html_id)): {
'Correct': 1
}
}
)
def test_multiple_students(self):
# Our test class is based around making requests for a particular user,
# so we're going to cheat by creating another user and copying and
# modifying StudentModule entries to make them from other users. It's
# a little hacky, but it seemed the simpler way to do this.
self.submit_question_answer('p1', {'2_1': u'Correct'})
self.submit_question_answer('p2', {'2_1': u'Incorrect'})
self.submit_question_answer('p3', {'2_1': u'Correct'})
# Make the above submissions owned by user2
user2 = UserFactory.create()
problems = StudentModule.objects.filter(
course_id=self.course.id,
student=self.student_user
)
for problem in problems:
problem.student_id = user2.id
problem.save()
# Now make more submissions by our original user
self.submit_question_answer('p1', {'2_1': u'Correct'})
self.submit_question_answer('p2', {'2_1': u'Correct'})
self.assertEqual(
grades.answer_distributions(self.course.id),
{
('p1', 'p1', '{}_2_1'.format(self.p1_html_id)): {
'Correct': 2
},
('p2', 'p2', '{}_2_1'.format(self.p2_html_id)): {
'Correct': 1,
'Incorrect': 1
},
('p3', 'p3', '{}_2_1'.format(self.p3_html_id)): {
'Correct': 1
}
}
)
def test_other_data_types(self):
# We'll submit one problem, and then muck with the student_answers
# dict inside its state to try different data types (str, int, float,
# none)
self.submit_question_answer('p1', {'2_1': u'Correct'})
# Now fetch the state entry for that problem.
student_module = StudentModule.objects.get(
course_id=self.course.id,
student=self.student_user
)
for val in ('Correct', True, False, 0, 0.0, 1, 1.0, None):
state = json.loads(student_module.state)
state["student_answers"]['{}_2_1'.format(self.p1_html_id)] = val
student_module.state = json.dumps(state)
student_module.save()
self.assertEqual(
grades.answer_distributions(self.course.id),
{
('p1', 'p1', '{}_2_1'.format(self.p1_html_id)): {
str(val): 1
},
}
)
def test_missing_content(self):
# If there's a StudentModule entry for content that no longer exists,
# we just quietly ignore it (because we can't display a meaningful url
# or name for it).
self.submit_question_answer('p1', {'2_1': 'Incorrect'})
# Now fetch the state entry for that problem and alter it so it points
# to a non-existent problem.
student_module = StudentModule.objects.get(
course_id=self.course.id,
student=self.student_user
)
student_module.module_state_key = student_module.module_state_key.replace(
name=student_module.module_state_key.name + "_fake"
)
student_module.save()
# It should be empty (ignored)
empty_distribution = grades.answer_distributions(self.course.id)
self.assertFalse(empty_distribution) # should be empty
def test_broken_state(self):
# Missing or broken state for a problem should be skipped without
# causing the whole answer_distribution call to explode.
# Submit p1
self.submit_question_answer('p1', {'2_1': u'Correct'})
# Now fetch the StudentModule entry for p1 so we can corrupt its state
prb1 = StudentModule.objects.get(
course_id=self.course.id,
student=self.student_user
)
# Submit p2
self.submit_question_answer('p2', {'2_1': u'Incorrect'})
for new_p1_state in ('{"student_answers": {}}', "invalid json!", None):
prb1.state = new_p1_state
prb1.save()
# p1 won't show up, but p2 should still work
self.assertEqual(
grades.answer_distributions(self.course.id),
{
('p2', 'p2', '{}_2_1'.format(self.p2_html_id)): {
'Incorrect': 1
},
}
)
@attr('shard_1')
class TestConditionalContent(TestSubmittingProblems):
"""
Check that conditional content works correctly with grading.
......
......@@ -1254,8 +1254,10 @@ class ProgressPageTests(ModuleStoreTestCase):
self.assertNotContains(resp, 'Request Certificate')
@patch.dict('django.conf.settings.FEATURES', {'CERTIFICATES_HTML_VIEW': True})
@patch('courseware.grades.grade', Mock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [],
'grade_breakdown': []}))
@patch(
'lms.djangoapps.grades.course_grades.summary',
Mock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': []})
)
def test_view_certificate_link(self):
"""
If certificate web view is enabled then certificate web view button should appear for user who certificate is
......@@ -1315,8 +1317,10 @@ class ProgressPageTests(ModuleStoreTestCase):
self.assertContains(resp, "creating your certificate")
@patch.dict('django.conf.settings.FEATURES', {'CERTIFICATES_HTML_VIEW': False})
@patch('courseware.grades.grade', Mock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [],
'grade_breakdown': []}))
@patch(
'lms.djangoapps.grades.course_grades.summary',
Mock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': []})
)
def test_view_certificate_link_hidden(self):
"""
If certificate web view is disabled then certificate web view button should not appear for user who certificate
......@@ -1355,7 +1359,7 @@ class ProgressPageTests(ModuleStoreTestCase):
)
self.assertEqual(resp.status_code, 200)
@patch('courseware.grades.grade', Mock(return_value={
@patch('lms.djangoapps.grades.course_grades.summary', Mock(return_value={
'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': []
}))
@ddt.data(
......@@ -1393,8 +1397,10 @@ class ProgressPageTests(ModuleStoreTestCase):
'Request Certificate' not in resp.content)
@patch.dict('django.conf.settings.FEATURES', {'CERTIFICATES_HTML_VIEW': True})
@patch('courseware.grades.grade', Mock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [],
'grade_breakdown': []}))
@patch(
'lms.djangoapps.grades.course_grades.summary',
Mock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': []})
)
def test_page_with_invalidated_certificate_with_html_view(self):
"""
Verify that for html certs if certificate is marked as invalidated than
......@@ -1427,8 +1433,10 @@ class ProgressPageTests(ModuleStoreTestCase):
self.assertContains(resp, u"View Certificate")
self.assert_invalidate_certificate(generated_certificate)
@patch('courseware.grades.grade', Mock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [],
'grade_breakdown': []}))
@patch(
'lms.djangoapps.grades.course_grades.summary',
Mock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': []})
)
def test_page_with_invalidated_certificate_with_pdf(self):
"""
Verify that for pdf certs if certificate is marked as invalidated than
......@@ -1444,8 +1452,10 @@ class ProgressPageTests(ModuleStoreTestCase):
self.assertContains(resp, u'Download Your Certificate')
self.assert_invalidate_certificate(generated_certificate)
@patch('courseware.grades.grade', Mock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [],
'grade_breakdown': []}))
@patch(
'lms.djangoapps.grades.course_grades.summary',
Mock(return_value={'grade': 'Pass', 'percent': 0.75, 'section_breakdown': [], 'grade_breakdown': []})
)
def test_message_for_audit_mode(self):
""" Verify that message appears on progress page, if learner is enrolled
in audit mode.
......@@ -1632,19 +1642,19 @@ class IsCoursePassedTests(ModuleStoreTestCase):
# If user has not grade then false will return
self.assertFalse(views.is_course_passed(self.course, None, self.student, self.request))
@patch('courseware.grades.grade', Mock(return_value={'percent': 0.9}))
@patch('lms.djangoapps.grades.course_grades.summary', Mock(return_value={'percent': 0.9}))
def test_user_pass_if_percent_appears_above_passing_point(self):
# Mocking the grades.grade
# If user has above passing marks then True will return
self.assertTrue(views.is_course_passed(self.course, None, self.student, self.request))
@patch('courseware.grades.grade', Mock(return_value={'percent': 0.2}))
@patch('lms.djangoapps.grades.course_grades.summary', Mock(return_value={'percent': 0.2}))
def test_user_fail_if_percent_appears_below_passing_point(self):
# Mocking the grades.grade
# If user has below passing marks then False will return
self.assertFalse(views.is_course_passed(self.course, None, self.student, self.request))
@patch('courseware.grades.grade', Mock(return_value={'percent': SUCCESS_CUTOFF}))
@patch('lms.djangoapps.grades.course_grades.summary', Mock(return_value={'percent': SUCCESS_CUTOFF}))
def test_user_with_passing_marks_and_achieved_marks_equal(self):
# Mocking the grades.grade
# If user's achieved passing marks are equal to the required passing
......@@ -1678,7 +1688,7 @@ class GenerateUserCertTests(ModuleStoreTestCase):
self.assertEqual(resp.status_code, HttpResponseBadRequest.status_code)
self.assertIn("Your certificate will be available when you pass the course.", resp.content)
@patch('courseware.grades.grade', Mock(return_value={'grade': 'Pass', 'percent': 0.75}))
@patch('lms.djangoapps.grades.course_grades.summary', Mock(return_value={'grade': 'Pass', 'percent': 0.75}))
@override_settings(CERT_QUEUE='certificates', LMS_SEGMENT_KEY="foobar")
def test_user_with_passing_grade(self):
# If user has above passing grading then json will return cert generating message and
......@@ -1710,7 +1720,7 @@ class GenerateUserCertTests(ModuleStoreTestCase):
)
mock_tracker.reset_mock()
@patch('courseware.grades.grade', Mock(return_value={'grade': 'Pass', 'percent': 0.75}))
@patch('lms.djangoapps.grades.course_grades.summary', Mock(return_value={'grade': 'Pass', 'percent': 0.75}))
def test_user_with_passing_existing_generating_cert(self):
# If user has passing grade but also has existing generating cert
# then json will return cert generating message with bad request code
......@@ -1724,7 +1734,7 @@ class GenerateUserCertTests(ModuleStoreTestCase):
self.assertEqual(resp.status_code, HttpResponseBadRequest.status_code)
self.assertIn("Certificate is being created.", resp.content)
@patch('courseware.grades.grade', Mock(return_value={'grade': 'Pass', 'percent': 0.75}))
@patch('lms.djangoapps.grades.course_grades.summary', Mock(return_value={'grade': 'Pass', 'percent': 0.75}))
@override_settings(CERT_QUEUE='certificates', LMS_SEGMENT_KEY="foobar")
def test_user_with_passing_existing_downloadable_cert(self):
# If user has already downloadable certificate
......
......@@ -46,7 +46,7 @@ from openedx.core.djangoapps.models.course_details import CourseDetails
from commerce.utils import EcommerceService
from enrollment.api import add_enrollment
from course_modes.models import CourseMode
from courseware import grades
from lms.djangoapps.grades import course_grades, progress as grades_progress
from courseware.access import has_access, has_ccx_coach_role, _adjust_start_date_for_beta_testers
from courseware.access_response import StartDateError
from courseware.access_utils import in_preview_mode
......@@ -723,12 +723,12 @@ def _progress(request, course_key, student_id):
# Fetch course blocks once for performance reasons
course_structure = get_course_blocks(student, course.location)
courseware_summary = grades.progress_summary(student, course, course_structure)
courseware_summary = grades_progress.summary(student, course, course_structure).chapters
if courseware_summary is None:
# This means the student didn't have access to the course (which the instructor requested)
raise Http404
grade_summary = grades.grade(student, course, course_structure=course_structure)
grade_summary = course_grades.summary(student, course, course_structure=course_structure)
studio_url = get_studio_url(course, 'settings/grading')
# checking certificate generation configuration
......@@ -1123,7 +1123,7 @@ def is_course_passed(course, grade_summary=None, student=None, request=None):
success_cutoff = min(nonzero_cutoffs) if nonzero_cutoffs else None
if grade_summary is None:
grade_summary = grades.grade(student, course)
grade_summary = course_grades.summary(student, course)
return success_cutoff and grade_summary['percent'] >= success_cutoff
......
......@@ -9,6 +9,7 @@ from django.contrib.auth.models import User
from xmodule.modulestore.django import modulestore
from milestones import api as milestones_api
from openedx.core.lib.gating import api as gating_api
from lms.djangoapps.grades.module_grades import get_module_score
log = logging.getLogger(__name__)
......@@ -64,7 +65,6 @@ def evaluate_prerequisite(course, prereq_content_key, user_id):
gated_content = gated_content_milestones.get(prereq_milestone['id'])
if gated_content:
from courseware.grades import get_module_score
user = User.objects.get(id=user_id)
score = get_module_score(user, course, sequential) * 100
for milestone in gated_content:
......
......@@ -4,7 +4,7 @@ Signal handlers for the gating djangoapp
from django.dispatch import receiver
from opaque_keys.edx.keys import CourseKey, UsageKey
from xmodule.modulestore.django import modulestore
from courseware.models import SCORE_CHANGED
from lms.djangoapps.grades.signals import SCORE_CHANGED
from gating import api as gating_api
......
......@@ -136,7 +136,7 @@ class TestEvaluatePrerequisite(GatingTestCase, MilestonesTestCaseMixin):
gating_api.set_required_content(self.course.id, self.seq2.location, self.seq1.location, min_score)
self.prereq_milestone = gating_api.get_gating_milestone(self.course.id, self.seq1.location, 'fulfills')
@patch('courseware.grades.get_module_score')
@patch('gating.api.get_module_score')
@data((.5, True), (1, True), (0, False))
@unpack
def test_min_score_achieved(self, module_score, result, mock_module_score):
......@@ -149,7 +149,7 @@ class TestEvaluatePrerequisite(GatingTestCase, MilestonesTestCaseMixin):
self.assertEqual(milestones_api.user_has_milestone(self.user_dict, self.prereq_milestone), result)
@patch('gating.api.log.warning')
@patch('courseware.grades.get_module_score')
@patch('gating.api.get_module_score')
@data((.5, False), (1, True))
@unpack
def test_invalid_min_score(self, module_score, result, mock_module_score, mock_log):
......@@ -162,21 +162,21 @@ class TestEvaluatePrerequisite(GatingTestCase, MilestonesTestCaseMixin):
self.assertEqual(milestones_api.user_has_milestone(self.user_dict, self.prereq_milestone), result)
self.assertTrue(mock_log.called)
@patch('courseware.grades.get_module_score')
@patch('gating.api.get_module_score')
def test_orphaned_xblock(self, mock_module_score):
""" Test test_orphaned_xblock """
evaluate_prerequisite(self.course, self.prob2.location, self.user.id)
self.assertFalse(mock_module_score.called)
@patch('courseware.grades.get_module_score')
@patch('gating.api.get_module_score')
def test_no_prerequisites(self, mock_module_score):
""" Test test_no_prerequisites """
evaluate_prerequisite(self.course, self.prob1.location, self.user.id)
self.assertFalse(mock_module_score.called)
@patch('courseware.grades.get_module_score')
@patch('gating.api.get_module_score')
def test_no_gated_content(self, mock_module_score):
""" Test test_no_gated_content """
......
"""
Grading Context
"""
from collections import defaultdict
from openedx.core.djangoapps.content.block_structure.api import get_course_in_cache
from .scores import possibly_scored
def grading_context_for_course(course):
"""
Same as grading_context, but takes in a course object.
"""
course_structure = get_course_in_cache(course.id)
return grading_context(course_structure)
def grading_context(course_structure):
"""
This returns a dictionary with keys necessary for quickly grading
a student. They are used by grades.grade()
The grading context has two keys:
graded_sections - This contains the sections that are graded, as
well as all possible children modules that can affect the
grading. This allows some sections to be skipped if the student
hasn't seen any part of it.
The format is a dictionary keyed by section-type. The values are
arrays of dictionaries containing
"section_block" : The section block
"scored_descendant_keys" : An array of usage keys for blocks
could possibly be in the section, for any student
all_graded_blocks - This contains a list of all blocks that can
affect grading a student. This is used to efficiently fetch
all the xmodule state for a FieldDataCache without walking
the descriptor tree again.
"""
all_graded_blocks = []
all_graded_sections = defaultdict(list)
for chapter_key in course_structure.get_children(course_structure.root_block_usage_key):
for section_key in course_structure.get_children(chapter_key):
section = course_structure[section_key]
scored_descendants_of_section = [section]
if section.graded:
for descendant_key in course_structure.post_order_traversal(
filter_func=possibly_scored,
start_node=section_key,
):
scored_descendants_of_section.append(
course_structure[descendant_key],
)
# include only those blocks that have scores, not if they are just a parent
section_info = {
'section_block': section,
'scored_descendants': [
child for child in scored_descendants_of_section
if getattr(child, 'has_score', None)
]
}
section_format = getattr(section, 'format', '')
all_graded_sections[section_format].append(section_info)
all_graded_blocks.extend(scored_descendants_of_section)
return {
'all_graded_sections': all_graded_sections,
'all_graded_blocks': all_graded_blocks,
}
"""
Functionality for module-level grades.
"""
# TODO The code in this file needs to be updated to use BlockTransformers. (TNL-4448)
# TODO The code here needs to be validated - may not be calculating correctly.
from django.test.client import RequestFactory
from courseware.model_data import FieldDataCache, ScoresClient
from courseware.module_render import get_module_for_descriptor
from opaque_keys.edx.locator import BlockUsageLocator
from util.module_utils import yield_dynamic_descriptor_descendants
def _get_mock_request(student):
"""
Make a fake request because grading code expects to be able to look at
the request. We have to attach the correct user to the request before
grading that student.
"""
request = RequestFactory().get('/')
request.user = student
return request
def _calculate_score_for_modules(user_id, course, modules):
"""
Calculates the cumulative score (percent) of the given modules
"""
# removing branch and version from exam modules locator
# otherwise student module would not return scores since module usage keys would not match
modules = [m for m in modules]
locations = [
BlockUsageLocator(
course_key=course.id,
block_type=module.location.block_type,
block_id=module.location.block_id
)
if isinstance(module.location, BlockUsageLocator) and module.location.version
else module.location
for module in modules
]
scores_client = ScoresClient(course.id, user_id)
scores_client.fetch_scores(locations)
# Iterate over all of the exam modules to get score percentage of user for each of them
module_percentages = []
ignore_categories = ['course', 'chapter', 'sequential', 'vertical', 'randomize', 'library_content']
for index, module in enumerate(modules):
if module.category not in ignore_categories and (module.graded or module.has_score):
module_score = scores_client.get(locations[index])
if module_score:
correct = module_score.correct or 0
total = module_score.total or 1
module_percentages.append(correct / total)
return sum(module_percentages) / float(len(module_percentages)) if module_percentages else 0
def get_module_score(user, course, module):
"""
Collects all children of the given module and calculates the cumulative
score for this set of modules for the given user.
Arguments:
user (User): The user
course (CourseModule): The course
module (XBlock): The module
Returns:
float: The cumulative score
"""
def inner_get_module(descriptor):
"""
Delegate to get_module_for_descriptor
"""
field_data_cache = FieldDataCache([descriptor], course.id, user)
return get_module_for_descriptor(
user,
_get_mock_request(user),
descriptor,
field_data_cache,
course.id,
course=course
)
modules = yield_dynamic_descriptor_descendants(
module,
user.id,
inner_get_module
)
return _calculate_score_for_modules(user.id, course, modules)
"""
Progress Summary of a learner's course grades.
"""
from course_blocks.api import get_course_blocks
from courseware.model_data import ScoresClient
from openedx.core.lib.gating import api as gating_api
from student.models import anonymous_id_for_user
from util.db import outer_atomic
from xmodule import graders, block_metadata_utils
from xmodule.graders import Score
from .scores import get_score, possibly_scored
class ProgressSummary(object):
"""
Wrapper class for the computation of a user's scores across a course.
Attributes
chapters: a summary of all sections with problems in the course. It is
organized as an array of chapters, each containing an array of sections,
each containing an array of scores. This contains information for graded
and ungraded problems, and is good for displaying a course summary with
due dates, etc.
weighted_scores: a dictionary mapping module locations to weighted Score
objects.
locations_to_children: a function mapping locations to their
direct descendants.
"""
def __init__(self, chapters=None, weighted_scores=None, locations_to_children=None):
self.chapters = chapters
self.weighted_scores = weighted_scores
self.locations_to_children = locations_to_children
def score_for_module(self, location):
"""
Calculate the aggregate weighted score for any location in the course.
This method returns a tuple containing (earned_score, possible_score).
If the location is of 'problem' type, this method will return the
possible and earned scores for that problem. If the location refers to a
composite module (a vertical or section ) the scores will be the sums of
all scored problems that are children of the chosen location.
"""
if location in self.weighted_scores:
score = self.weighted_scores[location]
return score.earned, score.possible
children = self.locations_to_children[location]
earned = 0.0
possible = 0.0
for child in children:
child_earned, child_possible = self.score_for_module(child)
earned += child_earned
possible += child_possible
return earned, possible
def summary(student, course, course_structure=None):
"""
This pulls a summary of all problems in the course.
Returns
- courseware_summary is a summary of all sections with problems in the course.
It is organized as an array of chapters, each containing an array of sections,
each containing an array of scores. This contains information for graded and
ungraded problems, and is good for displaying a course summary with due dates,
etc.
- None if the student does not have access to load the course module.
Arguments:
student: A User object for the student to grade
course: A Descriptor containing the course to grade
"""
if course_structure is None:
course_structure = get_course_blocks(student, course.location)
if not len(course_structure):
return ProgressSummary()
scorable_locations = [block_key for block_key in course_structure if possibly_scored(block_key)]
with outer_atomic():
scores_client = ScoresClient.create_for_locations(course.id, student.id, scorable_locations)
# We need to import this here to avoid a circular dependency of the form:
# XBlock --> submissions --> Django Rest Framework error strings -->
# Django translation --> ... --> courseware --> submissions
from submissions import api as sub_api # installed from the edx-submissions repository
with outer_atomic():
submissions_scores = sub_api.get_scores(
unicode(course.id), anonymous_id_for_user(student, course.id)
)
# Check for gated content
gated_content = gating_api.get_gated_content(course, student)
chapters = []
locations_to_weighted_scores = {}
for chapter_key in course_structure.get_children(course_structure.root_block_usage_key):
chapter = course_structure[chapter_key]
sections = []
for section_key in course_structure.get_children(chapter_key):
if unicode(section_key) in gated_content:
continue
section = course_structure[section_key]
graded = getattr(section, 'graded', False)
scores = []
for descendant_key in course_structure.post_order_traversal(
filter_func=possibly_scored,
start_node=section_key,
):
descendant = course_structure[descendant_key]
(correct, total) = get_score(
student,
descendant,
scores_client,
submissions_scores,
)
if correct is None and total is None:
continue
weighted_location_score = Score(
correct,
total,
graded,
block_metadata_utils.display_name_with_default_escaped(descendant),
descendant.location
)
scores.append(weighted_location_score)
locations_to_weighted_scores[descendant.location] = weighted_location_score
escaped_section_name = block_metadata_utils.display_name_with_default_escaped(section)
section_total, _ = graders.aggregate_scores(scores, escaped_section_name)
sections.append({
'display_name': escaped_section_name,
'url_name': block_metadata_utils.url_name_for_block(section),
'scores': scores,
'section_total': section_total,
'format': getattr(section, 'format', ''),
'due': getattr(section, 'due', None),
'graded': graded,
})
chapters.append({
'course': course.display_name_with_default_escaped,
'display_name': block_metadata_utils.display_name_with_default_escaped(chapter),
'url_name': block_metadata_utils.url_name_for_block(chapter),
'sections': sections
})
return ProgressSummary(chapters, locations_to_weighted_scores, course_structure.get_children)
"""
Functionality for problem scores.
"""
from openedx.core.lib.cache_utils import memoized
from xblock.core import XBlock
from .transformer import GradesTransformer
@memoized
def block_types_with_scores():
"""
Returns the block types that could have a score.
Something might be a scored item if it is capable of storing a score
(has_score=True). We also have to include anything that can have children,
since those children might have scores. We can avoid things like Videos,
which have state but cannot ever impact someone's grade.
"""
return frozenset(
cat for (cat, xblock_class) in XBlock.load_classes() if (
getattr(xblock_class, 'has_score', False) or getattr(xblock_class, 'has_children', False)
)
)
def possibly_scored(usage_key):
"""
Returns whether the given block could impact grading (i.e. scored, or has children).
"""
return usage_key.block_type in block_types_with_scores()
def weighted_score(raw_correct, raw_total, weight):
"""Return a tuple that represents the weighted (correct, total) score."""
# If there is no weighting, or weighting can't be applied, return input.
if weight is None or raw_total == 0:
return (raw_correct, raw_total)
return (float(raw_correct) * weight / raw_total, float(weight))
def get_score(user, block, scores_client, submissions_scores_cache):
"""
Return the score for a user on a problem, as a tuple (correct, total).
e.g. (5,7) if you got 5 out of 7 points.
If this problem doesn't have a score, or we couldn't load it, returns (None,
None).
user: a Student object
block: a BlockStructure's BlockData object
scores_client: an initialized ScoresClient
submissions_scores_cache: A dict of location names to (earned, possible) point tuples.
If an entry is found in this cache, it takes precedence.
"""
submissions_scores_cache = submissions_scores_cache or {}
if not user.is_authenticated():
return (None, None)
location_url = unicode(block.location)
if location_url in submissions_scores_cache:
return submissions_scores_cache[location_url]
if not getattr(block, 'has_score', False):
# These are not problems, and do not have a score
return (None, None)
# Check the score that comes from the ScoresClient (out of CSM).
# If an entry exists and has a total associated with it, we trust that
# value. This is important for cases where a student might have seen an
# older version of the problem -- they're still graded on what was possible
# when they tried the problem, not what it's worth now.
score = scores_client.get(block.location)
if score and score.total is not None:
# We have a valid score, just use it.
correct = score.correct if score.correct is not None else 0.0
total = score.total
else:
# This means we don't have a valid score entry and we don't have a
# cached_max_score on hand. We know they've earned 0.0 points on this.
correct = 0.0
total = block.transformer_data[GradesTransformer].max_score
# Problem may be an error module (if something in the problem builder failed)
# In which case total might be None
if total is None:
return (None, None)
return weighted_score(correct, total, block.weight)
"""
Grades related signals.
"""
from django.dispatch import receiver, Signal
from logging import getLogger
from student.models import user_by_anonymous_id
from submissions.models import score_set, score_reset
log = getLogger(__name__)
# Signal that indicates that a user's score for a problem has been updated.
# This signal is generated when a scoring event occurs either within the core
# platform or in the Submissions module. Note that this signal will be triggered
# regardless of the new and previous values of the score (i.e. it may be the
# case that this signal is generated when a user re-attempts a problem but
# receives the same score).
SCORE_CHANGED = Signal(
providing_args=[
'points_possible', # Maximum score available for the exercise
'points_earned', # Score obtained by the user
'user_id', # Integer User ID
'course_id', # Unicode string representing the course
'usage_id' # Unicode string indicating the courseware instance
]
)
@receiver(score_set)
def submissions_score_set_handler(sender, **kwargs): # pylint: disable=unused-argument
"""
Consume the score_set signal defined in the Submissions API, and convert it
to a SCORE_CHANGED signal defined in this module. Converts the unicode keys
for user, course and item into the standard representation for the
SCORE_CHANGED signal.
This method expects that the kwargs dictionary will contain the following
entries (See the definition of score_set):
- 'points_possible': integer,
- 'points_earned': integer,
- 'anonymous_user_id': unicode,
- 'course_id': unicode,
- 'item_id': unicode
"""
points_possible = kwargs.get('points_possible', None)
points_earned = kwargs.get('points_earned', None)
course_id = kwargs.get('course_id', None)
usage_id = kwargs.get('item_id', None)
user = None
if 'anonymous_user_id' in kwargs:
user = user_by_anonymous_id(kwargs.get('anonymous_user_id'))
# If any of the kwargs were missing, at least one of the following values
# will be None.
if all((user, points_possible, points_earned, course_id, usage_id)):
SCORE_CHANGED.send(
sender=None,
points_possible=points_possible,
points_earned=points_earned,
user_id=user.id,
course_id=course_id,
usage_id=usage_id
)
else:
log.exception(
u"Failed to process score_set signal from Submissions API. "
"points_possible: %s, points_earned: %s, user: %s, course_id: %s, "
"usage_id: %s", points_possible, points_earned, user, course_id, usage_id
)
@receiver(score_reset)
def submissions_score_reset_handler(sender, **kwargs): # pylint: disable=unused-argument
"""
Consume the score_reset signal defined in the Submissions API, and convert
it to a SCORE_CHANGED signal indicating that the score has been set to 0/0.
Converts the unicode keys for user, course and item into the standard
representation for the SCORE_CHANGED signal.
This method expects that the kwargs dictionary will contain the following
entries (See the definition of score_reset):
- 'anonymous_user_id': unicode,
- 'course_id': unicode,
- 'item_id': unicode
"""
course_id = kwargs.get('course_id', None)
usage_id = kwargs.get('item_id', None)
user = None
if 'anonymous_user_id' in kwargs:
user = user_by_anonymous_id(kwargs.get('anonymous_user_id'))
# If any of the kwargs were missing, at least one of the following values
# will be None.
if all((user, course_id, usage_id)):
SCORE_CHANGED.send(
sender=None,
points_possible=0,
points_earned=0,
user_id=user.id,
course_id=course_id,
usage_id=usage_id
)
else:
log.exception(
u"Failed to process score_reset signal from Submissions API. "
"user: %s, course_id: %s, usage_id: %s", user, course_id, usage_id
)
......@@ -3,19 +3,12 @@ Test grade calculation.
"""
from django.http import Http404
from django.test import TestCase
from django.test.client import RequestFactory
from mock import patch, MagicMock
from nose.plugins.attrib import attr
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import CourseLocator, BlockUsageLocator
from courseware.grades import (
grade,
iterate_grades_for,
ProgressSummary,
get_module_score
)
from courseware.module_render import get_module
from courseware.model_data import FieldDataCache, set_score
from courseware.tests.helpers import (
......@@ -28,6 +21,11 @@ from student.models import CourseEnrollment
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from .. import course_grades
from ..course_grades import summary as grades_summary
from ..module_grades import get_module_score
from ..progress import ProgressSummary
def _grade_with_errors(student, course, keep_raw_scores=False):
"""This fake grade method will throw exceptions for student3 and
......@@ -40,7 +38,7 @@ def _grade_with_errors(student, course, keep_raw_scores=False):
if student.username in ['student3', 'student4']:
raise Exception("I don't like {}".format(student.username))
return grade(student, course, keep_raw_scores=keep_raw_scores)
return grades_summary(student, course, keep_raw_scores=keep_raw_scores)
@attr('shard_1')
......@@ -76,7 +74,7 @@ class TestGradeIteration(SharedModuleStoreTestCase):
def test_empty_student_list(self):
"""If we don't pass in any students, it should return a zero-length
iterator, but it shouldn't error."""
gradeset_results = list(iterate_grades_for(self.course.id, []))
gradeset_results = list(course_grades.iterate_grades_for(self.course.id, []))
self.assertEqual(gradeset_results, [])
def test_nonexistent_course(self):
......@@ -84,7 +82,7 @@ class TestGradeIteration(SharedModuleStoreTestCase):
should be raised. This is a horrible crossing of abstraction boundaries
and should be fixed, but for now we're just testing the behavior. :-("""
with self.assertRaises(Http404):
gradeset_results = iterate_grades_for(SlashSeparatedCourseKey("I", "dont", "exist"), [])
gradeset_results = course_grades.iterate_grades_for(SlashSeparatedCourseKey("I", "dont", "exist"), [])
gradeset_results.next()
def test_all_empty_grades(self):
......@@ -95,7 +93,7 @@ class TestGradeIteration(SharedModuleStoreTestCase):
self.assertIsNone(gradeset['grade'])
self.assertEqual(gradeset['percent'], 0.0)
@patch('courseware.grades.grade', _grade_with_errors)
@patch('lms.djangoapps.grades.course_grades.summary', _grade_with_errors)
def test_grading_exception(self):
"""Test that we correctly capture exception messages that bubble up from
grading. Note that we only see errors at this level if the grading
......@@ -136,7 +134,7 @@ class TestGradeIteration(SharedModuleStoreTestCase):
students_to_gradesets = {}
students_to_errors = {}
for student, gradeset, err_msg in iterate_grades_for(course_id, students):
for student, gradeset, err_msg in course_grades.iterate_grades_for(course_id, students):
students_to_gradesets[student] = gradeset
if err_msg:
students_to_errors[student] = err_msg
......@@ -144,30 +142,6 @@ class TestGradeIteration(SharedModuleStoreTestCase):
return students_to_gradesets, students_to_errors
class TestFieldDataCacheScorableLocations(SharedModuleStoreTestCase):
"""
Make sure we can filter the locations we pull back student state for via
the FieldDataCache.
"""
@classmethod
def setUpClass(cls):
super(TestFieldDataCacheScorableLocations, cls).setUpClass()
cls.course = CourseFactory.create()
chapter = ItemFactory.create(category='chapter', parent=cls.course)
sequential = ItemFactory.create(category='sequential', parent=chapter)
vertical = ItemFactory.create(category='vertical', parent=sequential)
ItemFactory.create(category='video', parent=vertical)
ItemFactory.create(category='html', parent=vertical)
ItemFactory.create(category='discussion', parent=vertical)
ItemFactory.create(category='problem', parent=vertical)
def setUp(self):
super(TestFieldDataCacheScorableLocations, self).setUp()
self.student = UserFactory.create()
CourseEnrollment.enroll(self.student, self.course.id)
class TestProgressSummary(TestCase):
"""
Test the method that calculates the score for a given block based on the
......
......@@ -5,7 +5,8 @@ Tests for the score change signals defined in the courseware models module.
from django.test import TestCase
from mock import patch, MagicMock
from courseware.models import submissions_score_set_handler, submissions_score_reset_handler
from ..signals import submissions_score_set_handler, submissions_score_reset_handler
SUBMISSION_SET_KWARGS = {
'points_possible': 10,
......@@ -15,6 +16,7 @@ SUBMISSION_SET_KWARGS = {
'item_id': 'i4x://org/course/usage/123456'
}
SUBMISSION_RESET_KWARGS = {
'anonymous_user_id': 'anonymous_id',
'course_id': 'CourseID',
......@@ -35,10 +37,10 @@ class SubmissionSignalRelayTest(TestCase):
Configure mocks for all the dependencies of the render method
"""
super(SubmissionSignalRelayTest, self).setUp()
self.signal_mock = self.setup_patch('courseware.models.SCORE_CHANGED.send', None)
self.signal_mock = self.setup_patch('lms.djangoapps.grades.signals.SCORE_CHANGED.send', None)
self.user_mock = MagicMock()
self.user_mock.id = 42
self.get_user_mock = self.setup_patch('courseware.models.user_by_anonymous_id', self.user_mock)
self.get_user_mock = self.setup_patch('lms.djangoapps.grades.signals.user_by_anonymous_id', self.user_mock)
def setup_patch(self, function_name, return_value):
"""
......@@ -50,15 +52,6 @@ class SubmissionSignalRelayTest(TestCase):
self.addCleanup(new_patch.stop)
return mock
def setup_patch_with_mock(self, function_name, mock):
"""
Patch a function with a given mock
"""
new_patch = patch(function_name, new=mock)
new_patch.start()
self.addCleanup(new_patch.stop)
return mock
def test_score_set_signal_handler(self):
"""
Ensure that, on receipt of a score_set signal from the Submissions API,
......@@ -103,7 +96,7 @@ class SubmissionSignalRelayTest(TestCase):
that has an invalid user ID, the courseware model does not generate a
signal.
"""
self.get_user_mock = self.setup_patch('courseware.models.user_by_anonymous_id', None)
self.get_user_mock = self.setup_patch('lms.djangoapps.grades.signals.user_by_anonymous_id', None)
submissions_score_set_handler(None, **SUBMISSION_SET_KWARGS)
self.signal_mock.assert_not_called()
......@@ -152,6 +145,6 @@ class SubmissionSignalRelayTest(TestCase):
that has an invalid user ID, the courseware model does not generate a
signal.
"""
self.get_user_mock = self.setup_patch('courseware.models.user_by_anonymous_id', None)
self.get_user_mock = self.setup_patch('lms.djangoapps.grades.signals.user_by_anonymous_id', None)
submissions_score_reset_handler(None, **SUBMISSION_RESET_KWARGS)
self.signal_mock.assert_not_called()
......@@ -13,7 +13,7 @@ from xmodule.modulestore.tests.factories import check_mongo_calls
from lms.djangoapps.course_blocks.api import get_course_blocks
from lms.djangoapps.course_blocks.transformers.tests.helpers import CourseStructureTestCase
from openedx.core.djangoapps.content.block_structure.api import get_cache
from ..transformers.grades import GradesTransformer
from ..transformer import GradesTransformer
class GradesTransformerTestCase(CourseStructureTestCase):
......
"""
Utilities for grades related tests
"""
from contextlib import contextmanager
from mock import patch
@contextmanager
def mock_passing_grade(grade_pass='Pass', percent=0.75):
"""
Mock the grading function to always return a passing grade.
"""
with patch('lms.djangoapps.grades.course_grades.summary') as mock_grade:
mock_grade.return_value = {'grade': grade_pass, 'percent': percent}
yield
......@@ -3,10 +3,10 @@ Grades Transformer
"""
from django.test.client import RequestFactory
from courseware.model_data import FieldDataCache
from courseware.module_render import get_module_for_descriptor
from openedx.core.lib.block_structure.transformer import BlockStructureTransformer
from openedx.core.djangoapps.util.user_utils import SystemUser
from .. import module_render
from courseware.model_data import FieldDataCache
class GradesTransformer(BlockStructureTransformer):
......@@ -98,5 +98,5 @@ class GradesTransformer(BlockStructureTransformer):
for block_locator in block_structure.post_order_traversal():
block = block_structure.get_xblock(block_locator)
if getattr(block, 'has_score', False):
module = module_render.get_module_for_descriptor(user, request, block, cache, course_key)
module = get_module_for_descriptor(user, request, block, cache, course_key)
yield module
......@@ -10,9 +10,10 @@ import json
import time
from json import JSONEncoder
from courseware import grades, models
from courseware import models
from courseware.courses import get_course_by_id
from django.contrib.auth.models import User
from lms.djangoapps.grades import course_grades
from opaque_keys import OpaqueKey
from opaque_keys.edx.keys import UsageKey
from xmodule.graders import Score
......@@ -50,7 +51,7 @@ def offline_grade_calculation(course_key):
request.user = student
request.session = {}
gradeset = grades.grade(student, course, keep_raw_scores=True)
gradeset = course_grades.summary(student, course, keep_raw_scores=True)
# Convert Score namedtuples to dicts:
totaled_scores = gradeset['totaled_scores']
for section in totaled_scores:
......@@ -89,7 +90,7 @@ def student_grades(student, request, course, keep_raw_scores=False, use_offline=
as use_offline. If use_offline is True then this will look for an offline computed gradeset in the DB.
'''
if not use_offline:
return grades.grade(student, course, keep_raw_scores=keep_raw_scores)
return course_grades.summary(student, course, keep_raw_scores=keep_raw_scores)
try:
ocg = models.OfflineComputedGrade.objects.get(user=student, course_id=course.id)
......
......@@ -57,7 +57,7 @@ class TestOfflineGradeCalc(ModuleStoreTestCase):
self.user = UserFactory.create()
CourseEnrollment.enroll(self.user, self.course.id)
patcher = patch('courseware.grades.grade', new=mock_grade)
patcher = patch('lms.djangoapps.grades.course_grades.summary', new=mock_grade)
patcher.start()
self.addCleanup(patcher.stop)
......@@ -102,6 +102,6 @@ class TestOfflineGradeCalc(ModuleStoreTestCase):
def test_student_grades(self):
""" Test that the data returned by student_grades() and grades.grade() match """
offline_grade_calculation(self.course.id)
with patch('courseware.grades.grade', side_effect=AssertionError('Should not re-grade')):
with patch('lms.djangoapps.grades.course_grades.summary', side_effect=AssertionError('Should not re-grade')):
result = student_grades(self.user, None, self.course, use_offline=True)
self.assertEqual(result, mock_grade(self.user, self.course))
......@@ -24,7 +24,7 @@ from courseware.models import StudentModule
from certificates.models import GeneratedCertificate
from django.db.models import Count
from certificates.models import CertificateStatuses
from courseware.grades import grading_context_for_course
from grades.context import grading_context_for_course
STUDENT_FEATURES = ('id', 'username', 'first_name', 'last_name', 'is_staff', 'email')
......
......@@ -46,7 +46,7 @@ from certificates.models import (
)
from certificates.api import generate_user_certificates
from courseware.courses import get_course_by_id, get_problems_in_section
from courseware.grades import iterate_grades_for
from grades.course_grades import iterate_grades_for
from courseware.models import StudentModule
from courseware.model_data import DjangoKeyValueStore, FieldDataCache
from courseware.module_render import get_module_for_descriptor_internal
......
......@@ -7,8 +7,8 @@ from django.contrib.auth.models import User
from django.dispatch import receiver
import logging
from courseware.grades import get_weighted_scores
from courseware.models import SCORE_CHANGED
from lms.djangoapps.grades import progress
from lms.djangoapps.grades.signals import SCORE_CHANGED
from lms import CELERY_APP
from lti_provider.models import GradedAssignment
import lti_provider.outcomes as outcomes
......@@ -23,7 +23,7 @@ log = logging.getLogger("edx.lti_provider")
def score_changed_handler(sender, **kwargs): # pylint: disable=unused-argument
"""
Consume signals that indicate score changes. See the definition of
courseware.models.SCORE_CHANGED for a description of the signal.
SCORE_CHANGED for a description of the signal.
"""
points_possible = kwargs.get('points_possible', None)
points_earned = kwargs.get('points_earned', None)
......@@ -109,7 +109,7 @@ def send_composite_outcome(user_id, course_id, assignment_id, version):
mapped_usage_key = assignment.usage_key.map_into_course(course_key)
user = User.objects.get(id=user_id)
course = modulestore().get_course(course_key, depth=0)
progress_summary = get_weighted_scores(user, course)
progress_summary = progress.summary(user, course)
earned, possible = progress_summary.score_for_module(mapped_usage_key)
if possible == 0:
weighted_score = 0
......
......@@ -101,7 +101,7 @@ class SendCompositeOutcomeTest(BaseOutcomeTest):
)
self.weighted_scores = MagicMock()
self.weighted_scores_mock = self.setup_patch(
'lti_provider.tasks.get_weighted_scores', self.weighted_scores
'lti_provider.tasks.progress.summary', self.weighted_scores
)
self.module_store = MagicMock()
self.module_store.get_item = MagicMock(return_value=self.descriptor)
......
......@@ -25,6 +25,7 @@ from courseware.access_response import (
VisibilityError,
)
from course_modes.models import CourseMode
from lms.djangoapps.grades.tests.utils import mock_passing_grade
from openedx.core.lib.courses import course_image_url
from student.models import CourseEnrollment
from util.milestones_helpers import set_prerequisite_courses
......@@ -247,8 +248,7 @@ class TestUserEnrollmentApi(UrlResetMixin, MobileAPITestCase, MobileAuthUserTest
self.course.cert_html_view_enabled = True
self.store.update_item(self.course, self.user.id)
with patch('courseware.grades.grade') as mock_grade:
mock_grade.return_value = {'grade': 'Pass', 'percent': 0.75}
with mock_passing_grade():
generate_user_certificates(self.user, self.course.id)
response = self.api_response()
......
......@@ -1890,6 +1890,7 @@ INSTALLED_APPS = (
'openedx.core.djangoapps.course_groups',
'bulk_email',
'branding',
'grades',
# Student support tools
'support',
......
......@@ -5,7 +5,7 @@ This module contains all signals.
from django.dispatch import Signal
# Signal that fires when a user is graded (in lms/courseware/grades.py)
# Signal that fires when a user is graded (in lms/grades/course_grades.py)
GRADES_UPDATED = Signal(providing_args=["username", "grade_summary", "course_key", "deadline"])
# Signal that fires when a user is awarded a certificate in a course (in the certificates django app)
......
......@@ -52,7 +52,7 @@ setup(
"hidden_content = lms.djangoapps.course_blocks.transformers.hidden_content:HiddenContentTransformer",
"course_blocks_api = lms.djangoapps.course_api.blocks.transformers.blocks_api:BlocksAPITransformer",
"proctored_exam = lms.djangoapps.course_api.blocks.transformers.proctored_exam:ProctoredExamTransformer",
"grades = lms.djangoapps.courseware.transformers.grades:GradesTransformer",
"grades = lms.djangoapps.grades.transformer:GradesTransformer",
],
}
)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment