Commit 9a07227e by Nimisha Asthagiri Committed by GitHub

Merge pull request #16082 from edx/naa/remove-old-waffles

Remove unneeded WRITE_ONLY_IF_ENGAGED waffle switch
parents c8706044 083ccdb9
......@@ -231,9 +231,9 @@ class LTI20ModuleMixin(object):
Returns:
nothing
"""
self.set_user_module_score(user, None, None)
self.set_user_module_score(user, None, None, score_deleted=True)
def set_user_module_score(self, user, score, max_score, comment=u""):
def set_user_module_score(self, user, score, max_score, comment=u"", score_deleted=False):
"""
Sets the module user state, including grades and comments, and also scoring in db's courseware_studentmodule
......@@ -261,6 +261,7 @@ class LTI20ModuleMixin(object):
'value': scaled_score,
'max_value': max_score,
'user_id': user.id,
'score_deleted': score_deleted,
},
)
self.module_score = scaled_score
......
......@@ -251,7 +251,10 @@ class LTI20RESTResultServiceTest(LogicTest):
self.assertIsNone(self.xmodule.module_score)
self.assertEqual(self.xmodule.score_comment, u"")
(_, evt_type, called_grade_obj), _ = self.system.publish.call_args
self.assertEqual(called_grade_obj, {'user_id': self.USER_STANDIN.id, 'value': None, 'max_value': None})
self.assertEqual(
called_grade_obj,
{'user_id': self.USER_STANDIN.id, 'value': None, 'max_value': None, 'score_deleted': True},
)
self.assertEqual(evt_type, 'grade')
def test_lti20_delete_success(self):
......@@ -271,7 +274,10 @@ class LTI20RESTResultServiceTest(LogicTest):
self.assertIsNone(self.xmodule.module_score)
self.assertEqual(self.xmodule.score_comment, u"")
(_, evt_type, called_grade_obj), _ = self.system.publish.call_args
self.assertEqual(called_grade_obj, {'user_id': self.USER_STANDIN.id, 'value': None, 'max_value': None})
self.assertEqual(
called_grade_obj,
{'user_id': self.USER_STANDIN.id, 'value': None, 'max_value': None, 'score_deleted': True},
)
self.assertEqual(evt_type, 'grade')
def test_lti20_put_set_score_success(self):
......@@ -288,7 +294,10 @@ class LTI20RESTResultServiceTest(LogicTest):
self.assertEqual(self.xmodule.score_comment, u"ಠ益ಠ")
(_, evt_type, called_grade_obj), _ = self.system.publish.call_args
self.assertEqual(evt_type, 'grade')
self.assertEqual(called_grade_obj, {'user_id': self.USER_STANDIN.id, 'value': 0.1, 'max_value': 1.0})
self.assertEqual(
called_grade_obj,
{'user_id': self.USER_STANDIN.id, 'value': 0.1, 'max_value': 1.0, 'score_deleted': False},
)
def test_lti20_get_no_score_success(self):
"""
......
......@@ -471,6 +471,7 @@ def get_module_system_for_user(user, student_data, # TODO # pylint: disable=to
raw_earned=event['value'],
raw_possible=event['max_value'],
only_if_higher=event.get('only_if_higher'),
score_deleted=event.get('score_deleted'),
)
else:
context = contexts.course_context_from_course_id(course_id)
......
......@@ -1848,6 +1848,7 @@ class TestXmoduleRuntimeEvent(TestSubmittingProblems):
'only_if_higher': None,
'modified': datetime.now().replace(tzinfo=pytz.UTC),
'score_db_table': 'csm',
'score_deleted': None,
}
send_mock.assert_called_with(**expected_signal_kwargs)
......
......@@ -8,7 +8,6 @@ from openedx.core.djangoapps.waffle_utils import WaffleSwitchNamespace, WaffleFl
WAFFLE_NAMESPACE = u'grades'
# Switches
WRITE_ONLY_IF_ENGAGED = u'write_only_if_engaged'
ASSUME_ZERO_GRADE_IF_ABSENT = u'assume_zero_grade_if_absent'
ESTIMATE_FIRST_ATTEMPTED = u'estimate_first_attempted'
DISABLE_REGRADE_ON_POLICY_CHANGE = u'disable_regrade_on_policy_change'
......
......@@ -7,7 +7,6 @@ import dogstats_wrapper as dog_stats_api
from openedx.core.djangoapps.signals.signals import COURSE_GRADE_CHANGED, COURSE_GRADE_NOW_PASSED
from .config import assume_zero_if_absent, should_persist_grades
from .config.waffle import WRITE_ONLY_IF_ENGAGED, waffle
from .course_data import CourseData
from .course_grade import CourseGrade, ZeroCourseGrade
from .models import PersistentCourseGrade, VisibleBlocks
......@@ -193,7 +192,7 @@ class CourseGradeFactory(object):
should_persist = (
(not read_only) and # TODO(TNL-6786) Remove the read_only boolean once all grades are back-filled.
should_persist_grades(course_data.course_key) and
(not waffle().is_enabled(WRITE_ONLY_IF_ENGAGED) or course_grade.attempted)
course_grade.attempted
)
if should_persist:
course_grade._subsection_grade_factory.bulk_create_unsaved()
......
......@@ -30,7 +30,6 @@ from .signals import (
SUBSECTION_SCORE_CHANGED,
SUBSECTION_OVERRIDE_CHANGED,
)
from ..config.waffle import waffle, WRITE_ONLY_IF_ENGAGED
from ..constants import ScoreDatabaseTableEnum
from ..course_grade_factory import CourseGradeFactory
from ..scores import weighted_score
......@@ -180,6 +179,7 @@ def score_published_handler(sender, block, user, raw_earned, raw_possible, only_
only_if_higher=only_if_higher,
modified=score_modified_time,
score_db_table=ScoreDatabaseTableEnum.courseware_student_module,
score_deleted=kwargs.get('score_deleted', False),
)
return update_score
......@@ -253,11 +253,8 @@ def force_recalculate_course_and_subsection_grades(sender, user, course_key, **k
"""
Updates a saved course grade, forcing the subsection grades
from which it is calculated to update along the way.
Does not create a grade if the user has never attempted a problem,
even if the WRITE_ONLY_IF_ENGAGED waffle switch is off.
"""
if waffle().is_enabled(WRITE_ONLY_IF_ENGAGED) or CourseGradeFactory().read(user, course_key=course_key):
if CourseGradeFactory().read(user, course_key=course_key):
CourseGradeFactory().update(user=user, course_key=course_key, force_update_subsections=True)
......
......@@ -11,7 +11,6 @@ from lms.djangoapps.grades.scores import get_score, possibly_scored
from xmodule import block_metadata_utils, graders
from xmodule.graders import AggregatedScore, ShowCorrectness
from .config.waffle import WRITE_ONLY_IF_ENGAGED, waffle
log = getLogger(__name__)
......@@ -149,7 +148,9 @@ class SubsectionGrade(SubsectionGradeBase):
Saves the subsection grade in a persisted model.
"""
params = [
subsection_grade._persisted_model_params(student) for subsection_grade in subsection_grades # pylint: disable=protected-access
subsection_grade._persisted_model_params(student) # pylint: disable=protected-access
for subsection_grade in subsection_grades
if subsection_grade
if subsection_grade._should_persist_per_attempted() # pylint: disable=protected-access
]
return PersistentSubsectionGrade.bulk_create_grades(params, course_key)
......@@ -179,7 +180,6 @@ class SubsectionGrade(SubsectionGradeBase):
no attempts but the grade should still be persisted.
"""
return (
not waffle().is_enabled(WRITE_ONLY_IF_ENGAGED) or
self.all_total.first_attempted is not None or
score_deleted
)
......
......@@ -14,7 +14,6 @@ from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.db.utils import DatabaseError
from lms.djangoapps.course_blocks.api import get_course_blocks
from lms.djangoapps.courseware import courses
from lms.djangoapps.grades.config.models import ComputeGradesSetting
from opaque_keys.edx.keys import CourseKey, UsageKey
from opaque_keys.edx.locator import CourseLocator
......@@ -117,10 +116,10 @@ def compute_grades_for_course(course_key, offset, batch_size, **kwargs): # pyli
limited to at most <batch_size> students, starting from the specified
offset.
"""
course = courses.get_course_by_id(CourseKey.from_string(course_key))
enrollments = CourseEnrollment.objects.filter(course_id=course.id).order_by('created')
course_key = CourseKey.from_string(course_key)
enrollments = CourseEnrollment.objects.filter(course_id=course_key).order_by('created')
student_iter = (enrollment.user for enrollment in enrollments[offset:offset + batch_size])
for result in CourseGradeFactory().iter(users=student_iter, course=course, force_update=True):
for result in CourseGradeFactory().iter(users=student_iter, course_key=course_key, force_update=True):
if result.error is not None:
raise result.error
......
......@@ -23,7 +23,7 @@ from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.utils import TEST_DATA_DIR
from xmodule.modulestore.xml_importer import import_course_from_xml
from ..config.waffle import ASSUME_ZERO_GRADE_IF_ABSENT, WRITE_ONLY_IF_ENGAGED, waffle
from ..config.waffle import ASSUME_ZERO_GRADE_IF_ABSENT, waffle
from ..course_data import CourseData
from ..course_grade import CourseGrade, ZeroCourseGrade
from ..course_grade_factory import CourseGradeFactory
......@@ -207,7 +207,7 @@ class TestCourseGradeFactory(GradeTestBase):
self._assert_zero_grade(course_grade, ZeroCourseGrade if assume_zero_enabled else CourseGrade)
def test_create_zero_subs_grade_for_nonzero_course_grade(self):
with waffle().override(ASSUME_ZERO_GRADE_IF_ABSENT), waffle().override(WRITE_ONLY_IF_ENGAGED):
with waffle().override(ASSUME_ZERO_GRADE_IF_ABSENT):
subsection = self.course_structure[self.sequence.location]
with mock_get_score(1, 2):
self.subsection_grade_factory.update(subsection)
......@@ -259,6 +259,49 @@ class TestCourseGradeFactory(GradeTestBase):
self.assertTrue(desired_call.called)
self.assertFalse(undesired_call.called)
def test_course_grade_summary(self):
with mock_get_score(1, 2):
self.subsection_grade_factory.update(self.course_structure[self.sequence.location])
course_grade = CourseGradeFactory().update(self.request.user, self.course)
actual_summary = course_grade.summary
# We should have had a zero subsection grade for sequential 2, since we never
# gave it a mock score above.
expected_summary = {
'grade': None,
'grade_breakdown': {
'Homework': {
'category': 'Homework',
'percent': 0.25,
'detail': 'Homework = 25.00% of a possible 100.00%',
}
},
'percent': 0.25,
'section_breakdown': [
{
'category': 'Homework',
'detail': u'Homework 1 - Test Sequential 1 - 50% (1/2)',
'label': u'HW 01',
'percent': 0.5
},
{
'category': 'Homework',
'detail': u'Homework 2 - Test Sequential 2 - 0% (0/1)',
'label': u'HW 02',
'percent': 0.0
},
{
'category': 'Homework',
'detail': u'Homework Average = 25%',
'label': u'HW Avg',
'percent': 0.25,
'prominent': True
},
]
}
self.assertEqual(expected_summary, actual_summary)
@ddt.ddt
class TestSubsectionGradeFactory(ProblemSubmissionTestMixin, GradeTestBase):
......@@ -302,7 +345,8 @@ class TestSubsectionGradeFactory(ProblemSubmissionTestMixin, GradeTestBase):
wraps=self.subsection_grade_factory._get_bulk_cached_grade
) as mock_get_bulk_cached_grade:
with self.assertNumQueries(14):
grade_a = self.subsection_grade_factory.create(self.sequence)
with mock_get_score(1, 2):
grade_a = self.subsection_grade_factory.create(self.sequence)
self.assertTrue(mock_get_bulk_cached_grade.called)
self.assertTrue(mock_create_grade.called)
......@@ -315,7 +359,7 @@ class TestSubsectionGradeFactory(ProblemSubmissionTestMixin, GradeTestBase):
self.assertFalse(mock_create_grade.called)
self.assertEqual(grade_a.url_name, grade_b.url_name)
grade_b.all_total.first_attempted = None
grade_b.all_total.first_attempted = grade_a.all_total.first_attempted = None
self.assertEqual(grade_a.all_total, grade_b.all_total)
def test_update(self):
......@@ -333,15 +377,13 @@ class TestSubsectionGradeFactory(ProblemSubmissionTestMixin, GradeTestBase):
never attempted a problem, but are persisted if the
learner's state has been deleted.
"""
with waffle().override(WRITE_ONLY_IF_ENGAGED):
with mock_get_score(0, 0, None):
self.subsection_grade_factory.update(self.sequence)
with mock_get_score(0, 0, None):
self.subsection_grade_factory.update(self.sequence)
# ensure no grades have been persisted
self.assertEqual(0, len(PersistentSubsectionGrade.objects.all()))
with waffle().override(WRITE_ONLY_IF_ENGAGED):
with mock_get_score(0, 0, None):
self.subsection_grade_factory.update(self.sequence, score_deleted=True)
with mock_get_score(0, 0, None):
self.subsection_grade_factory.update(self.sequence, score_deleted=True)
# ensure a grade has been persisted
self.assertEqual(1, len(PersistentSubsectionGrade.objects.all()))
......@@ -430,37 +472,38 @@ class SubsectionGradeTest(GradeTestBase):
Test that grades are persisted to the database properly,
and that loading saved grades returns the same data.
"""
# Create a grade that *isn't* saved to the database
input_grade = SubsectionGrade(self.sequence)
input_grade.init_from_structure(
self.request.user,
self.course_structure,
self.subsection_grade_factory._submissions_scores,
self.subsection_grade_factory._csm_scores,
)
self.assertEqual(PersistentSubsectionGrade.objects.count(), 0)
with mock_get_score(1, 2):
# Create a grade that *isn't* saved to the database
input_grade = SubsectionGrade(self.sequence)
input_grade.init_from_structure(
self.request.user,
self.course_structure,
self.subsection_grade_factory._submissions_scores,
self.subsection_grade_factory._csm_scores,
)
self.assertEqual(PersistentSubsectionGrade.objects.count(), 0)
# save to db, and verify object is in database
input_grade.create_model(self.request.user)
self.assertEqual(PersistentSubsectionGrade.objects.count(), 1)
# save to db, and verify object is in database
input_grade.create_model(self.request.user)
self.assertEqual(PersistentSubsectionGrade.objects.count(), 1)
# load from db, and ensure output matches input
loaded_grade = SubsectionGrade(self.sequence)
saved_model = PersistentSubsectionGrade.read_grade(
user_id=self.request.user.id,
usage_key=self.sequence.location,
)
loaded_grade.init_from_model(
self.request.user,
saved_model,
self.course_structure,
self.subsection_grade_factory._submissions_scores,
self.subsection_grade_factory._csm_scores,
)
# load from db, and ensure output matches input
loaded_grade = SubsectionGrade(self.sequence)
saved_model = PersistentSubsectionGrade.read_grade(
user_id=self.request.user.id,
usage_key=self.sequence.location,
)
loaded_grade.init_from_model(
self.request.user,
saved_model,
self.course_structure,
self.subsection_grade_factory._submissions_scores,
self.subsection_grade_factory._csm_scores,
)
self.assertEqual(input_grade.url_name, loaded_grade.url_name)
loaded_grade.all_total.first_attempted = None
self.assertEqual(input_grade.all_total, loaded_grade.all_total)
self.assertEqual(input_grade.url_name, loaded_grade.url_name)
loaded_grade.all_total.first_attempted = input_grade.all_total.first_attempted = None
self.assertEqual(input_grade.all_total, loaded_grade.all_total)
@ddt.ddt
......@@ -744,59 +787,15 @@ class TestCourseGradeLogging(ProblemSubmissionTestMixin, SharedModuleStoreTestCa
enabled_for_course=True
):
with patch('lms.djangoapps.grades.course_grade_factory.log') as log_mock:
# read, but not persisted
self._create_course_grade_and_check_logging(grade_factory.create, log_mock.info, u'Update')
# update and persist
self._create_course_grade_and_check_logging(grade_factory.update, log_mock.info, u'Update')
# read from persistence, using create
self._create_course_grade_and_check_logging(grade_factory.create, log_mock.debug, u'Read')
# read from persistence, using read
self._create_course_grade_and_check_logging(grade_factory.read, log_mock.debug, u'Read')
with mock_get_score(1, 2):
# read, but not persisted
self._create_course_grade_and_check_logging(grade_factory.create, log_mock.info, u'Update')
# update and persist
self._create_course_grade_and_check_logging(grade_factory.update, log_mock.info, u'Update')
class TestCourseGradeFactory(GradeTestBase):
def test_course_grade_summary(self):
with mock_get_score(1, 2):
self.subsection_grade_factory.update(self.course_structure[self.sequence.location])
course_grade = CourseGradeFactory().update(self.request.user, self.course)
actual_summary = course_grade.summary
# read from persistence, using create
self._create_course_grade_and_check_logging(grade_factory.create, log_mock.debug, u'Read')
# We should have had a zero subsection grade for sequential 2, since we never
# gave it a mock score above.
expected_summary = {
'grade': None,
'grade_breakdown': {
'Homework': {
'category': 'Homework',
'percent': 0.25,
'detail': 'Homework = 25.00% of a possible 100.00%',
}
},
'percent': 0.25,
'section_breakdown': [
{
'category': 'Homework',
'detail': u'Homework 1 - Test Sequential 1 - 50% (1/2)',
'label': u'HW 01',
'percent': 0.5
},
{
'category': 'Homework',
'detail': u'Homework 2 - Test Sequential 2 - 0% (0/1)',
'label': u'HW 02',
'percent': 0.0
},
{
'category': 'Homework',
'detail': u'Homework Average = 25%',
'label': u'HW Avg',
'percent': 0.25,
'prominent': True
},
]
}
self.assertEqual(expected_summary, actual_summary)
# read from persistence, using read
self._create_course_grade_and_check_logging(grade_factory.read, log_mock.debug, u'Read')
......@@ -17,7 +17,6 @@ from student.tests.factories import UserFactory
from submissions.models import score_reset, score_set
from util.date_utils import to_timestamp
from ..config.waffle import waffle, WRITE_ONLY_IF_ENGAGED
from ..constants import ScoreDatabaseTableEnum
from ..signals.handlers import (
disconnect_submissions_signal_receiver,
......@@ -256,39 +255,3 @@ class ScoreChangedSignalRelayTest(TestCase):
with self.assertRaises(ValueError):
with disconnect_submissions_signal_receiver(PROBLEM_RAW_SCORE_CHANGED):
pass
@ddt.ddt
class RecalculateUserGradeSignalsTest(TestCase):
SIGNALS = {
'COHORT_MEMBERSHIP_UPDATED': COHORT_MEMBERSHIP_UPDATED,
'ENROLLMENT_TRACK_UPDATED': ENROLLMENT_TRACK_UPDATED,
}
def setUp(self):
super(RecalculateUserGradeSignalsTest, self).setUp()
self.user = UserFactory()
self.course_key = CourseLocator("test_org", "test_course_num", "test_run")
@patch('lms.djangoapps.grades.signals.handlers.CourseGradeFactory.update')
@patch('lms.djangoapps.grades.signals.handlers.CourseGradeFactory.read')
@ddt.data(*itertools.product(('COHORT_MEMBERSHIP_UPDATED', 'ENROLLMENT_TRACK_UPDATED'),
(True, False), (True, False)))
@ddt.unpack
def test_recalculate_on_signal(self, signal_name, write_only_if_engaged, has_grade, read_mock, update_mock):
"""
Tests the grades handler for signals that trigger regrading.
The handler should call CourseGradeFactory.update() with the
args below, *except* if the WRITE_ONLY_IF_ENGAGED waffle flag
is inactive and the user does not have a grade.
"""
if not has_grade:
read_mock.return_value = None
with waffle().override(WRITE_ONLY_IF_ENGAGED, active=write_only_if_engaged):
signal = self.SIGNALS[signal_name]
signal.send(sender=None, user=self.user, course_key=self.course_key)
if not write_only_if_engaged and not has_grade:
update_mock.assert_not_called()
else:
update_mock.assert_called_with(course_key=self.course_key, user=self.user, force_update_subsections=True)
......@@ -22,7 +22,6 @@ from lms.djangoapps.grades.signals.signals import PROBLEM_WEIGHTED_SCORE_CHANGED
from lms.djangoapps.grades.tasks import (
RECALCULATE_GRADE_DELAY,
_course_task_args,
compute_all_grades_for_course,
compute_grades_for_course_v2,
recalculate_subsection_grade_v3
)
......@@ -36,6 +35,8 @@ from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, check_mongo_calls
from .utils import mock_get_score
class MockGradesService(GradesService):
def __init__(self, mocked_return_value=None):
......@@ -120,7 +121,7 @@ class RecalculateSubsectionGradeTest(HasCourseWithProblemsMixin, ModuleStoreTest
PersistentGradesEnabledFlag.objects.create(enabled_for_all_courses=True, enabled=True)
@contextmanager
def mock_get_score(self, score=MagicMock(grade=1.0, max_grade=2.0)):
def mock_csm_get_score(self, score=MagicMock(grade=1.0, max_grade=2.0)):
"""
Mocks the scores needed by the SCORE_PUBLISHED signal
handler. By default, sets the returned score to 1/2.
......@@ -136,7 +137,7 @@ class RecalculateSubsectionGradeTest(HasCourseWithProblemsMixin, ModuleStoreTest
send_args = self.problem_weighted_score_changed_kwargs
local_task_args = self.recalculate_subsection_grade_kwargs.copy()
local_task_args['event_transaction_type'] = u'edx.grades.problem.submitted'
with self.mock_get_score() and patch(
with self.mock_csm_get_score() and patch(
'lms.djangoapps.grades.tasks.recalculate_subsection_grade_v3.apply_async',
return_value=None
) as mock_task_apply:
......@@ -163,10 +164,10 @@ class RecalculateSubsectionGradeTest(HasCourseWithProblemsMixin, ModuleStoreTest
self.assertEquals(mock_block_structure_create.call_count, 1)
@ddt.data(
(ModuleStoreEnum.Type.mongo, 1, 30, True),
(ModuleStoreEnum.Type.mongo, 1, 26, False),
(ModuleStoreEnum.Type.split, 3, 30, True),
(ModuleStoreEnum.Type.split, 3, 26, False),
(ModuleStoreEnum.Type.mongo, 1, 31, True),
(ModuleStoreEnum.Type.mongo, 1, 27, False),
(ModuleStoreEnum.Type.split, 3, 31, True),
(ModuleStoreEnum.Type.split, 3, 27, False),
)
@ddt.unpack
def test_query_counts(self, default_store, num_mongo_calls, num_sql_calls, create_multiple_subsections):
......@@ -178,8 +179,8 @@ class RecalculateSubsectionGradeTest(HasCourseWithProblemsMixin, ModuleStoreTest
self._apply_recalculate_subsection_grade()
@ddt.data(
(ModuleStoreEnum.Type.mongo, 1, 30),
(ModuleStoreEnum.Type.split, 3, 30),
(ModuleStoreEnum.Type.mongo, 1, 31),
(ModuleStoreEnum.Type.split, 3, 31),
)
@ddt.unpack
def test_query_counts_dont_change_with_more_content(self, default_store, num_mongo_calls, num_sql_calls):
......@@ -239,8 +240,8 @@ class RecalculateSubsectionGradeTest(HasCourseWithProblemsMixin, ModuleStoreTest
self.assertEqual(len(PersistentSubsectionGrade.bulk_read_grades(self.user.id, self.course.id)), 0)
@ddt.data(
(ModuleStoreEnum.Type.mongo, 1, 27),
(ModuleStoreEnum.Type.split, 3, 27),
(ModuleStoreEnum.Type.mongo, 1, 28),
(ModuleStoreEnum.Type.split, 3, 28),
)
@ddt.unpack
def test_persistent_grades_enabled_on_course(self, default_store, num_mongo_queries, num_sql_queries):
......@@ -373,14 +374,19 @@ class RecalculateSubsectionGradeTest(HasCourseWithProblemsMixin, ModuleStoreTest
def _apply_recalculate_subsection_grade(
self,
mock_score=MagicMock(modified=datetime.utcnow().replace(tzinfo=pytz.UTC) + timedelta(days=1))
mock_score=MagicMock(
modified=datetime.utcnow().replace(tzinfo=pytz.UTC) + timedelta(days=1),
grade=1.0,
max_grade=2.0,
)
):
"""
Calls the recalculate_subsection_grade task with necessary
mocking in place.
"""
with self.mock_get_score(mock_score):
recalculate_subsection_grade_v3.apply(kwargs=self.recalculate_subsection_grade_kwargs)
with self.mock_csm_get_score(mock_score):
with mock_get_score(1, 2):
recalculate_subsection_grade_v3.apply(kwargs=self.recalculate_subsection_grade_kwargs)
def _assert_retry_called(self, mock_retry):
"""
......@@ -414,11 +420,12 @@ class ComputeGradesForCourseTest(HasCourseWithProblemsMixin, ModuleStoreTestCase
@ddt.data(*xrange(0, 12, 3))
def test_behavior(self, batch_size):
result = compute_grades_for_course_v2.delay(
course_key=six.text_type(self.course.id),
batch_size=batch_size,
offset=4,
)
with mock_get_score(1, 2):
result = compute_grades_for_course_v2.delay(
course_key=six.text_type(self.course.id),
batch_size=batch_size,
offset=4,
)
self.assertTrue(result.successful)
self.assertEqual(
PersistentCourseGrade.objects.filter(course_id=self.course.id).count(),
......@@ -430,15 +437,6 @@ class ComputeGradesForCourseTest(HasCourseWithProblemsMixin, ModuleStoreTestCase
)
@ddt.data(*xrange(1, 12, 3))
def test_compute_all_grades_for_course(self, batch_size):
self.set_up_course()
result = compute_all_grades_for_course.delay(
course_key=six.text_type(self.course.id),
batch_size=batch_size,
)
self.assertTrue(result.successful)
@ddt.data(*xrange(1, 12, 3))
def test_course_task_args(self, test_batch_size):
offset_expected = 0
for course_key, offset, batch_size in _course_task_args(
......
......@@ -4,7 +4,7 @@ Utilities for grades related tests
from contextlib import contextmanager
from datetime import datetime
from mock import patch
from mock import patch, MagicMock
from courseware.model_data import FieldDataCache
from courseware.module_render import get_module
......@@ -18,9 +18,11 @@ def mock_passing_grade(grade_pass='Pass', percent=0.75, ):
"""
with patch('lms.djangoapps.grades.course_grade.CourseGrade._compute_letter_grade') as mock_letter_grade:
with patch('lms.djangoapps.grades.course_grade.CourseGrade._compute_percent') as mock_percent_grade:
mock_letter_grade.return_value = grade_pass
mock_percent_grade.return_value = percent
yield
with patch('lms.djangoapps.grades.course_grade.CourseGrade.attempted') as mock_attempted:
mock_letter_grade.return_value = grade_pass
mock_percent_grade.return_value = percent
mock_attempted.return_value = True
yield
@contextmanager
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment