Commit 6ee7f409 by Nimisha Asthagiri Committed by GitHub

Merge pull request #14952 from edx/neem/refactor-grade-report

Refactor Instructor Tasks - in prep for Grade report work
parents 37fbf173 18433dab
......@@ -17,13 +17,13 @@ from smtplib import SMTPDataError, SMTPServerDisconnected, SMTPConnectError
from bulk_email.models import CourseEmail, SEND_TO_MYSELF, BulkEmailFlag
from bulk_email.tasks import perform_delegate_email_batches, send_course_email
from lms.djangoapps.instructor_task.exceptions import DuplicateTaskException
from lms.djangoapps.instructor_task.models import InstructorTask
from lms.djangoapps.instructor_task.subtasks import (
initialize_subtask_info,
SubtaskStatus,
check_subtask_is_valid,
update_subtask_status,
DuplicateTaskException,
MAX_DATABASE_LOCK_RETRIES,
)
from opaque_keys.edx.locations import SlashSeparatedCourseKey
......
......@@ -9,7 +9,7 @@ from .scores import possibly_scored
def grading_context_for_course(course_key):
"""
Same as grading_context, but takes in a course object.
Same as grading_context, but takes in a course key.
"""
course_structure = get_course_in_cache(course_key)
return grading_context(course_structure)
......
......@@ -160,7 +160,7 @@ class GradesEventIntegrationTest(ProblemSubmissionTestMixin, SharedModuleStoreTe
}
)
@patch('lms.djangoapps.instructor_task.tasks_helper.tracker')
@patch('lms.djangoapps.instructor_task.tasks_helper.module_state.tracker')
@patch('lms.djangoapps.grades.signals.handlers.tracker')
@patch('lms.djangoapps.grades.models.tracker')
def test_rescoring_events(self, models_tracker, handlers_tracker, instructor_task_tracker):
......
"""
Exception classes used by Instructor tasks.
"""
class UpdateProblemModuleStateError(Exception):
"""
Error signaling a fatal condition while updating problem modules.
Used when the current module cannot be processed and no more
modules should be attempted.
"""
pass
class DuplicateTaskException(Exception):
"""Exception indicating that a task already exists or has already completed."""
pass
......@@ -14,9 +14,12 @@ import dogstats_wrapper as dog_stats_api
from django.db import transaction, DatabaseError
from django.core.cache import cache
from lms.djangoapps.instructor_task.models import InstructorTask, PROGRESS, QUEUING
from util.db import outer_atomic
from .exceptions import DuplicateTaskException
from .models import InstructorTask, PROGRESS, QUEUING
TASK_LOG = logging.getLogger('edx.celery.task')
# Lock expiration should be long enough to allow a subtask to complete.
......@@ -26,11 +29,6 @@ SUBTASK_LOCK_EXPIRE = 60 * 10 # Lock expires in 10 minutes
MAX_DATABASE_LOCK_RETRIES = 5
class DuplicateTaskException(Exception):
"""Exception indicating that a task already exists or has already completed."""
pass
def _get_number_of_subtasks(total_num_items, items_per_task):
"""
Determines number of subtasks that would be generated by _generate_items_for_subtask.
......
......@@ -27,26 +27,34 @@ from django.utils.translation import ugettext_noop
from celery import task
from bulk_email.tasks import perform_delegate_email_batches
from lms.djangoapps.instructor_task.tasks_helper import (
run_main_task,
BaseInstructorTask,
perform_module_state_update,
rescore_problem_module_state,
reset_attempts_module_state,
delete_problem_module_state,
upload_problem_responses_csv,
upload_grades_csv,
upload_problem_grade_report,
upload_students_csv,
cohort_students_and_upload,
from lms.djangoapps.instructor_task.tasks_base import BaseInstructorTask
from lms.djangoapps.instructor_task.tasks_helper.runner import run_main_task
from lms.djangoapps.instructor_task.tasks_helper.certs import (
generate_students_certificates,
)
from lms.djangoapps.instructor_task.tasks_helper.enrollments import (
upload_enrollment_report,
upload_may_enroll_csv,
upload_exec_summary_report,
upload_students_csv,
)
from lms.djangoapps.instructor_task.tasks_helper.grades import (
generate_course_grade_report,
generate_problem_grade_report,
upload_problem_responses_csv,
)
from lms.djangoapps.instructor_task.tasks_helper.misc import (
cohort_students_and_upload,
upload_course_survey_report,
generate_students_certificates,
upload_proctored_exam_results_report,
upload_ora2_data,
)
from lms.djangoapps.instructor_task.tasks_helper.module_state import (
perform_module_state_update,
rescore_problem_module_state,
reset_attempts_module_state,
delete_problem_module_state,
)
TASK_LOG = logging.getLogger('edx.celery.task')
......@@ -168,7 +176,7 @@ def calculate_grades_csv(entry_id, xmodule_instance_args):
xmodule_instance_args.get('task_id'), entry_id, action_name
)
task_fn = partial(upload_grades_csv, xmodule_instance_args)
task_fn = partial(generate_course_grade_report, xmodule_instance_args)
return run_main_task(entry_id, task_fn, action_name)
......@@ -185,7 +193,7 @@ def calculate_problem_grade_report(entry_id, xmodule_instance_args):
xmodule_instance_args.get('task_id'), entry_id, action_name
)
task_fn = partial(upload_problem_grade_report, xmodule_instance_args)
task_fn = partial(generate_problem_grade_report, xmodule_instance_args)
return run_main_task(entry_id, task_fn, action_name)
......
"""
Base class for Instructor celery tasks.
"""
from celery import Task
from celery.states import SUCCESS, FAILURE
import logging
from lms.djangoapps.instructor_task.models import InstructorTask
# define different loggers for use within tasks and on client side
TASK_LOG = logging.getLogger('edx.celery.task')
class BaseInstructorTask(Task):
"""
Base task class for use with InstructorTask models.
Permits updating information about task in corresponding InstructorTask for monitoring purposes.
Assumes that the entry_id of the InstructorTask model is the first argument to the task.
The `entry_id` is the primary key for the InstructorTask entry representing the task. This class
updates the entry on success and failure of the task it wraps. It is setting the entry's value
for task_state based on what Celery would set it to once the task returns to Celery:
FAILURE if an exception is encountered, and SUCCESS if it returns normally.
Other arguments are pass-throughs to perform_module_state_update, and documented there.
"""
abstract = True
def on_success(self, task_progress, task_id, args, kwargs):
"""
Update InstructorTask object corresponding to this task with info about success.
Updates task_output and task_state. But it shouldn't actually do anything
if the task is only creating subtasks to actually do the work.
Assumes `task_progress` is a dict containing the task's result, with the following keys:
'attempted': number of attempts made
'succeeded': number of attempts that "succeeded"
'skipped': number of attempts that "skipped"
'failed': number of attempts that "failed"
'total': number of possible subtasks to attempt
'action_name': user-visible verb to use in status messages. Should be past-tense.
Pass-through of input `action_name`.
'duration_ms': how long the task has (or had) been running.
This is JSON-serialized and stored in the task_output column of the InstructorTask entry.
"""
TASK_LOG.debug('Task %s: success returned with progress: %s', task_id, task_progress)
# We should be able to find the InstructorTask object to update
# based on the task_id here, without having to dig into the
# original args to the task. On the other hand, the entry_id
# is the first value passed to all such args, so we'll use that.
# And we assume that it exists, else we would already have had a failure.
entry_id = args[0]
entry = InstructorTask.objects.get(pk=entry_id)
# Check to see if any subtasks had been defined as part of this task.
# If not, then we know that we're done. (If so, let the subtasks
# handle updating task_state themselves.)
if len(entry.subtasks) == 0:
entry.task_output = InstructorTask.create_output_for_success(task_progress)
entry.task_state = SUCCESS
entry.save_now()
def on_failure(self, exc, task_id, args, kwargs, einfo):
"""
Update InstructorTask object corresponding to this task with info about failure.
Fetches and updates exception and traceback information on failure.
If an exception is raised internal to the task, it is caught by celery and provided here.
The information is recorded in the InstructorTask object as a JSON-serialized dict
stored in the task_output column. It contains the following keys:
'exception': type of exception object
'message': error message from exception object
'traceback': traceback information (truncated if necessary)
Note that there is no way to record progress made within the task (e.g. attempted,
succeeded, etc.) when such failures occur.
"""
TASK_LOG.debug(u'Task %s: failure returned', task_id)
entry_id = args[0]
try:
entry = InstructorTask.objects.get(pk=entry_id)
except InstructorTask.DoesNotExist:
# if the InstructorTask object does not exist, then there's no point
# trying to update it.
TASK_LOG.error(u"Task (%s) has no InstructorTask object for id %s", task_id, entry_id)
else:
TASK_LOG.warning(u"Task (%s) failed", task_id, exc_info=True)
entry.task_output = InstructorTask.create_output_for_failure(einfo.exception, einfo.traceback)
entry.task_state = FAILURE
entry.save_now()
"""
Instructor tasks related to certificates.
"""
from django.contrib.auth.models import User
from django.db.models import Q
from time import time
from certificates.api import generate_user_certificates
from certificates.models import CertificateStatuses, GeneratedCertificate
from student.models import CourseEnrollment
from xmodule.modulestore.django import modulestore
from .runner import TaskProgress
def generate_students_certificates(
_xmodule_instance_args, _entry_id, course_id, task_input, action_name):
"""
For a given `course_id`, generate certificates for only students present in 'students' key in task_input
json column, otherwise generate certificates for all enrolled students.
"""
start_time = time()
students_to_generate_certs_for = CourseEnrollment.objects.users_enrolled_in(course_id)
student_set = task_input.get('student_set')
if student_set == 'all_whitelisted':
# Generate Certificates for all white listed students.
students_to_generate_certs_for = students_to_generate_certs_for.filter(
certificatewhitelist__course_id=course_id,
certificatewhitelist__whitelist=True
)
elif student_set == 'whitelisted_not_generated':
# Whitelist students who did not get certificates already.
students_to_generate_certs_for = students_to_generate_certs_for.filter(
certificatewhitelist__course_id=course_id,
certificatewhitelist__whitelist=True
).exclude(
generatedcertificate__course_id=course_id,
generatedcertificate__status__in=CertificateStatuses.PASSED_STATUSES
)
elif student_set == "specific_student":
specific_student_id = task_input.get('specific_student_id')
students_to_generate_certs_for = students_to_generate_certs_for.filter(id=specific_student_id)
task_progress = TaskProgress(action_name, students_to_generate_certs_for.count(), start_time)
current_step = {'step': 'Calculating students already have certificates'}
task_progress.update_task_state(extra_meta=current_step)
statuses_to_regenerate = task_input.get('statuses_to_regenerate', [])
if student_set is not None and not statuses_to_regenerate:
# We want to skip 'filtering students' only when students are given and statuses to regenerate are not
students_require_certs = students_to_generate_certs_for
else:
students_require_certs = students_require_certificate(
course_id, students_to_generate_certs_for, statuses_to_regenerate
)
if statuses_to_regenerate:
# Mark existing generated certificates as 'unavailable' before regenerating
# We need to call this method after "students_require_certificate" otherwise "students_require_certificate"
# would return no results.
invalidate_generated_certificates(course_id, students_to_generate_certs_for, statuses_to_regenerate)
task_progress.skipped = task_progress.total - len(students_require_certs)
current_step = {'step': 'Generating Certificates'}
task_progress.update_task_state(extra_meta=current_step)
course = modulestore().get_course(course_id, depth=0)
# Generate certificate for each student
for student in students_require_certs:
task_progress.attempted += 1
status = generate_user_certificates(
student,
course_id,
course=course
)
if CertificateStatuses.is_passing_status(status):
task_progress.succeeded += 1
else:
task_progress.failed += 1
return task_progress.update_task_state(extra_meta=current_step)
def students_require_certificate(course_id, enrolled_students, statuses_to_regenerate=None):
"""
Returns list of students where certificates needs to be generated.
if 'statuses_to_regenerate' is given then return students that have Generated Certificates
and the generated certificate status lies in 'statuses_to_regenerate'
if 'statuses_to_regenerate' is not given then return all the enrolled student skipping the ones
whose certificates have already been generated.
:param course_id:
:param enrolled_students:
:param statuses_to_regenerate:
"""
if statuses_to_regenerate:
# Return Students that have Generated Certificates and the generated certificate status
# lies in 'statuses_to_regenerate'
students_require_certificates = enrolled_students.filter(
generatedcertificate__course_id=course_id,
generatedcertificate__status__in=statuses_to_regenerate
)
# Fetch results otherwise subsequent operations on table cause wrong data fetch
return list(students_require_certificates)
else:
# compute those students whose certificates are already generated
students_already_have_certs = User.objects.filter(
~Q(generatedcertificate__status=CertificateStatuses.unavailable),
generatedcertificate__course_id=course_id)
# Return all the enrolled student skipping the ones whose certificates have already been generated
return list(set(enrolled_students) - set(students_already_have_certs))
def invalidate_generated_certificates(course_id, enrolled_students, certificate_statuses): # pylint: disable=invalid-name
"""
Invalidate generated certificates for all enrolled students in the given course having status in
'certificate_statuses'.
Generated Certificates are invalidated by marking its status 'unavailable' and updating verify_uuid, download_uuid,
download_url and grade with empty string.
:param course_id: Course Key for the course whose generated certificates need to be removed
:param enrolled_students: (queryset or list) students enrolled in the course
:param certificate_statuses: certificates statuses for whom to remove generated certificate
"""
certificates = GeneratedCertificate.objects.filter( # pylint: disable=no-member
user__in=enrolled_students,
course_id=course_id,
status__in=certificate_statuses,
)
# Mark generated certificates as 'unavailable' and update download_url, download_uui, verify_uuid and
# grade with empty string for each row
certificates.update(
status=CertificateStatuses.unavailable,
verify_uuid='',
download_uuid='',
download_url='',
grade='',
)
from django.db import reset_queries
from celery import Task, current_task
import dogstats_wrapper as dog_stats_api
import json
import logging
from util.db import outer_atomic
from time import time
from lms.djangoapps.instructor_task.models import InstructorTask, PROGRESS
TASK_LOG = logging.getLogger('edx.celery.task')
class TaskProgress(object):
"""
Encapsulates the current task's progress by keeping track of
'attempted', 'succeeded', 'skipped', 'failed', 'total',
'action_name', and 'duration_ms' values.
"""
def __init__(self, action_name, total, start_time):
self.action_name = action_name
self.total = total
self.start_time = start_time
self.attempted = 0
self.succeeded = 0
self.skipped = 0
self.failed = 0
def update_task_state(self, extra_meta=None):
"""
Update the current celery task's state to the progress state
specified by the current object. Returns the progress
dictionary for use by `run_main_task` and
`BaseInstructorTask.on_success`.
Arguments:
extra_meta (dict): Extra metadata to pass to `update_state`
Returns:
dict: The current task's progress dict
"""
progress_dict = {
'action_name': self.action_name,
'attempted': self.attempted,
'succeeded': self.succeeded,
'skipped': self.skipped,
'failed': self.failed,
'total': self.total,
'duration_ms': int((time() - self.start_time) * 1000),
}
if extra_meta is not None:
progress_dict.update(extra_meta)
_get_current_task().update_state(state=PROGRESS, meta=progress_dict)
return progress_dict
def run_main_task(entry_id, task_fcn, action_name):
"""
Applies the `task_fcn` to the arguments defined in `entry_id` InstructorTask.
Arguments passed to `task_fcn` are:
`entry_id` : the primary key for the InstructorTask entry representing the task.
`course_id` : the id for the course.
`task_input` : dict containing task-specific arguments, JSON-decoded from InstructorTask's task_input.
`action_name` : past-tense verb to use for constructing status messages.
If no exceptions are raised, the `task_fcn` should return a dict containing
the task's result with the following keys:
'attempted': number of attempts made
'succeeded': number of attempts that "succeeded"
'skipped': number of attempts that "skipped"
'failed': number of attempts that "failed"
'total': number of possible subtasks to attempt
'action_name': user-visible verb to use in status messages.
Should be past-tense. Pass-through of input `action_name`.
'duration_ms': how long the task has (or had) been running.
"""
# Get the InstructorTask to be updated. If this fails then let the exception return to Celery.
# There's no point in catching it here.
with outer_atomic():
entry = InstructorTask.objects.get(pk=entry_id)
entry.task_state = PROGRESS
entry.save_now()
# Get inputs to use in this task from the entry
task_id = entry.task_id
course_id = entry.course_id
task_input = json.loads(entry.task_input)
# Construct log message
fmt = u'Task: {task_id}, InstructorTask ID: {entry_id}, Course: {course_id}, Input: {task_input}'
task_info_string = fmt.format(task_id=task_id, entry_id=entry_id, course_id=course_id, task_input=task_input)
TASK_LOG.info(u'%s, Starting update (nothing %s yet)', task_info_string, action_name)
# Check that the task_id submitted in the InstructorTask matches the current task
# that is running.
request_task_id = _get_current_task().request.id
if task_id != request_task_id:
fmt = u'{task_info}, Requested task did not match actual task "{actual_id}"'
message = fmt.format(task_info=task_info_string, actual_id=request_task_id)
TASK_LOG.error(message)
raise ValueError(message)
# Now do the work
with dog_stats_api.timer('instructor_tasks.time.overall', tags=[u'action:{name}'.format(name=action_name)]):
task_progress = task_fcn(entry_id, course_id, task_input, action_name)
# Release any queries that the connection has been hanging onto
reset_queries()
# Log and exit, returning task_progress info as task result
TASK_LOG.info(u'%s, Task type: %s, Finishing task: %s', task_info_string, action_name, task_progress)
return task_progress
def _get_current_task():
"""
Stub to make it easier to test without actually running Celery.
This is a wrapper around celery.current_task, which provides access
to the top of the stack of Celery's tasks. When running tests, however,
it doesn't seem to work to mock current_task directly, so this wrapper
is used to provide a hook to mock in tests, while providing the real
`current_task` in production.
"""
return current_task
from eventtracking import tracker
from lms.djangoapps.instructor_task.models import ReportStore
from util.file import course_filename_prefix_generator
REPORT_REQUESTED_EVENT_NAME = u'edx.instructor.report.requested'
# define value to use when no task_id is provided:
UNKNOWN_TASK_ID = 'unknown-task_id'
# define values for update functions to use to return status to perform_module_state_update
UPDATE_STATUS_SUCCEEDED = 'succeeded'
UPDATE_STATUS_FAILED = 'failed'
UPDATE_STATUS_SKIPPED = 'skipped'
def upload_csv_to_report_store(rows, csv_name, course_id, timestamp, config_name='GRADES_DOWNLOAD'):
"""
Upload data as a CSV using ReportStore.
Arguments:
rows: CSV data in the following format (first column may be a
header):
[
[row1_colum1, row1_colum2, ...],
...
]
csv_name: Name of the resulting CSV
course_id: ID of the course
"""
report_store = ReportStore.from_config(config_name)
report_store.store_rows(
course_id,
u"{course_prefix}_{csv_name}_{timestamp_str}.csv".format(
course_prefix=course_filename_prefix_generator(course_id),
csv_name=csv_name,
timestamp_str=timestamp.strftime("%Y-%m-%d-%H%M")
),
rows
)
tracker_emit(csv_name)
def tracker_emit(report_name):
"""
Emits a 'report.requested' event for the given report.
"""
tracker.emit(REPORT_REQUESTED_EVENT_NAME, {"report_type": report_name, })
......@@ -32,7 +32,7 @@ from lms.djangoapps.instructor_task.api import (
submit_delete_problem_state_for_all_students
)
from lms.djangoapps.instructor_task.models import InstructorTask
from lms.djangoapps.instructor_task.tasks_helper import upload_grades_csv
from lms.djangoapps.instructor_task.tasks_helper.grades import generate_course_grade_report
from lms.djangoapps.instructor_task.tests.test_base import (
InstructorTaskModuleTestCase,
TestReportMixin,
......@@ -572,10 +572,10 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent,
def verify_csv_task_success(self, task_result):
"""
Verify that all students were successfully graded by
`upload_grades_csv`.
`generate_course_grade_report`.
Arguments:
task_result (dict): Return value of `upload_grades_csv`.
task_result (dict): Return value of `generate_course_grade_report`.
"""
self.assertDictContainsSubset({'attempted': 2, 'succeeded': 2, 'failed': 0}, task_result)
......@@ -635,8 +635,8 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent,
self.submit_student_answer(self.student_a.username, problem_a_url, [OPTION_1, OPTION_1])
self.submit_student_answer(self.student_b.username, problem_b_url, [OPTION_1, OPTION_2])
with patch('lms.djangoapps.instructor_task.tasks_helper._get_current_task'):
result = upload_grades_csv(None, None, self.course.id, None, 'graded')
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
result = generate_course_grade_report(None, None, self.course.id, None, 'graded')
self.verify_csv_task_success(result)
self.verify_grades_in_csv(
[
......@@ -668,8 +668,8 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent,
self.submit_student_answer(self.student_a.username, problem_a_url, [OPTION_1, OPTION_1])
with patch('lms.djangoapps.instructor_task.tasks_helper._get_current_task'):
result = upload_grades_csv(None, None, self.course.id, None, 'graded')
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
result = generate_course_grade_report(None, None, self.course.id, None, 'graded')
self.verify_csv_task_success(result)
self.verify_grades_in_csv(
[
......
......@@ -22,6 +22,7 @@ from courseware.tests.factories import StudentModuleFactory
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from xmodule.modulestore.exceptions import ItemNotFoundError
from lms.djangoapps.instructor_task.exceptions import UpdateProblemModuleStateError
from lms.djangoapps.instructor_task.models import InstructorTask
from lms.djangoapps.instructor_task.tests.test_base import InstructorTaskModuleTestCase
from lms.djangoapps.instructor_task.tests.factories import InstructorTaskFactory
......@@ -32,10 +33,8 @@ from lms.djangoapps.instructor_task.tasks import (
generate_certificates,
export_ora2_data,
)
from lms.djangoapps.instructor_task.tasks_helper import (
UpdateProblemModuleStateError,
upload_ora2_data,
)
from lms.djangoapps.instructor_task.tasks_helper.misc import upload_ora2_data
PROBLEM_URL_NAME = "test_urlname"
......@@ -97,7 +96,7 @@ class TestInstructorTasks(InstructorTaskModuleTestCase):
self.current_task.update_state.side_effect = TestTaskFailure(expected_failure_message)
task_args = [entry_id, self._get_xmodule_instance_args()]
with patch('lms.djangoapps.instructor_task.tasks_helper._get_current_task') as mock_get_task:
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task') as mock_get_task:
mock_get_task.return_value = self.current_task
return task_class.apply(task_args, task_id=task_id).get()
......@@ -275,7 +274,7 @@ class TestRescoreInstructorTask(TestInstructorTasks):
mock_instance = MagicMock()
del mock_instance.rescore_problem
del mock_instance.rescore
with patch('lms.djangoapps.instructor_task.tasks_helper.get_module_for_descriptor_internal') as mock_get_module:
with patch('lms.djangoapps.instructor_task.tasks_helper.module_state.get_module_for_descriptor_internal') as mock_get_module:
mock_get_module.return_value = mock_instance
with self.assertRaises(UpdateProblemModuleStateError):
self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
......@@ -295,7 +294,7 @@ class TestRescoreInstructorTask(TestInstructorTasks):
num_students = 1
self._create_students_with_state(num_students, input_state)
task_entry = self._create_input_entry()
with patch('lms.djangoapps.instructor_task.tasks_helper.get_module_for_descriptor_internal', return_value=None):
with patch('lms.djangoapps.instructor_task.tasks_helper.module_state.get_module_for_descriptor_internal', return_value=None):
self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
self.assert_task_output(
......@@ -331,7 +330,7 @@ class TestRescoreInstructorTask(TestInstructorTasks):
num_students = 10
self._create_students_with_state(num_students)
task_entry = self._create_input_entry()
with patch('lms.djangoapps.instructor_task.tasks_helper.get_module_for_descriptor_internal') as mock_get_module:
with patch('lms.djangoapps.instructor_task.tasks_helper.module_state.get_module_for_descriptor_internal') as mock_get_module:
mock_get_module.return_value = mock_instance
self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
......@@ -356,7 +355,7 @@ class TestRescoreInstructorTask(TestInstructorTasks):
mock_instance = Mock()
mock_instance.rescore_problem = Mock(return_value={'success': 'bogus'})
del mock_instance.rescore
with patch('lms.djangoapps.instructor_task.tasks_helper.get_module_for_descriptor_internal') as mock_get_module:
with patch('lms.djangoapps.instructor_task.tasks_helper.module_state.get_module_for_descriptor_internal') as mock_get_module:
mock_get_module.return_value = mock_instance
self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
......@@ -381,7 +380,7 @@ class TestRescoreInstructorTask(TestInstructorTasks):
mock_instance = Mock()
mock_instance.rescore_problem = Mock(return_value={'bogus': 'value'})
del mock_instance.rescore
with patch('lms.djangoapps.instructor_task.tasks_helper.get_module_for_descriptor_internal') as mock_get_module:
with patch('lms.djangoapps.instructor_task.tasks_helper.module_state.get_module_for_descriptor_internal') as mock_get_module:
mock_get_module.return_value = mock_instance
self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment