Commit 379a2c18 by Nimisha Asthagiri

Refactor Grade Report in prep for parallelization.

parent cc98111d
...@@ -564,7 +564,7 @@ def ccx_grades_csv(request, course, ccx=None): ...@@ -564,7 +564,7 @@ def ccx_grades_csv(request, course, ccx=None):
courseenrollment__course_id=ccx_key, courseenrollment__course_id=ccx_key,
courseenrollment__is_active=1 courseenrollment__is_active=1
).order_by('username').select_related("profile") ).order_by('username').select_related("profile")
grades = CourseGradeFactory().iter(course, enrolled_students) grades = CourseGradeFactory().iter(enrolled_students, course)
header = None header = None
rows = [] rows = []
......
from lms.djangoapps.course_blocks.api import get_course_blocks from lms.djangoapps.course_blocks.api import get_course_blocks
from openedx.core.djangoapps.content.block_structure.api import get_block_structure_manager
from xmodule.modulestore.django import modulestore from xmodule.modulestore.django import modulestore
from ..transformer import GradesTransformer from ..transformer import GradesTransformer
...@@ -57,6 +58,12 @@ class CourseData(object): ...@@ -57,6 +58,12 @@ class CourseData(object):
return self._structure return self._structure
@property @property
def collected_structure(self):
if not self._collected_block_structure:
self._collected_block_structure = get_block_structure_manager(self.course_key).get_collected()
return self._collected_block_structure
@property
def course(self): def course(self):
if not self._course: if not self._course:
self._course = modulestore().get_course(self.course_key) self._course = modulestore().get_course(self.course_key)
......
...@@ -2,7 +2,6 @@ from collections import namedtuple ...@@ -2,7 +2,6 @@ from collections import namedtuple
import dogstats_wrapper as dog_stats_api import dogstats_wrapper as dog_stats_api
from logging import getLogger from logging import getLogger
from openedx.core.djangoapps.content.block_structure.api import get_block_structure_manager
from openedx.core.djangoapps.signals.signals import COURSE_GRADE_CHANGED from openedx.core.djangoapps.signals.signals import COURSE_GRADE_CHANGED
from ..config import assume_zero_if_absent, should_persist_grades from ..config import assume_zero_if_absent, should_persist_grades
...@@ -77,7 +76,15 @@ class CourseGradeFactory(object): ...@@ -77,7 +76,15 @@ class CourseGradeFactory(object):
course_data = CourseData(user, course, collected_block_structure, course_structure, course_key) course_data = CourseData(user, course, collected_block_structure, course_structure, course_key)
return self._update(user, course_data, read_only=False) return self._update(user, course_data, read_only=False)
def iter(self, course, students, force_update=False): def iter(
self,
users,
course=None,
collected_block_structure=None,
course_structure=None,
course_key=None,
force_update=False,
):
""" """
Given a course and an iterable of students (User), yield a GradeResult Given a course and an iterable of students (User), yield a GradeResult
for every student enrolled in the course. GradeResult is a named tuple of: for every student enrolled in the course. GradeResult is a named tuple of:
...@@ -92,25 +99,27 @@ class CourseGradeFactory(object): ...@@ -92,25 +99,27 @@ class CourseGradeFactory(object):
# compute the grade for all students. # compute the grade for all students.
# 2. Optimization: the collected course_structure is not # 2. Optimization: the collected course_structure is not
# retrieved from the data store multiple times. # retrieved from the data store multiple times.
course_data = CourseData(None, course, collected_block_structure, course_structure, course_key)
collected_block_structure = get_block_structure_manager(course.id).get_collected() for user in users:
for student in students: with dog_stats_api.timer(
with dog_stats_api.timer('lms.grades.CourseGradeFactory.iter', tags=[u'action:{}'.format(course.id)]): 'lms.grades.CourseGradeFactory.iter',
tags=[u'action:{}'.format(course_data.course_key)]
):
try: try:
operation = CourseGradeFactory().update if force_update else CourseGradeFactory().create method = CourseGradeFactory().update if force_update else CourseGradeFactory().create
course_grade = operation(student, course, collected_block_structure) course_grade = method(user, course, course_data.collected_structure, course_structure, course_key)
yield self.GradeResult(student, course_grade, "") yield self.GradeResult(user, course_grade, "")
except Exception as exc: # pylint: disable=broad-except except Exception as exc: # pylint: disable=broad-except
# Keep marching on even if this student couldn't be graded for # Keep marching on even if this student couldn't be graded for
# some reason, but log it for future reference. # some reason, but log it for future reference.
log.exception( log.exception(
'Cannot grade student %s in course %s because of exception: %s', 'Cannot grade student %s in course %s because of exception: %s',
student.id, user.id,
course.id, course_data.course_key,
exc.message exc.message
) )
yield self.GradeResult(student, None, exc.message) yield self.GradeResult(user, None, exc.message)
@staticmethod @staticmethod
def _create_zero(user, course_data): def _create_zero(user, course_data):
......
...@@ -96,7 +96,7 @@ def compute_grades_for_course(course_key, offset, batch_size, **kwargs): # pyli ...@@ -96,7 +96,7 @@ def compute_grades_for_course(course_key, offset, batch_size, **kwargs): # pyli
course = courses.get_course_by_id(CourseKey.from_string(course_key)) course = courses.get_course_by_id(CourseKey.from_string(course_key))
enrollments = CourseEnrollment.objects.filter(course_id=course.id).order_by('created') enrollments = CourseEnrollment.objects.filter(course_id=course.id).order_by('created')
student_iter = (enrollment.user for enrollment in enrollments[offset:offset + batch_size]) student_iter = (enrollment.user for enrollment in enrollments[offset:offset + batch_size])
list(CourseGradeFactory().iter(course, students=student_iter, force_update=True)) list(CourseGradeFactory().iter(users=student_iter, course=course, force_update=True))
@task(bind=True, base=_BaseTask, default_retry_delay=30, routing_key=settings.RECALCULATE_GRADES_ROUTING_KEY) @task(bind=True, base=_BaseTask, default_retry_delay=30, routing_key=settings.RECALCULATE_GRADES_ROUTING_KEY)
......
...@@ -60,7 +60,7 @@ class TestGradeIteration(SharedModuleStoreTestCase): ...@@ -60,7 +60,7 @@ class TestGradeIteration(SharedModuleStoreTestCase):
If we don't pass in any students, it should return a zero-length If we don't pass in any students, it should return a zero-length
iterator, but it shouldn't error. iterator, but it shouldn't error.
""" """
grade_results = list(CourseGradeFactory().iter(self.course, [])) grade_results = list(CourseGradeFactory().iter([], self.course))
self.assertEqual(grade_results, []) self.assertEqual(grade_results, [])
def test_all_empty_grades(self): def test_all_empty_grades(self):
...@@ -130,7 +130,7 @@ class TestGradeIteration(SharedModuleStoreTestCase): ...@@ -130,7 +130,7 @@ class TestGradeIteration(SharedModuleStoreTestCase):
students_to_course_grades = {} students_to_course_grades = {}
students_to_errors = {} students_to_errors = {}
for student, course_grade, err_msg in CourseGradeFactory().iter(course, students): for student, course_grade, err_msg in CourseGradeFactory().iter(students, course):
students_to_course_grades[student] = course_grade students_to_course_grades[student] = course_grade
if err_msg: if err_msg:
students_to_errors[student] = err_msg students_to_errors[student] = err_msg
......
...@@ -39,9 +39,9 @@ from lms.djangoapps.instructor_task.tasks_helper.enrollments import ( ...@@ -39,9 +39,9 @@ from lms.djangoapps.instructor_task.tasks_helper.enrollments import (
upload_students_csv, upload_students_csv,
) )
from lms.djangoapps.instructor_task.tasks_helper.grades import ( from lms.djangoapps.instructor_task.tasks_helper.grades import (
generate_course_grade_report, CourseGradeReport,
generate_problem_grade_report, ProblemGradeReport,
upload_problem_responses_csv, ProblemResponses,
) )
from lms.djangoapps.instructor_task.tasks_helper.misc import ( from lms.djangoapps.instructor_task.tasks_helper.misc import (
cohort_students_and_upload, cohort_students_and_upload,
...@@ -160,7 +160,7 @@ def calculate_problem_responses_csv(entry_id, xmodule_instance_args): ...@@ -160,7 +160,7 @@ def calculate_problem_responses_csv(entry_id, xmodule_instance_args):
""" """
# Translators: This is a past-tense verb that is inserted into task progress messages as {action}. # Translators: This is a past-tense verb that is inserted into task progress messages as {action}.
action_name = ugettext_noop('generated') action_name = ugettext_noop('generated')
task_fn = partial(upload_problem_responses_csv, xmodule_instance_args) task_fn = partial(ProblemResponses.generate, xmodule_instance_args)
return run_main_task(entry_id, task_fn, action_name) return run_main_task(entry_id, task_fn, action_name)
...@@ -176,7 +176,7 @@ def calculate_grades_csv(entry_id, xmodule_instance_args): ...@@ -176,7 +176,7 @@ def calculate_grades_csv(entry_id, xmodule_instance_args):
xmodule_instance_args.get('task_id'), entry_id, action_name xmodule_instance_args.get('task_id'), entry_id, action_name
) )
task_fn = partial(generate_course_grade_report, xmodule_instance_args) task_fn = partial(CourseGradeReport.generate, xmodule_instance_args)
return run_main_task(entry_id, task_fn, action_name) return run_main_task(entry_id, task_fn, action_name)
...@@ -193,7 +193,7 @@ def calculate_problem_grade_report(entry_id, xmodule_instance_args): ...@@ -193,7 +193,7 @@ def calculate_problem_grade_report(entry_id, xmodule_instance_args):
xmodule_instance_args.get('task_id'), entry_id, action_name xmodule_instance_args.get('task_id'), entry_id, action_name
) )
task_fn = partial(generate_problem_grade_report, xmodule_instance_args) task_fn = partial(ProblemGradeReport.generate, xmodule_instance_args)
return run_main_task(entry_id, task_fn, action_name) return run_main_task(entry_id, task_fn, action_name)
......
...@@ -3,7 +3,8 @@ Functionality for generating grade reports. ...@@ -3,7 +3,8 @@ Functionality for generating grade reports.
""" """
from collections import OrderedDict from collections import OrderedDict
from datetime import datetime from datetime import datetime
from itertools import chain from itertools import chain, izip_longest, izip
from lazy import lazy
import logging import logging
from pytz import UTC from pytz import UTC
import re import re
...@@ -29,206 +30,318 @@ from .utils import upload_csv_to_report_store ...@@ -29,206 +30,318 @@ from .utils import upload_csv_to_report_store
TASK_LOG = logging.getLogger('edx.celery.task') TASK_LOG = logging.getLogger('edx.celery.task')
def generate_course_grade_report(_xmodule_instance_args, _entry_id, course_id, _task_input, action_name): # pylint: disable=too-many-statements class CourseGradeReportContext(object):
""" """
For a given `course_id`, generate a grades CSV file for all students that Internal class that provides a common context to use for a single grade
are enrolled, and store using a `ReportStore`. Once created, the files can report. When a report is parallelized across multiple processes,
be accessed by instantiating another `ReportStore` (via elements of this context are serialized and parsed across process
`ReportStore.from_config()`) and calling `link_for()` on it. Writes are boundaries.
buffered, so we'll never write part of a CSV file to S3 -- i.e. any files
that are visible in ReportStore will be complete ones.
As we start to add more CSV downloads, it will probably be worthwhile to
make a more general CSVDoc class instead of building out the rows like we
do here.
""" """
start_time = time() def __init__(self, _xmodule_instance_args, _entry_id, course_id, _task_input, action_name):
start_date = datetime.now(UTC) self.task_info_string = (
status_interval = 100 u'Task: {task_id}, '
enrolled_students = CourseEnrollment.objects.users_enrolled_in(course_id) u'InstructorTask ID: {entry_id}, '
total_enrolled_students = enrolled_students.count() u'Course: {course_id}, '
task_progress = TaskProgress(action_name, total_enrolled_students, start_time) u'Input: {task_input}'
).format(
fmt = u'Task: {task_id}, InstructorTask ID: {entry_id}, Course: {course_id}, Input: {task_input}'
task_info_string = fmt.format(
task_id=_xmodule_instance_args.get('task_id') if _xmodule_instance_args is not None else None, task_id=_xmodule_instance_args.get('task_id') if _xmodule_instance_args is not None else None,
entry_id=_entry_id, entry_id=_entry_id,
course_id=course_id, course_id=course_id,
task_input=_task_input task_input=_task_input,
) )
TASK_LOG.info(u'%s, Task type: %s, Starting task execution', task_info_string, action_name) self.action_name = action_name
self.course_id = course_id
self.task_progress = TaskProgress(self.action_name, total=None, start_time=time())
course = get_course_by_id(course_id) @lazy
course_is_cohorted = is_course_cohorted(course.id) def course(self):
teams_enabled = course.teams_enabled return get_course_by_id(self.course_id)
cohorts_header = ['Cohort Name'] if course_is_cohorted else []
teams_header = ['Team Name'] if teams_enabled else []
experiment_partitions = get_split_user_partitions(course.user_partitions) @lazy
group_configs_header = [u'Experiment Group ({})'.format(partition.name) for partition in experiment_partitions] def course_experiments(self):
return get_split_user_partitions(self.course.user_partitions)
certificate_info_header = ['Certificate Eligible', 'Certificate Delivered', 'Certificate Type'] @lazy
certificate_whitelist = CertificateWhitelist.objects.filter(course_id=course_id, whitelist=True) def teams_enabled(self):
whitelisted_user_ids = [entry.user_id for entry in certificate_whitelist] return self.course.teams_enabled
# Loop over all our students and build our CSV lists in memory @lazy
rows = [] def cohorts_enabled(self):
err_rows = [["id", "username", "error_msg"]] return is_course_cohorted(self.course_id)
current_step = {'step': 'Calculating Grades'}
student_counter = 0 @lazy
TASK_LOG.info( def graded_assignments(self):
u'%s, Task type: %s, Current step: %s, Starting grade calculation for total students: %s', """
task_info_string, Returns an OrderedDict that maps an assignment type to a dict of
action_name, subsection-headers and average-header.
current_step, """
total_enrolled_students, grading_context = grading_context_for_course(self.course_id)
graded_assignments_map = OrderedDict()
for assignment_type_name, subsection_infos in grading_context['all_graded_subsections_by_type'].iteritems():
graded_subsections_map = OrderedDict()
for subsection_index, subsection_info in enumerate(subsection_infos, start=1):
subsection = subsection_info['subsection_block']
header_name = u"{assignment_type} {subsection_index}: {subsection_name}".format(
assignment_type=assignment_type_name,
subsection_index=subsection_index,
subsection_name=subsection.display_name,
) )
graded_subsections_map[subsection.location] = header_name
graded_assignments = _graded_assignments(course_id) average_header = u"{assignment_type}".format(assignment_type=assignment_type_name)
grade_header = []
for assignment_info in graded_assignments.itervalues():
if assignment_info['use_subsection_headers']:
grade_header.extend(assignment_info['subsection_headers'].itervalues())
grade_header.append(assignment_info['average_header'])
rows.append( # Use separate subsection and average columns only if
["Student ID", "Email", "Username", "Grade"] + # there's more than one subsection.
grade_header + separate_subsection_avg_headers = len(subsection_infos) > 1
cohorts_header + if separate_subsection_avg_headers:
group_configs_header + average_header += u" (Avg)"
teams_header +
['Enrollment Track', 'Verification Status'] +
certificate_info_header
)
for student, course_grade, err_msg in CourseGradeFactory().iter(course, enrolled_students): graded_assignments_map[assignment_type_name] = {
# Periodically update task status (this is a cache write) 'subsection_headers': graded_subsections_map,
if task_progress.attempted % status_interval == 0: 'average_header': average_header,
task_progress.update_task_state(extra_meta=current_step) 'separate_subsection_avg_headers': separate_subsection_avg_headers
task_progress.attempted += 1 }
return graded_assignments_map
# Now add a log entry after each student is graded to get a sense def update_status(self, message):
# of the task's progress """
student_counter += 1 Updates the status on the celery task to the given message.
TASK_LOG.info( Also logs the update.
u'%s, Task type: %s, Current step: %s, Grade calculation in-progress for students: %s/%s', """
task_info_string, TASK_LOG.info(u'%s, Task type: %s, %s', self.task_info_string, self.action_name, message)
action_name, return self.task_progress.update_task_state(extra_meta={'step': message})
current_step,
student_counter,
total_enrolled_students
)
if not course_grade:
# An empty gradeset means we failed to grade a student.
task_progress.failed += 1
err_rows.append([student.id, student.username, err_msg])
continue
# We were able to successfully grade this student for this course. class CourseGradeReport(object):
task_progress.succeeded += 1 """
Class to encapsulate functionality related to generating Grade Reports.
"""
@classmethod
def generate(cls, _xmodule_instance_args, _entry_id, course_id, _task_input, action_name):
"""
Public method to generate a grade report.
"""
context = CourseGradeReportContext(_xmodule_instance_args, _entry_id, course_id, _task_input, action_name)
return CourseGradeReport()._generate(context)
cohorts_group_name = [] def _generate(self, context):
if course_is_cohorted: """
group = get_cohort(student, course_id, assign=False) Internal method for generating a grade report for the given context.
cohorts_group_name.append(group.name if group else '') """
context.update_status(u'Starting grades')
success_headers = self._success_headers(context)
error_headers = self._error_headers()
batched_rows = self._batched_rows(context)
group_configs_group_names = [] context.update_status(u'Compiling grades')
for partition in experiment_partitions: success_rows, error_rows = self._compile(context, batched_rows)
group = PartitionService(course_id).get_group(student, partition, assign=False)
group_configs_group_names.append(group.name if group else '')
team_name = [] context.update_status(u'Uploading grades')
if teams_enabled: self._upload(context, success_headers, success_rows, error_headers, error_rows)
try:
membership = CourseTeamMembership.objects.get(user=student, team__course_id=course_id)
team_name.append(membership.team.name)
except CourseTeamMembership.DoesNotExist:
team_name.append('')
enrollment_mode = CourseEnrollment.enrollment_mode_for_user(student, course_id)[0] return context.update_status(u'Completed grades')
verification_status = SoftwareSecurePhotoVerification.verification_status_for_user(
student,
course_id,
enrollment_mode
)
certificate_info = certificate_info_for_user(
student,
course_id,
course_grade.letter_grade,
student.id in whitelisted_user_ids
)
TASK_LOG.info( def _success_headers(self, context):
u'Student certificate eligibility: %s ' """
u'(user=%s, course_id=%s, grade_percent=%s letter_grade=%s gradecutoffs=%s, allow_certificate=%s, ' Returns a list of all applicable column headers for this grade report.
u'is_whitelisted=%s)', """
certificate_info[0], return (
student, ["Student ID", "Email", "Username", "Grade"] +
course_id, self._grades_header(context) +
course_grade.percent, (['Cohort Name'] if context.cohorts_enabled else []) +
course_grade.letter_grade, [u'Experiment Group ({})'.format(partition.name) for partition in context.course_experiments] +
course.grade_cutoffs, (['Team Name'] if context.teams_enabled else []) +
student.profile.allow_certificate, ['Enrollment Track', 'Verification Status'] +
student.id in whitelisted_user_ids ['Certificate Eligible', 'Certificate Delivered', 'Certificate Type']
) )
def _error_headers(self):
"""
Returns a list of error headers for this grade report.
"""
return ["Student ID", "Username", "Error"]
def _batched_rows(self, context):
"""
A generator of batches of (success_rows, error_rows) for this report.
"""
for users in self._batch_users(context):
yield self._rows_for_users(context, users)
def _compile(self, context, batched_rows):
"""
Compiles and returns the complete list of (success_rows, error_rows) for
the given batched_rows and context.
"""
# partition and chain successes and errors
success_rows, error_rows = izip(*batched_rows)
success_rows = list(chain(*success_rows))
error_rows = list(chain(*error_rows))
# update metrics on task status
context.task_progress.succeeded = len(success_rows)
context.task_progress.failed = len(error_rows)
context.task_progress.attempted = context.task_progress.succeeded + context.task_progress.failed
context.task_progress.total = context.task_progress.attempted
return success_rows, error_rows
def _upload(self, context, success_headers, success_rows, error_headers, error_rows):
"""
Creates and uploads a CSV for the given headers and rows.
"""
date = datetime.now(UTC)
upload_csv_to_report_store([success_headers] + success_rows, 'grade_report', context.course_id, date)
if len(error_rows) > 0:
error_rows = [error_headers] + error_rows
upload_csv_to_report_store(error_rows, 'grade_report_err', context.course_id, date)
def _grades_header(self, context):
"""
Returns the applicable grades-related headers for this report.
"""
graded_assignments = context.graded_assignments
grades_header = []
for assignment_info in graded_assignments.itervalues():
if assignment_info['separate_subsection_avg_headers']:
grades_header.extend(assignment_info['subsection_headers'].itervalues())
grades_header.append(assignment_info['average_header'])
return grades_header
def _batch_users(self, context):
"""
Returns a generator of batches of users.
"""
def grouper(iterable, chunk_size=1, fillvalue=None):
args = [iter(iterable)] * chunk_size
return izip_longest(*args, fillvalue=fillvalue)
users = CourseEnrollment.objects.users_enrolled_in(context.course_id)
return grouper(users)
def _user_grade_results(self, course_grade, context):
"""
Returns a list of grade results for the given course_grade corresponding
to the headers for this report.
"""
grade_results = [] grade_results = []
for assignment_type, assignment_info in graded_assignments.iteritems(): for assignment_type, assignment_info in context.graded_assignments.iteritems():
for subsection_location in assignment_info['subsection_headers']: for subsection_location in assignment_info['subsection_headers']:
try: try:
subsection_grade = course_grade.graded_subsections_by_format[assignment_type][subsection_location] subsection_grade = course_grade.graded_subsections_by_format[assignment_type][subsection_location]
except KeyError: except KeyError:
grade_results.append([u'Not Available']) grade_result = u'Not Available'
else: else:
if subsection_grade.graded_total.first_attempted is not None: if subsection_grade.graded_total.first_attempted is not None:
grade_results.append( grade_result = subsection_grade.graded_total.earned / subsection_grade.graded_total.possible
[subsection_grade.graded_total.earned / subsection_grade.graded_total.possible]
)
else: else:
grade_results.append([u'Not Attempted']) grade_result = u'Not Attempted'
if assignment_info['use_subsection_headers']: grade_results.append([grade_result])
if assignment_info['separate_subsection_avg_headers']:
assignment_average = course_grade.grader_result['grade_breakdown'].get(assignment_type, {}).get( assignment_average = course_grade.grader_result['grade_breakdown'].get(assignment_type, {}).get(
'percent' 'percent'
) )
grade_results.append([assignment_average]) grade_results.append([assignment_average])
return [course_grade.percent] + list(chain.from_iterable(grade_results))
def _user_cohort_group_names(self, user, context):
"""
Returns a list of names of cohort groups in which the given user
belongs.
"""
cohort_group_names = []
if context.cohorts_enabled:
group = get_cohort(user, context.course_id, assign=False)
cohort_group_names.append(group.name if group else '')
return cohort_group_names
grade_results = list(chain.from_iterable(grade_results)) def _user_experiment_group_names(self, user, context):
"""
Returns a list of names of course experiments in which the given user
belongs.
"""
experiment_group_names = []
for partition in context.course_experiments:
group = PartitionService(context.course_id).get_group(user, partition, assign=False)
experiment_group_names.append(group.name if group else '')
return experiment_group_names
rows.append( def _user_team_names(self, user, context):
[student.id, student.email, student.username, course_grade.percent] + """
grade_results + cohorts_group_name + group_configs_group_names + team_name + Returns a list of names of teams in which the given user belongs.
[enrollment_mode] + [verification_status] + certificate_info """
team_names = []
if context.teams_enabled:
try:
membership = CourseTeamMembership.objects.get(user=user, team__course_id=context.course_id)
team_names.append(membership.team.name)
except CourseTeamMembership.DoesNotExist:
team_names.append('')
return team_names
def _user_verification_mode(self, user, context):
"""
Returns a list of enrollment-mode and verification-status for the
given user.
"""
enrollment_mode = CourseEnrollment.enrollment_mode_for_user(user, context.course_id)[0]
verification_status = SoftwareSecurePhotoVerification.verification_status_for_user(
user,
context.course_id,
enrollment_mode
) )
return [enrollment_mode, verification_status]
def _user_certificate_info(self, user, context, course_grade, whitelisted_user_ids):
"""
Returns the course certification information for the given user.
"""
certificate_info = certificate_info_for_user(
user,
context.course_id,
course_grade.letter_grade,
user.id in whitelisted_user_ids
)
TASK_LOG.info( TASK_LOG.info(
u'%s, Task type: %s, Current step: %s, Grade calculation completed for students: %s/%s', u'Student certificate eligibility: %s '
task_info_string, u'(user=%s, course_id=%s, grade_percent=%s letter_grade=%s gradecutoffs=%s, allow_certificate=%s, '
action_name, u'is_whitelisted=%s)',
current_step, certificate_info[0],
student_counter, user,
total_enrolled_students context.course_id,
course_grade.percent,
course_grade.letter_grade,
context.course.grade_cutoffs,
user.profile.allow_certificate,
user.id in whitelisted_user_ids,
) )
return certificate_info
# By this point, we've got the rows we're going to stuff into our CSV files. def _rows_for_users(self, context, users):
current_step = {'step': 'Uploading CSVs'} """
task_progress.update_task_state(extra_meta=current_step) Returns a list of rows for the given users for this report.
TASK_LOG.info(u'%s, Task type: %s, Current step: %s', task_info_string, action_name, current_step) """
certificate_whitelist = CertificateWhitelist.objects.filter(course_id=context.course_id, whitelist=True)
# Perform the actual upload whitelisted_user_ids = [entry.user_id for entry in certificate_whitelist]
upload_csv_to_report_store(rows, 'grade_report', course_id, start_date) success_rows, error_rows = [], []
for user, course_grade, err_msg in CourseGradeFactory().iter(users, course_key=context.course_id):
# If there are any error rows (don't count the header), write them out as well if not course_grade:
if len(err_rows) > 1: # An empty gradeset means we failed to grade a student.
upload_csv_to_report_store(err_rows, 'grade_report_err', course_id, start_date) error_rows.append([user.id, user.username, err_msg])
else:
# One last update before we close out... success_rows.append(
TASK_LOG.info(u'%s, Task type: %s, Finalizing grade task', task_info_string, action_name) [user.id, user.email, user.username] +
return task_progress.update_task_state(extra_meta=current_step) self._user_grade_results(course_grade, context) +
self._user_cohort_group_names(user, context) +
self._user_experiment_group_names(user, context) +
self._user_team_names(user, context) +
self._user_verification_mode(user, context) +
self._user_certificate_info(user, context, course_grade, whitelisted_user_ids)
)
return success_rows, error_rows
def generate_problem_grade_report(_xmodule_instance_args, _entry_id, course_id, _task_input, action_name): class ProblemGradeReport(object):
@classmethod
def generate(cls, _xmodule_instance_args, _entry_id, course_id, _task_input, action_name):
""" """
Generate a CSV containing all students' problem grades within a given Generate a CSV containing all students' problem grades within a given
`course_id`. `course_id`.
...@@ -244,7 +357,7 @@ def generate_problem_grade_report(_xmodule_instance_args, _entry_id, course_id, ...@@ -244,7 +357,7 @@ def generate_problem_grade_report(_xmodule_instance_args, _entry_id, course_id,
# as the keys. It is structured in this way to keep the values related. # as the keys. It is structured in this way to keep the values related.
header_row = OrderedDict([('id', 'Student ID'), ('email', 'Email'), ('username', 'Username')]) header_row = OrderedDict([('id', 'Student ID'), ('email', 'Email'), ('username', 'Username')])
graded_scorable_blocks = _graded_scorable_blocks_to_header(course_id) graded_scorable_blocks = cls._graded_scorable_blocks_to_header(course_id)
# Just generate the static fields for now. # Just generate the static fields for now.
rows = [list(header_row.values()) + ['Grade'] + list(chain.from_iterable(graded_scorable_blocks.values()))] rows = [list(header_row.values()) + ['Grade'] + list(chain.from_iterable(graded_scorable_blocks.values()))]
...@@ -252,7 +365,7 @@ def generate_problem_grade_report(_xmodule_instance_args, _entry_id, course_id, ...@@ -252,7 +365,7 @@ def generate_problem_grade_report(_xmodule_instance_args, _entry_id, course_id,
current_step = {'step': 'Calculating Grades'} current_step = {'step': 'Calculating Grades'}
course = get_course_by_id(course_id) course = get_course_by_id(course_id)
for student, course_grade, err_msg in CourseGradeFactory().iter(course, enrolled_students): for student, course_grade, err_msg in CourseGradeFactory().iter(enrolled_students, course):
student_fields = [getattr(student, field_name) for field_name in header_row] student_fields = [getattr(student, field_name) for field_name in header_row]
task_progress.attempted += 1 task_progress.attempted += 1
...@@ -291,8 +404,34 @@ def generate_problem_grade_report(_xmodule_instance_args, _entry_id, course_id, ...@@ -291,8 +404,34 @@ def generate_problem_grade_report(_xmodule_instance_args, _entry_id, course_id,
return task_progress.update_task_state(extra_meta={'step': 'Uploading CSV'}) return task_progress.update_task_state(extra_meta={'step': 'Uploading CSV'})
@classmethod
def _graded_scorable_blocks_to_header(cls, course_key):
"""
Returns an OrderedDict that maps a scorable block's id to its
headers in the final report.
"""
scorable_blocks_map = OrderedDict()
grading_context = grading_context_for_course(course_key)
for assignment_type_name, subsection_infos in grading_context['all_graded_subsections_by_type'].iteritems():
for subsection_index, subsection_info in enumerate(subsection_infos, start=1):
for scorable_block in subsection_info['scored_descendants']:
header_name = (
u"{assignment_type} {subsection_index}: "
u"{subsection_name} - {scorable_block_name}"
).format(
scorable_block_name=scorable_block.display_name,
assignment_type=assignment_type_name,
subsection_index=subsection_index,
subsection_name=subsection_info['subsection_block'].display_name,
)
scorable_blocks_map[scorable_block.location] = [header_name + " (Earned)",
header_name + " (Possible)"]
return scorable_blocks_map
def upload_problem_responses_csv(_xmodule_instance_args, _entry_id, course_id, task_input, action_name): class ProblemResponses(object):
@classmethod
def generate(cls, _xmodule_instance_args, _entry_id, course_id, task_input, action_name):
""" """
For a given `course_id`, generate a CSV file containing For a given `course_id`, generate a CSV file containing
all student answers to a given problem, and store using a `ReportStore`. all student answers to a given problem, and store using a `ReportStore`.
...@@ -324,60 +463,3 @@ def upload_problem_responses_csv(_xmodule_instance_args, _entry_id, course_id, t ...@@ -324,60 +463,3 @@ def upload_problem_responses_csv(_xmodule_instance_args, _entry_id, course_id, t
upload_csv_to_report_store(rows, csv_name, course_id, start_date) upload_csv_to_report_store(rows, csv_name, course_id, start_date)
return task_progress.update_task_state(extra_meta=current_step) return task_progress.update_task_state(extra_meta=current_step)
def _graded_assignments(course_key):
"""
Returns an OrderedDict that maps an assignment type to a dict of subsection-headers and average-header.
"""
grading_context = grading_context_for_course(course_key)
graded_assignments_map = OrderedDict()
for assignment_type_name, subsection_infos in grading_context['all_graded_subsections_by_type'].iteritems():
graded_subsections_map = OrderedDict()
for subsection_index, subsection_info in enumerate(subsection_infos, start=1):
subsection = subsection_info['subsection_block']
header_name = u"{assignment_type} {subsection_index}: {subsection_name}".format(
assignment_type=assignment_type_name,
subsection_index=subsection_index,
subsection_name=subsection.display_name,
)
graded_subsections_map[subsection.location] = header_name
average_header = u"{assignment_type}".format(assignment_type=assignment_type_name)
# Use separate subsection and average columns only if
# there's more than one subsection.
use_subsection_headers = len(subsection_infos) > 1
if use_subsection_headers:
average_header += u" (Avg)"
graded_assignments_map[assignment_type_name] = {
'subsection_headers': graded_subsections_map,
'average_header': average_header,
'use_subsection_headers': use_subsection_headers
}
return graded_assignments_map
def _graded_scorable_blocks_to_header(course_key):
"""
Returns an OrderedDict that maps a scorable block's id to its
headers in the final report.
"""
scorable_blocks_map = OrderedDict()
grading_context = grading_context_for_course(course_key)
for assignment_type_name, subsection_infos in grading_context['all_graded_subsections_by_type'].iteritems():
for subsection_index, subsection_info in enumerate(subsection_infos, start=1):
for scorable_block in subsection_info['scored_descendants']:
header_name = (
u"{assignment_type} {subsection_index}: "
u"{subsection_name} - {scorable_block_name}"
).format(
scorable_block_name=scorable_block.display_name,
assignment_type=assignment_type_name,
subsection_index=subsection_index,
subsection_name=subsection_info['subsection_block'].display_name,
)
scorable_blocks_map[scorable_block.location] = [header_name + " (Earned)", header_name + " (Possible)"]
return scorable_blocks_map
...@@ -32,7 +32,7 @@ from lms.djangoapps.instructor_task.api import ( ...@@ -32,7 +32,7 @@ from lms.djangoapps.instructor_task.api import (
submit_delete_problem_state_for_all_students submit_delete_problem_state_for_all_students
) )
from lms.djangoapps.instructor_task.models import InstructorTask from lms.djangoapps.instructor_task.models import InstructorTask
from lms.djangoapps.instructor_task.tasks_helper.grades import generate_course_grade_report from lms.djangoapps.instructor_task.tasks_helper.grades import CourseGradeReport
from lms.djangoapps.instructor_task.tests.test_base import ( from lms.djangoapps.instructor_task.tests.test_base import (
InstructorTaskModuleTestCase, InstructorTaskModuleTestCase,
TestReportMixin, TestReportMixin,
...@@ -572,10 +572,10 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent, ...@@ -572,10 +572,10 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent,
def verify_csv_task_success(self, task_result): def verify_csv_task_success(self, task_result):
""" """
Verify that all students were successfully graded by Verify that all students were successfully graded by
`generate_course_grade_report`. `CourseGradeReport`.
Arguments: Arguments:
task_result (dict): Return value of `generate_course_grade_report`. task_result (dict): Return value of `CourseGradeReport.generate`.
""" """
self.assertDictContainsSubset({'attempted': 2, 'succeeded': 2, 'failed': 0}, task_result) self.assertDictContainsSubset({'attempted': 2, 'succeeded': 2, 'failed': 0}, task_result)
...@@ -636,7 +636,7 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent, ...@@ -636,7 +636,7 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent,
self.submit_student_answer(self.student_b.username, problem_b_url, [OPTION_1, OPTION_2]) self.submit_student_answer(self.student_b.username, problem_b_url, [OPTION_1, OPTION_2])
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
result = generate_course_grade_report(None, None, self.course.id, None, 'graded') result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
self.verify_csv_task_success(result) self.verify_csv_task_success(result)
self.verify_grades_in_csv( self.verify_grades_in_csv(
[ [
...@@ -669,7 +669,7 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent, ...@@ -669,7 +669,7 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent,
self.submit_student_answer(self.student_a.username, problem_a_url, [OPTION_1, OPTION_1]) self.submit_student_answer(self.student_a.username, problem_a_url, [OPTION_1, OPTION_1])
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
result = generate_course_grade_report(None, None, self.course.id, None, 'graded') result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
self.verify_csv_task_success(result) self.verify_csv_task_success(result)
self.verify_grades_in_csv( self.verify_grades_in_csv(
[ [
......
...@@ -59,14 +59,13 @@ from lms.djangoapps.instructor_task.tasks_helper.enrollments import ( ...@@ -59,14 +59,13 @@ from lms.djangoapps.instructor_task.tasks_helper.enrollments import (
upload_students_csv, upload_students_csv,
) )
from lms.djangoapps.instructor_task.tasks_helper.grades import ( from lms.djangoapps.instructor_task.tasks_helper.grades import (
generate_course_grade_report, CourseGradeReport,
generate_problem_grade_report, ProblemGradeReport,
upload_problem_responses_csv, ProblemResponses,
) )
from lms.djangoapps.instructor_task.tasks_helper.misc import ( from lms.djangoapps.instructor_task.tasks_helper.misc import (
cohort_students_and_upload, cohort_students_and_upload,
upload_course_survey_report, upload_course_survey_report,
upload_proctored_exam_results_report,
upload_ora2_data, upload_ora2_data,
) )
from ..tasks_helper.utils import ( from ..tasks_helper.utils import (
...@@ -89,7 +88,7 @@ class InstructorGradeReportTestCase(TestReportMixin, InstructorTaskCourseTestCas ...@@ -89,7 +88,7 @@ class InstructorGradeReportTestCase(TestReportMixin, InstructorTaskCourseTestCas
Verify cell data in the grades CSV for a particular user. Verify cell data in the grades CSV for a particular user.
""" """
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
result = generate_course_grade_report(None, None, course_id, None, 'graded') result = CourseGradeReport.generate(None, None, course_id, None, 'graded')
self.assertDictContainsSubset({'attempted': 2, 'succeeded': 2, 'failed': 0}, result) self.assertDictContainsSubset({'attempted': 2, 'succeeded': 2, 'failed': 0}, result)
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD') report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
report_csv_filename = report_store.links_for(course_id)[0][0] report_csv_filename = report_store.links_for(course_id)[0][0]
...@@ -121,7 +120,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase): ...@@ -121,7 +120,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase):
self.current_task.update_state = Mock() self.current_task.update_state = Mock()
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task') as mock_current_task: with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task') as mock_current_task:
mock_current_task.return_value = self.current_task mock_current_task.return_value = self.current_task
result = generate_course_grade_report(None, None, self.course.id, None, 'graded') result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
num_students = len(emails) num_students = len(emails)
self.assertDictContainsSubset({'attempted': num_students, 'succeeded': num_students, 'failed': 0}, result) self.assertDictContainsSubset({'attempted': num_students, 'succeeded': num_students, 'failed': 0}, result)
...@@ -135,7 +134,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase): ...@@ -135,7 +134,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase):
mock_grades_iter.return_value = [ mock_grades_iter.return_value = [
(self.create_student('username', 'student@example.com'), None, 'Cannot grade student') (self.create_student('username', 'student@example.com'), None, 'Cannot grade student')
] ]
result = generate_course_grade_report(None, None, self.course.id, None, 'graded') result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset({'attempted': 1, 'succeeded': 0, 'failed': 1}, result) self.assertDictContainsSubset({'attempted': 1, 'succeeded': 0, 'failed': 1}, result)
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD') report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
...@@ -319,7 +318,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase): ...@@ -319,7 +318,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase):
'', '',
) )
] ]
result = generate_course_grade_report(None, None, self.course.id, None, 'graded') result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset({'attempted': 1, 'succeeded': 1, 'failed': 0}, result) self.assertDictContainsSubset({'attempted': 1, 'succeeded': 1, 'failed': 0}, result)
...@@ -378,7 +377,7 @@ class TestProblemResponsesReport(TestReportMixin, InstructorTaskCourseTestCase): ...@@ -378,7 +377,7 @@ class TestProblemResponsesReport(TestReportMixin, InstructorTaskCourseTestCase):
{'username': 'user1', 'state': u'state1'}, {'username': 'user1', 'state': u'state1'},
{'username': 'user2', 'state': u'state2'}, {'username': 'user2', 'state': u'state2'},
] ]
result = upload_problem_responses_csv(None, None, self.course.id, task_input, 'calculated') result = ProblemResponses.generate(None, None, self.course.id, task_input, 'calculated')
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD') report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
links = report_store.links_for(self.course.id) links = report_store.links_for(self.course.id)
...@@ -609,7 +608,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase): ...@@ -609,7 +608,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
Verify that we see no grade information for a course with no graded Verify that we see no grade information for a course with no graded
problems. problems.
""" """
result = generate_problem_grade_report(None, None, self.course.id, None, 'graded') result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset({'action_name': 'graded', 'attempted': 2, 'succeeded': 2, 'failed': 0}, result) self.assertDictContainsSubset({'action_name': 'graded', 'attempted': 2, 'succeeded': 2, 'failed': 0}, result)
self.verify_rows_in_csv([ self.verify_rows_in_csv([
dict(zip( dict(zip(
...@@ -633,7 +632,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase): ...@@ -633,7 +632,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
self.define_option_problem(u'Problem1', parent=vertical) self.define_option_problem(u'Problem1', parent=vertical)
self.submit_student_answer(self.student_1.username, u'Problem1', ['Option 1']) self.submit_student_answer(self.student_1.username, u'Problem1', ['Option 1'])
result = generate_problem_grade_report(None, None, self.course.id, None, 'graded') result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset({'action_name': 'graded', 'attempted': 2, 'succeeded': 2, 'failed': 0}, result) self.assertDictContainsSubset({'action_name': 'graded', 'attempted': 2, 'succeeded': 2, 'failed': 0}, result)
problem_name = u'Homework 1: Subsection - Problem1' problem_name = u'Homework 1: Subsection - Problem1'
header_row = self.csv_header_row + [problem_name + ' (Earned)', problem_name + ' (Possible)'] header_row = self.csv_header_row + [problem_name + ' (Earned)', problem_name + ' (Possible)']
...@@ -670,7 +669,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase): ...@@ -670,7 +669,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
mock_grades_iter.return_value = [ mock_grades_iter.return_value = [
(student, None, error_message) (student, None, error_message)
] ]
result = generate_problem_grade_report(None, None, self.course.id, None, 'graded') result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset({'attempted': 1, 'succeeded': 0, 'failed': 1}, result) self.assertDictContainsSubset({'attempted': 1, 'succeeded': 0, 'failed': 1}, result)
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD') report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
...@@ -720,7 +719,7 @@ class TestProblemReportSplitTestContent(TestReportMixin, TestConditionalContent, ...@@ -720,7 +719,7 @@ class TestProblemReportSplitTestContent(TestReportMixin, TestConditionalContent,
self.submit_student_answer(self.student_b.username, self.problem_b_url, [self.OPTION_1, self.OPTION_2]) self.submit_student_answer(self.student_b.username, self.problem_b_url, [self.OPTION_1, self.OPTION_2])
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
result = generate_problem_grade_report(None, None, self.course.id, None, 'graded') result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset( self.assertDictContainsSubset(
{'action_name': 'graded', 'attempted': 2, 'succeeded': 2, 'failed': 0}, result {'action_name': 'graded', 'attempted': 2, 'succeeded': 2, 'failed': 0}, result
) )
...@@ -812,7 +811,7 @@ class TestProblemReportSplitTestContent(TestReportMixin, TestConditionalContent, ...@@ -812,7 +811,7 @@ class TestProblemReportSplitTestContent(TestReportMixin, TestConditionalContent,
header_row += [problem + ' (Earned)', problem + ' (Possible)'] header_row += [problem + ' (Earned)', problem + ' (Possible)']
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
generate_problem_grade_report(None, None, self.course.id, None, 'graded') ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertEquals(self.get_csv_row_with_headers(), header_row) self.assertEquals(self.get_csv_row_with_headers(), header_row)
...@@ -868,7 +867,7 @@ class TestProblemReportCohortedContent(TestReportMixin, ContentGroupTestCase, In ...@@ -868,7 +867,7 @@ class TestProblemReportCohortedContent(TestReportMixin, ContentGroupTestCase, In
self.submit_student_answer(self.beta_user.username, u'Problem1', ['Option 1', 'Option 2']) self.submit_student_answer(self.beta_user.username, u'Problem1', ['Option 1', 'Option 2'])
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
result = generate_problem_grade_report(None, None, self.course.id, None, 'graded') result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset( self.assertDictContainsSubset(
{'action_name': 'graded', 'attempted': 4, 'succeeded': 4, 'failed': 0}, result {'action_name': 'graded', 'attempted': 4, 'succeeded': 4, 'failed': 0}, result
) )
...@@ -1579,7 +1578,7 @@ class TestGradeReport(TestReportMixin, InstructorTaskModuleTestCase): ...@@ -1579,7 +1578,7 @@ class TestGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
self.submit_student_answer(self.student.username, u'Problem1', ['Option 1']) self.submit_student_answer(self.student.username, u'Problem1', ['Option 1'])
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
result = generate_course_grade_report(None, None, self.course.id, None, 'graded') result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset( self.assertDictContainsSubset(
{'action_name': 'graded', 'attempted': 1, 'succeeded': 1, 'failed': 0}, {'action_name': 'graded', 'attempted': 1, 'succeeded': 1, 'failed': 0},
result, result,
...@@ -1654,7 +1653,7 @@ class TestGradeReportEnrollmentAndCertificateInfo(TestReportMixin, InstructorTas ...@@ -1654,7 +1653,7 @@ class TestGradeReportEnrollmentAndCertificateInfo(TestReportMixin, InstructorTas
Verify grade report data. Verify grade report data.
""" """
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
generate_course_grade_report(None, None, self.course.id, None, 'graded') CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD') report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
report_csv_filename = report_store.links_for(self.course.id)[0][0] report_csv_filename = report_store.links_for(self.course.id)[0][0]
report_path = report_store.path_to(self.course.id, report_csv_filename) report_path = report_store.path_to(self.course.id, report_csv_filename)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment