Commit 379a2c18 by Nimisha Asthagiri

Refactor Grade Report in prep for parallelization.

parent cc98111d
...@@ -564,7 +564,7 @@ def ccx_grades_csv(request, course, ccx=None): ...@@ -564,7 +564,7 @@ def ccx_grades_csv(request, course, ccx=None):
courseenrollment__course_id=ccx_key, courseenrollment__course_id=ccx_key,
courseenrollment__is_active=1 courseenrollment__is_active=1
).order_by('username').select_related("profile") ).order_by('username').select_related("profile")
grades = CourseGradeFactory().iter(course, enrolled_students) grades = CourseGradeFactory().iter(enrolled_students, course)
header = None header = None
rows = [] rows = []
......
from lms.djangoapps.course_blocks.api import get_course_blocks from lms.djangoapps.course_blocks.api import get_course_blocks
from openedx.core.djangoapps.content.block_structure.api import get_block_structure_manager
from xmodule.modulestore.django import modulestore from xmodule.modulestore.django import modulestore
from ..transformer import GradesTransformer from ..transformer import GradesTransformer
...@@ -57,6 +58,12 @@ class CourseData(object): ...@@ -57,6 +58,12 @@ class CourseData(object):
return self._structure return self._structure
@property @property
def collected_structure(self):
if not self._collected_block_structure:
self._collected_block_structure = get_block_structure_manager(self.course_key).get_collected()
return self._collected_block_structure
@property
def course(self): def course(self):
if not self._course: if not self._course:
self._course = modulestore().get_course(self.course_key) self._course = modulestore().get_course(self.course_key)
......
...@@ -2,7 +2,6 @@ from collections import namedtuple ...@@ -2,7 +2,6 @@ from collections import namedtuple
import dogstats_wrapper as dog_stats_api import dogstats_wrapper as dog_stats_api
from logging import getLogger from logging import getLogger
from openedx.core.djangoapps.content.block_structure.api import get_block_structure_manager
from openedx.core.djangoapps.signals.signals import COURSE_GRADE_CHANGED from openedx.core.djangoapps.signals.signals import COURSE_GRADE_CHANGED
from ..config import assume_zero_if_absent, should_persist_grades from ..config import assume_zero_if_absent, should_persist_grades
...@@ -77,7 +76,15 @@ class CourseGradeFactory(object): ...@@ -77,7 +76,15 @@ class CourseGradeFactory(object):
course_data = CourseData(user, course, collected_block_structure, course_structure, course_key) course_data = CourseData(user, course, collected_block_structure, course_structure, course_key)
return self._update(user, course_data, read_only=False) return self._update(user, course_data, read_only=False)
def iter(self, course, students, force_update=False): def iter(
self,
users,
course=None,
collected_block_structure=None,
course_structure=None,
course_key=None,
force_update=False,
):
""" """
Given a course and an iterable of students (User), yield a GradeResult Given a course and an iterable of students (User), yield a GradeResult
for every student enrolled in the course. GradeResult is a named tuple of: for every student enrolled in the course. GradeResult is a named tuple of:
...@@ -92,25 +99,27 @@ class CourseGradeFactory(object): ...@@ -92,25 +99,27 @@ class CourseGradeFactory(object):
# compute the grade for all students. # compute the grade for all students.
# 2. Optimization: the collected course_structure is not # 2. Optimization: the collected course_structure is not
# retrieved from the data store multiple times. # retrieved from the data store multiple times.
course_data = CourseData(None, course, collected_block_structure, course_structure, course_key)
collected_block_structure = get_block_structure_manager(course.id).get_collected() for user in users:
for student in students: with dog_stats_api.timer(
with dog_stats_api.timer('lms.grades.CourseGradeFactory.iter', tags=[u'action:{}'.format(course.id)]): 'lms.grades.CourseGradeFactory.iter',
tags=[u'action:{}'.format(course_data.course_key)]
):
try: try:
operation = CourseGradeFactory().update if force_update else CourseGradeFactory().create method = CourseGradeFactory().update if force_update else CourseGradeFactory().create
course_grade = operation(student, course, collected_block_structure) course_grade = method(user, course, course_data.collected_structure, course_structure, course_key)
yield self.GradeResult(student, course_grade, "") yield self.GradeResult(user, course_grade, "")
except Exception as exc: # pylint: disable=broad-except except Exception as exc: # pylint: disable=broad-except
# Keep marching on even if this student couldn't be graded for # Keep marching on even if this student couldn't be graded for
# some reason, but log it for future reference. # some reason, but log it for future reference.
log.exception( log.exception(
'Cannot grade student %s in course %s because of exception: %s', 'Cannot grade student %s in course %s because of exception: %s',
student.id, user.id,
course.id, course_data.course_key,
exc.message exc.message
) )
yield self.GradeResult(student, None, exc.message) yield self.GradeResult(user, None, exc.message)
@staticmethod @staticmethod
def _create_zero(user, course_data): def _create_zero(user, course_data):
......
...@@ -96,7 +96,7 @@ def compute_grades_for_course(course_key, offset, batch_size, **kwargs): # pyli ...@@ -96,7 +96,7 @@ def compute_grades_for_course(course_key, offset, batch_size, **kwargs): # pyli
course = courses.get_course_by_id(CourseKey.from_string(course_key)) course = courses.get_course_by_id(CourseKey.from_string(course_key))
enrollments = CourseEnrollment.objects.filter(course_id=course.id).order_by('created') enrollments = CourseEnrollment.objects.filter(course_id=course.id).order_by('created')
student_iter = (enrollment.user for enrollment in enrollments[offset:offset + batch_size]) student_iter = (enrollment.user for enrollment in enrollments[offset:offset + batch_size])
list(CourseGradeFactory().iter(course, students=student_iter, force_update=True)) list(CourseGradeFactory().iter(users=student_iter, course=course, force_update=True))
@task(bind=True, base=_BaseTask, default_retry_delay=30, routing_key=settings.RECALCULATE_GRADES_ROUTING_KEY) @task(bind=True, base=_BaseTask, default_retry_delay=30, routing_key=settings.RECALCULATE_GRADES_ROUTING_KEY)
......
...@@ -60,7 +60,7 @@ class TestGradeIteration(SharedModuleStoreTestCase): ...@@ -60,7 +60,7 @@ class TestGradeIteration(SharedModuleStoreTestCase):
If we don't pass in any students, it should return a zero-length If we don't pass in any students, it should return a zero-length
iterator, but it shouldn't error. iterator, but it shouldn't error.
""" """
grade_results = list(CourseGradeFactory().iter(self.course, [])) grade_results = list(CourseGradeFactory().iter([], self.course))
self.assertEqual(grade_results, []) self.assertEqual(grade_results, [])
def test_all_empty_grades(self): def test_all_empty_grades(self):
...@@ -130,7 +130,7 @@ class TestGradeIteration(SharedModuleStoreTestCase): ...@@ -130,7 +130,7 @@ class TestGradeIteration(SharedModuleStoreTestCase):
students_to_course_grades = {} students_to_course_grades = {}
students_to_errors = {} students_to_errors = {}
for student, course_grade, err_msg in CourseGradeFactory().iter(course, students): for student, course_grade, err_msg in CourseGradeFactory().iter(students, course):
students_to_course_grades[student] = course_grade students_to_course_grades[student] = course_grade
if err_msg: if err_msg:
students_to_errors[student] = err_msg students_to_errors[student] = err_msg
......
...@@ -39,9 +39,9 @@ from lms.djangoapps.instructor_task.tasks_helper.enrollments import ( ...@@ -39,9 +39,9 @@ from lms.djangoapps.instructor_task.tasks_helper.enrollments import (
upload_students_csv, upload_students_csv,
) )
from lms.djangoapps.instructor_task.tasks_helper.grades import ( from lms.djangoapps.instructor_task.tasks_helper.grades import (
generate_course_grade_report, CourseGradeReport,
generate_problem_grade_report, ProblemGradeReport,
upload_problem_responses_csv, ProblemResponses,
) )
from lms.djangoapps.instructor_task.tasks_helper.misc import ( from lms.djangoapps.instructor_task.tasks_helper.misc import (
cohort_students_and_upload, cohort_students_and_upload,
...@@ -160,7 +160,7 @@ def calculate_problem_responses_csv(entry_id, xmodule_instance_args): ...@@ -160,7 +160,7 @@ def calculate_problem_responses_csv(entry_id, xmodule_instance_args):
""" """
# Translators: This is a past-tense verb that is inserted into task progress messages as {action}. # Translators: This is a past-tense verb that is inserted into task progress messages as {action}.
action_name = ugettext_noop('generated') action_name = ugettext_noop('generated')
task_fn = partial(upload_problem_responses_csv, xmodule_instance_args) task_fn = partial(ProblemResponses.generate, xmodule_instance_args)
return run_main_task(entry_id, task_fn, action_name) return run_main_task(entry_id, task_fn, action_name)
...@@ -176,7 +176,7 @@ def calculate_grades_csv(entry_id, xmodule_instance_args): ...@@ -176,7 +176,7 @@ def calculate_grades_csv(entry_id, xmodule_instance_args):
xmodule_instance_args.get('task_id'), entry_id, action_name xmodule_instance_args.get('task_id'), entry_id, action_name
) )
task_fn = partial(generate_course_grade_report, xmodule_instance_args) task_fn = partial(CourseGradeReport.generate, xmodule_instance_args)
return run_main_task(entry_id, task_fn, action_name) return run_main_task(entry_id, task_fn, action_name)
...@@ -193,7 +193,7 @@ def calculate_problem_grade_report(entry_id, xmodule_instance_args): ...@@ -193,7 +193,7 @@ def calculate_problem_grade_report(entry_id, xmodule_instance_args):
xmodule_instance_args.get('task_id'), entry_id, action_name xmodule_instance_args.get('task_id'), entry_id, action_name
) )
task_fn = partial(generate_problem_grade_report, xmodule_instance_args) task_fn = partial(ProblemGradeReport.generate, xmodule_instance_args)
return run_main_task(entry_id, task_fn, action_name) return run_main_task(entry_id, task_fn, action_name)
......
...@@ -3,7 +3,8 @@ Functionality for generating grade reports. ...@@ -3,7 +3,8 @@ Functionality for generating grade reports.
""" """
from collections import OrderedDict from collections import OrderedDict
from datetime import datetime from datetime import datetime
from itertools import chain from itertools import chain, izip_longest, izip
from lazy import lazy
import logging import logging
from pytz import UTC from pytz import UTC
import re import re
...@@ -29,355 +30,436 @@ from .utils import upload_csv_to_report_store ...@@ -29,355 +30,436 @@ from .utils import upload_csv_to_report_store
TASK_LOG = logging.getLogger('edx.celery.task') TASK_LOG = logging.getLogger('edx.celery.task')
def generate_course_grade_report(_xmodule_instance_args, _entry_id, course_id, _task_input, action_name): # pylint: disable=too-many-statements class CourseGradeReportContext(object):
""" """
For a given `course_id`, generate a grades CSV file for all students that Internal class that provides a common context to use for a single grade
are enrolled, and store using a `ReportStore`. Once created, the files can report. When a report is parallelized across multiple processes,
be accessed by instantiating another `ReportStore` (via elements of this context are serialized and parsed across process
`ReportStore.from_config()`) and calling `link_for()` on it. Writes are boundaries.
buffered, so we'll never write part of a CSV file to S3 -- i.e. any files
that are visible in ReportStore will be complete ones.
As we start to add more CSV downloads, it will probably be worthwhile to
make a more general CSVDoc class instead of building out the rows like we
do here.
""" """
start_time = time() def __init__(self, _xmodule_instance_args, _entry_id, course_id, _task_input, action_name):
start_date = datetime.now(UTC) self.task_info_string = (
status_interval = 100 u'Task: {task_id}, '
enrolled_students = CourseEnrollment.objects.users_enrolled_in(course_id) u'InstructorTask ID: {entry_id}, '
total_enrolled_students = enrolled_students.count() u'Course: {course_id}, '
task_progress = TaskProgress(action_name, total_enrolled_students, start_time) u'Input: {task_input}'
).format(
fmt = u'Task: {task_id}, InstructorTask ID: {entry_id}, Course: {course_id}, Input: {task_input}' task_id=_xmodule_instance_args.get('task_id') if _xmodule_instance_args is not None else None,
task_info_string = fmt.format( entry_id=_entry_id,
task_id=_xmodule_instance_args.get('task_id') if _xmodule_instance_args is not None else None, course_id=course_id,
entry_id=_entry_id, task_input=_task_input,
course_id=course_id,
task_input=_task_input
)
TASK_LOG.info(u'%s, Task type: %s, Starting task execution', task_info_string, action_name)
course = get_course_by_id(course_id)
course_is_cohorted = is_course_cohorted(course.id)
teams_enabled = course.teams_enabled
cohorts_header = ['Cohort Name'] if course_is_cohorted else []
teams_header = ['Team Name'] if teams_enabled else []
experiment_partitions = get_split_user_partitions(course.user_partitions)
group_configs_header = [u'Experiment Group ({})'.format(partition.name) for partition in experiment_partitions]
certificate_info_header = ['Certificate Eligible', 'Certificate Delivered', 'Certificate Type']
certificate_whitelist = CertificateWhitelist.objects.filter(course_id=course_id, whitelist=True)
whitelisted_user_ids = [entry.user_id for entry in certificate_whitelist]
# Loop over all our students and build our CSV lists in memory
rows = []
err_rows = [["id", "username", "error_msg"]]
current_step = {'step': 'Calculating Grades'}
student_counter = 0
TASK_LOG.info(
u'%s, Task type: %s, Current step: %s, Starting grade calculation for total students: %s',
task_info_string,
action_name,
current_step,
total_enrolled_students,
)
graded_assignments = _graded_assignments(course_id)
grade_header = []
for assignment_info in graded_assignments.itervalues():
if assignment_info['use_subsection_headers']:
grade_header.extend(assignment_info['subsection_headers'].itervalues())
grade_header.append(assignment_info['average_header'])
rows.append(
["Student ID", "Email", "Username", "Grade"] +
grade_header +
cohorts_header +
group_configs_header +
teams_header +
['Enrollment Track', 'Verification Status'] +
certificate_info_header
)
for student, course_grade, err_msg in CourseGradeFactory().iter(course, enrolled_students):
# Periodically update task status (this is a cache write)
if task_progress.attempted % status_interval == 0:
task_progress.update_task_state(extra_meta=current_step)
task_progress.attempted += 1
# Now add a log entry after each student is graded to get a sense
# of the task's progress
student_counter += 1
TASK_LOG.info(
u'%s, Task type: %s, Current step: %s, Grade calculation in-progress for students: %s/%s',
task_info_string,
action_name,
current_step,
student_counter,
total_enrolled_students
) )
self.action_name = action_name
self.course_id = course_id
self.task_progress = TaskProgress(self.action_name, total=None, start_time=time())
@lazy
def course(self):
return get_course_by_id(self.course_id)
@lazy
def course_experiments(self):
return get_split_user_partitions(self.course.user_partitions)
@lazy
def teams_enabled(self):
return self.course.teams_enabled
@lazy
def cohorts_enabled(self):
return is_course_cohorted(self.course_id)
@lazy
def graded_assignments(self):
"""
Returns an OrderedDict that maps an assignment type to a dict of
subsection-headers and average-header.
"""
grading_context = grading_context_for_course(self.course_id)
graded_assignments_map = OrderedDict()
for assignment_type_name, subsection_infos in grading_context['all_graded_subsections_by_type'].iteritems():
graded_subsections_map = OrderedDict()
for subsection_index, subsection_info in enumerate(subsection_infos, start=1):
subsection = subsection_info['subsection_block']
header_name = u"{assignment_type} {subsection_index}: {subsection_name}".format(
assignment_type=assignment_type_name,
subsection_index=subsection_index,
subsection_name=subsection.display_name,
)
graded_subsections_map[subsection.location] = header_name
average_header = u"{assignment_type}".format(assignment_type=assignment_type_name)
if not course_grade: # Use separate subsection and average columns only if
# An empty gradeset means we failed to grade a student. # there's more than one subsection.
task_progress.failed += 1 separate_subsection_avg_headers = len(subsection_infos) > 1
err_rows.append([student.id, student.username, err_msg]) if separate_subsection_avg_headers:
continue average_header += u" (Avg)"
# We were able to successfully grade this student for this course. graded_assignments_map[assignment_type_name] = {
task_progress.succeeded += 1 'subsection_headers': graded_subsections_map,
'average_header': average_header,
'separate_subsection_avg_headers': separate_subsection_avg_headers
}
return graded_assignments_map
cohorts_group_name = [] def update_status(self, message):
if course_is_cohorted: """
group = get_cohort(student, course_id, assign=False) Updates the status on the celery task to the given message.
cohorts_group_name.append(group.name if group else '') Also logs the update.
"""
TASK_LOG.info(u'%s, Task type: %s, %s', self.task_info_string, self.action_name, message)
return self.task_progress.update_task_state(extra_meta={'step': message})
group_configs_group_names = []
for partition in experiment_partitions:
group = PartitionService(course_id).get_group(student, partition, assign=False)
group_configs_group_names.append(group.name if group else '')
team_name = [] class CourseGradeReport(object):
if teams_enabled: """
Class to encapsulate functionality related to generating Grade Reports.
"""
@classmethod
def generate(cls, _xmodule_instance_args, _entry_id, course_id, _task_input, action_name):
"""
Public method to generate a grade report.
"""
context = CourseGradeReportContext(_xmodule_instance_args, _entry_id, course_id, _task_input, action_name)
return CourseGradeReport()._generate(context)
def _generate(self, context):
"""
Internal method for generating a grade report for the given context.
"""
context.update_status(u'Starting grades')
success_headers = self._success_headers(context)
error_headers = self._error_headers()
batched_rows = self._batched_rows(context)
context.update_status(u'Compiling grades')
success_rows, error_rows = self._compile(context, batched_rows)
context.update_status(u'Uploading grades')
self._upload(context, success_headers, success_rows, error_headers, error_rows)
return context.update_status(u'Completed grades')
def _success_headers(self, context):
"""
Returns a list of all applicable column headers for this grade report.
"""
return (
["Student ID", "Email", "Username", "Grade"] +
self._grades_header(context) +
(['Cohort Name'] if context.cohorts_enabled else []) +
[u'Experiment Group ({})'.format(partition.name) for partition in context.course_experiments] +
(['Team Name'] if context.teams_enabled else []) +
['Enrollment Track', 'Verification Status'] +
['Certificate Eligible', 'Certificate Delivered', 'Certificate Type']
)
def _error_headers(self):
"""
Returns a list of error headers for this grade report.
"""
return ["Student ID", "Username", "Error"]
def _batched_rows(self, context):
"""
A generator of batches of (success_rows, error_rows) for this report.
"""
for users in self._batch_users(context):
yield self._rows_for_users(context, users)
def _compile(self, context, batched_rows):
"""
Compiles and returns the complete list of (success_rows, error_rows) for
the given batched_rows and context.
"""
# partition and chain successes and errors
success_rows, error_rows = izip(*batched_rows)
success_rows = list(chain(*success_rows))
error_rows = list(chain(*error_rows))
# update metrics on task status
context.task_progress.succeeded = len(success_rows)
context.task_progress.failed = len(error_rows)
context.task_progress.attempted = context.task_progress.succeeded + context.task_progress.failed
context.task_progress.total = context.task_progress.attempted
return success_rows, error_rows
def _upload(self, context, success_headers, success_rows, error_headers, error_rows):
"""
Creates and uploads a CSV for the given headers and rows.
"""
date = datetime.now(UTC)
upload_csv_to_report_store([success_headers] + success_rows, 'grade_report', context.course_id, date)
if len(error_rows) > 0:
error_rows = [error_headers] + error_rows
upload_csv_to_report_store(error_rows, 'grade_report_err', context.course_id, date)
def _grades_header(self, context):
"""
Returns the applicable grades-related headers for this report.
"""
graded_assignments = context.graded_assignments
grades_header = []
for assignment_info in graded_assignments.itervalues():
if assignment_info['separate_subsection_avg_headers']:
grades_header.extend(assignment_info['subsection_headers'].itervalues())
grades_header.append(assignment_info['average_header'])
return grades_header
def _batch_users(self, context):
"""
Returns a generator of batches of users.
"""
def grouper(iterable, chunk_size=1, fillvalue=None):
args = [iter(iterable)] * chunk_size
return izip_longest(*args, fillvalue=fillvalue)
users = CourseEnrollment.objects.users_enrolled_in(context.course_id)
return grouper(users)
def _user_grade_results(self, course_grade, context):
"""
Returns a list of grade results for the given course_grade corresponding
to the headers for this report.
"""
grade_results = []
for assignment_type, assignment_info in context.graded_assignments.iteritems():
for subsection_location in assignment_info['subsection_headers']:
try:
subsection_grade = course_grade.graded_subsections_by_format[assignment_type][subsection_location]
except KeyError:
grade_result = u'Not Available'
else:
if subsection_grade.graded_total.first_attempted is not None:
grade_result = subsection_grade.graded_total.earned / subsection_grade.graded_total.possible
else:
grade_result = u'Not Attempted'
grade_results.append([grade_result])
if assignment_info['separate_subsection_avg_headers']:
assignment_average = course_grade.grader_result['grade_breakdown'].get(assignment_type, {}).get(
'percent'
)
grade_results.append([assignment_average])
return [course_grade.percent] + list(chain.from_iterable(grade_results))
def _user_cohort_group_names(self, user, context):
"""
Returns a list of names of cohort groups in which the given user
belongs.
"""
cohort_group_names = []
if context.cohorts_enabled:
group = get_cohort(user, context.course_id, assign=False)
cohort_group_names.append(group.name if group else '')
return cohort_group_names
def _user_experiment_group_names(self, user, context):
"""
Returns a list of names of course experiments in which the given user
belongs.
"""
experiment_group_names = []
for partition in context.course_experiments:
group = PartitionService(context.course_id).get_group(user, partition, assign=False)
experiment_group_names.append(group.name if group else '')
return experiment_group_names
def _user_team_names(self, user, context):
"""
Returns a list of names of teams in which the given user belongs.
"""
team_names = []
if context.teams_enabled:
try: try:
membership = CourseTeamMembership.objects.get(user=student, team__course_id=course_id) membership = CourseTeamMembership.objects.get(user=user, team__course_id=context.course_id)
team_name.append(membership.team.name) team_names.append(membership.team.name)
except CourseTeamMembership.DoesNotExist: except CourseTeamMembership.DoesNotExist:
team_name.append('') team_names.append('')
return team_names
enrollment_mode = CourseEnrollment.enrollment_mode_for_user(student, course_id)[0]
def _user_verification_mode(self, user, context):
"""
Returns a list of enrollment-mode and verification-status for the
given user.
"""
enrollment_mode = CourseEnrollment.enrollment_mode_for_user(user, context.course_id)[0]
verification_status = SoftwareSecurePhotoVerification.verification_status_for_user( verification_status = SoftwareSecurePhotoVerification.verification_status_for_user(
student, user,
course_id, context.course_id,
enrollment_mode enrollment_mode
) )
return [enrollment_mode, verification_status]
def _user_certificate_info(self, user, context, course_grade, whitelisted_user_ids):
"""
Returns the course certification information for the given user.
"""
certificate_info = certificate_info_for_user( certificate_info = certificate_info_for_user(
student, user,
course_id, context.course_id,
course_grade.letter_grade, course_grade.letter_grade,
student.id in whitelisted_user_ids user.id in whitelisted_user_ids
) )
TASK_LOG.info( TASK_LOG.info(
u'Student certificate eligibility: %s ' u'Student certificate eligibility: %s '
u'(user=%s, course_id=%s, grade_percent=%s letter_grade=%s gradecutoffs=%s, allow_certificate=%s, ' u'(user=%s, course_id=%s, grade_percent=%s letter_grade=%s gradecutoffs=%s, allow_certificate=%s, '
u'is_whitelisted=%s)', u'is_whitelisted=%s)',
certificate_info[0], certificate_info[0],
student, user,
course_id, context.course_id,
course_grade.percent, course_grade.percent,
course_grade.letter_grade, course_grade.letter_grade,
course.grade_cutoffs, context.course.grade_cutoffs,
student.profile.allow_certificate, user.profile.allow_certificate,
student.id in whitelisted_user_ids user.id in whitelisted_user_ids,
) )
return certificate_info
grade_results = []
for assignment_type, assignment_info in graded_assignments.iteritems(): def _rows_for_users(self, context, users):
for subsection_location in assignment_info['subsection_headers']: """
Returns a list of rows for the given users for this report.
"""
certificate_whitelist = CertificateWhitelist.objects.filter(course_id=context.course_id, whitelist=True)
whitelisted_user_ids = [entry.user_id for entry in certificate_whitelist]
success_rows, error_rows = [], []
for user, course_grade, err_msg in CourseGradeFactory().iter(users, course_key=context.course_id):
if not course_grade:
# An empty gradeset means we failed to grade a student.
error_rows.append([user.id, user.username, err_msg])
else:
success_rows.append(
[user.id, user.email, user.username] +
self._user_grade_results(course_grade, context) +
self._user_cohort_group_names(user, context) +
self._user_experiment_group_names(user, context) +
self._user_team_names(user, context) +
self._user_verification_mode(user, context) +
self._user_certificate_info(user, context, course_grade, whitelisted_user_ids)
)
return success_rows, error_rows
class ProblemGradeReport(object):
@classmethod
def generate(cls, _xmodule_instance_args, _entry_id, course_id, _task_input, action_name):
"""
Generate a CSV containing all students' problem grades within a given
`course_id`.
"""
start_time = time()
start_date = datetime.now(UTC)
status_interval = 100
enrolled_students = CourseEnrollment.objects.users_enrolled_in(course_id)
task_progress = TaskProgress(action_name, enrolled_students.count(), start_time)
# This struct encapsulates both the display names of each static item in the
# header row as values as well as the django User field names of those items
# as the keys. It is structured in this way to keep the values related.
header_row = OrderedDict([('id', 'Student ID'), ('email', 'Email'), ('username', 'Username')])
graded_scorable_blocks = cls._graded_scorable_blocks_to_header(course_id)
# Just generate the static fields for now.
rows = [list(header_row.values()) + ['Grade'] + list(chain.from_iterable(graded_scorable_blocks.values()))]
error_rows = [list(header_row.values()) + ['error_msg']]
current_step = {'step': 'Calculating Grades'}
course = get_course_by_id(course_id)
for student, course_grade, err_msg in CourseGradeFactory().iter(enrolled_students, course):
student_fields = [getattr(student, field_name) for field_name in header_row]
task_progress.attempted += 1
if not course_grade:
# There was an error grading this student.
if not err_msg:
err_msg = u'Unknown error'
error_rows.append(student_fields + [err_msg])
task_progress.failed += 1
continue
earned_possible_values = []
for block_location in graded_scorable_blocks:
try: try:
subsection_grade = course_grade.graded_subsections_by_format[assignment_type][subsection_location] problem_score = course_grade.problem_scores[block_location]
except KeyError: except KeyError:
grade_results.append([u'Not Available']) earned_possible_values.append([u'Not Available', u'Not Available'])
else: else:
if subsection_grade.graded_total.first_attempted is not None: if problem_score.first_attempted:
grade_results.append( earned_possible_values.append([problem_score.earned, problem_score.possible])
[subsection_grade.graded_total.earned / subsection_grade.graded_total.possible]
)
else: else:
grade_results.append([u'Not Attempted']) earned_possible_values.append([u'Not Attempted', problem_score.possible])
if assignment_info['use_subsection_headers']:
assignment_average = course_grade.grader_result['grade_breakdown'].get(assignment_type, {}).get( rows.append(student_fields + [course_grade.percent] + list(chain.from_iterable(earned_possible_values)))
'percent'
) task_progress.succeeded += 1
grade_results.append([assignment_average]) if task_progress.attempted % status_interval == 0:
task_progress.update_task_state(extra_meta=current_step)
grade_results = list(chain.from_iterable(grade_results))
# Perform the upload if any students have been successfully graded
rows.append( if len(rows) > 1:
[student.id, student.email, student.username, course_grade.percent] + upload_csv_to_report_store(rows, 'problem_grade_report', course_id, start_date)
grade_results + cohorts_group_name + group_configs_group_names + team_name + # If there are any error rows, write them out as well
[enrollment_mode] + [verification_status] + certificate_info if len(error_rows) > 1:
) upload_csv_to_report_store(error_rows, 'problem_grade_report_err', course_id, start_date)
TASK_LOG.info( return task_progress.update_task_state(extra_meta={'step': 'Uploading CSV'})
u'%s, Task type: %s, Current step: %s, Grade calculation completed for students: %s/%s',
task_info_string, @classmethod
action_name, def _graded_scorable_blocks_to_header(cls, course_key):
current_step, """
student_counter, Returns an OrderedDict that maps a scorable block's id to its
total_enrolled_students headers in the final report.
) """
scorable_blocks_map = OrderedDict()
# By this point, we've got the rows we're going to stuff into our CSV files. grading_context = grading_context_for_course(course_key)
current_step = {'step': 'Uploading CSVs'} for assignment_type_name, subsection_infos in grading_context['all_graded_subsections_by_type'].iteritems():
task_progress.update_task_state(extra_meta=current_step) for subsection_index, subsection_info in enumerate(subsection_infos, start=1):
TASK_LOG.info(u'%s, Task type: %s, Current step: %s', task_info_string, action_name, current_step) for scorable_block in subsection_info['scored_descendants']:
header_name = (
# Perform the actual upload u"{assignment_type} {subsection_index}: "
upload_csv_to_report_store(rows, 'grade_report', course_id, start_date) u"{subsection_name} - {scorable_block_name}"
).format(
# If there are any error rows (don't count the header), write them out as well scorable_block_name=scorable_block.display_name,
if len(err_rows) > 1: assignment_type=assignment_type_name,
upload_csv_to_report_store(err_rows, 'grade_report_err', course_id, start_date) subsection_index=subsection_index,
subsection_name=subsection_info['subsection_block'].display_name,
# One last update before we close out... )
TASK_LOG.info(u'%s, Task type: %s, Finalizing grade task', task_info_string, action_name) scorable_blocks_map[scorable_block.location] = [header_name + " (Earned)",
return task_progress.update_task_state(extra_meta=current_step) header_name + " (Possible)"]
return scorable_blocks_map
def generate_problem_grade_report(_xmodule_instance_args, _entry_id, course_id, _task_input, action_name):
""" class ProblemResponses(object):
Generate a CSV containing all students' problem grades within a given @classmethod
`course_id`. def generate(cls, _xmodule_instance_args, _entry_id, course_id, task_input, action_name):
""" """
start_time = time() For a given `course_id`, generate a CSV file containing
start_date = datetime.now(UTC) all student answers to a given problem, and store using a `ReportStore`.
status_interval = 100 """
enrolled_students = CourseEnrollment.objects.users_enrolled_in(course_id) start_time = time()
task_progress = TaskProgress(action_name, enrolled_students.count(), start_time) start_date = datetime.now(UTC)
num_reports = 1
# This struct encapsulates both the display names of each static item in the task_progress = TaskProgress(action_name, num_reports, start_time)
# header row as values as well as the django User field names of those items current_step = {'step': 'Calculating students answers to problem'}
# as the keys. It is structured in this way to keep the values related. task_progress.update_task_state(extra_meta=current_step)
header_row = OrderedDict([('id', 'Student ID'), ('email', 'Email'), ('username', 'Username')])
# Compute result table and format it
graded_scorable_blocks = _graded_scorable_blocks_to_header(course_id) problem_location = task_input.get('problem_location')
student_data = list_problem_responses(course_id, problem_location)
# Just generate the static fields for now. features = ['username', 'state']
rows = [list(header_row.values()) + ['Grade'] + list(chain.from_iterable(graded_scorable_blocks.values()))] header, rows = format_dictlist(student_data, features)
error_rows = [list(header_row.values()) + ['error_msg']]
current_step = {'step': 'Calculating Grades'} task_progress.attempted = task_progress.succeeded = len(rows)
task_progress.skipped = task_progress.total - task_progress.attempted
course = get_course_by_id(course_id)
for student, course_grade, err_msg in CourseGradeFactory().iter(course, enrolled_students): rows.insert(0, header)
student_fields = [getattr(student, field_name) for field_name in header_row]
task_progress.attempted += 1 current_step = {'step': 'Uploading CSV'}
task_progress.update_task_state(extra_meta=current_step)
if not course_grade:
# There was an error grading this student. # Perform the upload
if not err_msg: problem_location = re.sub(r'[:/]', '_', problem_location)
err_msg = u'Unknown error' csv_name = 'student_state_from_{}'.format(problem_location)
error_rows.append(student_fields + [err_msg]) upload_csv_to_report_store(rows, csv_name, course_id, start_date)
task_progress.failed += 1
continue return task_progress.update_task_state(extra_meta=current_step)
earned_possible_values = []
for block_location in graded_scorable_blocks:
try:
problem_score = course_grade.problem_scores[block_location]
except KeyError:
earned_possible_values.append([u'Not Available', u'Not Available'])
else:
if problem_score.first_attempted:
earned_possible_values.append([problem_score.earned, problem_score.possible])
else:
earned_possible_values.append([u'Not Attempted', problem_score.possible])
rows.append(student_fields + [course_grade.percent] + list(chain.from_iterable(earned_possible_values)))
task_progress.succeeded += 1
if task_progress.attempted % status_interval == 0:
task_progress.update_task_state(extra_meta=current_step)
# Perform the upload if any students have been successfully graded
if len(rows) > 1:
upload_csv_to_report_store(rows, 'problem_grade_report', course_id, start_date)
# If there are any error rows, write them out as well
if len(error_rows) > 1:
upload_csv_to_report_store(error_rows, 'problem_grade_report_err', course_id, start_date)
return task_progress.update_task_state(extra_meta={'step': 'Uploading CSV'})
def upload_problem_responses_csv(_xmodule_instance_args, _entry_id, course_id, task_input, action_name):
"""
For a given `course_id`, generate a CSV file containing
all student answers to a given problem, and store using a `ReportStore`.
"""
start_time = time()
start_date = datetime.now(UTC)
num_reports = 1
task_progress = TaskProgress(action_name, num_reports, start_time)
current_step = {'step': 'Calculating students answers to problem'}
task_progress.update_task_state(extra_meta=current_step)
# Compute result table and format it
problem_location = task_input.get('problem_location')
student_data = list_problem_responses(course_id, problem_location)
features = ['username', 'state']
header, rows = format_dictlist(student_data, features)
task_progress.attempted = task_progress.succeeded = len(rows)
task_progress.skipped = task_progress.total - task_progress.attempted
rows.insert(0, header)
current_step = {'step': 'Uploading CSV'}
task_progress.update_task_state(extra_meta=current_step)
# Perform the upload
problem_location = re.sub(r'[:/]', '_', problem_location)
csv_name = 'student_state_from_{}'.format(problem_location)
upload_csv_to_report_store(rows, csv_name, course_id, start_date)
return task_progress.update_task_state(extra_meta=current_step)
def _graded_assignments(course_key):
"""
Returns an OrderedDict that maps an assignment type to a dict of subsection-headers and average-header.
"""
grading_context = grading_context_for_course(course_key)
graded_assignments_map = OrderedDict()
for assignment_type_name, subsection_infos in grading_context['all_graded_subsections_by_type'].iteritems():
graded_subsections_map = OrderedDict()
for subsection_index, subsection_info in enumerate(subsection_infos, start=1):
subsection = subsection_info['subsection_block']
header_name = u"{assignment_type} {subsection_index}: {subsection_name}".format(
assignment_type=assignment_type_name,
subsection_index=subsection_index,
subsection_name=subsection.display_name,
)
graded_subsections_map[subsection.location] = header_name
average_header = u"{assignment_type}".format(assignment_type=assignment_type_name)
# Use separate subsection and average columns only if
# there's more than one subsection.
use_subsection_headers = len(subsection_infos) > 1
if use_subsection_headers:
average_header += u" (Avg)"
graded_assignments_map[assignment_type_name] = {
'subsection_headers': graded_subsections_map,
'average_header': average_header,
'use_subsection_headers': use_subsection_headers
}
return graded_assignments_map
def _graded_scorable_blocks_to_header(course_key):
"""
Returns an OrderedDict that maps a scorable block's id to its
headers in the final report.
"""
scorable_blocks_map = OrderedDict()
grading_context = grading_context_for_course(course_key)
for assignment_type_name, subsection_infos in grading_context['all_graded_subsections_by_type'].iteritems():
for subsection_index, subsection_info in enumerate(subsection_infos, start=1):
for scorable_block in subsection_info['scored_descendants']:
header_name = (
u"{assignment_type} {subsection_index}: "
u"{subsection_name} - {scorable_block_name}"
).format(
scorable_block_name=scorable_block.display_name,
assignment_type=assignment_type_name,
subsection_index=subsection_index,
subsection_name=subsection_info['subsection_block'].display_name,
)
scorable_blocks_map[scorable_block.location] = [header_name + " (Earned)", header_name + " (Possible)"]
return scorable_blocks_map
...@@ -32,7 +32,7 @@ from lms.djangoapps.instructor_task.api import ( ...@@ -32,7 +32,7 @@ from lms.djangoapps.instructor_task.api import (
submit_delete_problem_state_for_all_students submit_delete_problem_state_for_all_students
) )
from lms.djangoapps.instructor_task.models import InstructorTask from lms.djangoapps.instructor_task.models import InstructorTask
from lms.djangoapps.instructor_task.tasks_helper.grades import generate_course_grade_report from lms.djangoapps.instructor_task.tasks_helper.grades import CourseGradeReport
from lms.djangoapps.instructor_task.tests.test_base import ( from lms.djangoapps.instructor_task.tests.test_base import (
InstructorTaskModuleTestCase, InstructorTaskModuleTestCase,
TestReportMixin, TestReportMixin,
...@@ -572,10 +572,10 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent, ...@@ -572,10 +572,10 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent,
def verify_csv_task_success(self, task_result): def verify_csv_task_success(self, task_result):
""" """
Verify that all students were successfully graded by Verify that all students were successfully graded by
`generate_course_grade_report`. `CourseGradeReport`.
Arguments: Arguments:
task_result (dict): Return value of `generate_course_grade_report`. task_result (dict): Return value of `CourseGradeReport.generate`.
""" """
self.assertDictContainsSubset({'attempted': 2, 'succeeded': 2, 'failed': 0}, task_result) self.assertDictContainsSubset({'attempted': 2, 'succeeded': 2, 'failed': 0}, task_result)
...@@ -636,7 +636,7 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent, ...@@ -636,7 +636,7 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent,
self.submit_student_answer(self.student_b.username, problem_b_url, [OPTION_1, OPTION_2]) self.submit_student_answer(self.student_b.username, problem_b_url, [OPTION_1, OPTION_2])
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
result = generate_course_grade_report(None, None, self.course.id, None, 'graded') result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
self.verify_csv_task_success(result) self.verify_csv_task_success(result)
self.verify_grades_in_csv( self.verify_grades_in_csv(
[ [
...@@ -669,7 +669,7 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent, ...@@ -669,7 +669,7 @@ class TestGradeReportConditionalContent(TestReportMixin, TestConditionalContent,
self.submit_student_answer(self.student_a.username, problem_a_url, [OPTION_1, OPTION_1]) self.submit_student_answer(self.student_a.username, problem_a_url, [OPTION_1, OPTION_1])
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
result = generate_course_grade_report(None, None, self.course.id, None, 'graded') result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
self.verify_csv_task_success(result) self.verify_csv_task_success(result)
self.verify_grades_in_csv( self.verify_grades_in_csv(
[ [
......
...@@ -59,14 +59,13 @@ from lms.djangoapps.instructor_task.tasks_helper.enrollments import ( ...@@ -59,14 +59,13 @@ from lms.djangoapps.instructor_task.tasks_helper.enrollments import (
upload_students_csv, upload_students_csv,
) )
from lms.djangoapps.instructor_task.tasks_helper.grades import ( from lms.djangoapps.instructor_task.tasks_helper.grades import (
generate_course_grade_report, CourseGradeReport,
generate_problem_grade_report, ProblemGradeReport,
upload_problem_responses_csv, ProblemResponses,
) )
from lms.djangoapps.instructor_task.tasks_helper.misc import ( from lms.djangoapps.instructor_task.tasks_helper.misc import (
cohort_students_and_upload, cohort_students_and_upload,
upload_course_survey_report, upload_course_survey_report,
upload_proctored_exam_results_report,
upload_ora2_data, upload_ora2_data,
) )
from ..tasks_helper.utils import ( from ..tasks_helper.utils import (
...@@ -89,7 +88,7 @@ class InstructorGradeReportTestCase(TestReportMixin, InstructorTaskCourseTestCas ...@@ -89,7 +88,7 @@ class InstructorGradeReportTestCase(TestReportMixin, InstructorTaskCourseTestCas
Verify cell data in the grades CSV for a particular user. Verify cell data in the grades CSV for a particular user.
""" """
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
result = generate_course_grade_report(None, None, course_id, None, 'graded') result = CourseGradeReport.generate(None, None, course_id, None, 'graded')
self.assertDictContainsSubset({'attempted': 2, 'succeeded': 2, 'failed': 0}, result) self.assertDictContainsSubset({'attempted': 2, 'succeeded': 2, 'failed': 0}, result)
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD') report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
report_csv_filename = report_store.links_for(course_id)[0][0] report_csv_filename = report_store.links_for(course_id)[0][0]
...@@ -121,7 +120,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase): ...@@ -121,7 +120,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase):
self.current_task.update_state = Mock() self.current_task.update_state = Mock()
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task') as mock_current_task: with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task') as mock_current_task:
mock_current_task.return_value = self.current_task mock_current_task.return_value = self.current_task
result = generate_course_grade_report(None, None, self.course.id, None, 'graded') result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
num_students = len(emails) num_students = len(emails)
self.assertDictContainsSubset({'attempted': num_students, 'succeeded': num_students, 'failed': 0}, result) self.assertDictContainsSubset({'attempted': num_students, 'succeeded': num_students, 'failed': 0}, result)
...@@ -135,7 +134,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase): ...@@ -135,7 +134,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase):
mock_grades_iter.return_value = [ mock_grades_iter.return_value = [
(self.create_student('username', 'student@example.com'), None, 'Cannot grade student') (self.create_student('username', 'student@example.com'), None, 'Cannot grade student')
] ]
result = generate_course_grade_report(None, None, self.course.id, None, 'graded') result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset({'attempted': 1, 'succeeded': 0, 'failed': 1}, result) self.assertDictContainsSubset({'attempted': 1, 'succeeded': 0, 'failed': 1}, result)
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD') report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
...@@ -319,7 +318,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase): ...@@ -319,7 +318,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase):
'', '',
) )
] ]
result = generate_course_grade_report(None, None, self.course.id, None, 'graded') result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset({'attempted': 1, 'succeeded': 1, 'failed': 0}, result) self.assertDictContainsSubset({'attempted': 1, 'succeeded': 1, 'failed': 0}, result)
...@@ -378,7 +377,7 @@ class TestProblemResponsesReport(TestReportMixin, InstructorTaskCourseTestCase): ...@@ -378,7 +377,7 @@ class TestProblemResponsesReport(TestReportMixin, InstructorTaskCourseTestCase):
{'username': 'user1', 'state': u'state1'}, {'username': 'user1', 'state': u'state1'},
{'username': 'user2', 'state': u'state2'}, {'username': 'user2', 'state': u'state2'},
] ]
result = upload_problem_responses_csv(None, None, self.course.id, task_input, 'calculated') result = ProblemResponses.generate(None, None, self.course.id, task_input, 'calculated')
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD') report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
links = report_store.links_for(self.course.id) links = report_store.links_for(self.course.id)
...@@ -609,7 +608,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase): ...@@ -609,7 +608,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
Verify that we see no grade information for a course with no graded Verify that we see no grade information for a course with no graded
problems. problems.
""" """
result = generate_problem_grade_report(None, None, self.course.id, None, 'graded') result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset({'action_name': 'graded', 'attempted': 2, 'succeeded': 2, 'failed': 0}, result) self.assertDictContainsSubset({'action_name': 'graded', 'attempted': 2, 'succeeded': 2, 'failed': 0}, result)
self.verify_rows_in_csv([ self.verify_rows_in_csv([
dict(zip( dict(zip(
...@@ -633,7 +632,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase): ...@@ -633,7 +632,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
self.define_option_problem(u'Problem1', parent=vertical) self.define_option_problem(u'Problem1', parent=vertical)
self.submit_student_answer(self.student_1.username, u'Problem1', ['Option 1']) self.submit_student_answer(self.student_1.username, u'Problem1', ['Option 1'])
result = generate_problem_grade_report(None, None, self.course.id, None, 'graded') result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset({'action_name': 'graded', 'attempted': 2, 'succeeded': 2, 'failed': 0}, result) self.assertDictContainsSubset({'action_name': 'graded', 'attempted': 2, 'succeeded': 2, 'failed': 0}, result)
problem_name = u'Homework 1: Subsection - Problem1' problem_name = u'Homework 1: Subsection - Problem1'
header_row = self.csv_header_row + [problem_name + ' (Earned)', problem_name + ' (Possible)'] header_row = self.csv_header_row + [problem_name + ' (Earned)', problem_name + ' (Possible)']
...@@ -670,7 +669,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase): ...@@ -670,7 +669,7 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
mock_grades_iter.return_value = [ mock_grades_iter.return_value = [
(student, None, error_message) (student, None, error_message)
] ]
result = generate_problem_grade_report(None, None, self.course.id, None, 'graded') result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset({'attempted': 1, 'succeeded': 0, 'failed': 1}, result) self.assertDictContainsSubset({'attempted': 1, 'succeeded': 0, 'failed': 1}, result)
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD') report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
...@@ -720,7 +719,7 @@ class TestProblemReportSplitTestContent(TestReportMixin, TestConditionalContent, ...@@ -720,7 +719,7 @@ class TestProblemReportSplitTestContent(TestReportMixin, TestConditionalContent,
self.submit_student_answer(self.student_b.username, self.problem_b_url, [self.OPTION_1, self.OPTION_2]) self.submit_student_answer(self.student_b.username, self.problem_b_url, [self.OPTION_1, self.OPTION_2])
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
result = generate_problem_grade_report(None, None, self.course.id, None, 'graded') result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset( self.assertDictContainsSubset(
{'action_name': 'graded', 'attempted': 2, 'succeeded': 2, 'failed': 0}, result {'action_name': 'graded', 'attempted': 2, 'succeeded': 2, 'failed': 0}, result
) )
...@@ -812,7 +811,7 @@ class TestProblemReportSplitTestContent(TestReportMixin, TestConditionalContent, ...@@ -812,7 +811,7 @@ class TestProblemReportSplitTestContent(TestReportMixin, TestConditionalContent,
header_row += [problem + ' (Earned)', problem + ' (Possible)'] header_row += [problem + ' (Earned)', problem + ' (Possible)']
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
generate_problem_grade_report(None, None, self.course.id, None, 'graded') ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertEquals(self.get_csv_row_with_headers(), header_row) self.assertEquals(self.get_csv_row_with_headers(), header_row)
...@@ -868,7 +867,7 @@ class TestProblemReportCohortedContent(TestReportMixin, ContentGroupTestCase, In ...@@ -868,7 +867,7 @@ class TestProblemReportCohortedContent(TestReportMixin, ContentGroupTestCase, In
self.submit_student_answer(self.beta_user.username, u'Problem1', ['Option 1', 'Option 2']) self.submit_student_answer(self.beta_user.username, u'Problem1', ['Option 1', 'Option 2'])
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
result = generate_problem_grade_report(None, None, self.course.id, None, 'graded') result = ProblemGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset( self.assertDictContainsSubset(
{'action_name': 'graded', 'attempted': 4, 'succeeded': 4, 'failed': 0}, result {'action_name': 'graded', 'attempted': 4, 'succeeded': 4, 'failed': 0}, result
) )
...@@ -1579,7 +1578,7 @@ class TestGradeReport(TestReportMixin, InstructorTaskModuleTestCase): ...@@ -1579,7 +1578,7 @@ class TestGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
self.submit_student_answer(self.student.username, u'Problem1', ['Option 1']) self.submit_student_answer(self.student.username, u'Problem1', ['Option 1'])
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
result = generate_course_grade_report(None, None, self.course.id, None, 'graded') result = CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
self.assertDictContainsSubset( self.assertDictContainsSubset(
{'action_name': 'graded', 'attempted': 1, 'succeeded': 1, 'failed': 0}, {'action_name': 'graded', 'attempted': 1, 'succeeded': 1, 'failed': 0},
result, result,
...@@ -1654,7 +1653,7 @@ class TestGradeReportEnrollmentAndCertificateInfo(TestReportMixin, InstructorTas ...@@ -1654,7 +1653,7 @@ class TestGradeReportEnrollmentAndCertificateInfo(TestReportMixin, InstructorTas
Verify grade report data. Verify grade report data.
""" """
with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'): with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task'):
generate_course_grade_report(None, None, self.course.id, None, 'graded') CourseGradeReport.generate(None, None, self.course.id, None, 'graded')
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD') report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
report_csv_filename = report_store.links_for(self.course.id)[0][0] report_csv_filename = report_store.links_for(self.course.id)[0][0]
report_path = report_store.path_to(self.course.id, report_csv_filename) report_path = report_store.path_to(self.course.id, report_csv_filename)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment