Commit 02e69252 by Nimisha Asthagiri

Update grades to use Block Structures

parent d20e5455
......@@ -11,12 +11,10 @@ from django.utils.timezone import UTC
from lazy import lazy
from lxml import etree
from path import Path as path
from xblock.core import XBlock
from xblock.fields import Scope, List, String, Dict, Boolean, Integer, Float
from xmodule import course_metadata_utils
from xmodule.course_metadata_utils import DEFAULT_START_DATE
from xmodule.exceptions import UndefinedContext
from xmodule.graders import grader_from_conf
from xmodule.mixin import LicenseMixin
from xmodule.seq_module import SequenceDescriptor, SequenceModule
......@@ -1183,83 +1181,6 @@ class CourseDescriptor(CourseFields, SequenceDescriptor, LicenseMixin):
"""
return course_metadata_utils.sorting_score(self.start, self.advertised_start, self.announcement)
@lazy
def grading_context(self):
"""
This returns a dictionary with keys necessary for quickly grading
a student. They are used by grades.grade()
The grading context has two keys:
graded_sections - This contains the sections that are graded, as
well as all possible children modules that can affect the
grading. This allows some sections to be skipped if the student
hasn't seen any part of it.
The format is a dictionary keyed by section-type. The values are
arrays of dictionaries containing
"section_descriptor" : The section descriptor
"xmoduledescriptors" : An array of xmoduledescriptors that
could possibly be in the section, for any student
all_descriptors - This contains a list of all xmodules that can
effect grading a student. This is used to efficiently fetch
all the xmodule state for a FieldDataCache without walking
the descriptor tree again.
"""
# If this descriptor has been bound to a student, return the corresponding
# XModule. If not, just use the descriptor itself
try:
module = getattr(self, '_xmodule', None)
if not module:
module = self
except UndefinedContext:
module = self
def possibly_scored(usage_key):
"""Can this XBlock type can have a score or children?"""
return usage_key.block_type in self.block_types_affecting_grading
all_descriptors = []
graded_sections = {}
def yield_descriptor_descendents(module_descriptor):
for child in module_descriptor.get_children(usage_key_filter=possibly_scored):
yield child
for module_descriptor in yield_descriptor_descendents(child):
yield module_descriptor
for chapter in self.get_children():
for section in chapter.get_children():
if section.graded:
xmoduledescriptors = list(yield_descriptor_descendents(section))
xmoduledescriptors.append(section)
# The xmoduledescriptors included here are only the ones that have scores.
section_description = {
'section_descriptor': section,
'xmoduledescriptors': [child for child in xmoduledescriptors if child.has_score]
}
section_format = section.format if section.format is not None else ''
graded_sections[section_format] = graded_sections.get(section_format, []) + [section_description]
all_descriptors.extend(xmoduledescriptors)
all_descriptors.append(section)
return {'graded_sections': graded_sections,
'all_descriptors': all_descriptors, }
@lazy
def block_types_affecting_grading(self):
"""Return all block types that could impact grading (i.e. scored, or having children)."""
return frozenset(
cat for (cat, xblock_class) in XBlock.load_classes() if (
getattr(xblock_class, 'has_score', False) or getattr(xblock_class, 'has_children', False)
)
)
@staticmethod
def make_id(org, course, url_name):
return '/'.join([org, course, url_name])
......
......@@ -51,7 +51,7 @@ class Command(BaseCommand):
for cert in ungraded:
# grade the student
grade = grades.grade(cert.user, request, course)
grade = grades.grade(cert.user, course)
print "grading {0} - {1}".format(cert.user, grade['percent'])
cert.grade = grade['percent']
if not options['noop']:
......
......@@ -257,7 +257,7 @@ class XQueueCertInterface(object):
self.request.session = {}
is_whitelisted = self.whitelist.filter(user=student, course_id=course_id, whitelist=True).exists()
grade = grades.grade(student, self.request, course)
grade = grades.grade(student, course)
enrollment_mode, __ = CourseEnrollment.enrollment_mode_for_user(student, course_id)
mode_is_verified = enrollment_mode in GeneratedCertificate.VERIFIED_CERTS_MODES
user_is_verified = SoftwareSecurePhotoVerification.user_is_verified(student)
......
......@@ -5,30 +5,33 @@ import json
import logging
import random
from collections import defaultdict
from functools import partial
import dogstats_wrapper as dog_stats_api
from course_blocks.api import get_course_blocks
from courseware import courses
from django.conf import settings
from django.core.cache import cache
from django.test.client import RequestFactory
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locator import BlockUsageLocator
from openedx.core.djangoapps.content.block_structure.api import get_course_in_cache
from openedx.core.lib.cache_utils import memoized
from openedx.core.lib.gating import api as gating_api
from courseware import courses
from courseware.access import has_access
from courseware.model_data import FieldDataCache, ScoresClient
from openedx.core.djangoapps.signals.signals import GRADES_UPDATED
from student.models import anonymous_id_for_user
from util.db import outer_atomic
from util.module_utils import yield_dynamic_descriptor_descendants
from xmodule import graders
from xblock.core import XBlock
from xmodule import graders, block_metadata_utils
from xmodule.graders import Score
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError
from .models import StudentModule
from .module_render import get_module_for_descriptor
from .transformers.grades import GradesTransformer
log = logging.getLogger("edx.courseware")
......@@ -141,7 +144,7 @@ class ProgressSummary(object):
weighted_scores: a dictionary mapping module locations to weighted Score
objects.
locations_to_children: a dictionary mapping module locations to their
locations_to_children: a function mapping locations to their
direct descendants.
"""
def __init__(self, chapters, weighted_scores, locations_to_children):
......@@ -172,34 +175,93 @@ class ProgressSummary(object):
return earned, possible
def descriptor_affects_grading(block_types_affecting_grading, descriptor):
@memoized
def block_types_with_scores():
"""
Returns True if the descriptor could have any impact on grading, else False.
Returns the block types that could have a score.
Something might be a scored item if it is capable of storing a score
(has_score=True). We also have to include anything that can have children,
since those children might have scores. We can avoid things like Videos,
which have state but cannot ever impact someone's grade.
"""
return descriptor.location.block_type in block_types_affecting_grading
return frozenset(
cat for (cat, xblock_class) in XBlock.load_classes() if (
getattr(xblock_class, 'has_score', False) or getattr(xblock_class, 'has_children', False)
)
)
def field_data_cache_for_grading(course, user):
def possibly_scored(usage_key):
"""
Returns whether the given block could impact grading (i.e. scored, or has children).
"""
Given a CourseDescriptor and User, create the FieldDataCache for grading.
return usage_key.block_type in block_types_with_scores()
This will generate a FieldDataCache that only loads state for those things
that might possibly affect the grading process, and will ignore things like
Videos.
def grading_context_for_course(course):
"""
descriptor_filter = partial(descriptor_affects_grading, course.block_types_affecting_grading)
return FieldDataCache.cache_for_descriptor_descendents(
course.id,
user,
course,
depth=None,
descriptor_filter=descriptor_filter
)
Same as grading_context, but takes in a course object.
"""
course_structure = get_course_in_cache(course.id)
return grading_context(course_structure)
def grading_context(course_structure):
"""
This returns a dictionary with keys necessary for quickly grading
a student. They are used by grades.grade()
The grading context has two keys:
graded_sections - This contains the sections that are graded, as
well as all possible children modules that can affect the
grading. This allows some sections to be skipped if the student
hasn't seen any part of it.
The format is a dictionary keyed by section-type. The values are
arrays of dictionaries containing
"section_block" : The section block
"scored_descendant_keys" : An array of usage keys for blocks
could possibly be in the section, for any student
all_graded_blocks - This contains a list of all blocks that can
affect grading a student. This is used to efficiently fetch
all the xmodule state for a FieldDataCache without walking
the descriptor tree again.
"""
all_graded_blocks = []
all_graded_sections = defaultdict(list)
for chapter_key in course_structure.get_children(course_structure.root_block_usage_key):
for section_key in course_structure.get_children(chapter_key):
section = course_structure[section_key]
scored_descendants_of_section = [section]
if section.graded:
for descendant_key in course_structure.post_order_traversal(
filter_func=possibly_scored,
start_node=section_key,
):
scored_descendants_of_section.append(
course_structure[descendant_key],
)
# include only those blocks that have scores, not if they are just a parent
section_info = {
'section_block': section,
'scored_descendants': [
child for child in scored_descendants_of_section
if getattr(child, 'has_score', None)
]
}
section_format = getattr(section, 'format', '')
all_graded_sections[section_format].append(section_info)
all_graded_blocks.extend(scored_descendants_of_section)
return {
'all_graded_sections': all_graded_sections,
'all_graded_blocks': all_graded_blocks,
}
def answer_distributions(course_key):
......@@ -213,7 +275,7 @@ def answer_distributions(course_key):
entries for a given course with type="problem" and a grade that is not null.
This means that we only count LoncapaProblems that people have submitted.
Other types of items like ORA or sequences will not be collected. Empty
Loncapa problem state that gets created from runnig the progress page is
Loncapa problem state that gets created from running the progress page is
also not counted.
This method accesses the StudentModule table directly instead of using the
......@@ -295,13 +357,13 @@ def answer_distributions(course_key):
return answer_counts
def grade(student, request, course, keep_raw_scores=False, field_data_cache=None, scores_client=None):
def grade(student, course, keep_raw_scores=False):
"""
Returns the grade of the student.
Also sends a signal to update the minimum grade requirement status.
"""
grade_summary = _grade(student, request, course, keep_raw_scores, field_data_cache, scores_client)
grade_summary = _grade(student, course, keep_raw_scores)
responses = GRADES_UPDATED.send_robust(
sender=None,
username=student.username,
......@@ -316,7 +378,7 @@ def grade(student, request, course, keep_raw_scores=False, field_data_cache=None
return grade_summary
def _grade(student, request, course, keep_raw_scores, field_data_cache, scores_client):
def _grade(student, course, keep_raw_scores):
"""
Unwrapped version of "grade"
......@@ -324,24 +386,18 @@ def _grade(student, request, course, keep_raw_scores, field_data_cache, scores_c
output from the course grader, augmented with the final letter
grade. The keys in the output are:
course: a CourseDescriptor
- grade : A final letter grade.
- percent : The final percent for the class (rounded up).
- section_breakdown : A breakdown of each section that makes
up the grade. (For display)
- grade_breakdown : A breakdown of the major components that
make up the final grade. (For display)
- course: a CourseDescriptor
- keep_raw_scores : if True, then value for key 'raw_scores' contains scores
for every graded module
More information on the format is in the docstring for CourseGrader.
"""
course_structure = get_course_blocks(student, course.location)
grading_context_result = grading_context(course_structure)
scorable_locations = [block.location for block in grading_context_result['all_graded_blocks']]
with outer_atomic():
if field_data_cache is None:
field_data_cache = field_data_cache_for_grading(course, student)
if scores_client is None:
scores_client = ScoresClient.from_field_data_cache(field_data_cache)
scores_client = ScoresClient.create_for_locations(course.id, student.id, scorable_locations)
# Dict of item_ids -> (earned, possible) point tuples. This *only* grabs
# scores that were registered with the submissions API, which for the moment
......@@ -358,47 +414,69 @@ def _grade(student, request, course, keep_raw_scores, field_data_cache, scores_c
)
max_scores_cache = MaxScoresCache.create_for_course(course)
# For the moment, we have to get scorable_locations from field_data_cache
# and not from scores_client, because scores_client is ignorant of things
# For the moment, scores_client is ignorant of scorable_locations
# in the submissions API. As a further refactoring step, submissions should
# be hidden behind the ScoresClient.
max_scores_cache.fetch_from_remote(field_data_cache.scorable_locations)
max_scores_cache.fetch_from_remote(scorable_locations)
grading_context = course.grading_context
raw_scores = []
totaled_scores, raw_scores = _calculate_totaled_scores(
student, grading_context_result, max_scores_cache, submissions_scores, scores_client, keep_raw_scores
)
with outer_atomic():
# Grading policy might be overriden by a CCX, need to reset it
course.set_grading_policy(course.grading_policy)
grade_summary = course.grader.grade(totaled_scores, generate_random_scores=settings.GENERATE_PROFILE_SCORES)
# We round the grade here, to make sure that the grade is a whole percentage and
# doesn't get displayed differently than it gets grades
grade_summary['percent'] = round(grade_summary['percent'] * 100 + 0.05) / 100
letter_grade = grade_for_percentage(course.grade_cutoffs, grade_summary['percent'])
grade_summary['grade'] = letter_grade
grade_summary['totaled_scores'] = totaled_scores # make this available, eg for instructor download & debugging
if keep_raw_scores:
# way to get all RAW scores out to instructor
# so grader can be double-checked
grade_summary['raw_scores'] = raw_scores
max_scores_cache.push_to_remote()
return grade_summary
def _calculate_totaled_scores(
student,
grading_context_result,
max_scores_cache,
submissions_scores,
scores_client,
keep_raw_scores,
):
"""
Returns the totaled scores, which can be passed to the grader.
"""
raw_scores = []
totaled_scores = {}
# This next complicated loop is just to collect the totaled_scores, which is
# passed to the grader
for section_format, sections in grading_context['graded_sections'].iteritems():
for section_format, sections in grading_context_result['all_graded_sections'].iteritems():
format_scores = []
for section in sections:
section_descriptor = section['section_descriptor']
section_name = section_descriptor.display_name_with_default_escaped
for section_info in sections:
section = section_info['section_block']
section_name = block_metadata_utils.display_name_with_default(section)
with outer_atomic():
# some problems have state that is updated independently of interaction
# with the LMS, so they need to always be scored. (E.g. combinedopenended ORA1)
# TODO This block is causing extra savepoints to be fired that are empty because no queries are executed
# during the loop. When refactoring this code please keep this outer_atomic call in mind and ensure we
# are not making unnecessary database queries.
should_grade_section = any(
descriptor.always_recalculate_grades for descriptor in section['xmoduledescriptors']
)
# If there are no problems that always have to be regraded, check to
# Check to
# see if any of our locations are in the scores from the submissions
# API. If scores exist, we have to calculate grades for this section.
if not should_grade_section:
should_grade_section = any(
descriptor.location.to_deprecated_string() in submissions_scores
for descriptor in section['xmoduledescriptors']
)
should_grade_section = any(
unicode(descendant.location) in submissions_scores
for descendant in section_info['scored_descendants']
)
if not should_grade_section:
should_grade_section = any(
descriptor.location in scores_client
for descriptor in section['xmoduledescriptors']
descendant.location in scores_client
for descendant in section_info['scored_descendants']
)
# If we haven't seen a single problem in the section, we don't have
......@@ -406,26 +484,11 @@ def _grade(student, request, course, keep_raw_scores, field_data_cache, scores_c
if should_grade_section:
scores = []
def create_module(descriptor):
'''creates an XModule instance given a descriptor'''
# TODO: We need the request to pass into here. If we could forego that, our arguments
# would be simpler
return get_module_for_descriptor(
student, request, descriptor, field_data_cache, course.id, course=course
)
descendants = yield_dynamic_descriptor_descendants(section_descriptor, student.id, create_module)
for module_descriptor in descendants:
user_access = has_access(
student, 'load', module_descriptor, module_descriptor.location.course_key
)
if not user_access:
continue
for descendant in section_info['scored_descendants']:
(correct, total) = get_score(
student,
module_descriptor,
create_module,
descendant,
scores_client,
submissions_scores,
max_scores_cache,
......@@ -433,13 +496,13 @@ def _grade(student, request, course, keep_raw_scores, field_data_cache, scores_c
if correct is None and total is None:
continue
if settings.GENERATE_PROFILE_SCORES: # for debugging!
if settings.GENERATE_PROFILE_SCORES: # for debugging!
if total > 1:
correct = random.randrange(max(total - 2, 1), total + 1)
else:
correct = total
graded = module_descriptor.graded
graded = descendant.graded
if not total > 0:
# We simply cannot grade a problem that is 12/0, because we might need it as a percentage
graded = False
......@@ -449,8 +512,8 @@ def _grade(student, request, course, keep_raw_scores, field_data_cache, scores_c
correct,
total,
graded,
module_descriptor.display_name_with_default_escaped,
module_descriptor.location
block_metadata_utils.display_name_with_default_escaped(descendant),
descendant.location
)
)
......@@ -460,37 +523,18 @@ def _grade(student, request, course, keep_raw_scores, field_data_cache, scores_c
else:
graded_total = Score(0.0, 1.0, True, section_name, None)
#Add the graded total to totaled_scores
# Add the graded total to totaled_scores
if graded_total.possible > 0:
format_scores.append(graded_total)
else:
log.info(
"Unable to grade a section with a total possible score of zero. " +
str(section_descriptor.location)
str(section.location)
)
totaled_scores[section_format] = format_scores
with outer_atomic():
# Grading policy might be overriden by a CCX, need to reset it
course.set_grading_policy(course.grading_policy)
grade_summary = course.grader.grade(totaled_scores, generate_random_scores=settings.GENERATE_PROFILE_SCORES)
# We round the grade here, to make sure that the grade is an whole percentage and
# doesn't get displayed differently than it gets grades
grade_summary['percent'] = round(grade_summary['percent'] * 100 + 0.05) / 100
letter_grade = grade_for_percentage(course.grade_cutoffs, grade_summary['percent'])
grade_summary['grade'] = letter_grade
grade_summary['totaled_scores'] = totaled_scores # make this available, eg for instructor download & debugging
if keep_raw_scores:
# way to get all RAW scores out to instructor
# so grader can be double-checked
grade_summary['raw_scores'] = raw_scores
max_scores_cache.push_to_remote()
return grade_summary
return totaled_scores, raw_scores
def grade_for_percentage(grade_cutoffs, percentage):
......@@ -515,30 +559,27 @@ def grade_for_percentage(grade_cutoffs, percentage):
return letter_grade
def progress_summary(student, request, course, field_data_cache=None, scores_client=None):
def progress_summary(student, course):
"""
Returns progress summary for all chapters in the course.
"""
progress = _progress_summary(student, request, course, field_data_cache, scores_client)
progress = _progress_summary(student, course)
if progress:
return progress.chapters
else:
return None
def get_weighted_scores(student, course, field_data_cache=None, scores_client=None):
def get_weighted_scores(student, course):
"""
Uses the _progress_summary method to return a ProgressSummmary object
Uses the _progress_summary method to return a ProgressSummary object
containing details of a students weighted scores for the course.
"""
request = _get_mock_request(student)
return _progress_summary(student, request, course, field_data_cache, scores_client)
return _progress_summary(student, course)
# TODO: This method is not very good. It was written in the old course style and
# then converted over and performance is not good. Once the progress page is redesigned
# to not have the progress summary this method should be deleted (so it won't be copied).
def _progress_summary(student, request, course, field_data_cache=None, scores_client=None):
def _progress_summary(student, course):
"""
Unwrapped version of "progress_summary".
......@@ -550,28 +591,20 @@ def _progress_summary(student, request, course, field_data_cache=None, scores_cl
each containing an array of scores. This contains information for graded and
ungraded problems, and is good for displaying a course summary with due dates,
etc.
- None if the student does not have access to load the course module.
Arguments:
student: A User object for the student to grade
course: A Descriptor containing the course to grade
If the student does not have access to load the course module, this function
will return None.
"""
with outer_atomic():
if field_data_cache is None:
field_data_cache = field_data_cache_for_grading(course, student)
if scores_client is None:
scores_client = ScoresClient.from_field_data_cache(field_data_cache)
course_module = get_module_for_descriptor(
student, request, course, field_data_cache, course.id, course=course
)
if not course_module:
return None
course_structure = get_course_blocks(student, course.location)
if not len(course_structure):
return None
scorable_locations = [block_key for block_key in course_structure if possibly_scored(block_key)]
course_module = getattr(course_module, '_x_module', course_module)
with outer_atomic():
scores_client = ScoresClient.create_for_locations(course.id, student.id, scorable_locations)
# We need to import this here to avoid a circular dependency of the form:
# XBlock --> submissions --> Django Rest Framework error strings -->
......@@ -579,93 +612,83 @@ def _progress_summary(student, request, course, field_data_cache=None, scores_cl
from submissions import api as sub_api # installed from the edx-submissions repository
with outer_atomic():
submissions_scores = sub_api.get_scores(
course.id.to_deprecated_string(), anonymous_id_for_user(student, course.id)
unicode(course.id), anonymous_id_for_user(student, course.id)
)
max_scores_cache = MaxScoresCache.create_for_course(course)
# For the moment, we have to get scorable_locations from field_data_cache
# and not from scores_client, because scores_client is ignorant of things
# For the moment, scores_client is ignorant of scorable_locations
# in the submissions API. As a further refactoring step, submissions should
# be hidden behind the ScoresClient.
max_scores_cache.fetch_from_remote(field_data_cache.scorable_locations)
max_scores_cache.fetch_from_remote(scorable_locations)
# Check for gated content
gated_content = gating_api.get_gated_content(course, student)
chapters = []
locations_to_children = defaultdict(list)
locations_to_weighted_scores = {}
# Don't include chapters that aren't displayable (e.g. due to error)
for chapter_module in course_module.get_display_items():
# Skip if the chapter is hidden
if chapter_module.hide_from_toc:
continue
for chapter_key in course_structure.get_children(course_structure.root_block_usage_key):
chapter = course_structure[chapter_key]
sections = []
for section_module in chapter_module.get_display_items():
# Skip if the section is hidden
with outer_atomic():
if section_module.hide_from_toc or unicode(section_module.location) in gated_content:
for section_key in course_structure.get_children(chapter_key):
if unicode(section_key) in gated_content:
continue
section = course_structure[section_key]
graded = getattr(section, 'graded', False)
scores = []
for descendant_key in course_structure.post_order_traversal(
filter_func=possibly_scored,
start_node=section_key,
):
descendant = course_structure[descendant_key]
(correct, total) = get_score(
student,
descendant,
scores_client,
submissions_scores,
max_scores_cache,
)
if correct is None and total is None:
continue
graded = section_module.graded
scores = []
module_creator = section_module.xmodule_runtime.get_module
for module_descriptor in yield_dynamic_descriptor_descendants(
section_module, student.id, module_creator
):
location_parent = module_descriptor.parent.replace(version=None, branch=None)
location_to_save = module_descriptor.location.replace(version=None, branch=None)
locations_to_children[location_parent].append(location_to_save)
(correct, total) = get_score(
student,
module_descriptor,
module_creator,
scores_client,
submissions_scores,
max_scores_cache,
)
if correct is None and total is None:
continue
weighted_location_score = Score(
correct,
total,
graded,
module_descriptor.display_name_with_default_escaped,
module_descriptor.location
)
weighted_location_score = Score(
correct,
total,
graded,
block_metadata_utils.display_name_with_default_escaped(descendant),
descendant.location
)
scores.append(weighted_location_score)
locations_to_weighted_scores[location_to_save] = weighted_location_score
scores.append(weighted_location_score)
locations_to_weighted_scores[descendant.location] = weighted_location_score
scores.reverse()
section_total, _ = graders.aggregate_scores(
scores, section_module.display_name_with_default_escaped)
escaped_section_name = block_metadata_utils.display_name_with_default_escaped(section)
section_total, _ = graders.aggregate_scores(scores, escaped_section_name)
module_format = section_module.format if section_module.format is not None else ''
sections.append({
'display_name': section_module.display_name_with_default_escaped,
'url_name': section_module.url_name,
'scores': scores,
'section_total': section_total,
'format': module_format,
'due': section_module.due,
'graded': graded,
})
sections.append({
'display_name': escaped_section_name,
'url_name': block_metadata_utils.url_name_for_block(section),
'scores': scores,
'section_total': section_total,
'format': getattr(section, 'format', ''),
'due': getattr(section, 'due', None),
'graded': graded,
})
chapters.append({
'course': course.display_name_with_default_escaped,
'display_name': chapter_module.display_name_with_default_escaped,
'url_name': chapter_module.url_name,
'display_name': block_metadata_utils.display_name_with_default_escaped(chapter),
'url_name': block_metadata_utils.url_name_for_block(chapter),
'sections': sections
})
max_scores_cache.push_to_remote()
return ProgressSummary(chapters, locations_to_weighted_scores, locations_to_children)
return ProgressSummary(chapters, locations_to_weighted_scores, course_structure.get_children)
def weighted_score(raw_correct, raw_total, weight):
......@@ -676,7 +699,7 @@ def weighted_score(raw_correct, raw_total, weight):
return (float(raw_correct) * weight / raw_total, float(weight))
def get_score(user, problem_descriptor, module_creator, scores_client, submissions_scores_cache, max_scores_cache):
def get_score(user, block, scores_client, submissions_scores_cache, max_scores_cache):
"""
Return the score for a user on a problem, as a tuple (correct, total).
e.g. (5,7) if you got 5 out of 7 points.
......@@ -685,10 +708,8 @@ def get_score(user, problem_descriptor, module_creator, scores_client, submissio
None).
user: a Student object
problem_descriptor: an XModuleDescriptor
block: a BlockStructure's BlockData object
scores_client: an initialized ScoresClient
module_creator: a function that takes a descriptor, and returns the corresponding XModule for this user.
Can return None if user doesn't have access, or if something else went wrong.
submissions_scores_cache: A dict of location names to (earned, possible) point tuples.
If an entry is found in this cache, it takes precedence.
max_scores_cache: a MaxScoresCache
......@@ -698,23 +719,11 @@ def get_score(user, problem_descriptor, module_creator, scores_client, submissio
if not user.is_authenticated():
return (None, None)
location_url = problem_descriptor.location.to_deprecated_string()
location_url = unicode(block.location)
if location_url in submissions_scores_cache:
return submissions_scores_cache[location_url]
# some problems have state that is updated independently of interaction
# with the LMS, so they need to always be scored. (E.g. combinedopenended ORA1.)
if problem_descriptor.always_recalculate_grades:
problem = module_creator(problem_descriptor)
if problem is None:
return (None, None)
score = problem.get_score()
if score is not None:
return (score['score'], score['total'])
else:
return (None, None)
if not problem_descriptor.has_score:
if not getattr(block, 'has_score', False):
# These are not problems, and do not have a score
return (None, None)
......@@ -723,8 +732,8 @@ def get_score(user, problem_descriptor, module_creator, scores_client, submissio
# value. This is important for cases where a student might have seen an
# older version of the problem -- they're still graded on what was possible
# when they tried the problem, not what it's worth now.
score = scores_client.get(problem_descriptor.location)
cached_max_score = max_scores_cache.get(problem_descriptor.location)
score = scores_client.get(block.location)
cached_max_score = max_scores_cache.get(block.location)
if score and score.total is not None:
# We have a valid score, just use it.
correct = score.correct if score.correct is not None else 0.0
......@@ -736,25 +745,18 @@ def get_score(user, problem_descriptor, module_creator, scores_client, submissio
total = cached_max_score
else:
# This means we don't have a valid score entry and we don't have a
# cached_max_score on hand. We know they've earned 0.0 points on this,
# but we need to instantiate the module (i.e. load student state) in
# order to find out how much it was worth.
problem = module_creator(problem_descriptor)
if problem is None:
return (None, None)
# cached_max_score on hand. We know they've earned 0.0 points on this.
correct = 0.0
total = problem.max_score()
total = block.transformer_data[GradesTransformer].max_score
# Problem may be an error module (if something in the problem builder failed)
# In which case total might be None
if total is None:
return (None, None)
else:
# add location to the max score cache
max_scores_cache.set(problem_descriptor.location, total)
max_scores_cache.set(block.location, total)
return weighted_score(correct, total, problem_descriptor.weight)
return weighted_score(correct, total, block.weight)
def iterate_grades_for(course_or_id, students, keep_raw_scores=False):
......@@ -783,13 +785,7 @@ def iterate_grades_for(course_or_id, students, keep_raw_scores=False):
for student in students:
with dog_stats_api.timer('lms.grades.iterate_grades_for', tags=[u'action:{}'.format(course.id)]):
try:
request = _get_mock_request(student)
# Grading calls problem rendering, which calls masquerading,
# which checks session vars -- thus the empty session dict below.
# It's not pretty, but untangling that is currently beyond the
# scope of this feature.
request.session = {}
gradeset = grade(student, request, course, keep_raw_scores)
gradeset = grade(student, course, keep_raw_scores)
yield student, gradeset, ""
except Exception as exc: # pylint: disable=broad-except
# Keep marching on even if this student couldn't be graded for
......
......@@ -940,7 +940,6 @@ class ScoresClient(object):
Score = namedtuple('Score', 'correct total')
def __init__(self, course_key, user_id):
"""Basic constructor. from_field_data_cache() is more appopriate for most uses."""
self.course_key = course_key
self.user_id = user_id
self._locations_to_scores = {}
......@@ -983,10 +982,10 @@ class ScoresClient(object):
return self._locations_to_scores.get(location.replace(version=None, branch=None))
@classmethod
def from_field_data_cache(cls, fd_cache):
"""Create a ScoresClient from a populated FieldDataCache."""
client = cls(fd_cache.course_id, fd_cache.user.id)
client.fetch_scores(fd_cache.scorable_locations)
def create_for_locations(cls, course_id, user_id, scorable_locations):
"""Create a ScoresClient with pre-fetched data for the given locations."""
client = cls(course_id, user_id)
client.fetch_scores(scorable_locations)
return client
......
......@@ -11,7 +11,6 @@ from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import CourseLocator, BlockUsageLocator
from courseware.grades import (
field_data_cache_for_grading,
grade,
iterate_grades_for,
MaxScoresCache,
......@@ -31,7 +30,7 @@ from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
def _grade_with_errors(student, request, course, keep_raw_scores=False):
def _grade_with_errors(student, course, keep_raw_scores=False):
"""This fake grade method will throw exceptions for student3 and
student4, but allow any other students to go through normal grading.
......@@ -42,7 +41,7 @@ def _grade_with_errors(student, request, course, keep_raw_scores=False):
if student.username in ['student3', 'student4']:
raise Exception("I don't like {}".format(student.username))
return grade(student, request, course, keep_raw_scores=keep_raw_scores)
return grade(student, course, keep_raw_scores=keep_raw_scores)
@attr('shard_1')
......@@ -217,15 +216,6 @@ class TestFieldDataCacheScorableLocations(SharedModuleStoreTestCase):
CourseEnrollment.enroll(self.student, self.course.id)
def test_field_data_cache_scorable_locations(self):
"""Only scorable locations should be in FieldDataCache.scorable_locations."""
fd_cache = field_data_cache_for_grading(self.course, self.student)
block_types = set(loc.block_type for loc in fd_cache.scorable_locations)
self.assertNotIn('video', block_types)
self.assertNotIn('html', block_types)
self.assertNotIn('discussion', block_types)
self.assertIn('problem', block_types)
class TestProgressSummary(TestCase):
"""
......
......@@ -256,13 +256,7 @@ class TestSubmittingProblems(ModuleStoreTestCase, LoginEnrollmentTestCase, Probl
- grade_breakdown : A breakdown of the major components that
make up the final grade. (For display)
"""
fake_request = self.factory.get(
reverse('progress', kwargs={'course_id': self.course.id.to_deprecated_string()})
)
fake_request.user = self.student_user
return grades.grade(self.student_user, fake_request, self.course)
return grades.grade(self.student_user, self.course)
def get_progress_summary(self):
"""
......@@ -275,15 +269,7 @@ class TestSubmittingProblems(ModuleStoreTestCase, LoginEnrollmentTestCase, Probl
ungraded problems, and is good for displaying a course summary with due dates,
etc.
"""
fake_request = self.factory.get(
reverse('progress', kwargs={'course_id': self.course.id.to_deprecated_string()})
)
progress_summary = grades.progress_summary(
self.student_user, fake_request, self.course
)
return progress_summary
return grades.progress_summary(self.student_user, self.course)
def check_grade_percent(self, percent):
"""
......
......@@ -482,23 +482,7 @@ class TestGetHtmlMethod(BaseTestXmodule):
# it'll just fall back to the values in the VideoDescriptor.
self.assertIn("example_source.mp4", self.item_descriptor.render(STUDENT_VIEW).content)
@patch('edxval.api.get_video_info')
def test_get_html_with_mocked_edx_video_id(self, mock_get_video_info):
mock_get_video_info.return_value = {
'url': '/edxval/video/example',
'edx_video_id': u'example',
'duration': 111.0,
'client_video_id': u'The example video',
'encoded_videos': [
{
'url': u'http://www.meowmix.com',
'file_size': 25556,
'bitrate': 9600,
'profile': u'desktop_mp4'
}
]
}
def test_get_html_with_mocked_edx_video_id(self):
SOURCE_XML = """
<video show_captions="true"
display_name="A Name"
......@@ -558,7 +542,23 @@ class TestGetHtmlMethod(BaseTestXmodule):
edx_video_id=data['edx_video_id']
)
self.initialize_module(data=DATA)
context = self.item_descriptor.render(STUDENT_VIEW).content
with patch('edxval.api.get_video_info') as mock_get_video_info:
mock_get_video_info.return_value = {
'url': '/edxval/video/example',
'edx_video_id': u'example',
'duration': 111.0,
'client_video_id': u'The example video',
'encoded_videos': [
{
'url': u'http://www.meowmix.com',
'file_size': 25556,
'bitrate': 9600,
'profile': u'desktop_mp4'
}
]
}
context = self.item_descriptor.render(STUDENT_VIEW).content
expected_context = dict(initial_context)
expected_context['metadata'].update({
......
......@@ -38,6 +38,7 @@ from instructor.views.api import require_global_staff
import shoppingcart
import survey.utils
import survey.views
from lms.djangoapps.ccx.utils import prep_course_for_grading
from certificates import api as certs_api
from openedx.core.djangoapps.models.course_details import CourseDetails
from commerce.utils import EcommerceService
......@@ -681,6 +682,7 @@ def _progress(request, course_key, student_id):
raise Http404
course = get_course_with_access(request.user, 'load', course_key, depth=None, check_if_enrolled=True)
prep_course_for_grading(course, request)
# check to see if there is a required survey that must be taken before
# the user can access the course.
......@@ -714,16 +716,8 @@ def _progress(request, course_key, student_id):
# additional DB lookup (this kills the Progress page in particular).
student = User.objects.prefetch_related("groups").get(id=student.id)
with outer_atomic():
field_data_cache = grades.field_data_cache_for_grading(course, student)
scores_client = ScoresClient.from_field_data_cache(field_data_cache)
courseware_summary = grades.progress_summary(
student, request, course, field_data_cache=field_data_cache, scores_client=scores_client
)
grade_summary = grades.grade(
student, request, course, field_data_cache=field_data_cache, scores_client=scores_client
)
courseware_summary = grades.progress_summary(student, course)
grade_summary = grades.grade(student, course)
studio_url = get_studio_url(course, 'settings/grading')
if courseware_summary is None:
......@@ -1056,7 +1050,7 @@ def is_course_passed(course, grade_summary=None, student=None, request=None):
success_cutoff = min(nonzero_cutoffs) if nonzero_cutoffs else None
if grade_summary is None:
grade_summary = grades.grade(student, request, course)
grade_summary = grades.grade(student, course)
return success_cutoff and grade_summary['percent'] >= success_cutoff
......
......@@ -63,7 +63,7 @@ Graded sections:
Listing grading context for course {}
graded sections:
[]
all descriptors:
all graded blocks:
length=0""".format(world.course_key)
assert_in(expected_config, world.css_text('#data-grade-config-text'))
......
......@@ -50,7 +50,7 @@ def offline_grade_calculation(course_key):
request.user = student
request.session = {}
gradeset = grades.grade(student, request, course, keep_raw_scores=True)
gradeset = grades.grade(student, course, keep_raw_scores=True)
# Convert Score namedtuples to dicts:
totaled_scores = gradeset['totaled_scores']
for section in totaled_scores:
......@@ -89,7 +89,7 @@ def student_grades(student, request, course, keep_raw_scores=False, use_offline=
as use_offline. If use_offline is True then this will look for an offline computed gradeset in the DB.
'''
if not use_offline:
return grades.grade(student, request, course, keep_raw_scores=keep_raw_scores)
return grades.grade(student, course, keep_raw_scores=keep_raw_scores)
try:
ocg = models.OfflineComputedGrade.objects.get(user=student, course_id=course.id)
......
......@@ -16,7 +16,7 @@ from xmodule.modulestore.tests.factories import CourseFactory
from ..offline_gradecalc import offline_grade_calculation, student_grades
def mock_grade(_student, _request, course, **_kwargs):
def mock_grade(_student, course, **_kwargs):
""" Return some fake grade data to mock grades.grade() """
return {
'grade': u'Pass',
......@@ -104,4 +104,4 @@ class TestOfflineGradeCalc(ModuleStoreTestCase):
offline_grade_calculation(self.course.id)
with patch('courseware.grades.grade', side_effect=AssertionError('Should not re-grade')):
result = student_grades(self.user, None, self.course, use_offline=True)
self.assertEqual(result, mock_grade(self.user, None, self.course))
self.assertEqual(result, mock_grade(self.user, self.course))
......@@ -24,6 +24,7 @@ from courseware.models import StudentModule
from certificates.models import GeneratedCertificate
from django.db.models import Count
from certificates.models import CertificateStatuses
from courseware.grades import grading_context_for_course
STUDENT_FEATURES = ('id', 'username', 'first_name', 'last_name', 'is_staff', 'email')
......@@ -490,14 +491,14 @@ def dump_grading_context(course):
msg += hbar
msg += "Listing grading context for course %s\n" % course.id.to_deprecated_string()
gcontext = course.grading_context
gcontext = grading_context_for_course(course)
msg += "graded sections:\n"
msg += '%s\n' % gcontext['graded_sections'].keys()
for (gsomething, gsvals) in gcontext['graded_sections'].items():
msg += '%s\n' % gcontext['all_graded_sections'].keys()
for (gsomething, gsvals) in gcontext['all_graded_sections'].items():
msg += "--> Section %s:\n" % (gsomething)
for sec in gsvals:
sdesc = sec['section_descriptor']
sdesc = sec['section_block']
frmat = getattr(sdesc, 'format', None)
aname = ''
if frmat in graders:
......@@ -512,7 +513,7 @@ def dump_grading_context(course):
notes = ', score by attempt!'
msg += " %s (format=%s, Assignment=%s%s)\n"\
% (sdesc.display_name, frmat, aname, notes)
msg += "all descriptors:\n"
msg += "length=%d\n" % len(gcontext['all_descriptors'])
msg += "all graded blocks:\n"
msg += "length=%d\n" % len(gcontext['all_graded_blocks'])
msg = '<pre>%s</pre>' % msg.replace('<', '&lt;')
return msg
......@@ -285,7 +285,7 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase):
user_b.username,
course.id,
cohort_name_header,
''
u'Default Group',
)
@patch('instructor_task.tasks_helper._get_current_task')
......@@ -685,7 +685,7 @@ class TestProblemReportSplitTestContent(TestReportMixin, TestConditionalContent,
def test_problem_grade_report(self):
"""
Test that we generate the correct the correct grade report when dealing with A/B tests.
Test that we generate the correct grade report when dealing with A/B tests.
In order to verify that the behavior of the grade report is correct, we submit answers for problems
that the student won't have access to. A/B tests won't restrict access to the problems, but it should
......
"""
Utilities related to caching.
"""
import collections
import cPickle as pickle
import functools
import zlib
......@@ -40,6 +41,48 @@ def memoize_in_request_cache(request_cache_attr_name=None):
return _decorator
class memoized(object): # pylint: disable=invalid-name
"""
Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
WARNING: Only use this memoized decorator for caching data that
is constant throughout the lifetime of a gunicorn worker process,
is costly to compute, and is required often. Otherwise, it can lead to
unwanted memory leakage.
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
if not isinstance(args, collections.Hashable):
# uncacheable. a list, for instance.
# better to not cache than blow up.
return self.func(*args)
if args in self.cache:
return self.cache[args]
else:
value = self.func(*args)
self.cache[args] = value
return value
def __repr__(self):
"""
Return the function's docstring.
"""
return self.func.__doc__
def __get__(self, obj, objtype):
"""
Support instance methods.
"""
return functools.partial(self.__call__, obj)
def hashvalue(arg):
"""
If arg is an xblock, use its location. otherwise just turn it into a string
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment