Commit d9f3c70e by Adam Committed by GitHub

Merge pull request #13626 from edx/efischer/grades_rc_fix

Grades RC cleanup
parents 0ea8e1b9 bcaa873b
...@@ -10,13 +10,67 @@ import logging ...@@ -10,13 +10,67 @@ import logging
import random import random
import sys import sys
from collections import namedtuple
log = logging.getLogger("edx.courseware") log = logging.getLogger("edx.courseware")
# This is a tuple for holding scores, either from problems or sections.
# Section either indicates the name of the problem or the name of the section class ScoreBase(object):
Score = namedtuple("Score", "earned possible graded section module_id") """
Abstract base class for encapsulating fields of values scores.
Field common to all scores include:
display_name (string) - the display name of the module
module_id (UsageKey) - the location of the module
graded (boolean) - whether or not this module is graded
"""
__metaclass__ = abc.ABCMeta
def __init__(self, graded, display_name, module_id):
self.graded = graded
self.display_name = display_name
self.module_id = module_id
def __eq__(self, other):
if type(other) is type(self):
return self.__dict__ == other.__dict__
return False
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return u"{class_name}({fields})".format(class_name=self.__class__.__name__, fields=self.__dict__)
class ProblemScore(ScoreBase):
"""
Encapsulates the fields of a Problem's score.
In addition to the fields in ScoreBase, also includes:
raw_earned (float) - raw points earned on this problem
raw_possible (float) - raw points possible to earn on this problem
weighted_earned = earned (float) - weighted value of the points earned
weighted_possible = possible (float) - weighted possible points on this problem
weight (float) - weight of this problem
"""
def __init__(self, raw_earned, raw_possible, weighted_earned, weighted_possible, weight, *args, **kwargs):
super(ProblemScore, self).__init__(*args, **kwargs)
self.raw_earned = raw_earned
self.raw_possible = raw_possible
self.earned = weighted_earned
self.possible = weighted_possible
self.weight = weight
class AggregatedScore(ScoreBase):
"""
Encapsulates the fields of a Subsection's score.
In addition to the fields in ScoreBase, also includes:
tw_earned = earned - total aggregated sum of all weighted earned values
tw_possible = possible - total aggregated sum of all weighted possible values
"""
def __init__(self, tw_earned, tw_possible, *args, **kwargs):
super(AggregatedScore, self).__init__(*args, **kwargs)
self.earned = tw_earned
self.possible = tw_possible
def float_sum(iterable): def float_sum(iterable):
...@@ -26,13 +80,14 @@ def float_sum(iterable): ...@@ -26,13 +80,14 @@ def float_sum(iterable):
return float(sum(iterable)) return float(sum(iterable))
def aggregate_scores(scores, section_name="summary", location=None): def aggregate_scores(scores, display_name="summary", location=None):
""" """
scores: A list of Score objects scores: A list of ScoreBase objects
display_name: The display name for the score object
location: The location under which all objects in scores are located location: The location under which all objects in scores are located
returns: A tuple (all_total, graded_total). returns: A tuple (all_total, graded_total).
all_total: A Score representing the total score summed over all input scores all_total: A ScoreBase representing the total score summed over all input scores
graded_total: A Score representing the score summed over all graded input scores graded_total: A ScoreBase representing the score summed over all graded input scores
""" """
total_correct_graded = float_sum(score.earned for score in scores if score.graded) total_correct_graded = float_sum(score.earned for score in scores if score.graded)
total_possible_graded = float_sum(score.possible for score in scores if score.graded) total_possible_graded = float_sum(score.possible for score in scores if score.graded)
...@@ -41,10 +96,10 @@ def aggregate_scores(scores, section_name="summary", location=None): ...@@ -41,10 +96,10 @@ def aggregate_scores(scores, section_name="summary", location=None):
total_possible = float_sum(score.possible for score in scores) total_possible = float_sum(score.possible for score in scores)
#regardless of whether it is graded #regardless of whether it is graded
all_total = Score(total_correct, total_possible, False, section_name, location) all_total = AggregatedScore(total_correct, total_possible, False, display_name, location)
#selecting only graded things #selecting only graded things
graded_total = Score(total_correct_graded, total_possible_graded, True, section_name, location) graded_total = AggregatedScore(total_correct_graded, total_possible_graded, True, display_name, location)
return all_total, graded_total return all_total, graded_total
...@@ -220,7 +275,7 @@ class SingleSectionGrader(CourseGrader): ...@@ -220,7 +275,7 @@ class SingleSectionGrader(CourseGrader):
found_score = None found_score = None
if self.type in grade_sheet: if self.type in grade_sheet:
for score in grade_sheet[self.type]: for score in grade_sheet[self.type]:
if score.section == self.name: if score.display_name == self.name:
found_score = score found_score = score
break break
...@@ -342,7 +397,7 @@ class AssignmentFormatGrader(CourseGrader): ...@@ -342,7 +397,7 @@ class AssignmentFormatGrader(CourseGrader):
else: else:
earned = scores[i].earned earned = scores[i].earned
possible = scores[i].possible possible = scores[i].possible
section_name = scores[i].section section_name = scores[i].display_name
percentage = earned / possible percentage = earned / possible
summary_format = u"{section_type} {index} - {name} - {percent:.0%} ({earned:.3n}/{possible:.3n})" summary_format = u"{section_type} {index} - {name} - {percent:.0%} ({earned:.3n}/{possible:.3n})"
......
...@@ -95,8 +95,8 @@ class StaffDebugPage(PageObject): ...@@ -95,8 +95,8 @@ class StaffDebugPage(PageObject):
This delete's a student's state for the problem This delete's a student's state for the problem
""" """
if user: if user:
self.q(css='input[id^=sd_fu_]').fill(user) self.q(css='input[id^=sd_fu_]').first.fill(user)
self.q(css='.staff-modal .staff-debug-sdelete').click() self.q(css='.staff-modal .staff-debug-sdelete').first.click()
def rescore(self, user=None): def rescore(self, user=None):
""" """
......
...@@ -15,6 +15,7 @@ from ...pages.lms.courseware import CoursewarePage ...@@ -15,6 +15,7 @@ from ...pages.lms.courseware import CoursewarePage
from ...pages.lms.instructor_dashboard import InstructorDashboardPage from ...pages.lms.instructor_dashboard import InstructorDashboardPage
from ...pages.lms.problem import ProblemPage from ...pages.lms.problem import ProblemPage
from ...pages.lms.progress import ProgressPage from ...pages.lms.progress import ProgressPage
from ...pages.lms.staff_view import StaffPage, StaffDebugPage
from ...pages.studio.component_editor import ComponentEditorView from ...pages.studio.component_editor import ComponentEditorView
from ...pages.studio.utils import type_in_codemirror from ...pages.studio.utils import type_in_codemirror
from ...pages.studio.overview import CourseOutlinePage from ...pages.studio.overview import CourseOutlinePage
...@@ -192,6 +193,22 @@ class PersistentGradesTest(ProgressPageBaseTest): ...@@ -192,6 +193,22 @@ class PersistentGradesTest(ProgressPageBaseTest):
type_in_codemirror(self, 0, modified_content) type_in_codemirror(self, 0, modified_content)
modal.q(css='.action-save').click() modal.q(css='.action-save').click()
def _delete_student_state_for_problem(self):
"""
As staff, clicks the "delete student state" button,
deleting the student user's state for the problem.
"""
with self._logged_in_session(staff=True):
self.courseware_page.visit()
staff_page = StaffPage(self.browser, self.course_id)
self.assertEqual(staff_page.staff_view_mode, "Staff")
staff_page.q(css='a.instructor-info-action').nth(1).click()
staff_debug_page = StaffDebugPage(self.browser)
staff_debug_page.wait_for_page()
staff_debug_page.delete_state(self.USERNAME)
msg = staff_debug_page.idash_msg[0]
self.assertEqual(u'Successfully deleted student state for user {0}'.format(self.USERNAME), msg)
@ddt.data( @ddt.data(
_edit_problem_content, _edit_problem_content,
_change_subsection_structure, _change_subsection_structure,
...@@ -223,6 +240,13 @@ class PersistentGradesTest(ProgressPageBaseTest): ...@@ -223,6 +240,13 @@ class PersistentGradesTest(ProgressPageBaseTest):
self.assertEqual(self._get_problem_scores(), [(1, 1), (0, 1)]) self.assertEqual(self._get_problem_scores(), [(1, 1), (0, 1)])
self.assertEqual(self._get_section_score(), (1, 2)) self.assertEqual(self._get_section_score(), (1, 2))
def test_progress_page_updates_when_student_state_deleted(self):
self._check_progress_page_with_scored_problem()
self._delete_student_state_for_problem()
with self._logged_in_session():
self.assertEqual(self._get_problem_scores(), [(0, 1), (0, 1)])
self.assertEqual(self._get_section_score(), (0, 2))
class SubsectionGradingPolicyTest(ProgressPageBaseTest): class SubsectionGradingPolicyTest(ProgressPageBaseTest):
""" """
......
<problem url_name="capa-optionresponse">
<optionresponse>
<optioninput options="('Correct', 'Incorrect')" correct="Correct"></optioninput>
<optioninput options="('Correct', 'Incorrect')" correct="Correct"></optioninput>
</optionresponse>
</problem>
<problem display_name="Exercise: apply to each 3" markdown="null" weight="5.0">
<text>
<p>
<b>ESTIMATED TIME TO COMPLETE: 4 minutes</b>
</p>
<pre>
&gt;&gt;&gt; print testList
[1, 16, 64, 81]
</pre>
</text>
<coderesponse queuename="Watcher-MITx-6.00x">
<textbox rows="10" cols="80" mode="python" tabsize="4"/>
<codeparam>
<initial_display>
# Your Code Here
</initial_display>
<answer_display>
def square(a):
return a * a
applyToEach(testList, square)
</answer_display>
<grader_payload>{"grader": "finger_exercises/L6/applyToEach3/grade_ate3.py"}</grader_payload>
</codeparam>
</coderesponse>
</problem>
<library_content display_name="Final Exam" has_score="true" max_count="25" source_library_id="library-v1:MSX+msx_cld213xfinalexam" source_library_version="577b5aca45064f068278faa0">
<problem/>
<problem/>
</library_content>
<lti launch_url="http://www.imsglobal.org/developers/LTI/test/v1p1/tool.php" lti_id="ims"/>
<openassessment url_name="0e2bbf6cc89e45d98b028fa4e2d46314" allow_file_upload="False">
<title></title>
<assessments>
<assessment name="peer-assessment" must_grade="1" must_be_graded_by="1"/>
<assessment name="self-assessment"/>
</assessments>
<rubric>
<prompt>
Censorship in the Libraries
'All of us can think of a book that we hope none of our children or any
other children have taken off the shelf. But if I have the right to remove
that book from the shelf -- that work I abhor -- then you also have exactly
the same right and so does everyone else. And then we have no books left on
the shelf for any of us.' --Katherine Paterson, Author
Write a persuasive essay to a newspaper reflecting your views on censorship
in libraries. Do you believe that certain materials, such as books, music,
movies, magazines, etc., should be removed from the shelves if they are
found offensive? Support your position with convincing arguments from your
own experience, observations, and/or reading.
Read for conciseness, clarity of thought, and form.
</prompt>
<criterion>
<name>Ideas</name>
<prompt>Determine if there is a unifying theme or main idea.</prompt>
<option points="0">
<name>Poor</name>
<explanation>
Difficult for the reader to discern the main idea.
Too brief or too repetitive to establish or maintain a focus.
</explanation>
</option>
<option points="3">
<name>Fair</name>
<explanation>
Presents a unifying theme or main idea, but may
include minor tangents. Stays somewhat focused on topic and
task.
</explanation>
</option>
<option points="5">
<name>Good</name>
<explanation>
Presents a unifying theme or main idea without going
off on tangents. Stays completely focused on topic and task.
</explanation>
</option>
</criterion>
<criterion>
<name>Content</name>
<prompt>Assess the content of the submission</prompt>
<option points="0">
<name>Poor</name>
<explanation>
Includes little information with few or no details or
unrelated details. Unsuccessful in attempts to explore any
facets of the topic.
</explanation>
</option>
<option points="1">
<name>Fair</name>
<explanation>
Includes little information and few or no details.
Explores only one or two facets of the topic.
</explanation>
</option>
<option points="3">
<name>Good</name>
<explanation>
Includes sufficient information and supporting
details. (Details may not be fully developed; ideas may be
listed.) Explores some facets of the topic.
</explanation>
</option>
<option points="3">
<name>Excellent</name>
<explanation>
Includes in-depth information and exceptional
supporting details that are fully developed. Explores all
facets of the topic.
</explanation>
</option>
</criterion>
</rubric>
</openassessment>
...@@ -166,8 +166,8 @@ class TestGatedContent(GatingTestCase, MilestonesTestCaseMixin): ...@@ -166,8 +166,8 @@ class TestGatedContent(GatingTestCase, MilestonesTestCaseMixin):
""" """
course_grade = CourseGradeFactory(user).create(self.course) course_grade = CourseGradeFactory(user).create(self.course)
for prob in [self.gating_prob1, self.gated_prob2, self.prob3]: for prob in [self.gating_prob1, self.gated_prob2, self.prob3]:
self.assertIn(prob.location, course_grade.locations_to_weighted_scores) self.assertIn(prob.location, course_grade.locations_to_scores)
self.assertNotIn(self.orphan.location, course_grade.locations_to_weighted_scores) self.assertNotIn(self.orphan.location, course_grade.locations_to_scores)
self.assertEquals(course_grade.percent, expected_percent) self.assertEquals(course_grade.percent, expected_percent)
......
...@@ -27,7 +27,7 @@ BLOCK_RECORD_LIST_VERSION = 1 ...@@ -27,7 +27,7 @@ BLOCK_RECORD_LIST_VERSION = 1
# Used to serialize information about a block at the time it was used in # Used to serialize information about a block at the time it was used in
# grade calculation. # grade calculation.
BlockRecord = namedtuple('BlockRecord', ['locator', 'weight', 'max_score']) BlockRecord = namedtuple('BlockRecord', ['locator', 'weight', 'raw_possible', 'graded'])
class BlockRecordList(tuple): class BlockRecordList(tuple):
...@@ -98,7 +98,8 @@ class BlockRecordList(tuple): ...@@ -98,7 +98,8 @@ class BlockRecordList(tuple):
BlockRecord( BlockRecord(
locator=UsageKey.from_string(block["locator"]).replace(course_key=course_key), locator=UsageKey.from_string(block["locator"]).replace(course_key=course_key),
weight=block["weight"], weight=block["weight"],
max_score=block["max_score"], raw_possible=block["raw_possible"],
graded=block["graded"],
) )
for block in block_dicts for block in block_dicts
) )
......
...@@ -43,15 +43,15 @@ class CourseGrade(object): ...@@ -43,15 +43,15 @@ class CourseGrade(object):
return subsections_by_format return subsections_by_format
@lazy @lazy
def locations_to_weighted_scores(self): def locations_to_scores(self):
""" """
Returns a dict of problem scores keyed by their locations. Returns a dict of problem scores keyed by their locations.
""" """
locations_to_weighted_scores = {} locations_to_scores = {}
for chapter in self.chapter_grades: for chapter in self.chapter_grades:
for subsection_grade in chapter['sections']: for subsection_grade in chapter['sections']:
locations_to_weighted_scores.update(subsection_grade.locations_to_weighted_scores) locations_to_scores.update(subsection_grade.locations_to_scores)
return locations_to_weighted_scores return locations_to_scores
@lazy @lazy
def grade_value(self): def grade_value(self):
...@@ -113,7 +113,7 @@ class CourseGrade(object): ...@@ -113,7 +113,7 @@ class CourseGrade(object):
grade_summary['percent'] = self.percent grade_summary['percent'] = self.percent
grade_summary['grade'] = self.letter_grade grade_summary['grade'] = self.letter_grade
grade_summary['totaled_scores'] = self.subsection_grade_totals_by_format grade_summary['totaled_scores'] = self.subsection_grade_totals_by_format
grade_summary['raw_scores'] = list(self.locations_to_weighted_scores.itervalues()) grade_summary['raw_scores'] = list(self.locations_to_scores.itervalues())
return grade_summary return grade_summary
...@@ -141,7 +141,7 @@ class CourseGrade(object): ...@@ -141,7 +141,7 @@ class CourseGrade(object):
subsections_total = sum(len(x) for x in self.subsection_grade_totals_by_format.itervalues()) subsections_total = sum(len(x) for x in self.subsection_grade_totals_by_format.itervalues())
subsections_read = len(subsection_grade_factory._unsaved_subsection_grades) # pylint: disable=protected-access subsections_read = len(subsection_grade_factory._unsaved_subsection_grades) # pylint: disable=protected-access
subsections_created = subsections_total - subsections_read subsections_created = subsections_total - subsections_read
blocks_total = len(self.locations_to_weighted_scores) blocks_total = len(self.locations_to_scores)
if not read_only: if not read_only:
subsection_grade_factory.bulk_create_unsaved() subsection_grade_factory.bulk_create_unsaved()
...@@ -166,8 +166,8 @@ class CourseGrade(object): ...@@ -166,8 +166,8 @@ class CourseGrade(object):
composite module (a vertical or section ) the scores will be the sums of composite module (a vertical or section ) the scores will be the sums of
all scored problems that are children of the chosen location. all scored problems that are children of the chosen location.
""" """
if location in self.locations_to_weighted_scores: if location in self.locations_to_scores:
score, _ = self.locations_to_weighted_scores[location] score = self.locations_to_scores[location]
return score.earned, score.possible return score.earned, score.possible
children = self.course_structure.get_children(location) children = self.course_structure.get_children(location)
earned = 0.0 earned = 0.0
......
...@@ -11,12 +11,11 @@ from courseware.model_data import ScoresClient ...@@ -11,12 +11,11 @@ from courseware.model_data import ScoresClient
from lms.djangoapps.grades.scores import get_score, possibly_scored from lms.djangoapps.grades.scores import get_score, possibly_scored
from lms.djangoapps.grades.models import BlockRecord, PersistentSubsectionGrade from lms.djangoapps.grades.models import BlockRecord, PersistentSubsectionGrade
from lms.djangoapps.grades.config.models import PersistentGradesEnabledFlag from lms.djangoapps.grades.config.models import PersistentGradesEnabledFlag
from lms.djangoapps.grades.transformer import GradesTransformer
from student.models import anonymous_id_for_user, User from student.models import anonymous_id_for_user, User
from submissions import api as submissions_api from submissions import api as submissions_api
from traceback import format_exc from traceback import format_exc
from xmodule import block_metadata_utils, graders from xmodule import block_metadata_utils, graders
from xmodule.graders import Score from xmodule.graders import AggregatedScore
log = getLogger(__name__) log = getLogger(__name__)
...@@ -54,62 +53,47 @@ class SubsectionGrade(object): ...@@ -54,62 +53,47 @@ class SubsectionGrade(object):
self.graded_total = None # aggregated grade for all graded problems self.graded_total = None # aggregated grade for all graded problems
self.all_total = None # aggregated grade for all problems, regardless of whether they are graded self.all_total = None # aggregated grade for all problems, regardless of whether they are graded
self.locations_to_weighted_scores = OrderedDict() # dict of problem locations to (Score, weight) tuples self.locations_to_scores = OrderedDict() # dict of problem locations to ProblemScore
self._scores = None
@property @property
def scores(self): def scores(self):
""" """
List of all problem scores in the subsection. List of all problem scores in the subsection.
""" """
if self._scores is None: return self.locations_to_scores.values()
self._scores = [score for score, _ in self.locations_to_weighted_scores.itervalues()]
return self._scores
def init_from_structure(self, student, course_structure, scores_client, submissions_scores): def init_from_structure(self, student, course_structure, submissions_scores, csm_scores):
""" """
Compute the grade of this subsection for the given student and course. Compute the grade of this subsection for the given student and course.
""" """
assert self._scores is None
for descendant_key in course_structure.post_order_traversal( for descendant_key in course_structure.post_order_traversal(
filter_func=possibly_scored, filter_func=possibly_scored,
start_node=self.location, start_node=self.location,
): ):
self._compute_block_score( self._compute_block_score(descendant_key, course_structure, submissions_scores, csm_scores)
student, descendant_key, course_structure, scores_client, submissions_scores, persisted_values={},
)
self.all_total, self.graded_total = graders.aggregate_scores(self.scores, self.display_name, self.location) self.all_total, self.graded_total = graders.aggregate_scores(self.scores, self.display_name, self.location)
self._log_event(log.debug, u"init_from_structure", student) self._log_event(log.debug, u"init_from_structure", student)
def init_from_model(self, student, model, course_structure, scores_client, submissions_scores): def init_from_model(self, student, model, course_structure, submissions_scores, csm_scores):
""" """
Load the subsection grade from the persisted model. Load the subsection grade from the persisted model.
""" """
assert self._scores is None
for block in model.visible_blocks.blocks: for block in model.visible_blocks.blocks:
persisted_values = {'weight': block.weight, 'possible': block.max_score} self._compute_block_score(block.locator, course_structure, submissions_scores, csm_scores, block)
self._compute_block_score(
student,
block.locator,
course_structure,
scores_client,
submissions_scores,
persisted_values
)
self.graded_total = Score( self.graded_total = AggregatedScore(
earned=model.earned_graded, tw_earned=model.earned_graded,
possible=model.possible_graded, tw_possible=model.possible_graded,
graded=True, graded=True,
section=self.display_name, display_name=self.display_name,
module_id=self.location, module_id=self.location,
) )
self.all_total = Score( self.all_total = AggregatedScore(
earned=model.earned_all, tw_earned=model.earned_all,
possible=model.possible_all, tw_possible=model.possible_all,
graded=False, graded=False,
section=self.display_name, display_name=self.display_name,
module_id=self.location, module_id=self.location,
) )
self._log_event(log.debug, u"init_from_model", student) self._log_event(log.debug, u"init_from_model", student)
...@@ -128,24 +112,23 @@ class SubsectionGrade(object): ...@@ -128,24 +112,23 @@ class SubsectionGrade(object):
""" """
Saves the subsection grade in a persisted model. Saves the subsection grade in a persisted model.
""" """
self._log_event(log.info, u"create_model", student) self._log_event(log.debug, u"create_model", student)
return PersistentSubsectionGrade.create_grade(**self._persisted_model_params(student)) return PersistentSubsectionGrade.create_grade(**self._persisted_model_params(student))
def update_or_create_model(self, student): def update_or_create_model(self, student):
""" """
Saves or updates the subsection grade in a persisted model. Saves or updates the subsection grade in a persisted model.
""" """
self._log_event(log.info, u"update_or_create_model", student) self._log_event(log.debug, u"update_or_create_model", student)
return PersistentSubsectionGrade.update_or_create_grade(**self._persisted_model_params(student)) return PersistentSubsectionGrade.update_or_create_grade(**self._persisted_model_params(student))
def _compute_block_score( def _compute_block_score(
self, self,
student,
block_key, block_key,
course_structure, course_structure,
scores_client,
submissions_scores, submissions_scores,
persisted_values, csm_scores,
persisted_block=None,
): ):
""" """
Compute score for the given block. If persisted_values Compute score for the given block. If persisted_values
...@@ -154,54 +137,14 @@ class SubsectionGrade(object): ...@@ -154,54 +137,14 @@ class SubsectionGrade(object):
block = course_structure[block_key] block = course_structure[block_key]
if getattr(block, 'has_score', False): if getattr(block, 'has_score', False):
problem_score = get_score(
possible = persisted_values.get('possible', None)
weight = persisted_values.get('weight', getattr(block, 'weight', None))
(earned, possible) = get_score(
student,
block,
scores_client,
submissions_scores, submissions_scores,
weight, csm_scores,
possible, persisted_block,
) block,
if earned is not None or possible is not None:
# There's a chance that the value of graded is not the same
# value when the problem was scored. Since we get the value
# from the block_structure.
#
# Cannot grade a problem with a denominator of 0.
# TODO: None > 0 is not python 3 compatible.
block_graded = self._get_explicit_graded(block, course_structure) if possible > 0 else False
self.locations_to_weighted_scores[block.location] = (
Score(
earned,
possible,
block_graded,
block_metadata_utils.display_name_with_default_escaped(block),
block.location,
),
weight,
)
def _get_explicit_graded(self, block, course_structure):
"""
Returns the explicit graded field value for the given block
"""
field_value = course_structure.get_transformer_block_field(
block.location,
GradesTransformer,
GradesTransformer.EXPLICIT_GRADED_FIELD_NAME
) )
if problem_score:
# Set to True if grading is not explicitly disabled for self.locations_to_scores[block_key] = problem_score
# this block. This allows us to include the block's score
# in the aggregated self.graded_total, regardless of the
# inherited graded value from the subsection. (TNL-5560)
return True if field_value is None else field_value
def _persisted_model_params(self, student): def _persisted_model_params(self, student):
""" """
...@@ -226,9 +169,9 @@ class SubsectionGrade(object): ...@@ -226,9 +169,9 @@ class SubsectionGrade(object):
Returns the list of visible blocks. Returns the list of visible blocks.
""" """
return [ return [
BlockRecord(location, weight, score.possible) BlockRecord(location, score.weight, score.raw_possible, score.graded)
for location, (score, weight) in for location, score in
self.locations_to_weighted_scores.iteritems() self.locations_to_scores.iteritems()
] ]
def _log_event(self, log_func, log_statement, student): def _log_event(self, log_func, log_statement, student):
...@@ -283,7 +226,7 @@ class SubsectionGradeFactory(object): ...@@ -283,7 +226,7 @@ class SubsectionGradeFactory(object):
if not subsection_grade: if not subsection_grade:
subsection_grade = SubsectionGrade(subsection, self.course) subsection_grade = SubsectionGrade(subsection, self.course)
subsection_grade.init_from_structure( subsection_grade.init_from_structure(
self.student, block_structure, self._scores_client, self._submissions_scores self.student, block_structure, self._submissions_scores, self._csm_scores,
) )
if PersistentGradesEnabledFlag.feature_enabled(self.course.id): if PersistentGradesEnabledFlag.feature_enabled(self.course.id):
if read_only: if read_only:
...@@ -313,7 +256,7 @@ class SubsectionGradeFactory(object): ...@@ -313,7 +256,7 @@ class SubsectionGradeFactory(object):
block_structure = self._get_block_structure(block_structure) block_structure = self._get_block_structure(block_structure)
subsection_grade = SubsectionGrade(subsection, self.course) subsection_grade = SubsectionGrade(subsection, self.course)
subsection_grade.init_from_structure( subsection_grade.init_from_structure(
self.student, block_structure, self._scores_client, self._submissions_scores self.student, block_structure, self._submissions_scores, self._csm_scores
) )
if PersistentGradesEnabledFlag.feature_enabled(self.course.id): if PersistentGradesEnabledFlag.feature_enabled(self.course.id):
...@@ -323,7 +266,7 @@ class SubsectionGradeFactory(object): ...@@ -323,7 +266,7 @@ class SubsectionGradeFactory(object):
return subsection_grade return subsection_grade
@lazy @lazy
def _scores_client(self): def _csm_scores(self):
""" """
Lazily queries and returns all the scores stored in the user Lazily queries and returns all the scores stored in the user
state (in CSM) for the course, while caching the result. state (in CSM) for the course, while caching the result.
...@@ -351,7 +294,7 @@ class SubsectionGradeFactory(object): ...@@ -351,7 +294,7 @@ class SubsectionGradeFactory(object):
if saved_subsection_grade: if saved_subsection_grade:
subsection_grade = SubsectionGrade(subsection, self.course) subsection_grade = SubsectionGrade(subsection, self.course)
subsection_grade.init_from_model( subsection_grade.init_from_model(
self.student, saved_subsection_grade, block_structure, self._scores_client, self._submissions_scores self.student, saved_subsection_grade, block_structure, self._submissions_scores, self._csm_scores,
) )
return subsection_grade return subsection_grade
......
...@@ -2,14 +2,21 @@ ...@@ -2,14 +2,21 @@
Test grade calculation. Test grade calculation.
""" """
import ddt
from django.http import Http404 from django.http import Http404
import itertools
from mock import patch from mock import patch
from nose.plugins.attrib import attr from nose.plugins.attrib import attr
from opaque_keys.edx.locations import SlashSeparatedCourseKey from opaque_keys.edx.locations import SlashSeparatedCourseKey
from capa.tests.response_xml_factory import MultipleChoiceResponseXMLFactory
from courseware.tests.helpers import get_request_for_user from courseware.tests.helpers import get_request_for_user
from lms.djangoapps.course_blocks.api import get_course_blocks
from student.tests.factories import UserFactory from student.tests.factories import UserFactory
from student.models import CourseEnrollment from student.models import CourseEnrollment
from xmodule.block_metadata_utils import display_name_with_default_escaped
from xmodule.graders import ProblemScore
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
...@@ -17,6 +24,7 @@ from .utils import answer_problem ...@@ -17,6 +24,7 @@ from .utils import answer_problem
from .. import course_grades from .. import course_grades
from ..course_grades import summary as grades_summary from ..course_grades import summary as grades_summary
from ..new.course_grade import CourseGradeFactory from ..new.course_grade import CourseGradeFactory
from ..new.subsection_grade import SubsectionGradeFactory
def _grade_with_errors(student, course): def _grade_with_errors(student, course):
...@@ -33,6 +41,17 @@ def _grade_with_errors(student, course): ...@@ -33,6 +41,17 @@ def _grade_with_errors(student, course):
return grades_summary(student, course) return grades_summary(student, course)
def _create_problem_xml():
"""
Creates and returns XML for a multiple choice response problem
"""
return MultipleChoiceResponseXMLFactory().build_xml(
question_text='The correct answer is Choice 3',
choices=[False, False, True, False],
choice_names=['choice_0', 'choice_1', 'choice_2', 'choice_3']
)
@attr(shard=1) @attr(shard=1)
class TestGradeIteration(SharedModuleStoreTestCase): class TestGradeIteration(SharedModuleStoreTestCase):
""" """
...@@ -134,6 +153,101 @@ class TestGradeIteration(SharedModuleStoreTestCase): ...@@ -134,6 +153,101 @@ class TestGradeIteration(SharedModuleStoreTestCase):
return students_to_gradesets, students_to_errors return students_to_gradesets, students_to_errors
@ddt.ddt
class TestWeightedProblems(SharedModuleStoreTestCase):
"""
Test scores and grades with various problem weight values.
"""
@classmethod
def setUpClass(cls):
super(TestWeightedProblems, cls).setUpClass()
cls.course = CourseFactory.create()
cls.chapter = ItemFactory.create(parent=cls.course, category="chapter", display_name="chapter")
cls.sequential = ItemFactory.create(parent=cls.chapter, category="sequential", display_name="sequential")
cls.vertical = ItemFactory.create(parent=cls.sequential, category="vertical", display_name="vertical1")
problem_xml = _create_problem_xml()
cls.problems = []
for i in range(2):
cls.problems.append(
ItemFactory.create(
parent=cls.vertical,
category="problem",
display_name="problem_{}".format(i),
data=problem_xml,
)
)
def setUp(self):
super(TestWeightedProblems, self).setUp()
self.user = UserFactory()
self.request = get_request_for_user(self.user)
def _verify_grades(self, raw_earned, raw_possible, weight, expected_score):
"""
Verifies the computed grades are as expected.
"""
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
# pylint: disable=no-member
for problem in self.problems:
problem.weight = weight
self.store.update_item(problem, self.user.id)
self.store.publish(self.course.location, self.user.id)
course_structure = get_course_blocks(self.request.user, self.course.location)
# answer all problems
for problem in self.problems:
answer_problem(self.course, self.request, problem, score=raw_earned, max_value=raw_possible)
# get grade
subsection_grade = SubsectionGradeFactory(
self.request.user, self.course, course_structure
).update(self.sequential)
# verify all problem grades
for problem in self.problems:
problem_score = subsection_grade.locations_to_scores[problem.location]
expected_score.display_name = display_name_with_default_escaped(problem)
expected_score.module_id = problem.location
self.assertEquals(problem_score, expected_score)
# verify subsection grades
self.assertEquals(subsection_grade.all_total.earned, expected_score.earned * len(self.problems))
self.assertEquals(subsection_grade.all_total.possible, expected_score.possible * len(self.problems))
@ddt.data(
*itertools.product(
(0.0, 0.5, 1.0, 2.0), # raw_earned
(-2.0, -1.0, 0.0, 0.5, 1.0, 2.0), # raw_possible
(-2.0, -1.0, -0.5, 0.0, 0.5, 1.0, 2.0, 50.0, None), # weight
)
)
@ddt.unpack
def test_problem_weight(self, raw_earned, raw_possible, weight):
use_weight = weight is not None and raw_possible != 0
if use_weight:
expected_w_earned = raw_earned / raw_possible * weight
expected_w_possible = weight
else:
expected_w_earned = raw_earned
expected_w_possible = raw_possible
expected_graded = expected_w_possible > 0
expected_score = ProblemScore(
raw_earned=raw_earned,
raw_possible=raw_possible,
weighted_earned=expected_w_earned,
weighted_possible=expected_w_possible,
weight=weight,
graded=expected_graded,
display_name=None, # problem-specific, filled in by _verify_grades
module_id=None, # problem-specific, filled in by _verify_grades
)
self._verify_grades(raw_earned, raw_possible, weight, expected_score)
class TestScoreForModule(SharedModuleStoreTestCase): class TestScoreForModule(SharedModuleStoreTestCase):
""" """
Test the method that calculates the score for a given block based on the Test the method that calculates the score for a given block based on the
......
...@@ -71,8 +71,8 @@ class GradesModelTestCase(TestCase): ...@@ -71,8 +71,8 @@ class GradesModelTestCase(TestCase):
block_type='problem', block_type='problem',
block_id='block_id_b' block_id='block_id_b'
) )
self.record_a = BlockRecord(locator=self.locator_a, weight=1, max_score=10) self.record_a = BlockRecord(locator=self.locator_a, weight=1, raw_possible=10, graded=False)
self.record_b = BlockRecord(locator=self.locator_b, weight=1, max_score=10) self.record_b = BlockRecord(locator=self.locator_b, weight=1, raw_possible=10, graded=True)
@ddt.ddt @ddt.ddt
...@@ -88,29 +88,31 @@ class BlockRecordTest(GradesModelTestCase): ...@@ -88,29 +88,31 @@ class BlockRecordTest(GradesModelTestCase):
Tests creation of a BlockRecord. Tests creation of a BlockRecord.
""" """
weight = 1 weight = 1
max_score = 10 raw_possible = 10
record = BlockRecord( record = BlockRecord(
self.locator_a, self.locator_a,
weight, weight,
max_score, raw_possible,
graded=False,
) )
self.assertEqual(record.locator, self.locator_a) self.assertEqual(record.locator, self.locator_a)
@ddt.data( @ddt.data(
(0, 0, "0123456789abcdef"), (0, 0, "0123456789abcdef", True),
(1, 10, 'totally_a_real_block_key'), (1, 10, 'totally_a_real_block_key', False),
("BlockRecord is", "a dumb data store", "with no validation"), ("BlockRecord is", "a dumb data store", "with no validation", None),
) )
@ddt.unpack @ddt.unpack
def test_serialization(self, weight, max_score, block_key): def test_serialization(self, weight, raw_possible, block_key, graded):
""" """
Tests serialization of a BlockRecord using the _asdict() method. Tests serialization of a BlockRecord using the _asdict() method.
""" """
record = BlockRecord(block_key, weight, max_score) record = BlockRecord(block_key, weight, raw_possible, graded)
expected = OrderedDict([ expected = OrderedDict([
("locator", block_key), ("locator", block_key),
("weight", weight), ("weight", weight),
("max_score", max_score), ("raw_possible", raw_possible),
("graded", graded),
]) ])
self.assertEqual(expected, record._asdict()) self.assertEqual(expected, record._asdict())
...@@ -134,7 +136,12 @@ class VisibleBlocksTest(GradesModelTestCase): ...@@ -134,7 +136,12 @@ class VisibleBlocksTest(GradesModelTestCase):
for block_dict in list_of_block_dicts: for block_dict in list_of_block_dicts:
block_dict['locator'] = unicode(block_dict['locator']) # BlockUsageLocator is not json-serializable block_dict['locator'] = unicode(block_dict['locator']) # BlockUsageLocator is not json-serializable
expected_data = { expected_data = {
'blocks': [{'locator': unicode(self.record_a.locator), 'max_score': 10, 'weight': 1}], 'blocks': [{
'locator': unicode(self.record_a.locator),
'raw_possible': 10,
'weight': 1,
'graded': self.record_a.graded,
}],
'course_key': unicode(self.record_a.locator.course_key), 'course_key': unicode(self.record_a.locator.course_key),
'version': BLOCK_RECORD_LIST_VERSION, 'version': BLOCK_RECORD_LIST_VERSION,
} }
......
""" """
Test saved subsection grade functionality. Test saved subsection grade functionality.
""" """
# pylint: disable=protected-access
import datetime
import ddt import ddt
from django.conf import settings from django.conf import settings
from django.db.utils import DatabaseError from django.db.utils import DatabaseError
from mock import patch from mock import patch
import pytz
from capa.tests.response_xml_factory import MultipleChoiceResponseXMLFactory from capa.tests.response_xml_factory import MultipleChoiceResponseXMLFactory
from courseware.tests.helpers import get_request_for_user from courseware.tests.helpers import get_request_for_user
from courseware.tests.test_submitting_problems import ProblemSubmissionTestMixin
from lms.djangoapps.course_blocks.api import get_course_blocks from lms.djangoapps.course_blocks.api import get_course_blocks
from lms.djangoapps.grades.config.tests.utils import persistent_grades_feature_flags from lms.djangoapps.grades.config.tests.utils import persistent_grades_feature_flags
from openedx.core.lib.xblock_utils.test_utils import add_xml_block_from_file
from student.models import CourseEnrollment from student.models import CourseEnrollment
from student.tests.factories import UserFactory from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase, SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from ..models import PersistentSubsectionGrade from ..models import PersistentSubsectionGrade
from ..new.course_grade import CourseGradeFactory from ..new.course_grade import CourseGradeFactory
from ..new.subsection_grade import SubsectionGrade, SubsectionGradeFactory from ..new.subsection_grade import SubsectionGrade, SubsectionGradeFactory
from lms.djangoapps.grades.tests.utils import mock_get_score from .utils import mock_get_score
class GradeTestBase(SharedModuleStoreTestCase): class GradeTestBase(SharedModuleStoreTestCase):
...@@ -116,7 +122,7 @@ class SubsectionGradeFactoryTest(GradeTestBase): ...@@ -116,7 +122,7 @@ class SubsectionGradeFactoryTest(GradeTestBase):
) as mock_create_grade: ) as mock_create_grade:
with patch( with patch(
'lms.djangoapps.grades.new.subsection_grade.SubsectionGradeFactory._get_saved_grade', 'lms.djangoapps.grades.new.subsection_grade.SubsectionGradeFactory._get_saved_grade',
wraps=self.subsection_grade_factory._get_saved_grade # pylint: disable=protected-access wraps=self.subsection_grade_factory._get_saved_grade
) as mock_get_saved_grade: ) as mock_get_saved_grade:
with self.assertNumQueries(14): with self.assertNumQueries(14):
grade_a = self.subsection_grade_factory.create(self.sequence) grade_a = self.subsection_grade_factory.create(self.sequence)
...@@ -205,8 +211,8 @@ class SubsectionGradeTest(GradeTestBase): ...@@ -205,8 +211,8 @@ class SubsectionGradeTest(GradeTestBase):
input_grade.init_from_structure( input_grade.init_from_structure(
self.request.user, self.request.user,
self.course_structure, self.course_structure,
self.subsection_grade_factory._scores_client, # pylint: disable=protected-access self.subsection_grade_factory._submissions_scores,
self.subsection_grade_factory._submissions_scores, # pylint: disable=protected-access self.subsection_grade_factory._csm_scores,
) )
self.assertEqual(PersistentSubsectionGrade.objects.count(), 0) self.assertEqual(PersistentSubsectionGrade.objects.count(), 0)
...@@ -224,9 +230,154 @@ class SubsectionGradeTest(GradeTestBase): ...@@ -224,9 +230,154 @@ class SubsectionGradeTest(GradeTestBase):
self.request.user, self.request.user,
saved_model, saved_model,
self.course_structure, self.course_structure,
self.subsection_grade_factory._scores_client, # pylint: disable=protected-access self.subsection_grade_factory._submissions_scores,
self.subsection_grade_factory._submissions_scores, # pylint: disable=protected-access self.subsection_grade_factory._csm_scores,
) )
self.assertEqual(input_grade.url_name, loaded_grade.url_name) self.assertEqual(input_grade.url_name, loaded_grade.url_name)
self.assertEqual(input_grade.all_total, loaded_grade.all_total) self.assertEqual(input_grade.all_total, loaded_grade.all_total)
@ddt.ddt
class TestMultipleProblemTypesSubsectionScores(ModuleStoreTestCase, ProblemSubmissionTestMixin):
"""
Test grading of different problem types.
"""
default_problem_metadata = {
u'graded': True,
u'weight': 2.5,
u'max_score': 7.0,
u'due': datetime.datetime(2099, 3, 15, 12, 30, 0, tzinfo=pytz.utc),
}
COURSE_NAME = u'Problem Type Test Course'
COURSE_NUM = u'probtype'
def setUp(self):
super(TestMultipleProblemTypesSubsectionScores, self).setUp()
password = u'test'
self.student = UserFactory.create(is_staff=False, username=u'test_student', password=password)
self.client.login(username=self.student.username, password=password)
self.request = get_request_for_user(self.student)
self.course = CourseFactory.create(
display_name=self.COURSE_NAME,
number=self.COURSE_NUM
)
self.chapter = ItemFactory.create(
parent=self.course,
category=u'chapter',
display_name=u'Test Chapter'
)
self.seq1 = ItemFactory.create(
parent=self.chapter,
category=u'sequential',
display_name=u'Test Sequential 1',
graded=True
)
self.vert1 = ItemFactory.create(
parent=self.seq1,
category=u'vertical',
display_name=u'Test Vertical 1'
)
def _get_fresh_subsection_score(self, course_structure, subsection):
"""
Return a Score object for the specified subsection.
Ensures that a stale cached value is not returned.
"""
subsection_factory = SubsectionGradeFactory(
self.student,
course_structure=course_structure,
course=self.course,
)
return subsection_factory.update(subsection)
def _get_altered_metadata(self, alterations):
"""
Returns a copy of the default_problem_metadata dict updated with the
specified alterations.
"""
metadata = self.default_problem_metadata.copy()
metadata.update(alterations)
return metadata
def _get_score_with_alterations(self, alterations):
"""
Given a dict of alterations to the default_problem_metadata, return
the score when one correct problem (out of two) is submitted.
"""
metadata = self._get_altered_metadata(alterations)
add_xml_block_from_file(u'problem', u'capa.xml', parent=self.vert1, metadata=metadata)
course_structure = get_course_blocks(self.student, self.course.location)
self.submit_question_answer(u'problem', {u'2_1': u'Correct'})
return self._get_fresh_subsection_score(course_structure, self.seq1)
def test_score_submission_for_capa_problems(self):
add_xml_block_from_file(u'problem', u'capa.xml', parent=self.vert1, metadata=self.default_problem_metadata)
course_structure = get_course_blocks(self.student, self.course.location)
score = self._get_fresh_subsection_score(course_structure, self.seq1)
self.assertEqual(score.all_total.earned, 0.0)
self.assertEqual(score.all_total.possible, 2.5)
self.submit_question_answer(u'problem', {u'2_1': u'Correct'})
score = self._get_fresh_subsection_score(course_structure, self.seq1)
self.assertEqual(score.all_total.earned, 1.25)
self.assertEqual(score.all_total.possible, 2.5)
@ddt.data(
(u'openassessment', u'openassessment.xml'),
(u'coderesponse', u'coderesponse.xml'),
(u'lti', u'lti.xml'),
(u'library_content', u'library_content.xml'),
)
@ddt.unpack
def test_loading_different_problem_types(self, block_type, filename):
"""
Test that transformation works for various block types
"""
metadata = self.default_problem_metadata.copy()
if block_type == u'library_content':
# Library content does not have a weight
del metadata[u'weight']
add_xml_block_from_file(block_type, filename, parent=self.vert1, metadata=metadata)
@ddt.data(
({}, 1.25, 2.5),
({u'weight': 27}, 13.5, 27),
({u'weight': 1.0}, 0.5, 1.0),
({u'weight': 0.0}, 0.0, 0.0),
({u'weight': None}, 1.0, 2.0),
)
@ddt.unpack
def test_weight_metadata_alterations(self, alterations, expected_earned, expected_possible):
score = self._get_score_with_alterations(alterations)
self.assertEqual(score.all_total.earned, expected_earned)
self.assertEqual(score.all_total.possible, expected_possible)
@ddt.data(
({u'graded': True}, 1.25, 2.5),
({u'graded': False}, 0.0, 0.0),
)
@ddt.unpack
def test_graded_metadata_alterations(self, alterations, expected_earned, expected_possible):
score = self._get_score_with_alterations(alterations)
self.assertEqual(score.graded_total.earned, expected_earned)
self.assertEqual(score.graded_total.possible, expected_possible)
@ddt.data(
{u'max_score': 99.3},
{u'max_score': 1.0},
{u'max_score': 0.0},
{u'max_score': None},
)
def test_max_score_does_not_change_results(self, alterations):
expected_earned = 1.25
expected_possible = 2.5
score = self._get_score_with_alterations(alterations)
self.assertEqual(score.all_total.earned, expected_earned)
self.assertEqual(score.all_total.possible, expected_possible)
...@@ -5,6 +5,7 @@ from contextlib import contextmanager ...@@ -5,6 +5,7 @@ from contextlib import contextmanager
from mock import patch from mock import patch
from courseware.module_render import get_module from courseware.module_render import get_module
from courseware.model_data import FieldDataCache from courseware.model_data import FieldDataCache
from xmodule.graders import ProblemScore
@contextmanager @contextmanager
...@@ -23,7 +24,7 @@ def mock_get_score(earned=0, possible=1): ...@@ -23,7 +24,7 @@ def mock_get_score(earned=0, possible=1):
Mocks the get_score function to return a valid grade. Mocks the get_score function to return a valid grade.
""" """
with patch('lms.djangoapps.grades.new.subsection_grade.get_score') as mock_score: with patch('lms.djangoapps.grades.new.subsection_grade.get_score') as mock_score:
mock_score.return_value = (earned, possible) mock_score.return_value = ProblemScore(earned, possible, earned, possible, 1, True, None, None)
yield mock_score yield mock_score
......
...@@ -3,6 +3,7 @@ Grades Transformer ...@@ -3,6 +3,7 @@ Grades Transformer
""" """
from django.test.client import RequestFactory from django.test.client import RequestFactory
from functools import reduce as functools_reduce from functools import reduce as functools_reduce
from logging import getLogger
from courseware.model_data import FieldDataCache from courseware.model_data import FieldDataCache
from courseware.module_render import get_module_for_descriptor from courseware.module_render import get_module_for_descriptor
...@@ -11,6 +12,9 @@ from openedx.core.lib.block_structure.transformer import BlockStructureTransform ...@@ -11,6 +12,9 @@ from openedx.core.lib.block_structure.transformer import BlockStructureTransform
from openedx.core.djangoapps.util.user_utils import SystemUser from openedx.core.djangoapps.util.user_utils import SystemUser
log = getLogger(__name__)
class GradesTransformer(BlockStructureTransformer): class GradesTransformer(BlockStructureTransformer):
""" """
The GradesTransformer collects grading information and stores it on The GradesTransformer collects grading information and stores it on
...@@ -119,8 +123,10 @@ class GradesTransformer(BlockStructureTransformer): ...@@ -119,8 +123,10 @@ class GradesTransformer(BlockStructureTransformer):
Collect the `max_score` from the given module, storing it as a Collect the `max_score` from the given module, storing it as a
`transformer_block_field` associated with the `GradesTransformer`. `transformer_block_field` associated with the `GradesTransformer`.
""" """
score = module.max_score() max_score = module.max_score()
block_structure.set_transformer_block_field(module.location, cls, 'max_score', score) block_structure.set_transformer_block_field(module.location, cls, 'max_score', max_score)
if max_score is None:
log.warning("GradesTransformer: max_score is None for {}".format(module.location))
@staticmethod @staticmethod
def _iter_scorable_xmodules(block_structure): def _iter_scorable_xmodules(block_structure):
......
...@@ -4,6 +4,7 @@ Enrollment operations for use by instructor APIs. ...@@ -4,6 +4,7 @@ Enrollment operations for use by instructor APIs.
Does not include any access control, be sure to check access before calling. Does not include any access control, be sure to check access before calling.
""" """
import crum
import json import json
import logging import logging
from django.contrib.auth.models import User from django.contrib.auth.models import User
...@@ -13,11 +14,14 @@ from django.core.mail import send_mail ...@@ -13,11 +14,14 @@ from django.core.mail import send_mail
from django.utils.translation import override as override_language from django.utils.translation import override as override_language
from course_modes.models import CourseMode from course_modes.models import CourseMode
from student.models import CourseEnrollment, CourseEnrollmentAllowed from courseware.model_data import FieldDataCache
from courseware.module_render import get_module_for_descriptor
from courseware.models import StudentModule from courseware.models import StudentModule
from edxmako.shortcuts import render_to_string from edxmako.shortcuts import render_to_string
from lms.djangoapps.grades.scores import weighted_score
from lms.djangoapps.grades.signals.signals import SCORE_CHANGED
from lang_pref import LANGUAGE_KEY from lang_pref import LANGUAGE_KEY
from student.models import CourseEnrollment, CourseEnrollmentAllowed
from submissions import api as sub_api # installed from the edx-submissions repository from submissions import api as sub_api # installed from the edx-submissions repository
from student.models import anonymous_id_for_user from student.models import anonymous_id_for_user
from openedx.core.djangoapps.user_api.models import UserPreference from openedx.core.djangoapps.user_api.models import UserPreference
...@@ -245,6 +249,7 @@ def reset_student_attempts(course_id, student, module_state_key, requesting_user ...@@ -245,6 +249,7 @@ def reset_student_attempts(course_id, student, module_state_key, requesting_user
) )
submission_cleared = True submission_cleared = True
except ItemNotFoundError: except ItemNotFoundError:
block = None
log.warning("Could not find %s in modulestore when attempting to reset attempts.", module_state_key) log.warning("Could not find %s in modulestore when attempting to reset attempts.", module_state_key)
# Reset the student's score in the submissions API, if xblock.clear_student_state has not done so already. # Reset the student's score in the submissions API, if xblock.clear_student_state has not done so already.
...@@ -267,6 +272,7 @@ def reset_student_attempts(course_id, student, module_state_key, requesting_user ...@@ -267,6 +272,7 @@ def reset_student_attempts(course_id, student, module_state_key, requesting_user
if delete_module: if delete_module:
module_to_reset.delete() module_to_reset.delete()
_fire_score_changed_for_block(course_id, student, block, module_state_key)
else: else:
_reset_module_attempts(module_to_reset) _reset_module_attempts(module_to_reset)
...@@ -287,6 +293,43 @@ def _reset_module_attempts(studentmodule): ...@@ -287,6 +293,43 @@ def _reset_module_attempts(studentmodule):
studentmodule.save() studentmodule.save()
def _fire_score_changed_for_block(course_id, student, block, module_state_key):
"""
Fires a SCORE_CHANGED event for the given module. The earned points are
always zero. We must retrieve the possible points from the XModule, as
noted below.
"""
if block and block.has_score:
cache = FieldDataCache.cache_for_descriptor_descendents(
course_id=course_id,
user=student,
descriptor=block,
depth=0
)
# For implementation reasons, we need to pull the max_score from the XModule,
# even though the data is not user-specific. Here we bind the data to the
# current user.
request = crum.get_current_request()
module = get_module_for_descriptor(
user=student,
request=request,
descriptor=block,
field_data_cache=cache,
course_key=course_id
)
points_earned, points_possible = weighted_score(0, module.max_score(), getattr(module, 'weight', None))
else:
points_earned, points_possible = 0, 0
SCORE_CHANGED.send(
sender=None,
points_possible=points_possible,
points_earned=points_earned,
user=student,
course_id=unicode(course_id),
usage_id=unicode(module_state_key)
)
def get_email_params(course, auto_enroll, secure=True, course_key=None, display_name=None): def get_email_params(course, auto_enroll, secure=True, course_key=None, display_name=None):
""" """
Generate parameters used when parsing email templates. Generate parameters used when parsing email templates.
......
...@@ -3190,7 +3190,8 @@ class TestInstructorAPIRegradeTask(SharedModuleStoreTestCase, LoginEnrollmentTes ...@@ -3190,7 +3190,8 @@ class TestInstructorAPIRegradeTask(SharedModuleStoreTestCase, LoginEnrollmentTes
}) })
self.assertEqual(response.status_code, 400) self.assertEqual(response.status_code, 400)
def test_reset_student_attempts_delete(self): @patch('courseware.module_render.SCORE_CHANGED.send')
def test_reset_student_attempts_delete(self, _mock_signal):
""" Test delete single student state. """ """ Test delete single student state. """
url = reverse('reset_student_attempts', kwargs={'course_id': self.course.id.to_deprecated_string()}) url = reverse('reset_student_attempts', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, { response = self.client.post(url, {
......
...@@ -8,6 +8,7 @@ import mock ...@@ -8,6 +8,7 @@ import mock
from mock import patch from mock import patch
from abc import ABCMeta from abc import ABCMeta
from courseware.models import StudentModule from courseware.models import StudentModule
from courseware.tests.helpers import get_request_for_user
from django.conf import settings from django.conf import settings
from django.utils.translation import get_language from django.utils.translation import get_language
from django.utils.translation import override as override_language from django.utils.translation import override as override_language
...@@ -16,7 +17,11 @@ from ccx_keys.locator import CCXLocator ...@@ -16,7 +17,11 @@ from ccx_keys.locator import CCXLocator
from student.tests.factories import UserFactory from student.tests.factories import UserFactory
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from capa.tests.response_xml_factory import MultipleChoiceResponseXMLFactory
from grades.new.subsection_grade import SubsectionGradeFactory
from grades.tests.utils import answer_problem
from lms.djangoapps.ccx.tests.factories import CcxFactory from lms.djangoapps.ccx.tests.factories import CcxFactory
from lms.djangoapps.course_blocks.api import get_course_blocks
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase from openedx.core.djangolib.testing.utils import CacheIsolationTestCase
from student.models import CourseEnrollment, CourseEnrollmentAllowed from student.models import CourseEnrollment, CourseEnrollmentAllowed
from student.roles import CourseCcxCoachRole from student.roles import CourseCcxCoachRole
...@@ -373,7 +378,8 @@ class TestInstructorEnrollmentStudentModule(SharedModuleStoreTestCase): ...@@ -373,7 +378,8 @@ class TestInstructorEnrollmentStudentModule(SharedModuleStoreTestCase):
reset_student_attempts(self.course_key, self.user, msk, requesting_user=self.user) reset_student_attempts(self.course_key, self.user, msk, requesting_user=self.user)
self.assertEqual(json.loads(module().state)['attempts'], 0) self.assertEqual(json.loads(module().state)['attempts'], 0)
def test_delete_student_attempts(self): @mock.patch('courseware.module_render.SCORE_CHANGED.send')
def test_delete_student_attempts(self, _mock_signal):
msk = self.course_key.make_usage_key('dummy', 'module') msk = self.course_key.make_usage_key('dummy', 'module')
original_state = json.dumps({'attempts': 32, 'otherstuff': 'alsorobots'}) original_state = json.dumps({'attempts': 32, 'otherstuff': 'alsorobots'})
StudentModule.objects.create( StudentModule.objects.create(
...@@ -399,7 +405,7 @@ class TestInstructorEnrollmentStudentModule(SharedModuleStoreTestCase): ...@@ -399,7 +405,7 @@ class TestInstructorEnrollmentStudentModule(SharedModuleStoreTestCase):
# Disable the score change signal to prevent other components from being # Disable the score change signal to prevent other components from being
# pulled into tests. # pulled into tests.
@mock.patch('courseware.module_render.SCORE_CHANGED.send') @mock.patch('courseware.module_render.SCORE_CHANGED.send')
def test_delete_submission_scores(self, _lti_mock): def test_delete_submission_scores(self, _mock_signal):
user = UserFactory() user = UserFactory()
problem_location = self.course_key.make_usage_key('dummy', 'module') problem_location = self.course_key.make_usage_key('dummy', 'module')
...@@ -494,6 +500,80 @@ class TestInstructorEnrollmentStudentModule(SharedModuleStoreTestCase): ...@@ -494,6 +500,80 @@ class TestInstructorEnrollmentStudentModule(SharedModuleStoreTestCase):
self.assertEqual(unrelated_state['brains'], 'zombie') self.assertEqual(unrelated_state['brains'], 'zombie')
class TestStudentModuleGrading(SharedModuleStoreTestCase):
"""
Tests the effects of student module manipulations
on student grades.
"""
@classmethod
def setUpClass(cls):
super(TestStudentModuleGrading, cls).setUpClass()
cls.course = CourseFactory.create()
cls.chapter = ItemFactory.create(
parent=cls.course,
category="chapter",
display_name="Test Chapter"
)
cls.sequence = ItemFactory.create(
parent=cls.chapter,
category='sequential',
display_name="Test Sequential 1",
graded=True
)
cls.vertical = ItemFactory.create(
parent=cls.sequence,
category='vertical',
display_name='Test Vertical 1'
)
problem_xml = MultipleChoiceResponseXMLFactory().build_xml(
question_text='The correct answer is Choice 3',
choices=[False, False, True, False],
choice_names=['choice_0', 'choice_1', 'choice_2', 'choice_3']
)
cls.problem = ItemFactory.create(
parent=cls.vertical,
category="problem",
display_name="Test Problem",
data=problem_xml
)
cls.request = get_request_for_user(UserFactory())
cls.user = cls.request.user
def _get_subsection_grade_and_verify(self, all_earned, all_possible, graded_earned, graded_possible):
"""
Retrieves the subsection grade and verifies that
its scores match those expected.
"""
subsection_grade_factory = SubsectionGradeFactory(
self.user,
self.course,
get_course_blocks(self.user, self.course.location)
)
grade = subsection_grade_factory.create(self.sequence)
self.assertEqual(grade.all_total.earned, all_earned)
self.assertEqual(grade.graded_total.earned, graded_earned)
self.assertEqual(grade.all_total.possible, all_possible)
self.assertEqual(grade.graded_total.possible, graded_possible)
@patch('crum.get_current_request')
def test_delete_student_state(self, _crum_mock):
problem_location = self.problem.location
self._get_subsection_grade_and_verify(0, 1, 0, 1)
answer_problem(course=self.course, request=self.request, problem=self.problem, score=1, max_value=1)
self._get_subsection_grade_and_verify(1, 1, 1, 1)
# Delete student state using the instructor dash
reset_student_attempts(
self.course.id,
self.user,
problem_location,
requesting_user=self.user,
delete_module=True,
)
# Verify that the student's grades are reset
self._get_subsection_grade_and_verify(0, 1, 0, 1)
class EnrollmentObjects(object): class EnrollmentObjects(object):
""" """
Container for enrollment objects. Container for enrollment objects.
......
...@@ -49,7 +49,8 @@ class InstructorServiceTests(SharedModuleStoreTestCase): ...@@ -49,7 +49,8 @@ class InstructorServiceTests(SharedModuleStoreTestCase):
state=json.dumps({'attempts': 2}), state=json.dumps({'attempts': 2}),
) )
def test_reset_student_attempts_delete(self): @mock.patch('courseware.module_render.SCORE_CHANGED.send')
def test_reset_student_attempts_delete(self, _mock_signal):
""" """
Test delete student state. Test delete student state.
""" """
......
...@@ -948,7 +948,7 @@ def upload_problem_grade_report(_xmodule_instance_args, _entry_id, course_id, _t ...@@ -948,7 +948,7 @@ def upload_problem_grade_report(_xmodule_instance_args, _entry_id, course_id, _t
final_grade = gradeset['percent'] final_grade = gradeset['percent']
# Only consider graded problems # Only consider graded problems
problem_scores = {unicode(score.module_id): score for score, _ in gradeset['raw_scores'] if score.graded} problem_scores = {unicode(score.module_id): score for score in gradeset['raw_scores'] if score.graded}
earned_possible_values = list() earned_possible_values = list()
for problem_id in problems: for problem_id in problems:
try: try:
......
"""
Utilities for testing xblocks
"""
from django.conf import settings
from xmodule.modulestore.tests.factories import ItemFactory
TEST_DATA_DIR = settings.COMMON_ROOT / u'test/data'
def add_xml_block_from_file(block_type, filename, parent, metadata):
"""
Create a block of the specified type with content included from the
specified XML file.
XML filenames are relative to common/test/data/blocks.
"""
with open(TEST_DATA_DIR / u'blocks' / filename) as datafile:
return ItemFactory.create(
parent=parent,
category=block_type,
data=datafile.read().decode('utf-8'),
metadata=metadata,
display_name=u'problem'
)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment