Commit 03510481 by Matt Drayer

mattdrayer/rebase-20140926-cherrypicks: Added recent commits to rebase branch

parent 168efb8a
......@@ -147,62 +147,3 @@ def get_course_child_content(request, user, course_key, child_descriptor):
field_data_cache,
course_key)
return child_content
def calculate_proforma_grade(grade_summary, grading_policy):
"""
Calculates a projected (proforma) final grade based on the current state
of grades using the provided grading policy. Categories equate to grading policy
'types' and have values such as 'Homework', 'Lab', 'MidtermExam', and 'FinalExam'
We invert the concepts here and use the category weights as the possible scores by
assuming that the weights total 100 percent. So, if a Homework category is worth 15
percent of your overall grade, and you have currently scored 70 percent for that
category, the normalized score for the Homework category is 0.105. Note that
we do not take into account dropped assignments/scores, such as lowest-two homeworks.
After all scored categories are processed we apply the average category score to any
unscored categories using the value as a projection of the user's performance in each category.
Example:
- Scored Category: Homework, Weight: 15%, Totaled Score: 70%, Normalized Score: 0.105
- Scored Category: MidtermExam, Weight: 30%, Totaled Score: 80%, Normalized Score: 0.240
- Scored Category: Final Exam, Weight: 40%, Totaled Score: 95%, Normalized Score: 0.380
- Average Category Score: (70 + 80 + 95) / 3 = 81.7
- Unscored Category: Lab, Weight: 15%, Totaled Score: 81.7%, Normalized Score: 0.123
- Proforma Grade = 0.105 + 0.240 + 0.380 + 0.123 = 0.8475 (84.8%)
"""
grade_breakdown = grade_summary['grade_breakdown']
proforma_grade = 0.00
totaled_scores = grade_summary['totaled_scores']
category_averages = []
categories_to_estimate = []
for grade_category in grade_breakdown:
category = grade_category['category']
item_scores = totaled_scores.get(category)
if item_scores is not None and len(item_scores):
total_item_score = 0.00
items_considered = 0
for item_score in item_scores:
if item_score.earned or (item_score.due and item_score.due < timezone.now()):
normalized_item_score = item_score.earned / item_score.possible
total_item_score += normalized_item_score
items_considered += 1
if total_item_score:
category_average_score = total_item_score / items_considered
category_averages.append(category_average_score)
category_policy = next((policy for policy in grading_policy['GRADER'] if policy['type'] == category), None)
category_weight = category_policy['weight']
category_grade = category_average_score * category_weight
proforma_grade += category_grade
else:
categories_to_estimate.append(category)
else:
categories_to_estimate.append(category)
assumed_category_average = sum(category_averages) / len(category_averages) if len(category_averages) > 0 else 0
for category in categories_to_estimate:
category_policy = next((policy for policy in grading_policy['GRADER'] if policy['type'] == category), None)
category_weight = category_policy['weight']
category_grade = assumed_category_average * category_weight
proforma_grade += category_grade
return proforma_grade
......@@ -40,7 +40,7 @@ from xmodule.modulestore import InvalidLocationError
from xmodule.modulestore.django import modulestore
from api_manager.courses.serializers import CourseModuleCompletionSerializer
from api_manager.courseware_access import get_course, get_course_child, get_course_child_content, get_course_key, course_exists, calculate_proforma_grade
from api_manager.courseware_access import get_course, get_course_child, get_course_key, course_exists
from api_manager.permissions import SecureAPIView, SecureListAPIView, IdsInFilterBackend, HasOrgsFilterBackend
from api_manager.models import GroupProfile, APIUser as User
from api_manager.organizations.serializers import OrganizationSerializer
......@@ -932,7 +932,7 @@ class UsersCoursesGradesDetail(SecureAPIView):
)
if len(queryset):
current_grade = queryset[0].grade
proforma_grade = calculate_proforma_grade(grade_summary, grading_policy)
proforma_grade = grades.calculate_proforma_grade(grade_summary, grading_policy)
response_data = {
'courseware_summary': progress_summary,
......
......@@ -9,6 +9,7 @@ from contextlib import contextmanager
from django.conf import settings
from django.db import transaction
from django.test.client import RequestFactory
from django.utils import timezone
from dogapi import dog_stats_api
......@@ -546,3 +547,62 @@ def iterate_grades_for(course_id, students):
exc.message
)
yield student, {}, exc.message
def calculate_proforma_grade(grade_summary, grading_policy):
"""
Calculates a projected (proforma) final grade based on the current state
of grades using the provided grading policy. Categories equate to grading policy
'types' and have values such as 'Homework', 'Lab', 'MidtermExam', and 'FinalExam'
We invert the concepts here and use the category weights as the possible scores by
assuming that the weights total 100 percent. So, if a Homework category is worth 15
percent of your overall grade, and you have currently scored 70 percent for that
category, the normalized score for the Homework category is 0.105. Note that
we do not take into account dropped assignments/scores, such as lowest-two homeworks.
After all scored categories are processed we apply the average category score to any
unscored categories using the value as a projection of the user's performance in each category.
Example:
- Scored Category: Homework, Weight: 15%, Totaled Score: 70%, Normalized Score: 0.105
- Scored Category: MidtermExam, Weight: 30%, Totaled Score: 80%, Normalized Score: 0.240
- Scored Category: Final Exam, Weight: 40%, Totaled Score: 95%, Normalized Score: 0.380
- Average Category Score: (70 + 80 + 95) / 3 = 81.7
- Unscored Category: Lab, Weight: 15%, Totaled Score: 81.7%, Normalized Score: 0.123
- Proforma Grade = 0.105 + 0.240 + 0.380 + 0.123 = 0.8475 (84.8%)
"""
grade_breakdown = grade_summary['grade_breakdown']
proforma_grade = 0.00
totaled_scores = grade_summary['totaled_scores']
category_averages = []
categories_to_estimate = []
for grade_category in grade_breakdown:
category = grade_category['category']
item_scores = totaled_scores.get(category)
if item_scores is not None and len(item_scores):
total_item_score = 0.00
items_considered = 0
for item_score in item_scores:
if item_score.earned or (item_score.due and item_score.due < timezone.now()):
normalized_item_score = item_score.earned / item_score.possible
total_item_score += normalized_item_score
items_considered += 1
if total_item_score:
category_average_score = total_item_score / items_considered
category_averages.append(category_average_score)
category_policy = next((policy for policy in grading_policy['GRADER'] if policy['type'] == category), None)
category_weight = category_policy['weight']
category_grade = category_average_score * category_weight
proforma_grade += category_grade
else:
categories_to_estimate.append(category)
else:
categories_to_estimate.append(category)
assumed_category_average = sum(category_averages) / len(category_averages) if len(category_averages) > 0 else 0
for category in categories_to_estimate:
category_policy = next((policy for policy in grading_policy['GRADER'] if policy['type'] == category), None)
category_weight = category_policy['weight']
category_grade = assumed_category_average * category_weight
proforma_grade += category_grade
return proforma_grade
......@@ -69,13 +69,15 @@ class Command(BaseCommand):
request.user = user
grade_data = grades.grade(user, request, course)
grade = grade_data['percent']
proforma_grade = grades.calculate_proforma_grade(grade_data, course.grading_policy)
try:
gradebook_entry = StudentGradebook.objects.get(user=user, course_id=course.id)
if gradebook_entry.grade != grade:
gradebook_entry.grade = grade
proforma_grade = proforma_grade
gradebook_entry.save()
except StudentGradebook.DoesNotExist:
StudentGradebook.objects.create(user=user, course_id=course.id, grade=grade)
StudentGradebook.objects.create(user=user, course_id=course.id, grade=grade, proforma_grade=proforma_grade)
log_msg = 'Gradebook entry created -- Course: {}, User: {} (grade: {})'.format(course.id, user.id, grade)
print log_msg
log.info(log_msg)
......@@ -22,6 +22,7 @@ class StudentGradebook(TimeStampedModel):
user = models.ForeignKey(User, db_index=True)
course_id = CourseKeyField(db_index=True, max_length=255, blank=True)
grade = models.FloatField()
proforma_grade = models.FloatField()
class Meta:
"""
......@@ -121,6 +122,7 @@ class StudentGradebookHistory(TimeStampedModel):
user = models.ForeignKey(User, db_index=True)
course_id = CourseKeyField(db_index=True, max_length=255, blank=True)
grade = models.FloatField()
proforma_grade = models.FloatField()
@receiver(post_save, sender=StudentGradebook)
def save_history(sender, instance, **kwargs): # pylint: disable=no-self-argument, unused-argument
......@@ -130,6 +132,7 @@ class StudentGradebookHistory(TimeStampedModel):
history_entry = StudentGradebookHistory(
user=instance.user,
course_id=instance.course_id,
grade=instance.grade
grade=instance.grade,
proforma_grade=instance.proforma_grade
)
history_entry.save()
......@@ -24,10 +24,12 @@ def on_score_changed(sender, **kwargs):
request.user = user
grade_data = grades.grade(user, request, course_descriptor)
grade = grade_data['percent']
proforma_grade = grades.calculate_proforma_grade(grade_data, course_descriptor.grading_policy)
try:
gradebook_entry = StudentGradebook.objects.get(user=user, course_id=course_key)
if gradebook_entry.grade != grade:
gradebook_entry.grade = grade
gradebook_entry.proforma_grade = proforma_grade
gradebook_entry.save()
except StudentGradebook.DoesNotExist:
StudentGradebook.objects.create(user=user, course_id=course_key, grade=grade)
StudentGradebook.objects.create(user=user, course_id=course_key, grade=grade, proforma_grade=proforma_grade)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment