Commit 646f4be1 by Matt Drayer Committed by Jonathan Piacenti

mattdrayer/api-proficiency-redux: New StudentGradebook

parent ad259fdc
...@@ -2,9 +2,9 @@ ...@@ -2,9 +2,9 @@
Management command to generate a list of grades for Management command to generate a list of grades for
all students that are enrolled in a course. all students that are enrolled in a course.
""" """
from util.request import RequestMock
from courseware import grades, courses from courseware import grades, courses
from certificates.models import GeneratedCertificate from certificates.models import GeneratedCertificate
from django.test.client import RequestFactory
from django.core.management.base import BaseCommand, CommandError from django.core.management.base import BaseCommand, CommandError
import os import os
from opaque_keys import InvalidKeyError from opaque_keys import InvalidKeyError
...@@ -13,23 +13,9 @@ from opaque_keys.edx.locations import SlashSeparatedCourseKey ...@@ -13,23 +13,9 @@ from opaque_keys.edx.locations import SlashSeparatedCourseKey
from django.contrib.auth.models import User from django.contrib.auth.models import User
from optparse import make_option from optparse import make_option
import datetime import datetime
from django.core.handlers.base import BaseHandler
import csv import csv
class RequestMock(RequestFactory):
def request(self, **request):
"Construct a generic request object."
request = RequestFactory.request(self, **request)
handler = BaseHandler()
handler.load_middleware()
for middleware_method in handler._request_middleware:
if middleware_method(request):
raise Exception("Couldn't create request mock object - "
"request middleware returned a response")
return request
class Command(BaseCommand): class Command(BaseCommand):
help = """ help = """
......
""" Utility functions related to HTTP requests """ """ Utility functions related to HTTP requests """
from django.core.handlers.base import BaseHandler
from django.test import RequestFactory
import re import re
import logging import logging
from django.conf import settings from django.conf import settings
from django.core.handlers.base import BaseHandler
from django.test.client import RequestFactory
from microsite_configuration import microsite from microsite_configuration import microsite
from opaque_keys import InvalidKeyError from opaque_keys import InvalidKeyError
......
...@@ -1326,7 +1326,7 @@ class CourseDescriptor(CourseFields, SequenceDescriptor, LicenseMixin): ...@@ -1326,7 +1326,7 @@ class CourseDescriptor(CourseFields, SequenceDescriptor, LicenseMixin):
for chapter in self.get_children(): for chapter in self.get_children():
for section in chapter.get_children(): for section in chapter.get_children():
if section.graded: if hasattr(section, 'graded') and section.graded:
xmoduledescriptors = list(yield_descriptor_descendents(section)) xmoduledescriptors = list(yield_descriptor_descendents(section))
xmoduledescriptors.append(section) xmoduledescriptors.append(section)
......
...@@ -26,7 +26,7 @@ def aggregate_scores(scores, section_name="summary"): ...@@ -26,7 +26,7 @@ def aggregate_scores(scores, section_name="summary"):
total_correct = sum(score.earned for score in scores) total_correct = sum(score.earned for score in scores)
total_possible = sum(score.possible for score in scores) total_possible = sum(score.possible for score in scores)
#regardless of whether or not it is graded # regardless of whether or not it is graded
all_total = Score( all_total = Score(
total_correct, total_correct,
total_possible, total_possible,
...@@ -34,7 +34,7 @@ def aggregate_scores(scores, section_name="summary"): ...@@ -34,7 +34,7 @@ def aggregate_scores(scores, section_name="summary"):
section_name, section_name,
None None
) )
#selecting only graded things # selecting only graded things
graded_total = Score( graded_total = Score(
total_correct_graded, total_correct_graded,
total_possible_graded, total_possible_graded,
......
...@@ -23,18 +23,12 @@ class GradeSerializer(serializers.Serializer): ...@@ -23,18 +23,12 @@ class GradeSerializer(serializers.Serializer):
class CourseLeadersSerializer(serializers.Serializer): class CourseLeadersSerializer(serializers.Serializer):
""" Serializer for course leaderboard """ """ Serializer for course leaderboard """
id = serializers.IntegerField(source='student__id') id = serializers.IntegerField(source='user__id')
username = serializers.CharField(source='student__username') username = serializers.CharField(source='user__username')
title = serializers.CharField(source='student__profile__title') title = serializers.CharField(source='user__profile__title')
avatar_url = serializers.CharField(source='student__profile__avatar_url') avatar_url = serializers.CharField(source='user__profile__avatar_url')
points_scored = serializers.SerializerMethodField('get_points_scored') # Percentage grade (versus letter grade)
grade = serializers.FloatField(source='grade')
def get_points_scored(self, obj):
"""
formats points_scored to two decimal points
"""
points_scored = obj['points_scored'] or 0
return int(round(points_scored))
class CourseCompletionsLeadersSerializer(serializers.Serializer): class CourseCompletionsLeadersSerializer(serializers.Serializer):
......
...@@ -16,7 +16,6 @@ urlpatterns = patterns( ...@@ -16,7 +16,6 @@ urlpatterns = patterns(
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/content/(?P<content_id>[a-zA-Z0-9_+\/:-]+)/users/*$', courses_views.CourseContentUsersList.as_view()), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/content/(?P<content_id>[a-zA-Z0-9_+\/:-]+)/users/*$', courses_views.CourseContentUsersList.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/content/(?P<content_id>[a-zA-Z0-9_+\/:-]+)$', courses_views.CourseContentDetail.as_view()), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/content/(?P<content_id>[a-zA-Z0-9_+\/:-]+)$', courses_views.CourseContentDetail.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/content/*$', courses_views.CourseContentList.as_view()), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/content/*$', courses_views.CourseContentList.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/grades/*$', courses_views.CoursesGradesList.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/groups/(?P<group_id>[0-9]+)$', courses_views.CoursesGroupsDetail.as_view()), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/groups/(?P<group_id>[0-9]+)$', courses_views.CoursesGroupsDetail.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/groups/*$', courses_views.CoursesGroupsList.as_view()), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/groups/*$', courses_views.CoursesGroupsList.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/overview/*$', courses_views.CoursesOverview.as_view()), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/overview/*$', courses_views.CoursesOverview.as_view()),
...@@ -24,11 +23,12 @@ urlpatterns = patterns( ...@@ -24,11 +23,12 @@ urlpatterns = patterns(
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/static_tabs/*$', courses_views.CoursesStaticTabsList.as_view()), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/static_tabs/*$', courses_views.CoursesStaticTabsList.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/completions/*$', courses_views.CourseModuleCompletionList.as_view(), name='completion-list'), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/completions/*$', courses_views.CourseModuleCompletionList.as_view(), name='completion-list'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/projects/*$', courses_views.CoursesProjectList.as_view(), name='courseproject-list'), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/projects/*$', courses_views.CoursesProjectList.as_view(), name='courseproject-list'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/*$', courses_views.CourseMetrics.as_view(), name='course-metrics'), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/*$', courses_views.CoursesMetrics.as_view(), name='course-metrics'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/cities/$', courses_views.CoursesCitiesMetrics.as_view(), name='courses-cities-metrics'), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/cities/$', courses_views.CoursesMetricsCities.as_view(), name='courses-cities-metrics'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/completions/leaders/*$', courses_views.CoursesCompletionsLeadersList.as_view(), name='course-metrics-completions-leaders'), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/completions/leaders/*$', courses_views.CoursesMetricsCompletionsLeadersList.as_view(), name='course-metrics-completions-leaders'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/proficiency/leaders/*$', courses_views.CoursesLeadersList.as_view(), name='course-metrics-proficiency-leaders'), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/grades/*$', courses_views.CoursesMetricsGradesList.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/social/$', courses_views.CoursesSocialMetrics.as_view(), name='courses-social-metrics'), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/grades/leaders/*$', courses_views.CoursesMetricsGradesLeadersList.as_view(), name='course-metrics-grades-leaders'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/social/$', courses_views.CoursesMetricsSocial.as_view(), name='courses-social-metrics'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/roles/(?P<role>[a-z_]+)/users/(?P<user_id>[0-9]+)*$', courses_views.CoursesRolesUsersDetail.as_view(), name='courses-roles-users-detail'), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/roles/(?P<role>[a-z_]+)/users/(?P<user_id>[0-9]+)*$', courses_views.CoursesRolesUsersDetail.as_view(), name='courses-roles-users-detail'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/roles/*$', courses_views.CoursesRolesList.as_view(), name='courses-roles-list'), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/roles/*$', courses_views.CoursesRolesList.as_view(), name='courses-roles-list'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/updates/*$', courses_views.CoursesUpdates.as_view()), url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/updates/*$', courses_views.CoursesUpdates.as_view()),
......
""" Centralized access to LMS courseware app """ """ Centralized access to LMS courseware app """
from django.utils import timezone
from courseware import courses, module_render from courseware import courses, module_render
from courseware.model_data import FieldDataCache from courseware.model_data import FieldDataCache
...@@ -133,3 +134,49 @@ def get_course_child_content(request, user, course_key, child_descriptor): ...@@ -133,3 +134,49 @@ def get_course_child_content(request, user, course_key, child_descriptor):
field_data_cache, field_data_cache,
course_key) course_key)
return child_content return child_content
def calculate_proforma_grade(grade_summary, grading_policy):
"""
Calculates a projected (proforma) final grade based on the current state
of grades using the provided grading policy. Sections equate to grading policy
'types' and have values such as 'Homework', 'Lab', 'MidtermExam', and 'FinalExam'
We invert the concepts here and use the section weights as the possible scores by
assuming that the section weights total 100 percent. So, if a Homework section
is worth 15 percent of your overall grade, and you have currently scored 70 percent
for that section, the normalized score for the Homework section is 0.105. Note that
we do not take into account dropped assignments/scores, such as lowest-two homeworks.
After all scored sections are processed we take the remaining weight at its full
value as a projection of the user obtaining 100 percent of the section potential.
Example:
- Section: Homework, Weight: 15%, Totaled Score: 70%, Normalized Score: 0.105
- Section: MidtermExam, Weight: 30%, Totaled Score: 80%, Normalized Score: 0.240
- Section: Final Exam, Weight: 40%, Totaled Score: 95%, Normalized Score: 0.380
- Remaining Weight: 0.15 (unscored Lab section), assume 100%, of 15% => 0.150
- Proforma Grade = 0.105 + 0.240 + 0.380 + 0.150 = 0.875 (87.5%)
"""
remaining_weight = 1.00
proforma_grade = 0.00
totaled_scores = grade_summary['totaled_scores']
for section in totaled_scores:
points_earned = 0.00
points_possible = 0.00
# totaled_scores is a collection of currently-recored scores for a given section
# we need to iterate through and combine the scores to create an overall score for the section
# This loop does not take into account dropped assignments (eg, homeworks)
for score in totaled_scores[section]:
# Only count grades where points have been scored, or where the due date has passed
if score.earned or (score.due and score.due < timezone.now()):
points_earned = points_earned + score.earned
points_possible = points_possible + score.possible
grade = points_earned / points_possible
section_policy = next((policy for policy in grading_policy['GRADER'] if policy['type'] == section), None)
if section_policy is not None:
section_weight = section_policy['weight']
proforma_grade = proforma_grade + (section_weight * grade)
remaining_weight = remaining_weight - section_weight
proforma_grade = proforma_grade + remaining_weight
return proforma_grade
...@@ -6,7 +6,7 @@ from requests.exceptions import ConnectionError ...@@ -6,7 +6,7 @@ from requests.exceptions import ConnectionError
from django.contrib.auth.models import Group from django.contrib.auth.models import Group
from django.core.exceptions import ObjectDoesNotExist from django.core.exceptions import ObjectDoesNotExist
from django.db import IntegrityError from django.db import IntegrityError
from django.db.models import Count, Q, Sum from django.db.models import Count, Q
from django.core.validators import validate_email, validate_slug, ValidationError from django.core.validators import validate_email, validate_slug, ValidationError
from django.conf import settings from django.conf import settings
from django.http import Http404 from django.http import Http404
...@@ -17,8 +17,8 @@ from rest_framework.response import Response ...@@ -17,8 +17,8 @@ from rest_framework.response import Response
from courseware import grades, module_render from courseware import grades, module_render
from courseware.model_data import FieldDataCache from courseware.model_data import FieldDataCache
from courseware.models import StudentModule
from django_comment_common.models import Role, FORUM_ROLE_MODERATOR from django_comment_common.models import Role, FORUM_ROLE_MODERATOR
from gradebook.models import StudentGradebook
from instructor.access import revoke_access, update_forum_role from instructor.access import revoke_access, update_forum_role
from lang_pref import LANGUAGE_KEY from lang_pref import LANGUAGE_KEY
from lms.lib.comment_client.user import User as CommentUser from lms.lib.comment_client.user import User as CommentUser
...@@ -34,7 +34,7 @@ from util.password_policy_validators import ( ...@@ -34,7 +34,7 @@ from util.password_policy_validators import (
) )
from api_manager.courses.serializers import CourseModuleCompletionSerializer from api_manager.courses.serializers import CourseModuleCompletionSerializer
from api_manager.courseware_access import get_course, get_course_child, get_course_total_score, get_course_key, course_exists from api_manager.courseware_access import get_course, get_course_child, get_course_key, course_exists, calculate_proforma_grade
from api_manager.permissions import SecureAPIView, SecureListAPIView, IdsInFilterBackend, HasOrgsFilterBackend from api_manager.permissions import SecureAPIView, SecureListAPIView, IdsInFilterBackend, HasOrgsFilterBackend
from api_manager.models import GroupProfile, APIUser as User from api_manager.models import GroupProfile, APIUser as User
from api_manager.organizations.serializers import OrganizationSerializer from api_manager.organizations.serializers import OrganizationSerializer
...@@ -917,36 +917,27 @@ class UsersCoursesGradesDetail(SecureAPIView): ...@@ -917,36 +917,27 @@ class UsersCoursesGradesDetail(SecureAPIView):
if not course_descriptor: if not course_descriptor:
return Response({}, status=status.HTTP_404_NOT_FOUND) return Response({}, status=status.HTTP_404_NOT_FOUND)
courseware_summary = grades.progress_summary(student, request, course_descriptor) # pylint: disable=W0612 progress_summary = grades.progress_summary(student, request, course_descriptor) # pylint: disable=W0612
grade_summary = grades.grade(student, request, course_descriptor) grade_summary = grades.grade(student, request, course_descriptor)
grading_policy = course_descriptor.grading_policy grading_policy = course_descriptor.grading_policy
current_grade = 0
proforma_grade = 0
queryset = StudentModule.objects.filter( queryset = StudentGradebook.objects.filter(
user=student,
course_id__exact=course_key, course_id__exact=course_key,
max_grade__isnull=False,
max_grade__gt=0
) )
if len(queryset):
current_grade = queryset[0].grade
proforma_grade = calculate_proforma_grade(grade_summary, grading_policy)
total_score = get_course_total_score(courseware_summary)
user_queryset = queryset.filter(grade__isnull=False, student=student)
comp_modules = user_queryset.aggregate(Sum('grade'))
score_of_comp_module = comp_modules['grade__sum'] or 0
max_possible_score = user_queryset.aggregate(Sum('max_grade'))
current_grade = 0
pro_forma_grade = 0
if total_score:
current_grade = score_of_comp_module / float(total_score) * 100
if max_possible_score['max_grade__sum']:
pro_forma_grade = score_of_comp_module / float(max_possible_score['max_grade__sum']) * 100
response_data = { response_data = {
'courseware_summary': courseware_summary, 'courseware_summary': progress_summary,
'grade_summary': grade_summary, 'grade_summary': grade_summary,
'grading_policy': grading_policy, 'grading_policy': grading_policy,
'current_grade': current_grade, 'current_grade': current_grade,
'pro_forma_grade': pro_forma_grade 'proforma_grade': proforma_grade
} }
return Response(response_data) return Response(response_data)
......
...@@ -184,6 +184,7 @@ def _grade(student, request, course, keep_raw_scores): ...@@ -184,6 +184,7 @@ def _grade(student, request, course, keep_raw_scores):
for section in sections: for section in sections:
section_descriptor = section['section_descriptor'] section_descriptor = section['section_descriptor']
section_name = section_descriptor.display_name_with_default section_name = section_descriptor.display_name_with_default
section_due = section_descriptor.due
# some problems have state that is updated independently of interaction # some problems have state that is updated independently of interaction
# with the LMS, so they need to always be scored. (E.g. foldit., # with the LMS, so they need to always be scored. (E.g. foldit.,
...@@ -252,7 +253,7 @@ def _grade(student, request, course, keep_raw_scores): ...@@ -252,7 +253,7 @@ def _grade(student, request, course, keep_raw_scores):
total, total,
graded, graded,
module_descriptor.display_name_with_default, module_descriptor.display_name_with_default,
module_descriptor.location section_due
) )
) )
...@@ -260,7 +261,7 @@ def _grade(student, request, course, keep_raw_scores): ...@@ -260,7 +261,7 @@ def _grade(student, request, course, keep_raw_scores):
if keep_raw_scores: if keep_raw_scores:
raw_scores += scores raw_scores += scores
else: else:
graded_total = Score(0.0, 1.0, True, section_name, None) graded_total = Score(0.0, 1.0, True, section_name, section_due)
#Add the graded total to totaled_scores #Add the graded total to totaled_scores
if graded_total.possible > 0: if graded_total.possible > 0:
...@@ -380,6 +381,7 @@ def _progress_summary(student, request, course): ...@@ -380,6 +381,7 @@ def _progress_summary(student, request, course):
continue continue
graded = section_module.graded graded = section_module.graded
due = section_module.due
scores = [] scores = []
module_creator = section_module.xmodule_runtime.get_module module_creator = section_module.xmodule_runtime.get_module
...@@ -400,7 +402,7 @@ def _progress_summary(student, request, course): ...@@ -400,7 +402,7 @@ def _progress_summary(student, request, course):
total, total,
graded, graded,
module_descriptor.display_name_with_default, module_descriptor.display_name_with_default,
module_descriptor.location due
) )
) )
......
...@@ -67,6 +67,7 @@ class ChunkingManager(models.Manager): ...@@ -67,6 +67,7 @@ class ChunkingManager(models.Manager):
) )
return res return res
from courseware.signals import score_changed
class StudentModule(models.Model): class StudentModule(models.Model):
""" """
...@@ -142,6 +143,28 @@ class StudentModule(models.Model): ...@@ -142,6 +143,28 @@ class StudentModule(models.Model):
return unicode(repr(self)) return unicode(repr(self))
@receiver(post_save, sender=StudentModule)
def send_score_changed_signal(sender, instance, **kwargs):
"""
Broadcast the recorded score to connected receivers
"""
if settings.FEATURES.get('SIGNAL_ON_SCORE_CHANGED', False) and instance.grade is not None:
previous_entries = StudentModuleHistory.objects.filter(student_module=instance)\
.exclude(grade=None)\
.exclude(created=instance.modified, state=instance.state)\
.order_by('-id')
if not len(previous_entries) or\
(instance.grade != previous_entries[0].grade) or\
(instance.max_grade != previous_entries[0].max_grade):
score_changed.send(
sender=sender,
user=instance.student,
course_key=instance.course_id,
score=instance.grade,
problem=instance.module_state_key
)
class StudentModuleHistory(models.Model): class StudentModuleHistory(models.Model):
"""Keeps a complete history of state changes for a given XModule for a given """Keeps a complete history of state changes for a given XModule for a given
Student. Right now, we restrict this to problems so that the table doesn't Student. Right now, we restrict this to problems so that the table doesn't
......
"""
https://docs.djangoproject.com/en/dev/topics/signals/
"""
import django.dispatch
score_changed = django.dispatch.Signal(providing_args=["user", "course", "score", "problem"])
...@@ -1251,6 +1251,7 @@ class TestModuleTrackingContext(ModuleStoreTestCase): ...@@ -1251,6 +1251,7 @@ class TestModuleTrackingContext(ModuleStoreTestCase):
) )
def test_context_contains_display_name(self, mock_tracker): def test_context_contains_display_name(self, mock_tracker):
mock_tracker.reset_mock()
problem_display_name = u'Option Response Problem' problem_display_name = u'Option Response Problem'
module_info = self.handle_callback_and_get_module_info(mock_tracker, problem_display_name) module_info = self.handle_callback_and_get_module_info(mock_tracker, problem_display_name)
self.assertEquals(problem_display_name, module_info['display_name']) self.assertEquals(problem_display_name, module_info['display_name'])
...@@ -1277,12 +1278,14 @@ class TestModuleTrackingContext(ModuleStoreTestCase): ...@@ -1277,12 +1278,14 @@ class TestModuleTrackingContext(ModuleStoreTestCase):
'problem_check', 'problem_check',
) )
self.assertEquals(len(mock_tracker.send.mock_calls), 1) mock_calls = mock_tracker.send.mock_calls
mock_call = mock_tracker.send.mock_calls[0] for call in mock_calls:
event = mock_call[1][0] call_data = call[1][0]
event_type = call_data.get('event_type')
if event_type == 'problem_check':
break
self.assertEquals(event['event_type'], 'problem_check') return call_data['context']['module']['display_name']
return event['context']['module']
def test_missing_display_name(self, mock_tracker): def test_missing_display_name(self, mock_tracker):
actual_display_name = self.handle_callback_and_get_module_info(mock_tracker)['display_name'] actual_display_name = self.handle_callback_and_get_module_info(mock_tracker)['display_name']
......
...@@ -1045,18 +1045,21 @@ class TestAnswerDistributions(TestSubmittingProblems): ...@@ -1045,18 +1045,21 @@ class TestAnswerDistributions(TestSubmittingProblems):
# We'll submit one problem, and then muck with the student_answers # We'll submit one problem, and then muck with the student_answers
# dict inside its state to try different data types (str, int, float, # dict inside its state to try different data types (str, int, float,
# none) # none)
self.submit_question_answer('p1', {'2_1': u'Correct'}) problem_name = 'p1'
self.submit_question_answer(problem_name, {'2_1': u'Correct'})
# Now fetch the state entry for that problem. # Now fetch the state entry for that problem.
student_module = StudentModule.objects.get( student_modules = StudentModule.objects.filter(
course_id=self.course.id, course_id=self.course.id,
student=self.student_user student=self.student_user
) )
for val in ('Correct', True, False, 0, 0.0, 1, 1.0, None): for student_module in student_modules:
state = json.loads(student_module.state) if student_module.module_state_key.name == problem_name:
state["student_answers"]['{}_2_1'.format(self.p1_html_id)] = val for val in ('Correct', True, False, 0, 0.0, 1, 1.0, None):
student_module.state = json.dumps(state) state = json.loads(student_module.state)
student_module.save() state["student_answers"]['{}_2_1'.format(self.p1_html_id)] = val
student_module.state = json.dumps(state)
student_module.save()
self.assertEqual( self.assertEqual(
grades.answer_distributions(self.course.id), grades.answer_distributions(self.course.id),
...@@ -1066,40 +1069,64 @@ class TestAnswerDistributions(TestSubmittingProblems): ...@@ -1066,40 +1069,64 @@ class TestAnswerDistributions(TestSubmittingProblems):
}, },
} }
) )
for student_module in student_modules:
if student_module.module_state_key.name == problem_name:
for val in ('Correct', True, False, 0, 0.0, 1, 1.0, None):
state = json.loads(student_module.state)
state["student_answers"]['i4x-MITx-100-problem-p1_2_1'] = val
student_module.state = json.dumps(state)
student_module.save()
self.assertEqual(
grades.answer_distributions(self.course.id),
{
('p1', 'p1', 'i4x-MITx-100-problem-p1_2_1'): {
str(val): 1
},
}
)
def test_missing_content(self): def test_missing_content(self):
# If there's a StudentModule entry for content that no longer exists, # If there's a StudentModule entry for content that no longer exists,
# we just quietly ignore it (because we can't display a meaningful url # we just quietly ignore it (because we can't display a meaningful url
# or name for it). # or name for it).
self.submit_question_answer('p1', {'2_1': 'Incorrect'}) problem_name = 'p1'
self.submit_question_answer(problem_name, {'2_1': 'Incorrect'})
# Now fetch the state entry for that problem and alter it so it points # Now fetch the state entry for that problem and alter it so it points
# to a non-existent problem. # to a non-existent problem.
student_module = StudentModule.objects.get( student_modules = StudentModule.objects.filter(
course_id=self.course.id, course_id=self.course.id,
student=self.student_user student=self.student_user
) )
student_module.module_state_key = student_module.module_state_key.replace( for student_module in student_modules:
name=student_module.module_state_key.name + "_fake" if student_module.module_state_key.name == problem_name:
) student_module.module_state_key = student_module.module_state_key.replace(
student_module.save() name=student_module.module_state_key.name + "_fake"
)
student_module.save()
# It should be empty (ignored) # It should be empty (ignored)
empty_distribution = grades.answer_distributions(self.course.id) empty_distribution = grades.answer_distributions(self.course.id)
self.assertFalse(empty_distribution) # should be empty self.assertFalse(empty_distribution) # should be empty
def test_broken_state(self): def test_broken_state(self):
# Missing or broken state for a problem should be skipped without # Missing or broken state for a problem should be skipped without
# causing the whole answer_distribution call to explode. # causing the whole answer_distribution call to explode.
# Submit p1 # Submit p1
self.submit_question_answer('p1', {'2_1': u'Correct'}) prb1_name = 'p1'
self.submit_question_answer(prb1_name, {'2_1': u'Correct'})
# Now fetch the StudentModule entry for p1 so we can corrupt its state # Now fetch the StudentModule entry for p1 so we can corrupt its state
prb1 = StudentModule.objects.get( student_modules = StudentModule.objects.filter(
course_id=self.course.id, course_id=self.course.id,
student=self.student_user student=self.student_user
) )
for student_module in student_modules:
if student_module.module_state_key.name == prb1_name:
prb1 = student_module
break
# Submit p2 # Submit p2
self.submit_question_answer('p2', {'2_1': u'Incorrect'}) self.submit_question_answer('p2', {'2_1': u'Incorrect'})
......
"""
Initialization module for gradebook djangoapp
"""
import gradebook.receivers
"""
One-time data migration script -- shoulen't need to run it again
"""
import logging
from optparse import make_option
from django.core.management.base import BaseCommand
from courseware import grades
from gradebook.models import StudentGradebook
from student.models import CourseEnrollment
from xmodule.modulestore.django import modulestore
from util.request import RequestMock
log = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Creates (or updates) gradebook entries for the specified course(s) and/or user(s)
"""
def handle(self, *args, **options):
help = "Command to creaete or update gradebook entries"
option_list = BaseCommand.option_list + (
make_option(
"-c",
"--course_ids",
dest="course_ids",
help="List of courses for which to generate grades",
metavar="slashes:first+course+id,slashes:second+course+id"
),
make_option(
"-u",
"--user_ids",
dest="user_ids",
help="List of users for which to generate grades",
metavar="1234,2468,3579"
),
)
course_ids = options.get('course_ids')
user_ids = options.get('user_ids')
# Get the list of courses from the system
courses = modulestore().get_courses()
# If one or more courses were specified by the caller, just use those ones...
if course_ids is not None:
filtered_courses = []
for course in courses:
if unicode(course.id) in course_ids.split(','):
filtered_courses.append(course)
courses = filtered_courses
for course in courses:
users = CourseEnrollment.users_enrolled_in(course.id)
# If one or more users were specified by the caller, just use those ones...
if user_ids is not None:
filtered_users = []
for user in users:
if str(user.id) in user_ids.split(','):
filtered_users.append(user)
users = filtered_users
# For each user...
for user in users:
request = RequestMock().get('/')
request.user = user
grade_data = grades.grade(user, request, course)
print grade_data
grade = grade_data['percent']
try:
gradebook_entry = StudentGradebook.objects.get(user=user, course_id=course.id)
if gradebook_entry.grade != grade:
gradebook_entry.grade = grade
gradebook_entry.save()
except StudentGradebook.DoesNotExist:
StudentGradebook.objects.create(user=user, course_id=course.id, grade=grade)
log_msg = 'Gradebook entry created -- Course: {}, User: {} (grade: {})'.format(course.id, user.id, grade)
print log_msg
log.info(log_msg)
"""
Run these tests @ Devstack:
rake fasttest_lms[common/djangoapps/api_manager/management/commands/tests/test_migrate_orgdata.py]
"""
from datetime import datetime
from mock import MagicMock
import uuid
from django.conf import settings
from capa.tests.response_xml_factory import StringResponseXMLFactory
from courseware import module_render
from courseware.model_data import FieldDataCache
from gradebook.management.commands import generate_gradebook_entries
from gradebook.models import StudentGradebook, StudentGradebookHistory
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
class GenerateGradebookEntriesTests(ModuleStoreTestCase):
"""
Test suite for grade generation script
"""
def setUp(self):
# Turn off the signalling mechanism temporarily
settings._wrapped.default_settings.FEATURES['SIGNAL_ON_SCORE_CHANGED'] = False
# Create a couple courses to work with
self.course = CourseFactory.create(
start=datetime(2014, 6, 16, 14, 30),
end=datetime(2015, 1, 16)
)
self.test_data = '<html>{}</html>'.format(str(uuid.uuid4()))
chapter1 = ItemFactory.create(
category="chapter",
parent_location=self.course.location,
data=self.test_data,
due=datetime(2014, 5, 16, 14, 30),
display_name="Overview"
)
chapter2 = ItemFactory.create(
category="chapter",
parent_location=self.course.location,
data=self.test_data,
due=datetime(2014, 5, 16, 14, 30),
display_name="Overview"
)
self.problem = ItemFactory.create(
parent_location=chapter1.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="homework problem 1",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Homework"}
)
self.problem2 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="homework problem 2",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Homework"}
)
self.problem3 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="lab problem 1",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Lab"}
)
self.problem4 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="midterm problem 2",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Midterm Exam"}
)
self.problem5 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="final problem 2",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Final Exam"}
)
# Create some users and enroll them
self.users = [UserFactory.create(username="testuser" + str(__), profile='test') for __ in xrange(3)]
for user in self.users:
CourseEnrollmentFactory.create(user=user, course_id=self.course.id)
grade = 0.15 * user.id
module = self.get_module_for_user(user, self.course, self.problem)
grade_dict = {'value': grade, 'max_value': 1, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
grade = 0.20 * user.id
module = self.get_module_for_user(user, self.course, self.problem2)
grade_dict = {'value': grade, 'max_value': 1, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
grade = 0.25 * user.id
module = self.get_module_for_user(user, self.course, self.problem3)
grade_dict = {'value': grade, 'max_value': 1, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
grade = 0.30 * user.id
module = self.get_module_for_user(user, self.course, self.problem4)
grade_dict = {'value': grade, 'max_value': 1, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
grade = 0.33 * user.id
module = self.get_module_for_user(user, self.course, self.problem5)
grade_dict = {'value': grade, 'max_value': 1, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
def get_module_for_user(self, user, course, problem):
"""Helper function to get useful module at self.location in self.course_id for user"""
mock_request = MagicMock()
mock_request.user = user
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id, user, course, depth=2)
return module_render.get_module( # pylint: disable=protected-access
user,
mock_request,
problem.location,
field_data_cache,
course.id
)._xmodule
def test_generate_gradebook_entries(self):
"""
Test the gradebook entry generator
"""
# Set up the command context
course_ids = '{},slashes:bogus+course+id'.format(self.course.id)
user_ids = '{}'.format(self.users[0].id)
current_entries = StudentGradebook.objects.all()
self.assertEqual(len(current_entries), 0)
current_entries = StudentGradebookHistory.objects.all()
self.assertEqual(len(current_entries), 0)
# Run the command just for one user
generate_gradebook_entries.Command().handle(user_ids=user_ids)
# Confirm the gradebook has been properly updated
current_entries = StudentGradebook.objects.all()
self.assertEqual(len(current_entries), 1)
current_entries = StudentGradebookHistory.objects.all()
self.assertEqual(len(current_entries), 1)
user0_entry = StudentGradebook.objects.get(user=self.users[0])
self.assertEqual(user0_entry.grade, 0.24)
# Enable the signalling mechanism
settings._wrapped.default_settings.FEATURES['SIGNAL_ON_SCORE_CHANGED'] = True
# Change the score of the final exam for that user
grade = 0.99
module = self.get_module_for_user(self.users[0], self.course, self.problem5)
grade_dict = {'value': grade, 'max_value': 1, 'user_id': self.users[0].id}
module.system.publish(module, 'grade', grade_dict)
# Confirm the gradebook has been properly updated
current_entries = StudentGradebook.objects.all()
self.assertEqual(len(current_entries), 1)
current_entries = StudentGradebookHistory.objects.all()
self.assertEqual(len(current_entries), 2)
user0_entry = StudentGradebook.objects.get(user=self.users[0])
self.assertEqual(user0_entry.grade, 0.50)
# Run the command across all users, but just for the specified course
generate_gradebook_entries.Command().handle(course_ids=course_ids)
# Confirm that the gradebook has been properly updated
current_entries = StudentGradebook.objects.all()
self.assertEqual(len(current_entries), 3)
current_entries = StudentGradebookHistory.objects.all()
self.assertEqual(len(current_entries), 4)
user0_entry = StudentGradebook.objects.get(user=self.users[0])
self.assertEqual(user0_entry.grade, 0.50)
user1_entry = StudentGradebook.objects.get(user=self.users[1])
self.assertEqual(user1_entry.grade, 0.48)
user2_entry = StudentGradebook.objects.get(user=self.users[2])
self.assertEqual(user2_entry.grade, 0.72)
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'StudentGradebook'
db.create_table('gradebook_studentgradebook', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('model_utils.fields.AutoCreatedField')(default=datetime.datetime.now)),
('modified', self.gf('model_utils.fields.AutoLastModifiedField')(default=datetime.datetime.now)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('course_id', self.gf('xmodule_django.models.CourseKeyField')(db_index=True, max_length=255, blank=True)),
('grade', self.gf('django.db.models.fields.FloatField')()),
))
db.send_create_signal('gradebook', ['StudentGradebook'])
# Adding unique constraint on 'StudentGradebook', fields ['user', 'course_id']
db.create_unique('gradebook_studentgradebook', ['user_id', 'course_id'])
# Adding model 'StudentGradebookHistory'
db.create_table('gradebook_studentgradebookhistory', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('model_utils.fields.AutoCreatedField')(default=datetime.datetime.now)),
('modified', self.gf('model_utils.fields.AutoLastModifiedField')(default=datetime.datetime.now)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('course_id', self.gf('xmodule_django.models.CourseKeyField')(db_index=True, max_length=255, blank=True)),
('grade', self.gf('django.db.models.fields.FloatField')()),
))
db.send_create_signal('gradebook', ['StudentGradebookHistory'])
db.create_index('gradebook_studentgradebook', ['grade', 'created'], unique=False, db_tablespace='')
def backwards(self, orm):
# Removing unique constraint on 'StudentGradebook', fields ['user', 'course_id']
db.delete_unique('gradebook_studentgradebook', ['user_id', 'course_id'])
# Deleting model 'StudentGradebook'
db.delete_table('gradebook_studentgradebook')
# Deleting model 'StudentGradebookHistory'
db.delete_table('gradebook_studentgradebookhistory')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'gradebook.studentgradebook': {
'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'StudentGradebook'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'grade': ('django.db.models.fields.FloatField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'gradebook.studentgradebookhistory': {
'Meta': {'object_name': 'StudentGradebookHistory'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'grade': ('django.db.models.fields.FloatField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['gradebook']
"""
Django database models supporting the gradebook app
"""
from django.utils import timezone
from django.contrib.auth.models import User
from django.db import models
from django.db.models import Avg
from django.db.models.signals import post_save
from django.dispatch import receiver
from model_utils.models import TimeStampedModel
from xmodule_django.models import CourseKeyField
class StudentGradebook(TimeStampedModel):
"""
StudentGradebook is essentiall a container used to cache calculated
grades (see courseware.grades.grade), which can be an expensive operation.
"""
user = models.ForeignKey(User, db_index=True)
course_id = CourseKeyField(db_index=True, max_length=255, blank=True)
grade = models.FloatField()
class Meta:
"""
Meta information for this Django model
"""
unique_together = (('user', 'course_id'),)
@classmethod
def generate_leaderboard(cls, course_key, user_id=None, count=3, exclude_users=None):
"""
Assembles a data set representing the Top N users, by grade, for a given course.
Optionally provide a user_id to include user-specific info. For example, you
may want to view the Top 5 users, but also need the data for the logged-in user
who may actually be currently located in position #10.
data = {
'course_avg': 0.873,
'queryset': [
{'id': 123, 'username': 'testuser1', 'title', 'Engineer', 'avatar_url': 'http://gravatar.com/123/', 'grade': 0.92, 'created': '2014-01-15 06:27:54'},
{'id': 983, 'username': 'testuser2', 'title', 'Analyst', 'avatar_url': 'http://gravatar.com/983/', 'grade': 0.91, 'created': '2014-06-27 01:15:54'},
{'id': 246, 'username': 'testuser3', 'title', 'Product Owner', 'avatar_url': 'http://gravatar.com/246/', 'grade': 0.90, 'created': '2014-03-19 04:54:54'},
{'id': 357, 'username': 'testuser4', 'title', 'Director', 'avatar_url': 'http://gravatar.com/357/', 'grade': 0.89, 'created': '2014-12-01 08:38:54'},
]
### IF USER ID SPECIFIED (in this case user_id=246) ###
'user_position': 4,
'user_grade': 0.89
}
"""
data = {}
queryset = StudentGradebook.objects.select_related('user')\
.filter(course_id__exact=course_key, user__is_active=True)
if exclude_users:
queryset = queryset.exclude(user__in=exclude_users)
# print StudentGradebook.objects.select_related('user')\
# .filter(course_id__exact=course_key, user__is_active=True).query
# assert 0
# Construct the leaderboard as a queryset
data['course_avg'] = queryset.aggregate(Avg('grade'))['grade__avg']
data['queryset'] = queryset.values(
'user__id',
'user__username',
'user__profile__title',
'user__profile__avatar_url',
'grade',
'created')\
.order_by('-grade', 'created')[:count]
# If a user_id value was provided, we need to provide some additional user-specific data to the caller
if user_id:
user_grade = 0
user_time_scored = timezone.now()
try:
user_queryset = StudentGradebook.objects.get(course_id__exact=course_key, user__id=user_id)
except StudentGradebook.DoesNotExist:
user_queryset = None
if user_queryset:
user_grade = user_queryset.grade
user_time_scored = user_queryset.created
users_above = queryset.filter(grade__gte=user_grade)\
.exclude(user__id=user_id)\
.exclude(grade=user_grade, created__lt=user_time_scored)
data['user_position'] = len(users_above) + 1
data['user_grade'] = user_grade
return data
class StudentGradebookHistory(TimeStampedModel):
"""
A running audit trail for the StudentGradebook model. Listens for
post_save events and creates/stores copies of gradebook entries.
"""
user = models.ForeignKey(User, db_index=True)
course_id = CourseKeyField(db_index=True, max_length=255, blank=True)
grade = models.FloatField()
@receiver(post_save, sender=StudentGradebook)
def save_history(sender, instance, **kwargs): # pylint: disable=no-self-argument, unused-argument
"""
Event hook for creating gradebook entry copies
"""
history_entry = StudentGradebookHistory(
user=instance.user,
course_id=instance.course_id,
grade=instance.grade
)
history_entry.save()
"""
Signal handlers supporting various gradebook use cases
"""
from django.dispatch import receiver
from courseware import grades
from courseware.signals import score_changed
from util.request import RequestMock
from gradebook.models import StudentGradebook
@receiver(score_changed)
def on_score_changed(sender, **kwargs):
"""
Listens for an 'on_score_changed' signal and when observed
recalculates the specified user's gradebook entry
"""
from courseware.views import get_course
user = kwargs['user']
course_key = kwargs['course_key']
course_descriptor = get_course(course_key, depth=None)
request = RequestMock().get('/')
request.user = user
grade_data = grades.grade(user, request, course_descriptor)
grade = grade_data['percent']
try:
gradebook_entry = StudentGradebook.objects.get(user=user, course_id=course_key)
if gradebook_entry.grade != grade:
gradebook_entry.grade = grade
gradebook_entry.save()
except StudentGradebook.DoesNotExist:
StudentGradebook.objects.create(user=user, course_id=course_key, grade=grade)
# pylint: disable=E1101
"""
Run these tests @ Devstack:
paver test_system -s lms --test_id=lms/djangoapps/gradebook/tests.py
"""
from mock import MagicMock
import uuid
from django.test.utils import override_settings
from capa.tests.response_xml_factory import StringResponseXMLFactory
from courseware import module_render
from courseware.model_data import FieldDataCache
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from gradebook.models import StudentGradebook, StudentGradebookHistory
@override_settings(STUDENT_GRADEBOOK=True)
class GradebookTests(ModuleStoreTestCase):
""" Test suite for Student Gradebook """
def get_module_for_user(self, user, course, problem):
"""Helper function to get useful module at self.location in self.course_id for user"""
mock_request = MagicMock()
mock_request.user = user
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id, user, course, depth=2)
return module_render.get_module( # pylint: disable=protected-access
user,
mock_request,
problem.location,
field_data_cache,
course.id
)._xmodule
def setUp(self):
self.test_server_prefix = 'https://testserver'
self.user = UserFactory()
self.score = 0.75
self.course = CourseFactory.create()
self.course.always_recalculate_grades = True
test_data = '<html>{}</html>'.format(str(uuid.uuid4()))
chapter1 = ItemFactory.create(
category="chapter",
parent_location=self.course.location,
data=test_data,
display_name="Chapter 1"
)
chapter2 = ItemFactory.create(
category="chapter",
parent_location=self.course.location,
data=test_data,
display_name="Chapter 2"
)
ItemFactory.create(
category="sequential",
parent_location=chapter1.location,
data=test_data,
display_name="Sequence 1",
)
ItemFactory.create(
category="sequential",
parent_location=chapter2.location,
data=test_data,
display_name="Sequence 2",
)
ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='foo'),
metadata={'rerandomize': 'always'},
display_name="test problem 1",
max_grade=45
)
self.problem = ItemFactory.create(
parent_location=chapter1.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="homework problem 1",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Homework"}
)
self.problem2 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="homework problem 2",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Homework"}
)
self.problem3 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="lab problem 1",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Lab"}
)
self.problem4 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="midterm problem 2",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Midterm Exam"}
)
self.problem5 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="final problem 2",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Final Exam"}
)
def test_receiver_on_score_changed(self):
module = self.get_module_for_user(self.user, self.course, self.problem)
grade_dict = {'value': 0.75, 'max_value': 1, 'user_id': self.user.id}
module.system.publish(module, 'grade', grade_dict)
module = self.get_module_for_user(self.user, self.course, self.problem2)
grade_dict = {'value': 0.95, 'max_value': 1, 'user_id': self.user.id}
module.system.publish(module, 'grade', grade_dict)
module = self.get_module_for_user(self.user, self.course, self.problem3)
grade_dict = {'value': 0.86, 'max_value': 1, 'user_id': self.user.id}
module.system.publish(module, 'grade', grade_dict)
module = self.get_module_for_user(self.user, self.course, self.problem4)
grade_dict = {'value': 0.92, 'max_value': 1, 'user_id': self.user.id}
module.system.publish(module, 'grade', grade_dict)
module = self.get_module_for_user(self.user, self.course, self.problem5)
grade_dict = {'value': 0.87, 'max_value': 1, 'user_id': self.user.id}
module.system.publish(module, 'grade', grade_dict)
gradebook = StudentGradebook.objects.all()
self.assertEqual(len(gradebook), 1)
history = StudentGradebookHistory.objects.all()
self.assertEqual(len(history), 5)
...@@ -576,7 +576,7 @@ class WorkgroupsApiTests(ModuleStoreTestCase): ...@@ -576,7 +576,7 @@ class WorkgroupsApiTests(ModuleStoreTestCase):
self.assertEqual(response.status_code, 201) self.assertEqual(response.status_code, 201)
# Confirm the grades for the users # Confirm the grades for the users
course_grades_uri = '{}/{}/grades'.format(self.test_courses_uri, self.test_course_id) course_grades_uri = '{}/{}/metrics/grades/'.format(self.test_courses_uri, self.test_course_id)
response = self.do_get(course_grades_uri) response = self.do_get(course_grades_uri)
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
self.assertGreater(len(response.data['grades']), 0) self.assertGreater(len(response.data['grades']), 0)
......
...@@ -262,3 +262,13 @@ except ImportError: ...@@ -262,3 +262,13 @@ except ImportError:
MODULESTORE = convert_module_store_setting_if_needed(MODULESTORE) MODULESTORE = convert_module_store_setting_if_needed(MODULESTORE)
SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd' SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd'
############# Student Module #################
FEATURES['SIGNAL_ON_SCORE_CHANGED'] = True
############# Student Gradebook #################
FEATURES['STUDENT_GRADEBOOK'] = True
if FEATURES.get('STUDENT_GRADEBOOK', False):
INSTALLED_APPS += ('gradebook',)
...@@ -501,20 +501,6 @@ FEATURES['ENABLE_LTI_PROVIDER'] = True ...@@ -501,20 +501,6 @@ FEATURES['ENABLE_LTI_PROVIDER'] = True
INSTALLED_APPS += ('lti_provider',) INSTALLED_APPS += ('lti_provider',)
AUTHENTICATION_BACKENDS += ('lti_provider.users.LtiBackend',) AUTHENTICATION_BACKENDS += ('lti_provider.users.LtiBackend',)
########################## SECURITY #######################
FEATURES['ENFORCE_PASSWORD_POLICY'] = False
FEATURES['ENABLE_MAX_FAILED_LOGIN_ATTEMPTS'] = False
FEATURES['SQUELCH_PII_IN_LOGS'] = False
FEATURES['PREVENT_CONCURRENT_LOGINS'] = False
FEATURES['ADVANCED_SECURITY'] = False
<<<<<<< HEAD
=======
PASSWORD_MIN_LENGTH = None
PASSWORD_COMPLEXITY = {}
############# Performance Profiler ################# ############# Performance Profiler #################
# Note: We've added profiler support to this configuration in order # Note: We've added profiler support to this configuration in order
# to enable analysis when running unit tests. (outputs to console) # to enable analysis when running unit tests. (outputs to console)
...@@ -525,4 +511,13 @@ if FEATURES.get('PROFILER'): ...@@ -525,4 +511,13 @@ if FEATURES.get('PROFILER'):
'profiler.middleware.HotshotProfilerMiddleware', 'profiler.middleware.HotshotProfilerMiddleware',
'profiler.middleware.CProfileProfilerMiddleware', 'profiler.middleware.CProfileProfilerMiddleware',
) )
>>>>>>> a00f851... mattdrayer/api-profiler-middleware: Hotshot/CProfile support
############# Student Module #################
FEATURES['SIGNAL_ON_SCORE_CHANGED'] = True
############# Student Gradebook #################
FEATURES['STUDENT_GRADEBOOK'] = True
if FEATURES.get('STUDENT_GRADEBOOK', False):
INSTALLED_APPS += ('gradebook',)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment