Commit 646f4be1 by Matt Drayer Committed by Jonathan Piacenti

mattdrayer/api-proficiency-redux: New StudentGradebook

parent ad259fdc
......@@ -2,9 +2,9 @@
Management command to generate a list of grades for
all students that are enrolled in a course.
"""
from util.request import RequestMock
from courseware import grades, courses
from certificates.models import GeneratedCertificate
from django.test.client import RequestFactory
from django.core.management.base import BaseCommand, CommandError
import os
from opaque_keys import InvalidKeyError
......@@ -13,23 +13,9 @@ from opaque_keys.edx.locations import SlashSeparatedCourseKey
from django.contrib.auth.models import User
from optparse import make_option
import datetime
from django.core.handlers.base import BaseHandler
import csv
class RequestMock(RequestFactory):
def request(self, **request):
"Construct a generic request object."
request = RequestFactory.request(self, **request)
handler = BaseHandler()
handler.load_middleware()
for middleware_method in handler._request_middleware:
if middleware_method(request):
raise Exception("Couldn't create request mock object - "
"request middleware returned a response")
return request
class Command(BaseCommand):
help = """
......
""" Utility functions related to HTTP requests """
from django.core.handlers.base import BaseHandler
from django.test import RequestFactory
import re
import logging
from django.conf import settings
from django.core.handlers.base import BaseHandler
from django.test.client import RequestFactory
from microsite_configuration import microsite
from opaque_keys import InvalidKeyError
......
......@@ -1326,7 +1326,7 @@ class CourseDescriptor(CourseFields, SequenceDescriptor, LicenseMixin):
for chapter in self.get_children():
for section in chapter.get_children():
if section.graded:
if hasattr(section, 'graded') and section.graded:
xmoduledescriptors = list(yield_descriptor_descendents(section))
xmoduledescriptors.append(section)
......
......@@ -26,7 +26,7 @@ def aggregate_scores(scores, section_name="summary"):
total_correct = sum(score.earned for score in scores)
total_possible = sum(score.possible for score in scores)
#regardless of whether or not it is graded
# regardless of whether or not it is graded
all_total = Score(
total_correct,
total_possible,
......@@ -34,7 +34,7 @@ def aggregate_scores(scores, section_name="summary"):
section_name,
None
)
#selecting only graded things
# selecting only graded things
graded_total = Score(
total_correct_graded,
total_possible_graded,
......
......@@ -23,18 +23,12 @@ class GradeSerializer(serializers.Serializer):
class CourseLeadersSerializer(serializers.Serializer):
""" Serializer for course leaderboard """
id = serializers.IntegerField(source='student__id')
username = serializers.CharField(source='student__username')
title = serializers.CharField(source='student__profile__title')
avatar_url = serializers.CharField(source='student__profile__avatar_url')
points_scored = serializers.SerializerMethodField('get_points_scored')
def get_points_scored(self, obj):
"""
formats points_scored to two decimal points
"""
points_scored = obj['points_scored'] or 0
return int(round(points_scored))
id = serializers.IntegerField(source='user__id')
username = serializers.CharField(source='user__username')
title = serializers.CharField(source='user__profile__title')
avatar_url = serializers.CharField(source='user__profile__avatar_url')
# Percentage grade (versus letter grade)
grade = serializers.FloatField(source='grade')
class CourseCompletionsLeadersSerializer(serializers.Serializer):
......
......@@ -15,12 +15,17 @@ from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist
from django.test import TestCase, Client
from django.test.utils import override_settings
from django.utils import timezone
from capa.tests.response_xml_factory import StringResponseXMLFactory
from courseware import module_render
from courseware.tests.factories import StudentModuleFactory
from courseware.model_data import FieldDataCache
from courseware.models import StudentModule
from courseware.tests.modulestore_config import TEST_DATA_MIXED_MODULESTORE
from django_comment_common.models import Role, FORUM_ROLE_MODERATOR
from gradebook.models import StudentGradebook
from instructor.access import allow_access
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
......@@ -29,7 +34,7 @@ from .content import TEST_COURSE_OVERVIEW_CONTENT, TEST_COURSE_UPDATES_CONTENT,
from .content import TEST_STATIC_TAB1_CONTENT, TEST_STATIC_TAB2_CONTENT
TEST_API_KEY = str(uuid.uuid4())
USER_COUNT = 5
USER_COUNT = 6
SAMPLE_GRADE_DATA_COUNT = 4
......@@ -56,6 +61,22 @@ def _fake_get_get_course_social_stats(course_id):
class CoursesApiTests(TestCase):
""" Test suite for Courses API views """
def get_module_for_user(self, user, course, problem):
"""Helper function to get useful module at self.location in self.course_id for user"""
mock_request = mock.MagicMock()
mock_request.user = user
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id, user, course, depth=2)
module = module_render.get_module( # pylint: disable=protected-access
user,
mock_request,
problem.location,
field_data_cache,
course.id
)
return module
def setUp(self):
self.test_server_prefix = 'https://testserver'
self.base_courses_uri = '/api/server/courses'
......@@ -169,38 +190,23 @@ class CoursesApiTests(TestCase):
user_profile.save()
for i in xrange(SAMPLE_GRADE_DATA_COUNT - 1):
category = 'mentoring'
module_type = 'mentoring'
section = 'Midterm Exam'
if i % 2 is 0:
category = 'group-project'
module_type = 'group-project'
section = "Final Exam"
self.item = ItemFactory.create(
parent_location=self.unit.location,
category=category,
data=StringResponseXMLFactory().build_xml(answer='foo'),
metadata={'rerandomize': 'always'},
display_name=u"test problem" + str(i)
parent_location=self.chapter.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name='Problem {}'.format(i),
metadata={'rerandomize': 'always', 'graded': True, 'format': section}
)
for j, user in enumerate(self.users):
the_grade = j * 0.75
StudentModuleFactory.create(
grade=the_grade,
max_grade=1 if i < j else 0.5,
student=user,
course_id=self.course.id,
module_state_key=self.item.location,
state=json.dumps({'attempts': self.attempts}),
module_type=module_type
)
for j, user in enumerate(self.users):
StudentModuleFactory.create(
course_id=self.course.id,
module_type='sequential',
module_state_key=self.item.location,
)
points_scored = (j + 1) * 20
points_possible = 100
module = self.get_module_for_user(user, self.course, self.item)
grade_dict = {'value': points_scored, 'max_value': points_possible, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
self.test_course_id = unicode(self.course.id)
self.test_bogus_course_id = 'i4x://foo/bar/baz'
......@@ -330,7 +336,7 @@ class CoursesApiTests(TestCase):
chapter = response.data['content'][0]
self.assertEqual(chapter['category'], 'chapter')
self.assertEqual(chapter['name'], 'Overview')
self.assertEqual(len(chapter['children']), 2)
self.assertEqual(len(chapter['children']), 5)
sequence = chapter['children'][0]
self.assertEqual(sequence['category'], 'videosequence')
......@@ -1521,7 +1527,7 @@ class CoursesApiTests(TestCase):
response = self.do_get(completion_uri)
self.assertEqual(response.status_code, 404)
def test_social_metrics(self):
def test_courses_metrics_social_get(self):
test_uri = '{}/{}/metrics/social/'.format(self.base_courses_uri, self.test_course_id)
response = self.do_get(test_uri)
self.assertEqual(response.status_code, 200)
......@@ -1541,91 +1547,69 @@ class CoursesApiTests(TestCase):
self.assertFalse(users.get('1'))
self.assertTrue(users.get('2'))
def test_courses_leaders_list_get(self):
def test_courses_metrics_grades_leaders_list_get(self):
# make the last user an observer to asset that its content is being filtered out from
# the aggregates
expected_course_average = 0.398
allow_access(self.course, self.users[USER_COUNT-1], 'observer')
# create another module completion to two users with same points
unit = ItemFactory.create(
parent_location=self.sub_section.location,
category="vertical",
metadata={'graded': True, 'format': 'Homework'},
display_name=u"test unit",
)
item = ItemFactory.create(
parent_location=unit.location,
parent_location=self.chapter.location,
category='mentoring',
data=StringResponseXMLFactory().build_xml(answer='foo'),
metadata={'rerandomize': 'always'},
display_name=u"test problem same points"
display_name=u"test problem smae points",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Midterm Exam"}
)
StudentModuleFactory.create(
grade=2.25,
max_grade=4,
student=self.users[USER_COUNT-3],
course_id=self.course.id,
module_state_key=item.location,
state=json.dumps({'attempts': self.attempts}),
module_type='mentoring'
)
StudentModule.objects.filter(student=self.users[USER_COUNT-3]).update(created=datetime.now()-timedelta(days=1))
points_scored = 2.25
points_possible = 4
user = self.users[USER_COUNT - 3]
module = self.get_module_for_user(user, self.course, item)
grade_dict = {'value': points_scored, 'max_value': points_possible, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
points_scored = 2.25
points_possible = 4
user = self.users[USER_COUNT - 2]
module = self.get_module_for_user(user, self.course, item)
grade_dict = {'value': points_scored, 'max_value': points_possible, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
test_uri = '{}/{}/metrics/proficiency/leaders/'.format(self.base_courses_uri, self.test_course_id)
StudentGradebook.objects.filter(user=user).update(created=timezone.now() - timedelta(days=1))
test_uri = '{}/{}/metrics/grades/leaders/'.format(self.base_courses_uri, self.test_course_id)
response = self.do_get(test_uri)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data['leaders']), 3)
self.assertEqual(response.data['leaders'][0]['username'], 'testuser2')
self.assertEqual(response.data['course_avg'], 3.9)
self.assertEqual(response.data['leaders'][0]['username'], 'testuser4')
self.assertEqual(response.data['course_avg'], expected_course_average)
test_uri = '{}/{}/metrics/proficiency/leaders/?{}'.format(self.base_courses_uri, self.test_course_id, 'count=4')
response = self.do_get(test_uri)
count_filter_test_uri = '{}?{}'.format(test_uri, 'count=4')
response = self.do_get(count_filter_test_uri)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data['leaders']), 4)
# Filter by content_id
content_id = {'content_id': self.item.scope_ids.usage_id}
content_filter_uri = '{}/{}/metrics/proficiency/leaders/?{}'\
.format(self.base_courses_uri, self.test_course_id, urlencode(content_id))
response = self.do_get(content_filter_uri)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data['leaders']), 3)
self.assertEqual(response.data['course_avg'], 1.1)
# Filter by user_id
user_filter_uri = '{}/{}/metrics/proficiency/leaders/?user_id={}'\
.format(self.base_courses_uri, self.test_course_id, self.users[2].id)
user_filter_uri = '{}?user_id={}'.format(test_uri, self.users[1].id)
response = self.do_get(user_filter_uri)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data['leaders']), 3)
self.assertEqual(response.data['course_avg'], 3.9)
self.assertEqual(response.data['position'], 1)
self.assertEqual(response.data['points'], 7)
self.assertEqual(response.data['course_avg'], expected_course_average)
self.assertEqual(response.data['user_position'], 4)
self.assertEqual(response.data['user_grade'], 0.28)
# Filter by user who has never accessed a course module
test_user = UserFactory.create(username="testusernocoursemod")
user_filter_uri = '{}/{}/metrics/proficiency/leaders/?user_id={}'\
.format(self.base_courses_uri, self.test_course_id, test_user.id)
user_filter_uri = '{}?user_id={}'.format(test_uri, test_user.id)
response = self.do_get(user_filter_uri)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data['leaders']), 3)
self.assertEqual(response.data['course_avg'], 3.9)
self.assertEqual(response.data['position'], 5)
self.assertEqual(response.data['points'], 0)
self.assertEqual(response.data['user_grade'], 0)
self.assertEqual(response.data['user_position'], 6)
# test with bogus course
test_uri = '{}/{}/metrics/proficiency/leaders/'.format(self.base_courses_uri, self.test_bogus_course_id)
response = self.do_get(test_uri)
bogus_test_uri = '{}/{}/metrics/grades/leaders/'.format(self.base_courses_uri, self.test_bogus_course_id)
response = self.do_get(bogus_test_uri)
self.assertEqual(response.status_code, 404)
# test with bogus content filter
content_id = {'content_id': self.test_bogus_content_id}
content_filter_uri = '{}/{}/metrics/proficiency/leaders/?{}'\
.format(self.base_courses_uri, self.test_course_id, urlencode(content_id))
response = self.do_get(content_filter_uri)
self.assertEqual(response.status_code, 400)
def test_courses_completions_leaders_list_get(self):
completion_uri = '{}/{}/completions/'.format(self.base_courses_uri, unicode(self.course.id))
# Make last user as observer to make sure that data is being filtered out
......@@ -1663,7 +1647,7 @@ class CoursesApiTests(TestCase):
response = self.do_get(test_uri)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data['leaders']), 4)
self.assertEqual(response.data['course_avg'], 24)
self.assertEqual(response.data['course_avg'], 20)
# without count filter and user_id
test_uri = '{}/{}/metrics/completions/leaders/?user_id={}'.format(self.base_courses_uri, self.test_course_id,
......@@ -1672,57 +1656,52 @@ class CoursesApiTests(TestCase):
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data['leaders']), 3)
self.assertEqual(response.data['position'], 2)
self.assertEqual(response.data['completions'], 26)
self.assertEqual(response.data['completions'], 28)
# test with bogus course
test_uri = '{}/{}/metrics/completions/leaders/'.format(self.base_courses_uri, self.test_bogus_course_id)
response = self.do_get(test_uri)
self.assertEqual(response.status_code, 404)
def test_courses_grades_list_get(self):
def test_courses_metrics_grades_list_get(self):
# Retrieve the list of grades for this course
# All the course/item/user scaffolding was handled in Setup
test_uri = '{}/{}/grades'.format(self.base_courses_uri, self.test_course_id)
test_uri = '{}/{}/metrics/grades'.format(self.base_courses_uri, self.test_course_id)
response = self.do_get(test_uri)
self.assertEqual(response.status_code, 200)
self.assertGreater(response.data['average_grade'], 0)
self.assertGreater(response.data['points_scored'], 0)
self.assertGreater(response.data['points_possible'], 0)
self.assertGreater(response.data['course_average_grade'], 0)
self.assertGreater(response.data['course_points_scored'], 0)
self.assertGreater(response.data['course_points_possible'], 0)
self.assertGreater(len(response.data['grades']), 0)
self.assertGreater(response.data['grade_average'], 0)
self.assertGreater(response.data['grade_maximum'], 0)
self.assertGreater(response.data['grade_minimum'], 0)
self.assertEqual(response.data['grade_count'], USER_COUNT)
self.assertGreater(response.data['course_grade_average'], 0)
self.assertGreater(response.data['course_grade_maximum'], 0)
self.assertGreater(response.data['course_grade_minimum'], 0)
self.assertEqual(response.data['course_grade_count'], USER_COUNT)
self.assertEqual(len(response.data['grades']), USER_COUNT)
# Filter by user_id
user_filter_uri = '{}?user_id=1,3'.format(test_uri)
response = self.do_get(user_filter_uri)
self.assertEqual(response.status_code, 200)
self.assertGreater(response.data['average_grade'], 0)
self.assertGreater(response.data['points_scored'], 0)
self.assertGreater(response.data['points_possible'], 0)
self.assertGreater(response.data['course_average_grade'], 0)
self.assertGreater(response.data['course_points_scored'], 0)
self.assertGreater(response.data['course_points_possible'], 0)
self.assertGreater(len(response.data['grades']), 0)
# Filter by content_id
content_id = {'content_id': self.item.scope_ids.usage_id}
content_filter_uri = '{}?{}'.format(test_uri, urlencode(content_id))
response = self.do_get(content_filter_uri)
self.assertGreater(response.data['grade_average'], 0)
self.assertGreater(response.data['grade_maximum'], 0)
self.assertGreater(response.data['grade_minimum'], 0)
self.assertEqual(response.data['grade_count'], 2)
self.assertGreater(response.data['course_grade_average'], 0)
self.assertGreater(response.data['course_grade_maximum'], 0)
self.assertGreater(response.data['course_grade_minimum'], 0)
self.assertEqual(response.data['course_grade_count'], USER_COUNT)
self.assertEqual(len(response.data['grades']), 2)
# make the last user an observer to asset that its content is being filtered out from
# the aggregates
user_index = USER_COUNT - 1
allow_access(self.course, self.users[user_index], 'observer')
test_uri = '{}/{}/metrics/grades'.format(self.base_courses_uri, self.test_course_id)
response = self.do_get(test_uri)
self.assertEqual(response.status_code, 200)
self.assertGreater(response.data['average_grade'], 0)
self.assertGreater(response.data['points_scored'], 0)
self.assertGreater(response.data['points_possible'], 0)
self.assertGreater(response.data['course_average_grade'], 0)
self.assertGreater(response.data['course_points_scored'], 0)
self.assertGreater(response.data['course_points_possible'], 0)
self.assertGreater(len(response.data['grades']), 0)
# Filter by invalid content_id
content_id = {'content_id': self.test_bogus_content_id}
content_filter_uri = '{}?{}'.format(test_uri, urlencode(content_id))
response = self.do_get(content_filter_uri)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(response.data['grades']), user_index)
def test_courses_grades_list_get_invalid_course(self):
# Retrieve the list of grades for this course
......@@ -1880,6 +1859,10 @@ class CoursesApiTests(TestCase):
self.assertEqual(response.data['results'][0]['city'], 'Denver')
self.assertEqual(response.data['results'][0]['count'], 5)
# Do a get with a bogus course to hit the 404 case
response = self.do_get('{}/{}/metrics/cities/'.format(self.base_courses_uri, self.test_bogus_course_id))
self.assertEqual(response.status_code, 404)
def test_courses_roles_list_get(self):
allow_access(self.course, self.users[0], 'staff')
allow_access(self.course, self.users[1], 'instructor')
......
......@@ -16,7 +16,6 @@ urlpatterns = patterns(
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/content/(?P<content_id>[a-zA-Z0-9_+\/:-]+)/users/*$', courses_views.CourseContentUsersList.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/content/(?P<content_id>[a-zA-Z0-9_+\/:-]+)$', courses_views.CourseContentDetail.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/content/*$', courses_views.CourseContentList.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/grades/*$', courses_views.CoursesGradesList.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/groups/(?P<group_id>[0-9]+)$', courses_views.CoursesGroupsDetail.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/groups/*$', courses_views.CoursesGroupsList.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/overview/*$', courses_views.CoursesOverview.as_view()),
......@@ -24,11 +23,12 @@ urlpatterns = patterns(
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/static_tabs/*$', courses_views.CoursesStaticTabsList.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/completions/*$', courses_views.CourseModuleCompletionList.as_view(), name='completion-list'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/projects/*$', courses_views.CoursesProjectList.as_view(), name='courseproject-list'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/*$', courses_views.CourseMetrics.as_view(), name='course-metrics'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/cities/$', courses_views.CoursesCitiesMetrics.as_view(), name='courses-cities-metrics'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/completions/leaders/*$', courses_views.CoursesCompletionsLeadersList.as_view(), name='course-metrics-completions-leaders'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/proficiency/leaders/*$', courses_views.CoursesLeadersList.as_view(), name='course-metrics-proficiency-leaders'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/social/$', courses_views.CoursesSocialMetrics.as_view(), name='courses-social-metrics'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/*$', courses_views.CoursesMetrics.as_view(), name='course-metrics'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/cities/$', courses_views.CoursesMetricsCities.as_view(), name='courses-cities-metrics'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/completions/leaders/*$', courses_views.CoursesMetricsCompletionsLeadersList.as_view(), name='course-metrics-completions-leaders'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/grades/*$', courses_views.CoursesMetricsGradesList.as_view()),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/grades/leaders/*$', courses_views.CoursesMetricsGradesLeadersList.as_view(), name='course-metrics-grades-leaders'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/metrics/social/$', courses_views.CoursesMetricsSocial.as_view(), name='courses-social-metrics'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/roles/(?P<role>[a-z_]+)/users/(?P<user_id>[0-9]+)*$', courses_views.CoursesRolesUsersDetail.as_view(), name='courses-roles-users-detail'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/roles/*$', courses_views.CoursesRolesList.as_view(), name='courses-roles-list'),
url(r'^(?P<course_id>[a-zA-Z0-9_+\/:-]+)/updates/*$', courses_views.CoursesUpdates.as_view()),
......
......@@ -5,13 +5,13 @@ import logging
import itertools
from lxml import etree
from StringIO import StringIO
from datetime import datetime
from django.conf import settings
from django.contrib.auth.models import Group, User
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Avg, Sum, Count, Max
from django.db.models import Avg, Count, Max, Min
from django.http import Http404
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.db.models import Q
......@@ -22,6 +22,7 @@ from courseware.courses import get_course_about_section, get_course_info_section
from courseware.models import StudentModule
from courseware.views import get_static_tab_contents
from django_comment_common.models import FORUM_ROLE_MODERATOR
from gradebook.models import StudentGradebook
from instructor.access import revoke_access, update_forum_role
from student.models import CourseEnrollment, CourseEnrollmentAllowed
from student.roles import CourseRole, CourseAccessRole, CourseInstructorRole, CourseStaffRole, CourseObserverRole, UserBasedRole
......@@ -1400,9 +1401,9 @@ class CourseModuleCompletionList(SecureListAPIView):
return Response({'message': _('Resource already exists')}, status=status.HTTP_409_CONFLICT)
class CoursesGradesList(SecureListAPIView):
class CoursesMetricsGradesList(SecureListAPIView):
"""
### The CoursesGradesList view allows clients to retrieve a list of grades for the specified Course
### The CoursesMetricsGradesList view allows clients to retrieve a list of grades for the specified Course
- URI: ```/api/courses/{course_id}/grades/```
- GET: Returns a JSON representation (array) of the set of grade objects
### Use Cases/Notes:
......@@ -1411,54 +1412,44 @@ class CoursesGradesList(SecureListAPIView):
def get(self, request, course_id): # pylint: disable=W0221
"""
GET /api/courses/{course_id}/grades?user_ids=1,2&content_ids=i4x://1/2/3,i4x://a/b/c
GET /api/courses/{course_id}/metrics/grades?user_ids=1,2
"""
if not course_exists(request, request.user, course_id):
return Response({}, status=status.HTTP_404_NOT_FOUND)
course_key = get_course_key(course_id)
queryset = StudentModule.objects.filter(
course_id__exact=course_key,
grade__isnull=False,
max_grade__isnull=False,
max_grade__gt=0
)
exclude_users = _get_aggregate_exclusion_user_ids(course_key)
queryset = StudentGradebook.objects.filter(course_id__exact=course_key).exclude(user__in=exclude_users)
upper_bound = getattr(settings, 'API_LOOKUP_UPPER_BOUND', 100)
upper_bound = getattr(settings, 'API_LOOKUP_UPPER_BOUND', 200)
user_ids = self.request.QUERY_PARAMS.get('user_id', None)
if user_ids:
user_ids = map(int, user_ids.split(','))[:upper_bound]
queryset = queryset.filter(student__in=user_ids)
content_id = self.request.QUERY_PARAMS.get('content_id', None)
if content_id:
content_descriptor, content_key, existing_content = get_course_child(request, request.user, course_key, content_id) # pylint: disable=W0612
if not content_descriptor:
return Response({}, status=status.HTTP_400_BAD_REQUEST)
queryset = queryset.filter(module_state_key=content_key)
queryset = queryset.filter(user__in=user_ids)
queryset_grade_avg = queryset.aggregate(Avg('grade'))
queryset_grade_sum = queryset.aggregate(Sum('grade'))
queryset_maxgrade_sum = queryset.aggregate(Sum('max_grade'))
course_queryset = StudentModule.objects.filter(
course_id__exact=course_key,
grade__isnull=False,
max_grade__isnull=False,
max_grade__gt=0
)
queryset_grade_max = queryset.aggregate(Max('grade'))
queryset_grade_min = queryset.aggregate(Min('grade'))
queryset_grade_count = queryset.aggregate(Count('grade'))
course_queryset = StudentGradebook.objects.filter(course_id__exact=course_key).exclude(user__in=exclude_users)
course_queryset_grade_avg = course_queryset.aggregate(Avg('grade'))
course_queryset_grade_sum = course_queryset.aggregate(Sum('grade'))
course_queryset_maxgrade_sum = course_queryset.aggregate(Sum('max_grade'))
course_queryset_grade_max = course_queryset.aggregate(Max('grade'))
course_queryset_grade_min = course_queryset.aggregate(Min('grade'))
course_queryset_grade_count = course_queryset.aggregate(Count('grade'))
response_data = {}
base_uri = generate_base_uri(request)
response_data['uri'] = base_uri
response_data['average_grade'] = queryset_grade_avg['grade__avg']
response_data['points_scored'] = queryset_grade_sum['grade__sum']
response_data['points_possible'] = queryset_maxgrade_sum['max_grade__sum']
response_data['course_average_grade'] = course_queryset_grade_avg['grade__avg']
response_data['course_points_scored'] = course_queryset_grade_sum['grade__sum']
response_data['course_points_possible'] = course_queryset_maxgrade_sum['max_grade__sum']
response_data['grade_average'] = queryset_grade_avg['grade__avg']
response_data['grade_maximum'] = queryset_grade_max['grade__max']
response_data['grade_minimum'] = queryset_grade_min['grade__min']
response_data['grade_count'] = queryset_grade_count['grade__count']
response_data['course_grade_average'] = course_queryset_grade_avg['grade__avg']
response_data['course_grade_maximum'] = course_queryset_grade_max['grade__max']
response_data['course_grade_minimum'] = course_queryset_grade_min['grade__min']
response_data['course_grade_count'] = course_queryset_grade_count['grade__count']
response_data['grades'] = []
for row in queryset:
......@@ -1482,9 +1473,9 @@ class CoursesProjectList(SecureListAPIView):
return Project.objects.filter(course_id=course_key)
class CourseMetrics(SecureAPIView):
class CoursesMetrics(SecureAPIView):
"""
### The CourseMetrics view allows clients to retrieve a list of Metrics for the specified Course
### The CoursesMetrics view allows clients to retrieve a list of Metrics for the specified Course
- URI: ```/api/courses/{course_id}/metrics/```
- GET: Returns a JSON representation (array) of the set of course metrics
### Use Cases/Notes:
......@@ -1505,85 +1496,46 @@ class CourseMetrics(SecureAPIView):
return Response(data, status=status.HTTP_200_OK)
class CoursesLeadersList(SecureListAPIView):
class CoursesMetricsGradesLeadersList(SecureListAPIView):
"""
### The CoursesLeadersList view allows clients to retrieve top 3 users who are leading
in terms of points_scored and course average for the specified Course. If user_id parameter is given
### The CoursesMetricsGradesLeadersList view allows clients to retrieve top 3 users who are leading
in terms of grade and course average for the specified Course. If user_id parameter is given
it would return user's position
- URI: ```/api/courses/{course_id}/metrics/proficiency/leaders/?user_id={user_id}```
- GET: Returns a JSON representation (array) of the users with points scored
Filters can also be applied
```/api/courses/{course_id}/metrics/proficiency/leaders/?content_id={content_id}```
- URI: ```/api/courses/{course_id}/metrics/grades/leaders/?user_id={user_id}```
- GET: Returns a JSON representation (array) of the users with grades
To get more than 3 users use count parameter
```/api/courses/{course_id}/metrics/proficiency/leaders/?count=3```
``` /api/courses/{course_id}/metrics/grades/leaders/?count=3```
### Use Cases/Notes:
* Example: Display proficiency leaderboard of a given course
* Example: Display position of a users in a course in terms of proficiency points and course avg
* Example: Display grades leaderboard of a given course
* Example: Display position of a users in a course in terms of grade and course avg
"""
def get(self, request, course_id): # pylint: disable=W0613,W0221
"""
GET /api/courses/{course_id}/metrics/proficiency/leaders/
GET /api/courses/{course_id}/grades/leaders/
"""
user_id = self.request.QUERY_PARAMS.get('user_id', None)
content_id = self.request.QUERY_PARAMS.get('content_id', None)
count = self.request.QUERY_PARAMS.get('count', 3)
data = {}
course_avg = 0
if not course_exists(request, request.user, course_id):
return Response({}, status=status.HTTP_404_NOT_FOUND)
course_key = get_course_key(course_id)
# Users having certain roles (such as an Observer) are excluded from aggregations
exclude_users = _get_aggregate_exclusion_user_ids(course_key)
queryset = StudentModule.objects.filter(
course_id__exact=course_key,
grade__isnull=False,
max_grade__isnull=False,
max_grade__gt=0,
student__is_active=True
).exclude(student__in=exclude_users)
if content_id:
content_descriptor, content_key, existing_content = get_course_child(request, request.user, course_key, content_id) # pylint: disable=W0612
if not content_descriptor:
return Response({}, status=status.HTTP_400_BAD_REQUEST)
queryset = queryset.filter(module_state_key=content_key)
if user_id:
user_queryset = StudentModule.objects.filter(course_id__exact=course_key, grade__isnull=False,
max_grade__isnull=False,
max_grade__gt=0,
student__id=user_id)
user_points = user_queryset.aggregate(points=Sum('grade'))
user_points = user_points['points'] or 0
user_points = round(user_points, 2)
user_time_scored = user_queryset.aggregate(time_scored=Max('created'))
user_time_scored = user_time_scored['time_scored'] or datetime.now()
users_above = queryset.values('student__id').annotate(points=Sum('grade'))\
.annotate(time_scored=Max('created')).\
filter(Q(points__gt=user_points) | Q(points__gte=user_points, time_scored__lt=user_time_scored))\
.exclude(student__id=user_id).count() # excluding user to overcome
# float comparison bug
data['position'] = users_above + 1
data['points'] = int(round(user_points))
points = queryset.aggregate(total=Sum('grade'))
if points and points['total'] is not None:
users_total = CourseEnrollment.users_enrolled_in(course_key).exclude(id__in=exclude_users).count()
if users_total:
course_avg = round(points['total'] / float(users_total), 1)
data['course_avg'] = course_avg
queryset = queryset.filter(student__is_active=True).values('student__id', 'student__username',
'student__profile__title',
'student__profile__avatar_url')\
.annotate(points_scored=Sum('grade')).annotate(time_scored=Max('created'))\
.order_by('-points_scored', 'time_scored')[:count]
serializer = CourseLeadersSerializer(queryset, many=True)
leaderboard_data = StudentGradebook.generate_leaderboard(course_key, user_id=user_id, count=count, exclude_users=exclude_users)
serializer = CourseLeadersSerializer(leaderboard_data['queryset'], many=True)
data['leaders'] = serializer.data # pylint: disable=E1101
data['course_avg'] = leaderboard_data['course_avg']
if 'user_position' in leaderboard_data:
data['user_position'] = leaderboard_data['user_position']
if 'user_grade' in leaderboard_data:
data['user_grade'] = leaderboard_data['user_grade']
return Response(data, status=status.HTTP_200_OK)
class CoursesCompletionsLeadersList(SecureAPIView):
class CoursesMetricsCompletionsLeadersList(SecureAPIView):
"""
### The CoursesCompletionsLeadersList view allows clients to retrieve top 3 users who are leading
in terms of course module completions and course average for the specified Course, if user_id parameter is given
......@@ -1624,7 +1576,7 @@ class CoursesCompletionsLeadersList(SecureAPIView):
.exclude(cat_list)
user_completions = user_queryset.count()
user_time_completed = user_queryset.aggregate(time_completed=Max('created'))
user_time_completed = user_time_completed['time_completed'] or datetime.now()
user_time_completed = user_time_completed['time_completed'] or timezone.now()
completions_above_user = queryset.filter(user__is_active=True).values('user__id')\
.annotate(completions=Count('content_id')).annotate(time_completed=Max('created'))\
.filter(Q(completions__gt=user_completions) | Q(completions=user_completions,
......@@ -1670,9 +1622,9 @@ class CoursesWorkgroupsList(SecureListAPIView):
return queryset
class CoursesSocialMetrics(SecureListAPIView):
class CoursesMetricsSocial(SecureListAPIView):
"""
### The CoursesSocialMetrics view allows clients to query about the activity of all users in the
### The CoursesMetricsSocial view allows clients to query about the activity of all users in the
forums
- URI: ```/api/users/{course_id}/metrics/social/```
- GET: Returns a list of social metrics for users in the specified course
......@@ -1712,9 +1664,9 @@ class CoursesSocialMetrics(SecureListAPIView):
return Response(data, http_status)
class CoursesCitiesMetrics(SecureListAPIView):
class CoursesMetricsCities(SecureListAPIView):
"""
### The CoursesCitiesMetrics view allows clients to retrieve ordered list of user
### The CoursesMetricsCities view allows clients to retrieve ordered list of user
count by city in a particular course
- URI: ```/api/courses/{course_id}/metrics/cities/```
- GET: Provides paginated list of user count by cities
......
""" Centralized access to LMS courseware app """
from django.utils import timezone
from courseware import courses, module_render
from courseware.model_data import FieldDataCache
......@@ -133,3 +134,49 @@ def get_course_child_content(request, user, course_key, child_descriptor):
field_data_cache,
course_key)
return child_content
def calculate_proforma_grade(grade_summary, grading_policy):
"""
Calculates a projected (proforma) final grade based on the current state
of grades using the provided grading policy. Sections equate to grading policy
'types' and have values such as 'Homework', 'Lab', 'MidtermExam', and 'FinalExam'
We invert the concepts here and use the section weights as the possible scores by
assuming that the section weights total 100 percent. So, if a Homework section
is worth 15 percent of your overall grade, and you have currently scored 70 percent
for that section, the normalized score for the Homework section is 0.105. Note that
we do not take into account dropped assignments/scores, such as lowest-two homeworks.
After all scored sections are processed we take the remaining weight at its full
value as a projection of the user obtaining 100 percent of the section potential.
Example:
- Section: Homework, Weight: 15%, Totaled Score: 70%, Normalized Score: 0.105
- Section: MidtermExam, Weight: 30%, Totaled Score: 80%, Normalized Score: 0.240
- Section: Final Exam, Weight: 40%, Totaled Score: 95%, Normalized Score: 0.380
- Remaining Weight: 0.15 (unscored Lab section), assume 100%, of 15% => 0.150
- Proforma Grade = 0.105 + 0.240 + 0.380 + 0.150 = 0.875 (87.5%)
"""
remaining_weight = 1.00
proforma_grade = 0.00
totaled_scores = grade_summary['totaled_scores']
for section in totaled_scores:
points_earned = 0.00
points_possible = 0.00
# totaled_scores is a collection of currently-recored scores for a given section
# we need to iterate through and combine the scores to create an overall score for the section
# This loop does not take into account dropped assignments (eg, homeworks)
for score in totaled_scores[section]:
# Only count grades where points have been scored, or where the due date has passed
if score.earned or (score.due and score.due < timezone.now()):
points_earned = points_earned + score.earned
points_possible = points_possible + score.possible
grade = points_earned / points_possible
section_policy = next((policy for policy in grading_policy['GRADER'] if policy['type'] == section), None)
if section_policy is not None:
section_weight = section_policy['weight']
proforma_grade = proforma_grade + (section_weight * grade)
remaining_weight = remaining_weight - section_weight
proforma_grade = proforma_grade + remaining_weight
return proforma_grade
......@@ -10,22 +10,27 @@ from random import randint
import json
import uuid
from urllib import urlencode
from mock import patch
import mock
from django.contrib.auth.models import User
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist
from django.test import Client
from django.test.utils import override_settings
from django.utils import timezone
from django.utils.translation import ugettext as _
from capa.tests.response_xml_factory import StringResponseXMLFactory
from courseware.tests.factories import StudentModuleFactory
from courseware import module_render
from courseware.model_data import FieldDataCache
from django_comment_common.models import Role, FORUM_ROLE_MODERATOR
from instructor.access import allow_access
from notification_prefs import NOTIFICATION_PREF_KEY
from projects.models import Project, Workgroup
from student.tests.factories import UserFactory
from student.models import anonymous_id_for_user
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from openedx.core.djangoapps.user_api.models import UserPreference
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
TEST_API_KEY = str(uuid.uuid4())
......@@ -44,11 +49,25 @@ class SecureClient(Client):
@override_settings(EDX_API_KEY=TEST_API_KEY)
@override_settings(PASSWORD_MIN_LENGTH=4)
@override_settings(API_PAGE_SIZE=10)
@patch.dict("django.conf.settings.FEATURES", {'ENFORCE_PASSWORD_POLICY': True})
@mock.patch.dict("django.conf.settings.FEATURES", {'ENFORCE_PASSWORD_POLICY': True})
class UsersApiTests(ModuleStoreTestCase):
""" Test suite for Users API views """
def get_module_for_user(self, user, course, problem):
"""Helper function to get useful module at self.location in self.course_id for user"""
mock_request = mock.MagicMock()
mock_request.user = user
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id, user, course, depth=2)
module = module_render.get_module( # pylint: disable=protected-access
user,
mock_request,
problem.location,
field_data_cache,
course.id
)
return module
def setUp(self):
self.test_server_prefix = 'https://testserver'
self.test_username = str(uuid.uuid4())
......@@ -300,6 +319,20 @@ class UsersApiTests(ModuleStoreTestCase):
self.assertGreater(response.data['message'], 0)
self.assertEqual(response.data['field_conflict'], 'username or email')
@mock.patch.dict("student.models.settings.FEATURES", {"ENABLE_DISCUSSION_EMAIL_DIGEST": True})
def test_user_list_post_discussion_digest_email(self):
test_uri = self.users_base_uri
local_username = self.test_username + str(randint(11, 99))
data = {'email': self.test_email, 'username': local_username, 'password':
self.test_password, 'first_name': self.test_first_name, 'last_name': self.test_last_name}
response = self.do_post(test_uri, data)
self.assertEqual(response.status_code, 201)
self.assertGreater(response.data['id'], 0)
confirm_uri = self.test_server_prefix + \
test_uri + '/' + str(response.data['id'])
user = User.objects.get(id=response.data['id'])
self.assertIsNotNone(UserPreference.get_preference(user, NOTIFICATION_PREF_KEY))
def test_user_detail_get(self):
test_uri = self.users_base_uri
local_username = self.test_username + str(randint(11, 99))
......@@ -1135,7 +1168,7 @@ class UsersApiTests(ModuleStoreTestCase):
response = self.do_delete(test_uri)
self.assertEqual(response.status_code, 404)
def test_course_grades(self):
def test_user_courses_grades_list_get(self):
user_id = self.user.id
course = CourseFactory.create()
......@@ -1182,15 +1215,129 @@ class UsersApiTests(ModuleStoreTestCase):
display_name="test problem 2"
)
StudentModuleFactory.create(
grade=1,
max_grade=1,
student=self.user,
course_id=course.id,
module_state_key=problem.location,
state=json.dumps({'attempts': 3}),
module_type='mentoring'
item = ItemFactory.create(
parent_location=chapter2.location,
category='mentoring',
data=StringResponseXMLFactory().build_xml(answer='foo'),
display_name=u"test mentoring midterm",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Midterm Exam"}
)
points_scored = 1
points_possible = 1
user = self.user
module = self.get_module_for_user(user, course, item)
grade_dict = {'value': points_scored, 'max_value': points_possible, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
item2 = ItemFactory.create(
parent_location=chapter2.location,
category='mentoring',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name=u"test mentoring final",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Final Exam"}
)
points_scored = 95
points_possible = 100
user = self.user
module = self.get_module_for_user(user, course, item2)
grade_dict = {'value': points_scored, 'max_value': points_possible, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
item3 = ItemFactory.create(
parent_location=chapter2.location,
category='mentoring',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name=u"test mentoring homework",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Homework"}
)
points_scored = 7
points_possible = 10
user = self.user
module = self.get_module_for_user(user, course, item3)
grade_dict = {'value': points_scored, 'max_value': points_possible, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
item4 = ItemFactory.create(
parent_location=chapter2.location,
category='mentoring',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name=u"test mentoring homework 2",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Homework"}
)
points_scored = 9
points_possible = 10
user = self.user
module = self.get_module_for_user(user, course, item4)
grade_dict = {'value': points_scored, 'max_value': points_possible, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
item5 = ItemFactory.create(
parent_location=chapter2.location,
category='mentoring',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name=u"test mentoring homework 3",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Homework"},
due=datetime(2015, 1, 16, 14, 30).replace(tzinfo=timezone.utc)
)
points_scored = 1
points_possible = 1
user = self.user
module = self.get_module_for_user(user, course, item)
grade_dict = {'value': points_scored, 'max_value': points_possible, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
item2 = ItemFactory.create(
parent_location=chapter2.location,
category='mentoring',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name=u"test mentoring final",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Final Exam"}
)
points_scored = 95
points_possible = 100
user = self.user
module = self.get_module_for_user(user, course, item2)
grade_dict = {'value': points_scored, 'max_value': points_possible, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
item3 = ItemFactory.create(
parent_location=chapter2.location,
category='mentoring',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name=u"test mentoring homework",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Homework"}
)
points_scored = 7
points_possible = 10
user = self.user
module = self.get_module_for_user(user, course, item3)
grade_dict = {'value': points_scored, 'max_value': points_possible, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
item4 = ItemFactory.create(
parent_location=chapter2.location,
category='mentoring',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name=u"test mentoring homework 2",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Homework"}
)
points_scored = 9
points_possible = 10
user = self.user
module = self.get_module_for_user(user, course, item4)
grade_dict = {'value': points_scored, 'max_value': points_possible, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
item5 = ItemFactory.create(
parent_location=chapter2.location,
category='mentoring',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name=u"test mentoring homework 3",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Homework"},
due=datetime(2015, 1, 16, 14, 30).replace(tzinfo=timezone.utc)
)
test_uri = '{}/{}/courses/{}/grades'.format(self.users_base_uri, user_id, unicode(course.id))
......@@ -1208,7 +1355,7 @@ class UsersApiTests(ModuleStoreTestCase):
self.assertEqual(sections[0]['graded'], False)
sections = courseware_summary[1]['sections']
self.assertEqual(len(sections), 3)
self.assertEqual(len(sections), 8)
self.assertEqual(sections[0]['display_name'], 'Sequence 2')
self.assertEqual(sections[0]['graded'], False)
......@@ -1218,8 +1365,8 @@ class UsersApiTests(ModuleStoreTestCase):
self.assertGreater(len(grading_policy['GRADER']), 0)
self.assertIsNotNone(grading_policy['GRADE_CUTOFFS'])
self.assertEqual(response.data['current_grade'], 50)
self.assertEqual(response.data['pro_forma_grade'], 100)
self.assertEqual(response.data['current_grade'], 0.7)
self.assertEqual(response.data['proforma_grade'], 0.95)
def is_user_profile_created_updated(self, response, data):
"""This function compare response with user profile data """
......
......@@ -6,7 +6,7 @@ from requests.exceptions import ConnectionError
from django.contrib.auth.models import Group
from django.core.exceptions import ObjectDoesNotExist
from django.db import IntegrityError
from django.db.models import Count, Q, Sum
from django.db.models import Count, Q
from django.core.validators import validate_email, validate_slug, ValidationError
from django.conf import settings
from django.http import Http404
......@@ -17,8 +17,8 @@ from rest_framework.response import Response
from courseware import grades, module_render
from courseware.model_data import FieldDataCache
from courseware.models import StudentModule
from django_comment_common.models import Role, FORUM_ROLE_MODERATOR
from gradebook.models import StudentGradebook
from instructor.access import revoke_access, update_forum_role
from lang_pref import LANGUAGE_KEY
from lms.lib.comment_client.user import User as CommentUser
......@@ -34,7 +34,7 @@ from util.password_policy_validators import (
)
from api_manager.courses.serializers import CourseModuleCompletionSerializer
from api_manager.courseware_access import get_course, get_course_child, get_course_total_score, get_course_key, course_exists
from api_manager.courseware_access import get_course, get_course_child, get_course_key, course_exists, calculate_proforma_grade
from api_manager.permissions import SecureAPIView, SecureListAPIView, IdsInFilterBackend, HasOrgsFilterBackend
from api_manager.models import GroupProfile, APIUser as User
from api_manager.organizations.serializers import OrganizationSerializer
......@@ -917,36 +917,27 @@ class UsersCoursesGradesDetail(SecureAPIView):
if not course_descriptor:
return Response({}, status=status.HTTP_404_NOT_FOUND)
courseware_summary = grades.progress_summary(student, request, course_descriptor) # pylint: disable=W0612
progress_summary = grades.progress_summary(student, request, course_descriptor) # pylint: disable=W0612
grade_summary = grades.grade(student, request, course_descriptor)
grading_policy = course_descriptor.grading_policy
current_grade = 0
proforma_grade = 0
queryset = StudentModule.objects.filter(
queryset = StudentGradebook.objects.filter(
user=student,
course_id__exact=course_key,
max_grade__isnull=False,
max_grade__gt=0
)
if len(queryset):
current_grade = queryset[0].grade
proforma_grade = calculate_proforma_grade(grade_summary, grading_policy)
total_score = get_course_total_score(courseware_summary)
user_queryset = queryset.filter(grade__isnull=False, student=student)
comp_modules = user_queryset.aggregate(Sum('grade'))
score_of_comp_module = comp_modules['grade__sum'] or 0
max_possible_score = user_queryset.aggregate(Sum('max_grade'))
current_grade = 0
pro_forma_grade = 0
if total_score:
current_grade = score_of_comp_module / float(total_score) * 100
if max_possible_score['max_grade__sum']:
pro_forma_grade = score_of_comp_module / float(max_possible_score['max_grade__sum']) * 100
response_data = {
'courseware_summary': courseware_summary,
'courseware_summary': progress_summary,
'grade_summary': grade_summary,
'grading_policy': grading_policy,
'current_grade': current_grade,
'pro_forma_grade': pro_forma_grade
'proforma_grade': proforma_grade
}
return Response(response_data)
......
......@@ -184,6 +184,7 @@ def _grade(student, request, course, keep_raw_scores):
for section in sections:
section_descriptor = section['section_descriptor']
section_name = section_descriptor.display_name_with_default
section_due = section_descriptor.due
# some problems have state that is updated independently of interaction
# with the LMS, so they need to always be scored. (E.g. foldit.,
......@@ -252,7 +253,7 @@ def _grade(student, request, course, keep_raw_scores):
total,
graded,
module_descriptor.display_name_with_default,
module_descriptor.location
section_due
)
)
......@@ -260,7 +261,7 @@ def _grade(student, request, course, keep_raw_scores):
if keep_raw_scores:
raw_scores += scores
else:
graded_total = Score(0.0, 1.0, True, section_name, None)
graded_total = Score(0.0, 1.0, True, section_name, section_due)
#Add the graded total to totaled_scores
if graded_total.possible > 0:
......@@ -380,6 +381,7 @@ def _progress_summary(student, request, course):
continue
graded = section_module.graded
due = section_module.due
scores = []
module_creator = section_module.xmodule_runtime.get_module
......@@ -400,7 +402,7 @@ def _progress_summary(student, request, course):
total,
graded,
module_descriptor.display_name_with_default,
module_descriptor.location
due
)
)
......
......@@ -67,6 +67,7 @@ class ChunkingManager(models.Manager):
)
return res
from courseware.signals import score_changed
class StudentModule(models.Model):
"""
......@@ -142,6 +143,28 @@ class StudentModule(models.Model):
return unicode(repr(self))
@receiver(post_save, sender=StudentModule)
def send_score_changed_signal(sender, instance, **kwargs):
"""
Broadcast the recorded score to connected receivers
"""
if settings.FEATURES.get('SIGNAL_ON_SCORE_CHANGED', False) and instance.grade is not None:
previous_entries = StudentModuleHistory.objects.filter(student_module=instance)\
.exclude(grade=None)\
.exclude(created=instance.modified, state=instance.state)\
.order_by('-id')
if not len(previous_entries) or\
(instance.grade != previous_entries[0].grade) or\
(instance.max_grade != previous_entries[0].max_grade):
score_changed.send(
sender=sender,
user=instance.student,
course_key=instance.course_id,
score=instance.grade,
problem=instance.module_state_key
)
class StudentModuleHistory(models.Model):
"""Keeps a complete history of state changes for a given XModule for a given
Student. Right now, we restrict this to problems so that the table doesn't
......
"""
https://docs.djangoproject.com/en/dev/topics/signals/
"""
import django.dispatch
score_changed = django.dispatch.Signal(providing_args=["user", "course", "score", "problem"])
......@@ -1251,6 +1251,7 @@ class TestModuleTrackingContext(ModuleStoreTestCase):
)
def test_context_contains_display_name(self, mock_tracker):
mock_tracker.reset_mock()
problem_display_name = u'Option Response Problem'
module_info = self.handle_callback_and_get_module_info(mock_tracker, problem_display_name)
self.assertEquals(problem_display_name, module_info['display_name'])
......@@ -1277,12 +1278,14 @@ class TestModuleTrackingContext(ModuleStoreTestCase):
'problem_check',
)
self.assertEquals(len(mock_tracker.send.mock_calls), 1)
mock_call = mock_tracker.send.mock_calls[0]
event = mock_call[1][0]
mock_calls = mock_tracker.send.mock_calls
for call in mock_calls:
call_data = call[1][0]
event_type = call_data.get('event_type')
if event_type == 'problem_check':
break
self.assertEquals(event['event_type'], 'problem_check')
return event['context']['module']
return call_data['context']['module']['display_name']
def test_missing_display_name(self, mock_tracker):
actual_display_name = self.handle_callback_and_get_module_info(mock_tracker)['display_name']
......
......@@ -1045,18 +1045,21 @@ class TestAnswerDistributions(TestSubmittingProblems):
# We'll submit one problem, and then muck with the student_answers
# dict inside its state to try different data types (str, int, float,
# none)
self.submit_question_answer('p1', {'2_1': u'Correct'})
problem_name = 'p1'
self.submit_question_answer(problem_name, {'2_1': u'Correct'})
# Now fetch the state entry for that problem.
student_module = StudentModule.objects.get(
student_modules = StudentModule.objects.filter(
course_id=self.course.id,
student=self.student_user
)
for val in ('Correct', True, False, 0, 0.0, 1, 1.0, None):
state = json.loads(student_module.state)
state["student_answers"]['{}_2_1'.format(self.p1_html_id)] = val
student_module.state = json.dumps(state)
student_module.save()
for student_module in student_modules:
if student_module.module_state_key.name == problem_name:
for val in ('Correct', True, False, 0, 0.0, 1, 1.0, None):
state = json.loads(student_module.state)
state["student_answers"]['{}_2_1'.format(self.p1_html_id)] = val
student_module.state = json.dumps(state)
student_module.save()
self.assertEqual(
grades.answer_distributions(self.course.id),
......@@ -1066,40 +1069,64 @@ class TestAnswerDistributions(TestSubmittingProblems):
},
}
)
for student_module in student_modules:
if student_module.module_state_key.name == problem_name:
for val in ('Correct', True, False, 0, 0.0, 1, 1.0, None):
state = json.loads(student_module.state)
state["student_answers"]['i4x-MITx-100-problem-p1_2_1'] = val
student_module.state = json.dumps(state)
student_module.save()
self.assertEqual(
grades.answer_distributions(self.course.id),
{
('p1', 'p1', 'i4x-MITx-100-problem-p1_2_1'): {
str(val): 1
},
}
)
def test_missing_content(self):
# If there's a StudentModule entry for content that no longer exists,
# we just quietly ignore it (because we can't display a meaningful url
# or name for it).
self.submit_question_answer('p1', {'2_1': 'Incorrect'})
problem_name = 'p1'
self.submit_question_answer(problem_name, {'2_1': 'Incorrect'})
# Now fetch the state entry for that problem and alter it so it points
# to a non-existent problem.
student_module = StudentModule.objects.get(
student_modules = StudentModule.objects.filter(
course_id=self.course.id,
student=self.student_user
)
student_module.module_state_key = student_module.module_state_key.replace(
name=student_module.module_state_key.name + "_fake"
)
student_module.save()
for student_module in student_modules:
if student_module.module_state_key.name == problem_name:
student_module.module_state_key = student_module.module_state_key.replace(
name=student_module.module_state_key.name + "_fake"
)
student_module.save()
# It should be empty (ignored)
empty_distribution = grades.answer_distributions(self.course.id)
self.assertFalse(empty_distribution) # should be empty
# It should be empty (ignored)
empty_distribution = grades.answer_distributions(self.course.id)
self.assertFalse(empty_distribution) # should be empty
def test_broken_state(self):
# Missing or broken state for a problem should be skipped without
# causing the whole answer_distribution call to explode.
# Submit p1
self.submit_question_answer('p1', {'2_1': u'Correct'})
prb1_name = 'p1'
self.submit_question_answer(prb1_name, {'2_1': u'Correct'})
# Now fetch the StudentModule entry for p1 so we can corrupt its state
prb1 = StudentModule.objects.get(
student_modules = StudentModule.objects.filter(
course_id=self.course.id,
student=self.student_user
)
for student_module in student_modules:
if student_module.module_state_key.name == prb1_name:
prb1 = student_module
break
# Submit p2
self.submit_question_answer('p2', {'2_1': u'Incorrect'})
......
"""
Initialization module for gradebook djangoapp
"""
import gradebook.receivers
"""
One-time data migration script -- shoulen't need to run it again
"""
import logging
from optparse import make_option
from django.core.management.base import BaseCommand
from courseware import grades
from gradebook.models import StudentGradebook
from student.models import CourseEnrollment
from xmodule.modulestore.django import modulestore
from util.request import RequestMock
log = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Creates (or updates) gradebook entries for the specified course(s) and/or user(s)
"""
def handle(self, *args, **options):
help = "Command to creaete or update gradebook entries"
option_list = BaseCommand.option_list + (
make_option(
"-c",
"--course_ids",
dest="course_ids",
help="List of courses for which to generate grades",
metavar="slashes:first+course+id,slashes:second+course+id"
),
make_option(
"-u",
"--user_ids",
dest="user_ids",
help="List of users for which to generate grades",
metavar="1234,2468,3579"
),
)
course_ids = options.get('course_ids')
user_ids = options.get('user_ids')
# Get the list of courses from the system
courses = modulestore().get_courses()
# If one or more courses were specified by the caller, just use those ones...
if course_ids is not None:
filtered_courses = []
for course in courses:
if unicode(course.id) in course_ids.split(','):
filtered_courses.append(course)
courses = filtered_courses
for course in courses:
users = CourseEnrollment.users_enrolled_in(course.id)
# If one or more users were specified by the caller, just use those ones...
if user_ids is not None:
filtered_users = []
for user in users:
if str(user.id) in user_ids.split(','):
filtered_users.append(user)
users = filtered_users
# For each user...
for user in users:
request = RequestMock().get('/')
request.user = user
grade_data = grades.grade(user, request, course)
print grade_data
grade = grade_data['percent']
try:
gradebook_entry = StudentGradebook.objects.get(user=user, course_id=course.id)
if gradebook_entry.grade != grade:
gradebook_entry.grade = grade
gradebook_entry.save()
except StudentGradebook.DoesNotExist:
StudentGradebook.objects.create(user=user, course_id=course.id, grade=grade)
log_msg = 'Gradebook entry created -- Course: {}, User: {} (grade: {})'.format(course.id, user.id, grade)
print log_msg
log.info(log_msg)
"""
Run these tests @ Devstack:
rake fasttest_lms[common/djangoapps/api_manager/management/commands/tests/test_migrate_orgdata.py]
"""
from datetime import datetime
from mock import MagicMock
import uuid
from django.conf import settings
from capa.tests.response_xml_factory import StringResponseXMLFactory
from courseware import module_render
from courseware.model_data import FieldDataCache
from gradebook.management.commands import generate_gradebook_entries
from gradebook.models import StudentGradebook, StudentGradebookHistory
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
class GenerateGradebookEntriesTests(ModuleStoreTestCase):
"""
Test suite for grade generation script
"""
def setUp(self):
# Turn off the signalling mechanism temporarily
settings._wrapped.default_settings.FEATURES['SIGNAL_ON_SCORE_CHANGED'] = False
# Create a couple courses to work with
self.course = CourseFactory.create(
start=datetime(2014, 6, 16, 14, 30),
end=datetime(2015, 1, 16)
)
self.test_data = '<html>{}</html>'.format(str(uuid.uuid4()))
chapter1 = ItemFactory.create(
category="chapter",
parent_location=self.course.location,
data=self.test_data,
due=datetime(2014, 5, 16, 14, 30),
display_name="Overview"
)
chapter2 = ItemFactory.create(
category="chapter",
parent_location=self.course.location,
data=self.test_data,
due=datetime(2014, 5, 16, 14, 30),
display_name="Overview"
)
self.problem = ItemFactory.create(
parent_location=chapter1.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="homework problem 1",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Homework"}
)
self.problem2 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="homework problem 2",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Homework"}
)
self.problem3 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="lab problem 1",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Lab"}
)
self.problem4 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="midterm problem 2",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Midterm Exam"}
)
self.problem5 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="final problem 2",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Final Exam"}
)
# Create some users and enroll them
self.users = [UserFactory.create(username="testuser" + str(__), profile='test') for __ in xrange(3)]
for user in self.users:
CourseEnrollmentFactory.create(user=user, course_id=self.course.id)
grade = 0.15 * user.id
module = self.get_module_for_user(user, self.course, self.problem)
grade_dict = {'value': grade, 'max_value': 1, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
grade = 0.20 * user.id
module = self.get_module_for_user(user, self.course, self.problem2)
grade_dict = {'value': grade, 'max_value': 1, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
grade = 0.25 * user.id
module = self.get_module_for_user(user, self.course, self.problem3)
grade_dict = {'value': grade, 'max_value': 1, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
grade = 0.30 * user.id
module = self.get_module_for_user(user, self.course, self.problem4)
grade_dict = {'value': grade, 'max_value': 1, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
grade = 0.33 * user.id
module = self.get_module_for_user(user, self.course, self.problem5)
grade_dict = {'value': grade, 'max_value': 1, 'user_id': user.id}
module.system.publish(module, 'grade', grade_dict)
def get_module_for_user(self, user, course, problem):
"""Helper function to get useful module at self.location in self.course_id for user"""
mock_request = MagicMock()
mock_request.user = user
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id, user, course, depth=2)
return module_render.get_module( # pylint: disable=protected-access
user,
mock_request,
problem.location,
field_data_cache,
course.id
)._xmodule
def test_generate_gradebook_entries(self):
"""
Test the gradebook entry generator
"""
# Set up the command context
course_ids = '{},slashes:bogus+course+id'.format(self.course.id)
user_ids = '{}'.format(self.users[0].id)
current_entries = StudentGradebook.objects.all()
self.assertEqual(len(current_entries), 0)
current_entries = StudentGradebookHistory.objects.all()
self.assertEqual(len(current_entries), 0)
# Run the command just for one user
generate_gradebook_entries.Command().handle(user_ids=user_ids)
# Confirm the gradebook has been properly updated
current_entries = StudentGradebook.objects.all()
self.assertEqual(len(current_entries), 1)
current_entries = StudentGradebookHistory.objects.all()
self.assertEqual(len(current_entries), 1)
user0_entry = StudentGradebook.objects.get(user=self.users[0])
self.assertEqual(user0_entry.grade, 0.24)
# Enable the signalling mechanism
settings._wrapped.default_settings.FEATURES['SIGNAL_ON_SCORE_CHANGED'] = True
# Change the score of the final exam for that user
grade = 0.99
module = self.get_module_for_user(self.users[0], self.course, self.problem5)
grade_dict = {'value': grade, 'max_value': 1, 'user_id': self.users[0].id}
module.system.publish(module, 'grade', grade_dict)
# Confirm the gradebook has been properly updated
current_entries = StudentGradebook.objects.all()
self.assertEqual(len(current_entries), 1)
current_entries = StudentGradebookHistory.objects.all()
self.assertEqual(len(current_entries), 2)
user0_entry = StudentGradebook.objects.get(user=self.users[0])
self.assertEqual(user0_entry.grade, 0.50)
# Run the command across all users, but just for the specified course
generate_gradebook_entries.Command().handle(course_ids=course_ids)
# Confirm that the gradebook has been properly updated
current_entries = StudentGradebook.objects.all()
self.assertEqual(len(current_entries), 3)
current_entries = StudentGradebookHistory.objects.all()
self.assertEqual(len(current_entries), 4)
user0_entry = StudentGradebook.objects.get(user=self.users[0])
self.assertEqual(user0_entry.grade, 0.50)
user1_entry = StudentGradebook.objects.get(user=self.users[1])
self.assertEqual(user1_entry.grade, 0.48)
user2_entry = StudentGradebook.objects.get(user=self.users[2])
self.assertEqual(user2_entry.grade, 0.72)
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'StudentGradebook'
db.create_table('gradebook_studentgradebook', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('model_utils.fields.AutoCreatedField')(default=datetime.datetime.now)),
('modified', self.gf('model_utils.fields.AutoLastModifiedField')(default=datetime.datetime.now)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('course_id', self.gf('xmodule_django.models.CourseKeyField')(db_index=True, max_length=255, blank=True)),
('grade', self.gf('django.db.models.fields.FloatField')()),
))
db.send_create_signal('gradebook', ['StudentGradebook'])
# Adding unique constraint on 'StudentGradebook', fields ['user', 'course_id']
db.create_unique('gradebook_studentgradebook', ['user_id', 'course_id'])
# Adding model 'StudentGradebookHistory'
db.create_table('gradebook_studentgradebookhistory', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('model_utils.fields.AutoCreatedField')(default=datetime.datetime.now)),
('modified', self.gf('model_utils.fields.AutoLastModifiedField')(default=datetime.datetime.now)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('course_id', self.gf('xmodule_django.models.CourseKeyField')(db_index=True, max_length=255, blank=True)),
('grade', self.gf('django.db.models.fields.FloatField')()),
))
db.send_create_signal('gradebook', ['StudentGradebookHistory'])
db.create_index('gradebook_studentgradebook', ['grade', 'created'], unique=False, db_tablespace='')
def backwards(self, orm):
# Removing unique constraint on 'StudentGradebook', fields ['user', 'course_id']
db.delete_unique('gradebook_studentgradebook', ['user_id', 'course_id'])
# Deleting model 'StudentGradebook'
db.delete_table('gradebook_studentgradebook')
# Deleting model 'StudentGradebookHistory'
db.delete_table('gradebook_studentgradebookhistory')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'gradebook.studentgradebook': {
'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'StudentGradebook'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'grade': ('django.db.models.fields.FloatField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'gradebook.studentgradebookhistory': {
'Meta': {'object_name': 'StudentGradebookHistory'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'grade': ('django.db.models.fields.FloatField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['gradebook']
"""
Django database models supporting the gradebook app
"""
from django.utils import timezone
from django.contrib.auth.models import User
from django.db import models
from django.db.models import Avg
from django.db.models.signals import post_save
from django.dispatch import receiver
from model_utils.models import TimeStampedModel
from xmodule_django.models import CourseKeyField
class StudentGradebook(TimeStampedModel):
"""
StudentGradebook is essentiall a container used to cache calculated
grades (see courseware.grades.grade), which can be an expensive operation.
"""
user = models.ForeignKey(User, db_index=True)
course_id = CourseKeyField(db_index=True, max_length=255, blank=True)
grade = models.FloatField()
class Meta:
"""
Meta information for this Django model
"""
unique_together = (('user', 'course_id'),)
@classmethod
def generate_leaderboard(cls, course_key, user_id=None, count=3, exclude_users=None):
"""
Assembles a data set representing the Top N users, by grade, for a given course.
Optionally provide a user_id to include user-specific info. For example, you
may want to view the Top 5 users, but also need the data for the logged-in user
who may actually be currently located in position #10.
data = {
'course_avg': 0.873,
'queryset': [
{'id': 123, 'username': 'testuser1', 'title', 'Engineer', 'avatar_url': 'http://gravatar.com/123/', 'grade': 0.92, 'created': '2014-01-15 06:27:54'},
{'id': 983, 'username': 'testuser2', 'title', 'Analyst', 'avatar_url': 'http://gravatar.com/983/', 'grade': 0.91, 'created': '2014-06-27 01:15:54'},
{'id': 246, 'username': 'testuser3', 'title', 'Product Owner', 'avatar_url': 'http://gravatar.com/246/', 'grade': 0.90, 'created': '2014-03-19 04:54:54'},
{'id': 357, 'username': 'testuser4', 'title', 'Director', 'avatar_url': 'http://gravatar.com/357/', 'grade': 0.89, 'created': '2014-12-01 08:38:54'},
]
### IF USER ID SPECIFIED (in this case user_id=246) ###
'user_position': 4,
'user_grade': 0.89
}
"""
data = {}
queryset = StudentGradebook.objects.select_related('user')\
.filter(course_id__exact=course_key, user__is_active=True)
if exclude_users:
queryset = queryset.exclude(user__in=exclude_users)
# print StudentGradebook.objects.select_related('user')\
# .filter(course_id__exact=course_key, user__is_active=True).query
# assert 0
# Construct the leaderboard as a queryset
data['course_avg'] = queryset.aggregate(Avg('grade'))['grade__avg']
data['queryset'] = queryset.values(
'user__id',
'user__username',
'user__profile__title',
'user__profile__avatar_url',
'grade',
'created')\
.order_by('-grade', 'created')[:count]
# If a user_id value was provided, we need to provide some additional user-specific data to the caller
if user_id:
user_grade = 0
user_time_scored = timezone.now()
try:
user_queryset = StudentGradebook.objects.get(course_id__exact=course_key, user__id=user_id)
except StudentGradebook.DoesNotExist:
user_queryset = None
if user_queryset:
user_grade = user_queryset.grade
user_time_scored = user_queryset.created
users_above = queryset.filter(grade__gte=user_grade)\
.exclude(user__id=user_id)\
.exclude(grade=user_grade, created__lt=user_time_scored)
data['user_position'] = len(users_above) + 1
data['user_grade'] = user_grade
return data
class StudentGradebookHistory(TimeStampedModel):
"""
A running audit trail for the StudentGradebook model. Listens for
post_save events and creates/stores copies of gradebook entries.
"""
user = models.ForeignKey(User, db_index=True)
course_id = CourseKeyField(db_index=True, max_length=255, blank=True)
grade = models.FloatField()
@receiver(post_save, sender=StudentGradebook)
def save_history(sender, instance, **kwargs): # pylint: disable=no-self-argument, unused-argument
"""
Event hook for creating gradebook entry copies
"""
history_entry = StudentGradebookHistory(
user=instance.user,
course_id=instance.course_id,
grade=instance.grade
)
history_entry.save()
"""
Signal handlers supporting various gradebook use cases
"""
from django.dispatch import receiver
from courseware import grades
from courseware.signals import score_changed
from util.request import RequestMock
from gradebook.models import StudentGradebook
@receiver(score_changed)
def on_score_changed(sender, **kwargs):
"""
Listens for an 'on_score_changed' signal and when observed
recalculates the specified user's gradebook entry
"""
from courseware.views import get_course
user = kwargs['user']
course_key = kwargs['course_key']
course_descriptor = get_course(course_key, depth=None)
request = RequestMock().get('/')
request.user = user
grade_data = grades.grade(user, request, course_descriptor)
grade = grade_data['percent']
try:
gradebook_entry = StudentGradebook.objects.get(user=user, course_id=course_key)
if gradebook_entry.grade != grade:
gradebook_entry.grade = grade
gradebook_entry.save()
except StudentGradebook.DoesNotExist:
StudentGradebook.objects.create(user=user, course_id=course_key, grade=grade)
# pylint: disable=E1101
"""
Run these tests @ Devstack:
paver test_system -s lms --test_id=lms/djangoapps/gradebook/tests.py
"""
from mock import MagicMock
import uuid
from django.test.utils import override_settings
from capa.tests.response_xml_factory import StringResponseXMLFactory
from courseware import module_render
from courseware.model_data import FieldDataCache
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from gradebook.models import StudentGradebook, StudentGradebookHistory
@override_settings(STUDENT_GRADEBOOK=True)
class GradebookTests(ModuleStoreTestCase):
""" Test suite for Student Gradebook """
def get_module_for_user(self, user, course, problem):
"""Helper function to get useful module at self.location in self.course_id for user"""
mock_request = MagicMock()
mock_request.user = user
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id, user, course, depth=2)
return module_render.get_module( # pylint: disable=protected-access
user,
mock_request,
problem.location,
field_data_cache,
course.id
)._xmodule
def setUp(self):
self.test_server_prefix = 'https://testserver'
self.user = UserFactory()
self.score = 0.75
self.course = CourseFactory.create()
self.course.always_recalculate_grades = True
test_data = '<html>{}</html>'.format(str(uuid.uuid4()))
chapter1 = ItemFactory.create(
category="chapter",
parent_location=self.course.location,
data=test_data,
display_name="Chapter 1"
)
chapter2 = ItemFactory.create(
category="chapter",
parent_location=self.course.location,
data=test_data,
display_name="Chapter 2"
)
ItemFactory.create(
category="sequential",
parent_location=chapter1.location,
data=test_data,
display_name="Sequence 1",
)
ItemFactory.create(
category="sequential",
parent_location=chapter2.location,
data=test_data,
display_name="Sequence 2",
)
ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='foo'),
metadata={'rerandomize': 'always'},
display_name="test problem 1",
max_grade=45
)
self.problem = ItemFactory.create(
parent_location=chapter1.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="homework problem 1",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Homework"}
)
self.problem2 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="homework problem 2",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Homework"}
)
self.problem3 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="lab problem 1",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Lab"}
)
self.problem4 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="midterm problem 2",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Midterm Exam"}
)
self.problem5 = ItemFactory.create(
parent_location=chapter2.location,
category='problem',
data=StringResponseXMLFactory().build_xml(answer='bar'),
display_name="final problem 2",
metadata={'rerandomize': 'always', 'graded': True, 'format': "Final Exam"}
)
def test_receiver_on_score_changed(self):
module = self.get_module_for_user(self.user, self.course, self.problem)
grade_dict = {'value': 0.75, 'max_value': 1, 'user_id': self.user.id}
module.system.publish(module, 'grade', grade_dict)
module = self.get_module_for_user(self.user, self.course, self.problem2)
grade_dict = {'value': 0.95, 'max_value': 1, 'user_id': self.user.id}
module.system.publish(module, 'grade', grade_dict)
module = self.get_module_for_user(self.user, self.course, self.problem3)
grade_dict = {'value': 0.86, 'max_value': 1, 'user_id': self.user.id}
module.system.publish(module, 'grade', grade_dict)
module = self.get_module_for_user(self.user, self.course, self.problem4)
grade_dict = {'value': 0.92, 'max_value': 1, 'user_id': self.user.id}
module.system.publish(module, 'grade', grade_dict)
module = self.get_module_for_user(self.user, self.course, self.problem5)
grade_dict = {'value': 0.87, 'max_value': 1, 'user_id': self.user.id}
module.system.publish(module, 'grade', grade_dict)
gradebook = StudentGradebook.objects.all()
self.assertEqual(len(gradebook), 1)
history = StudentGradebookHistory.objects.all()
self.assertEqual(len(history), 5)
......@@ -576,7 +576,7 @@ class WorkgroupsApiTests(ModuleStoreTestCase):
self.assertEqual(response.status_code, 201)
# Confirm the grades for the users
course_grades_uri = '{}/{}/grades'.format(self.test_courses_uri, self.test_course_id)
course_grades_uri = '{}/{}/metrics/grades/'.format(self.test_courses_uri, self.test_course_id)
response = self.do_get(course_grades_uri)
self.assertEqual(response.status_code, 200)
self.assertGreater(len(response.data['grades']), 0)
......
......@@ -262,3 +262,13 @@ except ImportError:
MODULESTORE = convert_module_store_setting_if_needed(MODULESTORE)
SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd'
############# Student Module #################
FEATURES['SIGNAL_ON_SCORE_CHANGED'] = True
############# Student Gradebook #################
FEATURES['STUDENT_GRADEBOOK'] = True
if FEATURES.get('STUDENT_GRADEBOOK', False):
INSTALLED_APPS += ('gradebook',)
......@@ -501,20 +501,6 @@ FEATURES['ENABLE_LTI_PROVIDER'] = True
INSTALLED_APPS += ('lti_provider',)
AUTHENTICATION_BACKENDS += ('lti_provider.users.LtiBackend',)
########################## SECURITY #######################
FEATURES['ENFORCE_PASSWORD_POLICY'] = False
FEATURES['ENABLE_MAX_FAILED_LOGIN_ATTEMPTS'] = False
FEATURES['SQUELCH_PII_IN_LOGS'] = False
FEATURES['PREVENT_CONCURRENT_LOGINS'] = False
FEATURES['ADVANCED_SECURITY'] = False
<<<<<<< HEAD
=======
PASSWORD_MIN_LENGTH = None
PASSWORD_COMPLEXITY = {}
############# Performance Profiler #################
# Note: We've added profiler support to this configuration in order
# to enable analysis when running unit tests. (outputs to console)
......@@ -525,4 +511,13 @@ if FEATURES.get('PROFILER'):
'profiler.middleware.HotshotProfilerMiddleware',
'profiler.middleware.CProfileProfilerMiddleware',
)
>>>>>>> a00f851... mattdrayer/api-profiler-middleware: Hotshot/CProfile support
############# Student Module #################
FEATURES['SIGNAL_ON_SCORE_CHANGED'] = True
############# Student Gradebook #################
FEATURES['STUDENT_GRADEBOOK'] = True
if FEATURES.get('STUDENT_GRADEBOOK', False):
INSTALLED_APPS += ('gradebook',)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment