Commit 772196a5 by muhammad-ammar

Merge remote-tracking branch 'origin/master' into django-upgrade/1.8

Conflicts:
	setup.py
	submissions/models.py
parents d6fc3578 3d3ba2a4
......@@ -3,3 +3,4 @@ Will Daly <will@edx.org>
David Ormsbee <dave@edx.org>
Stephen Sanchez <steve@edx.org>
Phil McGachey <phil_mcgachey@harvard.edu>
Diana Huang <dkh@edx.org>
......@@ -10,13 +10,13 @@ import json
from django.conf import settings
from django.core.cache import cache
from django.db import IntegrityError, DatabaseError
from django.db import IntegrityError, DatabaseError, transaction
from dogapi import dog_stats_api
from submissions.serializers import (
SubmissionSerializer, StudentItemSerializer, ScoreSerializer
)
from submissions.models import Submission, StudentItem, Score, ScoreSummary, score_set, score_reset
from submissions.models import Submission, StudentItem, Score, ScoreSummary, ScoreAnnotation, score_set, score_reset
logger = logging.getLogger("submissions.api")
......@@ -698,7 +698,8 @@ def reset_score(student_id, course_id, item_id):
logger.info(msg)
def set_score(submission_uuid, points_earned, points_possible):
def set_score(submission_uuid, points_earned, points_possible,
annotation_creator=None, annotation_type=None, annotation_reason=None):
"""Set a score for a particular submission.
Sets the score for a particular submission. This score is calculated
......@@ -709,6 +710,11 @@ def set_score(submission_uuid, points_earned, points_possible):
points_earned (int): The earned points for this submission.
points_possible (int): The total points possible for this particular student item.
annotation_creator (str): An optional field for recording who gave this particular score
annotation_type (str): An optional field for recording what type of annotation should be created,
e.g. "staff_override".
annotation_reason (str): An optional field for recording why this score was set to its value.
Returns:
None
......@@ -761,9 +767,19 @@ def set_score(submission_uuid, points_earned, points_possible):
# even though we cannot retrieve it.
# In this case, we assume that someone else has already created
# a score summary and ignore the error.
# TODO: once we're using Django 1.8, use transactions to ensure that these
# two models are saved at the same time.
try:
score_model = score.save()
_log_score(score_model)
if annotation_creator is not None:
score_annotation = ScoreAnnotation(
score=score_model,
creator=annotation_creator,
annotation_type=annotation_type,
reason=annotation_reason
)
score_annotation.save()
# Send a signal out to any listeners who are waiting for scoring events.
score_set.send(
sender=None,
......
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import submissions.models
class Migration(migrations.Migration):
dependencies = [
('submissions', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ScoreAnnotation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('annotation_type', models.CharField(max_length=255, db_index=True)),
('creator', submissions.models.AnonymizedUserIDField()),
('reason', models.TextField()),
('score', models.ForeignKey(to='submissions.Score')),
],
),
migrations.AlterField(
model_name='studentitem',
name='student_id',
field=submissions.models.AnonymizedUserIDField(),
),
]
......@@ -34,6 +34,22 @@ score_reset = Signal(
)
class AnonymizedUserIDField(models.CharField):
""" Field for storing anonymized user ids. """
description = "The anonymized User ID that the XBlock sees"
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 255
kwargs['db_index'] = True
super(AnonymizedUserIDField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(AnonymizedUserIDField, self).deconstruct()
del kwargs["max_length"]
del kwargs["db_index"]
return name, path, args, kwargs
class StudentItem(models.Model):
"""Represents a single item for a single course for a single user.
......@@ -42,7 +58,7 @@ class StudentItem(models.Model):
"""
# The anonymized Student ID that the XBlock sees, not their real ID.
student_id = models.CharField(max_length=255, blank=False, db_index=True)
student_id = AnonymizedUserIDField()
# Not sure yet whether these are legacy course_ids or new course_ids
course_id = models.CharField(max_length=255, blank=False, db_index=True)
......@@ -281,3 +297,15 @@ class ScoreSummary(models.Model):
u"Error while updating score summary for student item {}"
.format(score.student_item)
)
class ScoreAnnotation(models.Model):
""" Annotate individual scores with extra information if necessary. """
score = models.ForeignKey(Score)
# A string that will represent the 'type' of annotation,
# e.g. staff_override, etc.
annotation_type = models.CharField(max_length=255, blank=False, db_index=True)
creator = AnonymizedUserIDField()
reason = models.TextField()
\ No newline at end of file
# -*- coding: utf-8 -*-
import datetime
import copy
......@@ -10,7 +12,7 @@ from mock import patch
import pytz
from submissions import api as api
from submissions.models import ScoreSummary, Submission, StudentItem, score_set
from submissions.models import ScoreSummary, ScoreAnnotation, Submission, StudentItem, score_set
from submissions.serializers import StudentItemSerializer
STUDENT_ITEM = dict(
......@@ -252,6 +254,7 @@ class TestSubmissionsApi(TestCase):
api.set_score(submission["uuid"], 11, 12)
score = api.get_latest_score_for_submission(submission["uuid"])
self._assert_score(score, 11, 12)
self.assertFalse(ScoreAnnotation.objects.all().exists())
@patch.object(score_set, 'send')
def test_set_score_signal(self, send_mock):
......@@ -268,6 +271,28 @@ class TestSubmissionsApi(TestCase):
item_id=STUDENT_ITEM['item_id']
)
@ddt.data(u"First score was incorrect", u"☃")
def test_set_score_with_annotation(self, reason):
submission = api.create_submission(STUDENT_ITEM, ANSWER_ONE)
creator_uuid = "Bob"
annotation_type = "staff_override"
api.set_score(submission["uuid"], 11, 12, creator_uuid, annotation_type, reason)
score = api.get_latest_score_for_submission(submission["uuid"])
self._assert_score(score, 11, 12)
# We need to do this to verify that one score annotation exists and was
# created for this score. We do not have an api point for retrieving
# annotations, and it doesn't make sense to expose them, since they're
# for auditing purposes.
annotations = ScoreAnnotation.objects.all()
self.assertGreater(len(annotations), 0)
annotation = annotations[0]
self.assertEqual(annotation.score.points_earned, 11)
self.assertEqual(annotation.score.points_possible, 12)
self.assertEqual(annotation.annotation_type, annotation_type)
self.assertEqual(annotation.creator, creator_uuid)
self.assertEqual(annotation.reason, reason)
def test_get_score(self):
submission = api.create_submission(STUDENT_ITEM, ANSWER_ONE)
api.set_score(submission["uuid"], 11, 12)
......@@ -595,4 +620,3 @@ class TestSubmissionsApi(TestCase):
self.assertIsNotNone(score)
self.assertEqual(score["points_earned"], expected_points_earned)
self.assertEqual(score["points_possible"], expected_points_possible)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment