Commit 9605ed9c by Stephen Sanchez

Removing the peer uuid from the assessment api

Changing the Peer API signatures to use submission UUID instead of student items. This will greatly simplify our APIs uses and queries.
parent ff52a5c4
...@@ -90,7 +90,7 @@ class Command(BaseCommand): ...@@ -90,7 +90,7 @@ class Command(BaseCommand):
# Retrieve the submission we want to score # Retrieve the submission we want to score
# Note that we are NOT using the priority queue here, since we know # Note that we are NOT using the priority queue here, since we know
# exactly which submission we want to score. # exactly which submission we want to score.
peer_api.create_peer_workflow_item(scorer_id, submission_uuid) peer_api.create_peer_workflow_item(scorer_submission_uuid, submission_uuid)
# Create the peer assessment # Create the peer assessment
assessment = { assessment = {
......
...@@ -4,9 +4,6 @@ ...@@ -4,9 +4,6 @@
<li id="openassessment__peer-assessment" class="openassessment__steps__step step--peer-assessment ui-toggle-visibility"> <li id="openassessment__peer-assessment" class="openassessment__steps__step step--peer-assessment ui-toggle-visibility">
{% endblock %} {% endblock %}
{% spaceless %} {% spaceless %}
<span class="system__element" id="peer_submission_uuid">
{{ peer_submission.uuid }}
</span>
<header class="step__header ui-toggle-visibility__control"> <header class="step__header ui-toggle-visibility__control">
<h2 class="step__title"> <h2 class="step__title">
......
...@@ -4,9 +4,6 @@ ...@@ -4,9 +4,6 @@
{% block list_item %} {% block list_item %}
<li id="openassessment__self-assessment" class="openassessment__steps__step step--self-assessment ui-toggle-visibility"> <li id="openassessment__self-assessment" class="openassessment__steps__step step--self-assessment ui-toggle-visibility">
{% endblock %} {% endblock %}
<span class="system__element" id="self_submission_uuid">
{{ self_submission.uuid }}
</span>
<header class="step__header ui-toggle-visibility__control"> <header class="step__header ui-toggle-visibility__control">
<h2 class="step__title"> <h2 class="step__title">
......
...@@ -161,7 +161,7 @@ class PeerAssessmentMixin(object): ...@@ -161,7 +161,7 @@ class PeerAssessmentMixin(object):
if assessment: if assessment:
context_dict["must_grade"] = assessment["must_grade"] context_dict["must_grade"] = assessment["must_grade"]
finished, count = peer_api.has_finished_required_evaluating( finished, count = peer_api.has_finished_required_evaluating(
student_item, self.submission_uuid,
assessment["must_grade"] assessment["must_grade"]
) )
context_dict["graded"] = count context_dict["graded"] = count
...@@ -212,7 +212,7 @@ class PeerAssessmentMixin(object): ...@@ -212,7 +212,7 @@ class PeerAssessmentMixin(object):
peer_submission = False peer_submission = False
try: try:
peer_submission = peer_api.get_submission_to_assess( peer_submission = peer_api.get_submission_to_assess(
student_item_dict, self.submission_uuid,
assessment["must_be_graded_by"], assessment["must_be_graded_by"],
True True
) )
......
...@@ -99,8 +99,6 @@ class SelfAssessmentMixin(object): ...@@ -99,8 +99,6 @@ class SelfAssessmentMixin(object):
Dict with keys "success" (bool) indicating success/failure Dict with keys "success" (bool) indicating success/failure
and "msg" (unicode) containing additional information if an error occurs. and "msg" (unicode) containing additional information if an error occurs.
""" """
if 'submission_uuid' not in data:
return {'success': False, 'msg': _(u"Missing submission_uuid key in request")}
if 'options_selected' not in data: if 'options_selected' not in data:
return {'success': False, 'msg': _(u"Missing options_selected key in request")} return {'success': False, 'msg': _(u"Missing options_selected key in request")}
......
...@@ -15,11 +15,11 @@ describe("OpenAssessment.BaseView", function() { ...@@ -15,11 +15,11 @@ describe("OpenAssessment.BaseView", function() {
grade: readFixtures("oa_grade_complete.html") grade: readFixtures("oa_grade_complete.html")
}; };
this.peerAssess = function(submissionId, optionsSelected, feedback) { this.peerAssess = function(optionsSelected, feedback) {
return $.Deferred(function(defer) { defer.resolve(); }).promise(); return $.Deferred(function(defer) { defer.resolve(); }).promise();
}; };
this.selfAssess = function(submissionId, optionsSelected) { this.selfAssess = function(optionsSelected) {
return $.Deferred(function(defer) { defer.resolve(); }).promise(); return $.Deferred(function(defer) { defer.resolve(); }).promise();
}; };
...@@ -90,7 +90,7 @@ describe("OpenAssessment.BaseView", function() { ...@@ -90,7 +90,7 @@ describe("OpenAssessment.BaseView", function() {
var testError = 'Test failure contacting server message'; var testError = 'Test failure contacting server message';
loadSubviews(function() { loadSubviews(function() {
/* stub our selfAssess to fail */ /* stub our selfAssess to fail */
spyOn(server, 'selfAssess').andCallFake(function(submissionId, optionsSelected) { spyOn(server, 'selfAssess').andCallFake(function(optionsSelected) {
return $.Deferred(function(defer) { defer.rejectWith(server, [testError]); }).promise(); return $.Deferred(function(defer) { defer.rejectWith(server, [testError]); }).promise();
}); });
view.selfAssess(); view.selfAssess();
......
...@@ -97,7 +97,7 @@ describe("OpenAssessment.Server", function() { ...@@ -97,7 +97,7 @@ describe("OpenAssessment.Server", function() {
var success = false; var success = false;
var options = {clarity: "Very clear", precision: "Somewhat precise"}; var options = {clarity: "Very clear", precision: "Somewhat precise"};
server.peerAssess("abc1234", options, "Excellent job!").done(function() { server.peerAssess(options, "Excellent job!").done(function() {
success = true; success = true;
}); });
...@@ -106,7 +106,6 @@ describe("OpenAssessment.Server", function() { ...@@ -106,7 +106,6 @@ describe("OpenAssessment.Server", function() {
url: '/peer_assess', url: '/peer_assess',
type: "POST", type: "POST",
data: JSON.stringify({ data: JSON.stringify({
submission_uuid: "abc1234",
options_selected: options, options_selected: options,
feedback: "Excellent job!" feedback: "Excellent job!"
}) })
...@@ -306,7 +305,7 @@ describe("OpenAssessment.Server", function() { ...@@ -306,7 +305,7 @@ describe("OpenAssessment.Server", function() {
var options = {clarity: "Very clear", precision: "Somewhat precise"}; var options = {clarity: "Very clear", precision: "Somewhat precise"};
var receivedErrorMsg = ""; var receivedErrorMsg = "";
var testString = getHugeTestString(); var testString = getHugeTestString();
server.peerAssess("abc1234", options, testString).fail( server.peerAssess(options, testString).fail(
function(errorMsg) { function(errorMsg) {
receivedErrorMsg = errorMsg; receivedErrorMsg = errorMsg;
} }
...@@ -319,7 +318,7 @@ describe("OpenAssessment.Server", function() { ...@@ -319,7 +318,7 @@ describe("OpenAssessment.Server", function() {
var receivedMsg = null; var receivedMsg = null;
var options = {clarity: "Very clear", precision: "Somewhat precise"}; var options = {clarity: "Very clear", precision: "Somewhat precise"};
server.peerAssess("abc1234", options, "Excellent job!").fail(function(msg) { server.peerAssess(options, "Excellent job!").fail(function(msg) {
receivedMsg = msg; receivedMsg = msg;
}); });
...@@ -331,7 +330,7 @@ describe("OpenAssessment.Server", function() { ...@@ -331,7 +330,7 @@ describe("OpenAssessment.Server", function() {
var receivedMsg = null; var receivedMsg = null;
var options = {clarity: "Very clear", precision: "Somewhat precise"}; var options = {clarity: "Very clear", precision: "Somewhat precise"};
server.peerAssess("abc1234", options, "Excellent job!").fail(function(msg) { server.peerAssess(options, "Excellent job!").fail(function(msg) {
receivedMsg = msg; receivedMsg = msg;
}); });
......
...@@ -245,7 +245,6 @@ OpenAssessment.BaseView.prototype = { ...@@ -245,7 +245,6 @@ OpenAssessment.BaseView.prototype = {
*/ */
peerAssessRequest: function(successFunction) { peerAssessRequest: function(successFunction) {
// Retrieve assessment info from the DOM // Retrieve assessment info from the DOM
var submissionId = $("#peer_submission_uuid", this.element)[0].innerHTML.trim();
var optionsSelected = {}; var optionsSelected = {};
$("#peer-assessment--001__assessment input[type=radio]:checked", this.element).each( $("#peer-assessment--001__assessment input[type=radio]:checked", this.element).each(
function(index, sel) { function(index, sel) {
...@@ -257,7 +256,7 @@ OpenAssessment.BaseView.prototype = { ...@@ -257,7 +256,7 @@ OpenAssessment.BaseView.prototype = {
// Send the assessment to the server // Send the assessment to the server
var view = this; var view = this;
this.toggleActionError('peer', null); this.toggleActionError('peer', null);
this.server.peerAssess(submissionId, optionsSelected, feedback).done( this.server.peerAssess(optionsSelected, feedback).done(
successFunction successFunction
).fail(function(errMsg) { ).fail(function(errMsg) {
view.toggleActionError('peer', errMsg); view.toggleActionError('peer', errMsg);
...@@ -269,7 +268,6 @@ OpenAssessment.BaseView.prototype = { ...@@ -269,7 +268,6 @@ OpenAssessment.BaseView.prototype = {
**/ **/
selfAssess: function() { selfAssess: function() {
// Retrieve self-assessment info from the DOM // Retrieve self-assessment info from the DOM
var submissionId = $("#self_submission_uuid", this.element)[0].innerHTML.trim();
var optionsSelected = {}; var optionsSelected = {};
$("#self-assessment--001__assessment input[type=radio]:checked", this.element).each( $("#self-assessment--001__assessment input[type=radio]:checked", this.element).each(
function(index, sel) { function(index, sel) {
...@@ -280,7 +278,7 @@ OpenAssessment.BaseView.prototype = { ...@@ -280,7 +278,7 @@ OpenAssessment.BaseView.prototype = {
// Send the assessment to the server // Send the assessment to the server
var view = this; var view = this;
this.toggleActionError('self', null); this.toggleActionError('self', null);
this.server.selfAssess(submissionId, optionsSelected).done( this.server.selfAssess(optionsSelected).done(
function() { function() {
view.renderPeerAssessmentStep(); view.renderPeerAssessmentStep();
view.renderSelfAssessmentStep(); view.renderSelfAssessmentStep();
......
...@@ -221,7 +221,6 @@ OpenAssessment.Server.prototype = { ...@@ -221,7 +221,6 @@ OpenAssessment.Server.prototype = {
/** /**
Send a peer assessment to the XBlock. Send a peer assessment to the XBlock.
Args: Args:
submissionId (string): The UUID of the submission.
optionsSelected (object literal): Keys are criteria names, optionsSelected (object literal): Keys are criteria names,
values are the option text the user selected for the criterion. values are the option text the user selected for the criterion.
feedback (string): Written feedback on the submission. feedback (string): Written feedback on the submission.
...@@ -233,13 +232,13 @@ OpenAssessment.Server.prototype = { ...@@ -233,13 +232,13 @@ OpenAssessment.Server.prototype = {
Example: Example:
var options = { clarity: "Very clear", precision: "Somewhat precise" }; var options = { clarity: "Very clear", precision: "Somewhat precise" };
var feedback = "Good job!"; var feedback = "Good job!";
server.peerAssess("abc123", options, feedback).done( server.peerAssess(options, feedback).done(
function() { console.log("Success!"); } function() { console.log("Success!"); }
).fail( ).fail(
function(errorMsg) { console.log(errorMsg); } function(errorMsg) { console.log(errorMsg); }
); );
**/ **/
peerAssess: function(submissionId, optionsSelected, feedback) { peerAssess: function(optionsSelected, feedback) {
var url = this.url('peer_assess'); var url = this.url('peer_assess');
if (feedback.length > this.maxInputSize) { if (feedback.length > this.maxInputSize) {
return $.Deferred(function(defer) { return $.Deferred(function(defer) {
...@@ -247,7 +246,6 @@ OpenAssessment.Server.prototype = { ...@@ -247,7 +246,6 @@ OpenAssessment.Server.prototype = {
}).promise(); }).promise();
} }
var payload = JSON.stringify({ var payload = JSON.stringify({
submission_uuid: submissionId,
options_selected: optionsSelected, options_selected: optionsSelected,
feedback: feedback feedback: feedback
}); });
...@@ -271,7 +269,6 @@ OpenAssessment.Server.prototype = { ...@@ -271,7 +269,6 @@ OpenAssessment.Server.prototype = {
Send a self-assessment to the XBlock. Send a self-assessment to the XBlock.
Args: Args:
submissionId (string): The UUID of the submission.
optionsSelected (object literal): Keys are criteria names, optionsSelected (object literal): Keys are criteria names,
values are the option text the user selected for the criterion. values are the option text the user selected for the criterion.
...@@ -281,16 +278,15 @@ OpenAssessment.Server.prototype = { ...@@ -281,16 +278,15 @@ OpenAssessment.Server.prototype = {
Example: Example:
var options = { clarity: "Very clear", precision: "Somewhat precise" }; var options = { clarity: "Very clear", precision: "Somewhat precise" };
server.selfAssess("abc123", options).done( server.selfAssess(options).done(
function() { console.log("Success!"); } function() { console.log("Success!"); }
).fail( ).fail(
function(errorMsg) { console.log(errorMsg); } function(errorMsg) { console.log(errorMsg); }
); );
**/ **/
selfAssess: function(submissionId, optionsSelected) { selfAssess: function(optionsSelected) {
var url = this.url('self_assess'); var url = this.url('self_assess');
var payload = JSON.stringify({ var payload = JSON.stringify({
submission_uuid: submissionId,
options_selected: optionsSelected options_selected: optionsSelected
}); });
return $.Deferred(function(defer) { return $.Deferred(function(defer) {
......
...@@ -188,7 +188,7 @@ class TestGrade(XBlockHandlerTestCase): ...@@ -188,7 +188,7 @@ class TestGrade(XBlockHandlerTestCase):
scorer_sub = sub_api.create_submission(scorer, {'text': submission_text}) scorer_sub = sub_api.create_submission(scorer, {'text': submission_text})
workflow_api.create_workflow(scorer_sub['uuid']) workflow_api.create_workflow(scorer_sub['uuid'])
submission = peer_api.get_submission_to_assess(scorer, len(peers)) submission = peer_api.get_submission_to_assess(scorer_sub['uuid'], len(peers))
# Store the scorer's submission so our user can assess it later # Store the scorer's submission so our user can assess it later
scorer_submissions.append(scorer_sub) scorer_submissions.append(scorer_sub)
...@@ -203,7 +203,7 @@ class TestGrade(XBlockHandlerTestCase): ...@@ -203,7 +203,7 @@ class TestGrade(XBlockHandlerTestCase):
# Have our user make assessments (so she can get a score) # Have our user make assessments (so she can get a score)
for asmnt in peer_assessments: for asmnt in peer_assessments:
new_submission = peer_api.get_submission_to_assess(student_item, len(peers)) new_submission = peer_api.get_submission_to_assess(submission['uuid'], len(peers))
peer_api.create_assessment( peer_api.create_assessment(
submission['uuid'], student_id, asmnt, {'criteria': xblock.rubric_criteria}, submission['uuid'], student_id, asmnt, {'criteria': xblock.rubric_criteria},
xblock.get_assessment_module('peer-assessment')['must_be_graded_by'] xblock.get_assessment_module('peer-assessment')['must_be_graded_by']
......
...@@ -6,9 +6,6 @@ from collections import namedtuple ...@@ -6,9 +6,6 @@ from collections import namedtuple
import copy import copy
import json import json
import mock
import submissions.api as sub_api
from openassessment.workflow import api as workflow_api
from openassessment.assessment import peer_api from openassessment.assessment import peer_api
from .base import XBlockHandlerTestCase, scenario from .base import XBlockHandlerTestCase, scenario
...@@ -37,7 +34,7 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -37,7 +34,7 @@ class TestPeerAssessment(XBlockHandlerTestCase):
# Now Hal will assess Sally. # Now Hal will assess Sally.
assessment = copy.deepcopy(self.ASSESSMENT) assessment = copy.deepcopy(self.ASSESSMENT)
sub = peer_api.get_submission_to_assess(hal_student_item, 1) peer_api.get_submission_to_assess(hal_submission['uuid'], 1)
peer_api.create_assessment( peer_api.create_assessment(
hal_submission['uuid'], hal_submission['uuid'],
hal_student_item['student_id'], hal_student_item['student_id'],
...@@ -48,7 +45,7 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -48,7 +45,7 @@ class TestPeerAssessment(XBlockHandlerTestCase):
# Now Sally will assess Hal. # Now Sally will assess Hal.
assessment = copy.deepcopy(self.ASSESSMENT) assessment = copy.deepcopy(self.ASSESSMENT)
sub = peer_api.get_submission_to_assess(sally_student_item, 1) peer_api.get_submission_to_assess(sally_submission['uuid'], 1)
peer_api.create_assessment( peer_api.create_assessment(
sally_submission['uuid'], sally_submission['uuid'],
sally_student_item['student_id'], sally_student_item['student_id'],
...@@ -86,8 +83,8 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -86,8 +83,8 @@ class TestPeerAssessment(XBlockHandlerTestCase):
# Create a submission for the scorer (required before assessing another student) # Create a submission for the scorer (required before assessing another student)
another_student = copy.deepcopy(student_item) another_student = copy.deepcopy(student_item)
another_student['student_id'] = "Bob" another_student['student_id'] = "Bob"
xblock.create_submission(another_student, self.SUBMISSION) another_submission = xblock.create_submission(another_student, self.SUBMISSION)
peer_api.get_submission_to_assess(another_student, 3) peer_api.get_submission_to_assess(another_submission['uuid'], 3)
# Submit an assessment and expect a successful response # Submit an assessment and expect a successful response
...@@ -126,8 +123,8 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -126,8 +123,8 @@ class TestPeerAssessment(XBlockHandlerTestCase):
# Create a submission for the scorer (required before assessing another student) # Create a submission for the scorer (required before assessing another student)
another_student = copy.deepcopy(student_item) another_student = copy.deepcopy(student_item)
another_student['student_id'] = "Bob" another_student['student_id'] = "Bob"
xblock.create_submission(another_student, self.SUBMISSION) another_sub = xblock.create_submission(another_student, self.SUBMISSION)
peer_api.get_submission_to_assess(another_student, 3) peer_api.get_submission_to_assess(another_sub['uuid'], 3)
# Submit an assessment and expect a successful response # Submit an assessment and expect a successful response
...@@ -160,7 +157,7 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -160,7 +157,7 @@ class TestPeerAssessment(XBlockHandlerTestCase):
# Create a submission for the scorer (required before assessing another student) # Create a submission for the scorer (required before assessing another student)
another_student = copy.deepcopy(student_item) another_student = copy.deepcopy(student_item)
another_student['student_id'] = "Bob" another_student['student_id'] = "Bob"
another_submission = xblock.create_submission(another_student, self.SUBMISSION) xblock.create_submission(another_student, self.SUBMISSION)
# Submit an assessment, but mutate the options selected so they do NOT match the rubric # Submit an assessment, but mutate the options selected so they do NOT match the rubric
assessment = copy.deepcopy(self.ASSESSMENT) assessment = copy.deepcopy(self.ASSESSMENT)
...@@ -211,7 +208,8 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -211,7 +208,8 @@ class TestPeerAssessment(XBlockHandlerTestCase):
# Now Hal will assess Sally. # Now Hal will assess Sally.
assessment = copy.deepcopy(self.ASSESSMENT) assessment = copy.deepcopy(self.ASSESSMENT)
sally_sub = peer_api.get_submission_to_assess(hal_student_item, 1) sally_sub = peer_api.get_submission_to_assess(hal_submission['uuid'], 1)
assessment['submission_uuid'] = sally_sub['uuid']
peer_api.create_assessment( peer_api.create_assessment(
hal_submission['uuid'], hal_submission['uuid'],
hal_student_item['student_id'], hal_student_item['student_id'],
...@@ -222,7 +220,8 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -222,7 +220,8 @@ class TestPeerAssessment(XBlockHandlerTestCase):
# Now Sally will assess Hal. # Now Sally will assess Hal.
assessment = copy.deepcopy(self.ASSESSMENT) assessment = copy.deepcopy(self.ASSESSMENT)
hal_sub = peer_api.get_submission_to_assess(sally_student_item, 1) hal_sub = peer_api.get_submission_to_assess(sally_submission['uuid'], 1)
assessment['submission_uuid'] = hal_sub['uuid']
peer_api.create_assessment( peer_api.create_assessment(
sally_submission['uuid'], sally_submission['uuid'],
sally_student_item['student_id'], sally_student_item['student_id'],
...@@ -243,9 +242,6 @@ class TestPeerAssessment(XBlockHandlerTestCase): ...@@ -243,9 +242,6 @@ class TestPeerAssessment(XBlockHandlerTestCase):
self.assertIsNotNone(peer_response) self.assertIsNotNone(peer_response)
self.assertNotIn(submission["answer"]["text"].encode('utf-8'), peer_response.body) self.assertNotIn(submission["answer"]["text"].encode('utf-8'), peer_response.body)
hal_response = "Hal".encode('utf-8') in peer_response.body
sally_response = "Sally".encode('utf-8') in peer_response.body
peer_api.create_assessment( peer_api.create_assessment(
submission['uuid'], submission['uuid'],
student_item['student_id'], student_item['student_id'],
......
...@@ -95,13 +95,6 @@ class TestSelfAssessment(XBlockHandlerTestCase): ...@@ -95,13 +95,6 @@ class TestSelfAssessment(XBlockHandlerTestCase):
@scenario('data/self_assessment_scenario.xml', user_id='Bob') @scenario('data/self_assessment_scenario.xml', user_id='Bob')
def test_self_assess_handler_missing_keys(self, xblock): def test_self_assess_handler_missing_keys(self, xblock):
# Missing submission_uuid
assessment = copy.deepcopy(self.ASSESSMENT)
del assessment['submission_uuid']
resp = self.request(xblock, 'self_assess', json.dumps(assessment), response_format='json')
self.assertFalse(resp['success'])
self.assertIn('submission_uuid', resp['msg'])
# Missing options_selected # Missing options_selected
assessment = copy.deepcopy(self.ASSESSMENT) assessment = copy.deepcopy(self.ASSESSMENT)
del assessment['options_selected'] del assessment['options_selected']
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment