Commit ba7b9fac by Stephen Sanchez

Merge pull request #121 from edx/sanchez/turbo-mode-peer-grading

Add turbo-mode to the peer assessment rendering.
parents e0ffc198 2fc631ea
......@@ -30,6 +30,9 @@
{% block body %}
<div class="ui-toggle-visibility__content">
<div class="wrapper--step__content">
{% block message %}
{% endblock %}
<div class="step__instruction">
<p>Please read and assess the following response from one of your peers.</p>
</div>
......@@ -61,7 +64,7 @@
<ol class="list list--fields assessment__rubric">
{% for criterion in rubric_criteria %}
<li class="field field--radio is--required assessment__rubric__question ui-toggle-visibility is--collapsed" id="assessment__rubric__question--{{ criterion.name }}">
<li class="field field--radio is--required assessment__rubric__question ui-toggle-visibility" id="assessment__rubric__question--{{ criterion.name }}">
<h4 class="question__title ui-toggle-visibility__control">
<i class="ico icon-caret-right"></i>
<span class="ui-toggle-visibility__control__copy question__title__copy">{{ criterion.prompt }}</span>
......@@ -75,10 +78,10 @@
<div class="wrapper--input">
<input type="radio"
name="{{ criterion.name }}"
id="assessment__rubric__question--{{ criterion.name }}__{{ option.name }}"
id="assessment__rubric__question--{{ criterion.name }}"
class="answer__value"
value="{{ option.name }}" />
<label for="assessment__rubric__question--{{ criterion.name }}__{{ option.name }}"
<label for="assessment__rubric__question--{{ criterion.name }}"
class="answer__label"
>{{ option.name }}</label>
</div>
......
......@@ -10,8 +10,7 @@
<span class="step__status__value">
<i class="ico icon-ok"></i>
<span class="copy">
<span class="step__status__value--completed">{{ graded }}</span> of
<span class="step__status__value--required">{{ must_grade }}</span> completed
<span class="step__status__value--completed">{{ graded }}</span> completed
</span>
</span>
</span>
......
{% extends "openassessmentblock/peer/oa_peer_assessment.html" %}
{% block list_item %}
<li id="openassessment__peer-assessment"class="openassessment__steps__step step--peer-assessment ui-toggle-visibility is--complete">
{% endblock %}
{% block title %}
<span class="step__status">
<span class="step__status__label">This step's status:</span>
<span class="step__status__value">
<span class="copy">Turbo Mode</span>
</span>
</span>
{% endblock %}
{% block message %}
<div class="step__message message message--confirmation message--confirmation-turbo-mode">
<h3 class="message__title">Congratulations!</h3>
<div class="message__content">You have successfully completed all of the peer assessment that you have been asked to do for this step. If you would like to continue providing feedback to your peers you may do so here, but it will not influence your final grade.</div>
</div>
{% endblock %}
......@@ -27,6 +27,9 @@
{% block body %}
<div class="ui-toggle-visibility__content">
<div class="wrapper--step__content">
{% block message %}
{% endblock %}
<div class="step__instruction">
<p>Please provide your response to the following question. You can save your progress and return to complete your response at any time before the due date of <span class="step__deadline"><span class="date">{{ formatted_due_date }}</span></span>. <strong class="emphasis--beta">After you submit your response, you cannot edit it</strong>.</p>
</div>
......
......@@ -28,53 +28,55 @@
{% block body %}
<div class="ui-toggle-visibility__content">
<div class="step__content">
<article class="self-assessment__display" id="self-assessment">
<header class="self-assessment__display__header">
<h3 class="self-assessment__display__title">Your Submitted Response</h3>
</header>
<div class="wrapper--step__content">
{% block message %}
{% endblock %}
<div class="self-assessment__display__response">
{{ self_submission.answer|linebreaks }}
</div>
</article>
<form id="self-assessment--001__assessment" class="self-assessment__assessment" method="post">
<fieldset class="assessment__fields">
<ol class="list list--fields assessment__rubric">
{% for criterion in rubric_criteria %}
<li class="field field--radio is--required assessment__rubric__question" id="assessment__rubric__question--{{ criterion.name }}">
<h4 class="question__title">
<i class="ico icon-caret-right"></i>
<span class="question__title__copy">{{ criterion.prompt }}</span>
<span class="label--required sr">* (Required)</span>
</h4>
<div class="step__content">
<article class="self-assessment__display" id="self-assessment">
<header class="self-assessment__display__header">
<h3 class="self-assessment__display__title">Your Submitted Response</h3>
</header>
<ol class="question__answers">
{% for option in criterion.options %}
<li class="answer">
<div class="wrapper--input">
<input type="radio"
name="{{ criterion.name }}"
id="assessment__rubric__question--{{ criterion.name }}__{{ option.name }}"
class="answer__value"
value="{{ option.name }}" />
<label for="assessment__rubric__question--{{ criterion.name }}__{{ option.name }}"
class="answer__label">{{ option.name }}</label>
</div>
<div class="wrapper--metadata">
<span class="answer__tip">{{ option.explanation }}</span>
<span class="answer__points">{{option.points}} <span class="label">points</span></span>
</div>
</li>
{% endfor %}
</ol>
</li>
{% endfor %}
<div class="self-assessment__display__response">
{{ self_submission.answer|linebreaks }}
</div>
</article>
</ol>
</fieldset>
<form id="self-assessment--001__assessment" class="self-assessment__assessment" method="post">
<fieldset class="assessment__fields">
<ol class="list list--fields assessment__rubric">
{% for criterion in rubric_criteria %}
<li class="field field--radio is--required assessment__rubric__question" id="assessment__rubric__question--{{ criterion.name }}">
<h4 class="question__title">
<i class="ico icon-caret-right"></i>
<span class="question__title__copy">{{ criterion.prompt }}</span>
<span class="label--required sr">* (Required)</span>
</h4>
<ol class="question__answers">
{% for option in criterion.options %}
<li class="answer">
<div class="wrapper--input">
<input type="radio"
name="{{ criterion.name }}"
id="assessment__rubric__question--{{ criterion.name }}"
class="answer__value"
value="{{ option.name }}" />
<label for="assessment__rubric__question--{{ criterion.name }}"
class="answer__label">{{ option.name }}</label>
</div>
<div class="wrapper--metadata">
<span class="answer__tip">{{ option.explanation }}</span>
<span class="answer__points">{{option.points}} <span class="label">points</span></span>
</div>
</li>
{% endfor %}
</ol>
</li>
{% endfor %}
</ol>
</fieldset>
<div class="self-assessment__actions">
<div class="message message--error message--error-server">
<h3 class="message__title">We could not submit your assessment</h3>
......
......@@ -2,6 +2,7 @@ import copy
from xblock.core import XBlock
from openassessment.assessment import peer_api
class GradeMixin(object):
"""Grade Mixin introduces all handlers for displaying grades
......
......@@ -3,11 +3,10 @@ from django.utils.translation import ugettext as _
from xblock.core import XBlock
from openassessment.assessment import peer_api
from openassessment.assessment.peer_api import (
PeerAssessmentWorkflowError, PeerAssessmentRequestError,
PeerAssessmentInternalError
PeerAssessmentInternalError, PeerAssessmentRequestError,
PeerAssessmentWorkflowError
)
logger = logging.getLogger(__name__)
......@@ -90,8 +89,6 @@ class PeerAssessmentMixin(object):
else:
return {'success': False, 'msg': _('Could not load peer assessment.')}
@XBlock.handler
def render_peer_assessment(self, data, suffix=''):
"""Renders the Peer Assessment HTML section of the XBlock
......@@ -100,21 +97,33 @@ class PeerAssessmentMixin(object):
Assessment XBlock. See OpenAssessmentBlock.render_assessment() for
more information on rendering XBlock sections.
Args:
data (dict): May contain an attribute 'continue_grading', which
allows a student to continue grading peers past the required
number of assessments.
"""
student_item = None
workflow = self.get_workflow_info()
path = 'openassessmentblock/peer/oa_peer_unavailable.html'
finished = False
problem_open, date = self.is_open(step="peer")
context_dict = {
"rubric_criteria": self.rubric_criteria,
"estimated_time": "20 minutes" # TODO: Need to configure this.
}
finished = False
workflow = self.get_workflow_info()
if workflow is None:
return self.render_assessment(path, context_dict)
continue_grading = (
data.params.get('continue_grading', False)
and workflow["status_details"]["peer"]["complete"]
)
student_item = self.get_student_item_dict()
assessment = self.get_assessment_module('peer-assessment')
if assessment:
context_dict["must_grade"] = assessment["must_grade"]
student_item = self.get_student_item_dict()
finished, count = peer_api.has_finished_required_evaluating(
student_item,
assessment["must_grade"]
......@@ -122,7 +131,11 @@ class PeerAssessmentMixin(object):
context_dict["graded"] = count
context_dict["review_num"] = count + 1
if assessment["must_grade"] - count == 1:
if continue_grading:
context_dict["submit_button_text"] = (
"Submit your assessment & review another response."
)
elif assessment["must_grade"] - count == 1:
context_dict["submit_button_text"] = (
"Submit your assessment & move onto next step."
)
......@@ -130,25 +143,38 @@ class PeerAssessmentMixin(object):
context_dict["submit_button_text"] = (
"Submit your assessment & move to response #{}"
).format(count + 2)
path = 'openassessmentblock/peer/oa_peer_unavailable.html'
if date == "due" and not problem_open:
path = 'openassessmentblock/peer/oa_peer_closed.html'
elif workflow and workflow["status"] == "peer" and student_item:
elif workflow.get("status") == "peer":
peer_sub = self.get_peer_submission(student_item, assessment)
if peer_sub:
path = 'openassessmentblock/peer/oa_peer_assessment.html'
context_dict["peer_submission"] = peer_sub
elif workflow and workflow["status"] == "done":
else:
path = 'openassessmentblock/peer/oa_peer_waiting.html'
elif continue_grading and student_item:
peer_sub = self.get_peer_submission(student_item, assessment, continue_grading)
if peer_sub:
path = 'openassessmentblock/peer/oa_peer_turbo_mode.html'
context_dict["peer_submission"] = peer_sub
else:
path = 'openassessmentblock/peer/oa_peer_complete.html'
elif workflow.get("status") == "done":
path = "openassessmentblock/peer/oa_peer_complete.html"
elif workflow and finished:
elif finished:
path = 'openassessmentblock/peer/oa_peer_waiting.html'
return self.render_assessment(path, context_dict)
def get_peer_submission(self, student_item_dict, assessment):
def get_peer_submission(
self,
student_item_dict,
assessment,
over_grading=False
):
submissions_open, __ = self.is_open(step="submission")
over_grading = not submissions_open
over_grading = over_grading or not submissions_open
peer_submission = False
try:
peer_submission = peer_api.get_submission_to_assess(
......@@ -159,5 +185,3 @@ class PeerAssessmentMixin(object):
except PeerAssessmentWorkflowError as err:
logger.exception(err)
return peer_submission
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -25,28 +25,17 @@ OpenAssessment.BaseUI = function(runtime, element, server) {
OpenAssessment.BaseUI.prototype = {
/**
Collapse one of the steps in the workflow.
* collapse/expand UI functionality
*/
toggleExpansion: function(component) {
component.toggleClass('is--collapsed');
Args:
stepName (string): The name of the step to expand/collapse.
expanded (bool): If true, expand the step; otherwise, collapse it.
**/
setExpanded: function(stepName, expanded) {
var el = $("#openassessment__" + stepName, this.element);
if (expanded) {
el.removeClass('is--collapsed');
}
else {
el.addClass('is--collapsed');
}
},
/**
Asynchronously load each sub-view into the DOM.
**/
* Asynchronously load each sub-view into the DOM.
*/
load: function() {
this.renderSubmissionStep(true);
this.renderPeerAssessmentStep(false);
......@@ -65,19 +54,25 @@ OpenAssessment.BaseUI.prototype = {
this.server.render('submission').done(
function(html) {
// Load the HTML
$('#openassessment__response', ui.element).replaceWith(html);
var sel = $('#openassessment__response', ui.element);
sel.replaceWith(html);
// Install a click handler for collapse/expand
sel.find('.step__header', '.ui-toggle-visibility__control').click(
function(eventObject) {
ui.toggleExpansion($('#openassessment__response'));
}
);
// If we have a saved submission, enable the submit button
ui.responseChanged();
// Install change handler for textarea (to enable submission button)
$('#submission__answer__value', ui.element).keyup(
sel.find('#submission__answer__value').keyup(
function(eventData) { ui.responseChanged(); }
);
// Install a click handler for submission
$('#step--response__submit', ui.element).click(
sel.find('#step--response__submit').click(
function(eventObject) {
// Override default form submission
eventObject.preventDefault();
......@@ -87,7 +82,7 @@ OpenAssessment.BaseUI.prototype = {
);
// Install a click handler for the save button
$('#submission__save', ui.element).click(
sel.find('#submission__save').click(
function(eventObject) {
// Override default form submission
eventObject.preventDefault();
......@@ -121,11 +116,28 @@ OpenAssessment.BaseUI.prototype = {
this.server.render('peer_assessment').done(
function(html) {
// Load the HTML
$('#openassessment__peer-assessment', ui.element).replaceWith(html);
var sel = $('#openassessment__peer-assessment', ui.element);
sel.replaceWith(html);
// Install a click handler for collapse/expand
sel.find('.step__header', '.ui-toggle-visibility__control').click(
function(eventObject) {
if (sel.hasClass('is--collapsed')) {
// We're expanded into turbo mode. Get a new peer
ui.renderContinuedPeerAssessmentStep()
} else {
sel.toggleClass('is--collapsed');
}
}
);
sel.find('.assessment__rubric__question', '.ui-toggle-visibility').click(
function(eventObject) {
ui.toggleExpansion($(this));
}
);
// Install a change handler for rubric options to enable/disable the submit button
$("#peer-assessment--001__assessment", ui.element).change(
sel.find("#peer-assessment--001__assessment").change(
function() {
var numChecked = $('input[type=radio]:checked', this).length;
var numAvailable = $('.field--radio.assessment__rubric__question', this).length;
......@@ -136,7 +148,7 @@ OpenAssessment.BaseUI.prototype = {
);
// Install a click handler for assessment
$('#peer-assessment--001__assessment__submit', ui.element).click(
sel.find('#peer-assessment--001__assessment__submit').click(
function(eventObject) {
// Override default form submission
eventObject.preventDefault();
......@@ -152,6 +164,48 @@ OpenAssessment.BaseUI.prototype = {
},
/**
* Render the peer-assessment step for continued grading. Always renders as
* expanded, since this should be called for an explicit continuation of the
* peer grading process.
*/
renderContinuedPeerAssessmentStep: function() {
var ui = this;
this.server.renderContinuedPeer().done(
function(html) {
// Load the HTML
$('#openassessment__peer-assessment', ui.element).replaceWith(html);
var sel = $('#openassessment__peer-assessment', ui.element);
// Install a click handler for collapse/expand
sel.find('.step__header', '.ui-toggle-visibility__control').click(
function(eventObject) {
sel.toggleClass('is--collapsed');
}
);
sel.find('.assessment__rubric__question', '.ui-toggle-visibility').click(
function(eventObject) {
ui.toggleExpansion($(this));
}
);
// Install a click handler for assessment
sel.find('#peer-assessment--001__assessment__submit').click(
function(eventObject) {
// Override default form submission
eventObject.preventDefault();
// Handle the click
ui.continuedPeerAssess();
}
);
}
).fail(function(errMsg) {
// TODO: display to the user
console.log(errMsg);
});
},
/**
Render the self-assessment step.
Args:
......@@ -162,6 +216,19 @@ OpenAssessment.BaseUI.prototype = {
this.server.render('self_assessment').done(
function(html) {
$('#openassessment__self-assessment', ui.element).replaceWith(html);
var sel = $('#openassessment__self-assessment', ui.element)
// Install a click handler for collapse/expand
sel.find('.step__header', '.ui-toggle-visibility__control').click(
function(eventObject) {
ui.toggleExpansion($('#openassessment__self-assessment'));
}
);
sel.find('.assessment__rubric__question', '.ui-toggle-visibility').click(
function(eventObject) {
ui.toggleExpansion($('#' + eventObject.id));
}
);
// Install a change handler for rubric options to enable/disable the submit button
$("#self-assessment--001__assessment", ui.element).change(
......@@ -175,7 +242,7 @@ OpenAssessment.BaseUI.prototype = {
);
// Install a click handler for the submit button
$('#self-assessment--001__assessment__submit', ui.element).click(
sel.find('#self-assessment--001__assessment__submit').click(
function(eventObject) {
// Override default form submission
eventObject.preventDefault();
......@@ -244,9 +311,43 @@ OpenAssessment.BaseUI.prototype = {
},
/**
Send an assessment to the server and update the UI.
**/
* Send an assessment to the server and update the UI.
*/
peerAssess: function() {
var ui = this;
ui.peerAssessRequest(function() {
// When we have successfully sent the assessment,
// collapse the current step and expand the next step
ui.renderPeerAssessmentStep(false);
ui.renderSelfAssessmentStep(true);
ui.renderGradeStep(false);
});
},
/**
* Send an assessment to the server and update the UI, with the assumption
* that we are continuing peer assessments beyond the required amount.
*/
continuedPeerAssess: function() {
var ui = this;
ui.peerAssessRequest(function() {
// When we have successfully sent the assessment,
// collapse the current step and expand the next step
ui.renderContinuedPeerAssessmentStep();
ui.renderGradeStep(false);
});
},
/**
* Common peer assessment request building, used for all types of peer
* assessments.
*
* Args:
* successFunction (function): The function called if the request is
* successful. This varies based on the type of request to submit
* a peer assessment.
*/
peerAssessRequest: function(successFunction) {
// Retrieve assessment info from the DOM
var submissionId = $("span#peer_submission_uuid", this.element)[0].innerHTML.trim();
var optionsSelected = {};
......@@ -261,16 +362,10 @@ OpenAssessment.BaseUI.prototype = {
var ui = this;
this.toggleActionError('peer', null);
this.server.peerAssess(submissionId, optionsSelected, feedback).done(
function() {
// When we have successfully sent the assessment,
// collapse the current step and expand the next step
ui.renderPeerAssessmentStep(false);
ui.renderSelfAssessmentStep(true);
ui.renderGradeStep(false);
}
).fail(function(errMsg) {
ui.toggleActionError('peer', errMsg);
});
successFunction()
).fail(function(errMsg) {
ui.toggleActionError('peer', errMsg);
});
},
/**
......@@ -346,12 +441,6 @@ OpenAssessment.BaseUI.prototype = {
}
};
/* collapse/expand UI functionality */
function $toggleExpansion(e) {
e.preventDefault();
$(e.target).closest('.ui-toggle-visibility').toggleClass('is--collapsed');
}
/* XBlock JavaScript entry point for OpenAssessmentXBlock. */
function OpenAssessmentBlock(runtime, element) {
/**
......@@ -361,8 +450,5 @@ function OpenAssessmentBlock(runtime, element) {
var server = new OpenAssessment.Server(runtime, element);
var ui = new OpenAssessment.BaseUI(runtime, element, server);
ui.load();
/* TODO: refactor this event based on workflow and logic steps */
$(document).on('click', '.ui-toggle-visibility .ui-toggle-visibility__control', $toggleExpansion);
});
}
......@@ -70,6 +70,37 @@ OpenAssessment.Server.prototype = {
},
/**
Render the Peer Assessment Section after a complete workflow, in order to
continue grading peers.
Returns:
A JQuery promise, which resolves with the HTML of the rendered peer
assessment section or fails with an error message.
Example:
server.render_continued_peer().done(
function(html) { console.log(html); }
).fail(
function(err) { console.log(err); }
)
**/
renderContinuedPeer: function() {
var url = this.url('render_peer_assessment');
return $.Deferred(function(defer) {
$.ajax({
url: url,
type: "POST",
dataType: "html",
data: {continue_grading: true}
}).done(function(data) {
defer.resolveWith(this, [data]);
}).fail(function(data) {
defer.rejectWith(this, ['Could not contact server.']);
})
}).promise();
},
/**
Send a submission to the XBlock.
Args:
......
......@@ -196,6 +196,18 @@
margin-top: $baseline-v;
}
// step message
.step__message {
@include row();
margin-bottom: $baseline-v;
border-radius: ($baseline-v/5);
padding: $baseline-v ($baseline-h/2);
&.message--confirmation {
background: tint($color-complete, 90%);
}
}
// step instructions
.step__instruction {
@extend %copy-4;
......@@ -465,6 +477,9 @@
// TYPE: confirmation
.message--confirmation {
.message__title {
color: $color-complete;
}
}
// --------------------
......
"""
Tests the Open Assessment XBlock functionality.
"""
from collections import namedtuple
import datetime as dt
import pytz
from mock import Mock, patch
from openassessment.xblock import openassessmentblock
from openassessment.xblock.submission_mixin import SubmissionMixin
from .base import XBlockHandlerTestCase, scenario
......@@ -32,12 +29,14 @@ class TestOpenAssessment(XBlockHandlerTestCase):
self.assertTrue(submission_response.body.find("openassessment__response"))
# Validate Peer Rendering.
peer_response = xblock.render_peer_assessment({})
request = namedtuple('Request', 'params')
request.params = {}
peer_response = xblock.render_peer_assessment(request)
self.assertIsNotNone(peer_response)
self.assertTrue(peer_response.body.find("openassessment__peer-assessment"))
# Validate Self Rendering.
self_response = xblock.render_self_assessment({})
self_response = xblock.render_self_assessment(request)
self.assertIsNotNone(self_response)
self.assertTrue(self_response.body.find("openassessment__peer-assessment"))
......@@ -70,7 +69,9 @@ class TestOpenAssessment(XBlockHandlerTestCase):
xblock.start = dt.datetime(2014, 4, 1, 1, 1, 1)
xblock.due = dt.datetime(2014, 5, 1)
resp = xblock.render_peer_assessment({})
request = namedtuple('Request', 'params')
request.params = {}
resp = xblock.render_peer_assessment(request)
self.assertTrue(resp.body.find('Tuesday, April 01, 2014'))
self.assertTrue(resp.body.find('Thursday, May 01, 2014'))
......
......@@ -2,6 +2,7 @@
"""
Tests for peer assessment handlers in Open Assessment XBlock.
"""
from collections import namedtuple
import copy
import json
......@@ -63,7 +64,9 @@ class TestPeerAssessment(XBlockHandlerTestCase):
self.assertEqual(workflow_info["status"], u'peer')
# Validate Submission Rendering.
peer_response = xblock.render_peer_assessment({})
request = namedtuple('Request', 'params')
request.params = {}
peer_response = xblock.render_peer_assessment(request)
self.assertIsNotNone(peer_response)
self.assertNotIn(submission["answer"].encode('utf-8'), peer_response.body)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment