Commit ac05b5c2 by Jason Bau

Merge pull request #956 from edx/giulio/ice-pr

Implementing NYT's ICE track changes to Open Ended Assessements
parents 33f13846 fe59a8ce
......@@ -234,6 +234,14 @@ class CombinedOpenEndedFields(object):
default=False,
scope=Scope.settings
)
track_changes = Boolean(
display_name="Peer Track Changes",
help=("EXPERIMENTAL FEATURE FOR PEER GRADING ONLY: "
"If set to 'True', peer graders will be able to make changes to the student "
"submission and those changes will be tracked and shown along with the graded feedback."),
default=False,
scope=Scope.settings
)
due = Date(
help="Date that this problem is due by",
scope=Scope.settings
......
......@@ -40,6 +40,25 @@ div.name{
section.combined-open-ended {
@include clearfix;
.written-feedback {
position: relative;
margin: 0px;
height: 150px;
border: 1px solid lightgray;
padding: 5px;
resize: vertical;
width: 99%;
overflow: auto;
.del {
text-decoration: line-through;
background-color: #ffc3c3;
}
.ins {
background-color: #c3ffc3;
}
}
}
......
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -283,6 +283,9 @@ class @PeerGradingProblem
@error_container.hide()
@flag_submission_confirmation.hide()
if @tracking_changes()
@change_tracker = new TrackChanges(@el)
@is_calibrated_check()
# locally scoped jquery.
......@@ -306,13 +309,18 @@ class @PeerGradingProblem
construct_data: () ->
if @tracking_changes()
feedback_content = @feedback_area.html()
else
feedback_content = @feedback_area.val()
data =
rubric_scores: @rub.get_score_list()
score: @rub.get_total_score()
location: @location
submission_id: @essay_id_input.val()
submission_key: @submission_key_input.val()
feedback: @feedback_area.val()
feedback: feedback_content
submission_flagged: @flag_student_checkbox.is(':checked')
answer_unknown: @answer_unknown_checkbox.is(':checked')
return data
......@@ -388,7 +396,7 @@ class @PeerGradingProblem
@grading_message.fadeIn()
message = "<p>Successfully saved your feedback. Fetching the next essay."
if response.required_done
message = message + " You have completed the required number of gradings."
message = message + " You have done the required number of peer evals but may continue grading if you like."
message = message + "</p>"
@grading_message.html(message)
else
......@@ -464,6 +472,9 @@ class @PeerGradingProblem
else
@render_error("An error occurred while retrieving the next calibration essay")
tracking_changes: () =>
return @grading_wrapper.data('track-changes') == true
# Renders a student submission to be graded
render_submission: (response) =>
if response.success
......@@ -483,8 +494,12 @@ class @PeerGradingProblem
@grading_panel.find(@grading_text_sel).show()
@flag_student_container.show()
@answer_unknown_container.show()
@feedback_area.val("")
if @tracking_changes()
@feedback_area.html(@make_paragraphs(response.student_response))
@change_tracker.rebindTracker()
else
@feedback_area.val("")
@answer_unknown_checkbox.removeAttr("checked")
@flag_student_checkbox.removeAttr("checked")
@submit_button.show()
@submit_button.unbind('click')
......@@ -496,7 +511,6 @@ class @PeerGradingProblem
else
@render_error("An error occured when retrieving the next submission.")
make_paragraphs: (text) ->
paragraph_split = text.split(/\n\s*\n/)
new_text = ''
......@@ -527,13 +541,13 @@ class @PeerGradingProblem
# display correct grade
@calibration_feedback_panel.slideDown()
calibration_wrapper = @$(@calibration_feedback_wrapper_sel)
calibration_wrapper.html("<p>The score you gave was: #{@grade}. The actual score is: #{response.actual_score}</p>")
calibration_wrapper.html("<p>The score you gave was: #{@grade}. The instructor score is: #{response.actual_score}</p>")
score = parseInt(@grade)
actual_score = parseInt(response.actual_score)
if score == actual_score
calibration_wrapper.append("<p>Your score matches the actual score!</p>")
calibration_wrapper.append("<p>Your score matches the instructor score!</p>")
else
calibration_wrapper.append("<p>You may want to review the rubric again.</p>")
......
class @TrackChanges
reset_button_sel: '.reset-changes'
tracked_feedback_sel: '.feedback-area.track-changes'
submit_button_sel: '.submit-button'
tracker: null
constructor: (element) ->
@el = element
@reset_button = @$(@reset_button_sel)
@submit_button = @$(@submit_button_sel)
@tracked_feedback = @$(@tracked_feedback_sel)
@reset_button.click @reset_changes
@submit_button.click @stop_tracking_on_submit
rebindTracker: () =>
if @tracker?
@tracker.stopTracking()
delete @tracker
@tracker = new ice.InlineChangeEditor({
element: @tracked_feedback[0], #return DOM element from selector
handleEvents: true,
currentUser: { id: 1, name: 'Peer Feedback' }, #hardcoded current user
# optional plugins
plugins: [
# Track content that is cut and pasted
{
name: 'IceCopyPastePlugin',
settings: {
# List of tags and attributes to preserve when cleaning a paste
preserve: 'p,a[href],span[id,class],em,strong'
}
}
]
})
@tracker.startTracking()
# locally scoped jquery. (scoped to the element)
$: (selector) ->
$(selector, @el)
reset_changes: (event) =>
event.preventDefault()
@tracker.rejectAll()
stop_tracking_on_submit: () =>
@tracker.stopTracking()
......@@ -86,9 +86,13 @@ class PeerGradingModule(PeerGradingFields, XModule):
_VERSION = 1
js = {
'js': [
resource_string(__name__, 'js/src/peergrading/ice.min.js'),
],
'coffee': [
resource_string(__name__, 'js/src/peergrading/peer_grading.coffee'),
resource_string(__name__, 'js/src/peergrading/peer_grading_problem.coffee'),
resource_string(__name__, 'js/src/peergrading/track_changes.coffee'),
resource_string(__name__, 'js/src/collapsible.coffee'),
resource_string(__name__, 'js/src/javascript_loader.coffee'),
]
......@@ -495,6 +499,21 @@ class PeerGradingModule(PeerGradingFields, XModule):
})
return html
def _find_corresponding_module_for_location(self, location):
"""
Find the peer grading module that exists at the given location.
"""
try:
return self.descriptor.system.load_item(location)
except ItemNotFoundError:
# The linked problem doesn't exist.
log.error("Problem {0} does not exist in this course.".format(location))
raise
except NoPathToItem:
# The linked problem does not have a path to it (ie is in a draft or other strange state).
log.error("Cannot find a path to problem {0} in this course.".format(location))
raise
def peer_grading(self, _data=None):
'''
Show a peer grading interface
......@@ -528,27 +547,11 @@ class PeerGradingModule(PeerGradingFields, XModule):
log.exception("Could not contact peer grading service.")
success = False
def _find_corresponding_module_for_location(location):
"""
Find the peer grading module that exists at the given location.
"""
try:
return self.descriptor.system.load_item(location)
except ItemNotFoundError:
# The linked problem doesn't exist.
log.error("Problem {0} does not exist in this course.".format(location))
raise
except NoPathToItem:
# The linked problem does not have a path to it (ie is in a draft or other strange state).
log.error("Cannot find a path to problem {0} in this course.".format(location))
raise
good_problem_list = []
for problem in problem_list:
problem_location = problem['location']
try:
descriptor = _find_corresponding_module_for_location(problem_location)
descriptor = self._find_corresponding_module_for_location(problem_location)
except (NoPathToItem, ItemNotFoundError):
continue
if descriptor:
......@@ -599,6 +602,8 @@ class PeerGradingModule(PeerGradingFields, XModule):
elif data.get('location') is not None:
problem_location = data.get('location')
module = self._find_corresponding_module_for_location(problem_location)
ajax_url = self.ajax_url
html = self.system.render_template('peer_grading/peer_grading_problem.html', {
'view_html': '',
......@@ -607,6 +612,7 @@ class PeerGradingModule(PeerGradingFields, XModule):
'ajax_url': ajax_url,
# Checked above
'staff_access': False,
'track_changes': getattr(module, 'track_changes', False),
'use_single_location': self.use_for_single_location,
})
......
......@@ -3,11 +3,11 @@ from xmodule.modulestore import Location
from .import get_test_system
from test_util_open_ended import MockQueryDict, DummyModulestore
from xmodule.open_ended_grading_classes.peer_grading_service import MockPeerGradingService
import json
from mock import Mock
from xmodule.peer_grading_module import PeerGradingModule
from xblock.field_data import DictFieldData
from xblock.fields import ScopeIds
from xmodule.modulestore.exceptions import ItemNotFoundError
import logging
......@@ -24,7 +24,9 @@ class PeerGradingModuleTest(unittest.TestCase, DummyModulestore):
"""
problem_location = Location(["i4x", "edX", "open_ended", "peergrading",
"PeerGradingSample"])
coe_location = Location(["i4x", "edX", "open_ended", "combinedopenended", "SampleQuestion"])
calibrated_dict = {'location': "blah"}
coe_dict = {'location': coe_location.url()}
save_dict = MockQueryDict()
save_dict.update({
'location': "blah",
......@@ -46,6 +48,7 @@ class PeerGradingModuleTest(unittest.TestCase, DummyModulestore):
self.test_system.open_ended_grading_interface = None
self.setup_modulestore(COURSE)
self.peer_grading = self.get_module_from_location(self.problem_location, COURSE)
self.coe = self.get_module_from_location(self.coe_location, COURSE)
def test_module_closed(self):
"""
......@@ -132,9 +135,18 @@ class PeerGradingModuleTest(unittest.TestCase, DummyModulestore):
See if we can render a single problem
@return:
"""
response = self.peer_grading.peer_grading_problem(self.calibrated_dict)
response = self.peer_grading.peer_grading_problem(self.coe_dict)
self.assertEqual(response['success'], True)
def test___find_corresponding_module_for_location_exceptions(self):
"""
Unit test for the exception cases of __find_corresponding_module_for_location
Mainly for diff coverage
@return:
"""
with self.assertRaises(ItemNotFoundError):
self.peer_grading._find_corresponding_module_for_location(Location('i4x','a','b','c','d'))
def test_get_instance_state(self):
"""
Get the instance state dict
......@@ -235,3 +247,35 @@ class PeerGradingModuleLinkedTest(unittest.TestCase, DummyModulestore):
# Ensure that it is properly setup.
self.assertTrue(peer_grading.use_for_single_location)
class PeerGradingModuleTrackChangesTest(unittest.TestCase, DummyModulestore):
"""
Test peer grading with the track changes modification
"""
class MockedTrackChangesProblem(object):
track_changes = True
mock_track_changes_problem = Mock(side_effect=[MockedTrackChangesProblem()])
pgm_location = Location(["i4x", "edX", "open_ended", "peergrading", "PeerGradingSample"])
def setUp(self):
"""
Create a peer grading module from a test system
@return:
"""
self.test_system = get_test_system()
self.test_system.open_ended_grading_interface = None
self.setup_modulestore(COURSE)
self.peer_grading = self.get_module_from_location(self.pgm_location, COURSE)
def test_tracking_peer_eval_problem(self):
"""
Tests rendering of peer eval problem with track changes set. With the test_system render_template
this test becomes a bit tautological, but oh well.
@return:
"""
self.peer_grading._find_corresponding_module_for_location = self.mock_track_changes_problem
response = self.peer_grading.peer_grading_problem({'location': 'mocked'})
self.assertEqual(response['success'], True)
self.assertIn("'track_changes': True", response['html'])
......@@ -6,6 +6,32 @@ div.peer-grading{
margin: 0;
height: 75px;
}
div.feedback-area.track-changes {
position: relative;
margin: 0px;
height: 150px;
border: 1px solid lightgray;
padding: 5px;
resize: vertical;
width: 99%;
overflow: auto;
}
div.feedback-area.track-changes, p.legend {
.ice-controls {
float: right;
}
.del {
position: relative;
text-decoration: line-through;
background-color: #ffc3c3;
}
.ins {
position: relative;
background-color: #c3ffc3;
}
}
ul.rubric-list{
margin: 0;
......
......@@ -26,7 +26,7 @@
</section>
</div>
</div>
<section class="grading-wrapper">
<section class="grading-wrapper" data-track-changes="${'true' if track_changes else 'false'}">
<div class="grading-message">
</div>
<h2>${_("Student Response")}</h2>
......@@ -42,9 +42,21 @@
<p class="rubric-selection-container"></p>
<p class="score-selection-container"></p>
<h3>${_("Written Feedback")}</h3>
<p>${_("Please include some written feedback as well.")}</p>
<textarea name="feedback" placeholder="Feedback for student" class="feedback-area" cols="70" ></textarea>
<div class="flag-student-container"> ${_("This submission has explicit or pornographic content : ")}
% if track_changes:
<p>${_("Please edit your peer's submission below.")}</p>
<p class="legend">
<span class="ins">${_("This is an insertion.")}</span>&nbsp;
<span class="del">${_("This is a deletion.")}</span>&nbsp;
<span class="ice-controls">
<a href="#" class="reset-changes"><i class="icon-refresh"></i> Reset Changes</a>
</span>
</p>
<div name="feedback" class="feedback-area track-changes" contenteditable="true"></div>
% else:
<p>${_("Please include some written feedback as well.")}</p>
<textarea name="feedback" placeholder="Feedback for student" class="feedback-area" cols="70" ></textarea>
% endif
<div class="flag-student-container"> ${_("This submission has explicit or offensive content : ")}
<input type="checkbox" class="flag-checkbox" value="student_is_flagged">
</div>
<div class="answer-unknown-container"> ${_("I do not know how to grade this question : ")}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment