Commit fe59a8ce by Giulio Gratta Committed by Jason Bau

Implementing NYT's ICE track changes to Open Ended Assessements

Squashed commits to facilitate cherry-picking.  Committers and
original commit messages are:
- ==> @caesar2164, * ==> @jrbl, ~ ==> @jbau

- Added boolean to OE problem CMS settings to turn on ICE track changes
- Added ICE init and tracking start/stop to HTML
- Conditional in Peer Grading HTML for ICE container
- CSS styling for ICE container and insertion/deletion elements
- Added class to track changes HTML
- Added ice.min.js as well as included it in the js requirements for peer grading
- Use track_changes in peer_grading_problem
* Refactor peer_grading_problem to lookup track_changes variable by problem location.
* Whitelist ICE insert, delete tags
* Adds ICE multi-user-editor insert and delete tags to the peer grading sanitize_html whitelist.
- add reset button

* Introduction ICE means we need its inline tags to get passed through
  to the feedback target, but we don't want malicious peer feedback
  providers introducing <script>, etc, so we use lxml.html.clean to
  scrub peer grading input.
* Adds feedback URL autoheating.
* Allows ICE <insert> and <delete> tags but not others.

~ add helptext re: EXPERIMENTAL FEATURES

~ address a bunch of @VikParuchuri review comments on ICE for OEE
  ~ remove extraneous code
  ~ change to new xmodule accessors
  ~ refactor filtering

~ fix broken test test_peer_grading_problem

~ Create track_changes.coffee and remove <script> from
  peer_grading_problem.html
~ Remove all reliance on id in favor of local scoping and classes

~ bring up test coverage somewhat

~ remove superflous $.scrollTo

~ TODO: Filtering
parent ed1a0738
...@@ -234,6 +234,14 @@ class CombinedOpenEndedFields(object): ...@@ -234,6 +234,14 @@ class CombinedOpenEndedFields(object):
default=False, default=False,
scope=Scope.settings scope=Scope.settings
) )
track_changes = Boolean(
display_name="Peer Track Changes",
help=("EXPERIMENTAL FEATURE FOR PEER GRADING ONLY: "
"If set to 'True', peer graders will be able to make changes to the student "
"submission and those changes will be tracked and shown along with the graded feedback."),
default=False,
scope=Scope.settings
)
due = Date( due = Date(
help="Date that this problem is due by", help="Date that this problem is due by",
scope=Scope.settings scope=Scope.settings
......
...@@ -40,6 +40,25 @@ div.name{ ...@@ -40,6 +40,25 @@ div.name{
section.combined-open-ended { section.combined-open-ended {
@include clearfix; @include clearfix;
.written-feedback {
position: relative;
margin: 0px;
height: 150px;
border: 1px solid lightgray;
padding: 5px;
resize: vertical;
width: 99%;
overflow: auto;
.del {
text-decoration: line-through;
background-color: #ffc3c3;
}
.ins {
background-color: #c3ffc3;
}
}
} }
......
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -283,6 +283,9 @@ class @PeerGradingProblem ...@@ -283,6 +283,9 @@ class @PeerGradingProblem
@error_container.hide() @error_container.hide()
@flag_submission_confirmation.hide() @flag_submission_confirmation.hide()
if @tracking_changes()
@change_tracker = new TrackChanges(@el)
@is_calibrated_check() @is_calibrated_check()
# locally scoped jquery. # locally scoped jquery.
...@@ -306,13 +309,18 @@ class @PeerGradingProblem ...@@ -306,13 +309,18 @@ class @PeerGradingProblem
construct_data: () -> construct_data: () ->
if @tracking_changes()
feedback_content = @feedback_area.html()
else
feedback_content = @feedback_area.val()
data = data =
rubric_scores: @rub.get_score_list() rubric_scores: @rub.get_score_list()
score: @rub.get_total_score() score: @rub.get_total_score()
location: @location location: @location
submission_id: @essay_id_input.val() submission_id: @essay_id_input.val()
submission_key: @submission_key_input.val() submission_key: @submission_key_input.val()
feedback: @feedback_area.val() feedback: feedback_content
submission_flagged: @flag_student_checkbox.is(':checked') submission_flagged: @flag_student_checkbox.is(':checked')
answer_unknown: @answer_unknown_checkbox.is(':checked') answer_unknown: @answer_unknown_checkbox.is(':checked')
return data return data
...@@ -388,7 +396,7 @@ class @PeerGradingProblem ...@@ -388,7 +396,7 @@ class @PeerGradingProblem
@grading_message.fadeIn() @grading_message.fadeIn()
message = "<p>Successfully saved your feedback. Fetching the next essay." message = "<p>Successfully saved your feedback. Fetching the next essay."
if response.required_done if response.required_done
message = message + " You have completed the required number of gradings." message = message + " You have done the required number of peer evals but may continue grading if you like."
message = message + "</p>" message = message + "</p>"
@grading_message.html(message) @grading_message.html(message)
else else
...@@ -464,6 +472,9 @@ class @PeerGradingProblem ...@@ -464,6 +472,9 @@ class @PeerGradingProblem
else else
@render_error("An error occurred while retrieving the next calibration essay") @render_error("An error occurred while retrieving the next calibration essay")
tracking_changes: () =>
return @grading_wrapper.data('track-changes') == true
# Renders a student submission to be graded # Renders a student submission to be graded
render_submission: (response) => render_submission: (response) =>
if response.success if response.success
...@@ -483,8 +494,12 @@ class @PeerGradingProblem ...@@ -483,8 +494,12 @@ class @PeerGradingProblem
@grading_panel.find(@grading_text_sel).show() @grading_panel.find(@grading_text_sel).show()
@flag_student_container.show() @flag_student_container.show()
@answer_unknown_container.show() @answer_unknown_container.show()
if @tracking_changes()
@feedback_area.html(@make_paragraphs(response.student_response))
@change_tracker.rebindTracker()
else
@feedback_area.val("") @feedback_area.val("")
@answer_unknown_checkbox.removeAttr("checked")
@flag_student_checkbox.removeAttr("checked") @flag_student_checkbox.removeAttr("checked")
@submit_button.show() @submit_button.show()
@submit_button.unbind('click') @submit_button.unbind('click')
...@@ -496,7 +511,6 @@ class @PeerGradingProblem ...@@ -496,7 +511,6 @@ class @PeerGradingProblem
else else
@render_error("An error occured when retrieving the next submission.") @render_error("An error occured when retrieving the next submission.")
make_paragraphs: (text) -> make_paragraphs: (text) ->
paragraph_split = text.split(/\n\s*\n/) paragraph_split = text.split(/\n\s*\n/)
new_text = '' new_text = ''
...@@ -527,13 +541,13 @@ class @PeerGradingProblem ...@@ -527,13 +541,13 @@ class @PeerGradingProblem
# display correct grade # display correct grade
@calibration_feedback_panel.slideDown() @calibration_feedback_panel.slideDown()
calibration_wrapper = @$(@calibration_feedback_wrapper_sel) calibration_wrapper = @$(@calibration_feedback_wrapper_sel)
calibration_wrapper.html("<p>The score you gave was: #{@grade}. The actual score is: #{response.actual_score}</p>") calibration_wrapper.html("<p>The score you gave was: #{@grade}. The instructor score is: #{response.actual_score}</p>")
score = parseInt(@grade) score = parseInt(@grade)
actual_score = parseInt(response.actual_score) actual_score = parseInt(response.actual_score)
if score == actual_score if score == actual_score
calibration_wrapper.append("<p>Your score matches the actual score!</p>") calibration_wrapper.append("<p>Your score matches the instructor score!</p>")
else else
calibration_wrapper.append("<p>You may want to review the rubric again.</p>") calibration_wrapper.append("<p>You may want to review the rubric again.</p>")
......
class @TrackChanges
reset_button_sel: '.reset-changes'
tracked_feedback_sel: '.feedback-area.track-changes'
submit_button_sel: '.submit-button'
tracker: null
constructor: (element) ->
@el = element
@reset_button = @$(@reset_button_sel)
@submit_button = @$(@submit_button_sel)
@tracked_feedback = @$(@tracked_feedback_sel)
@reset_button.click @reset_changes
@submit_button.click @stop_tracking_on_submit
rebindTracker: () =>
if @tracker?
@tracker.stopTracking()
delete @tracker
@tracker = new ice.InlineChangeEditor({
element: @tracked_feedback[0], #return DOM element from selector
handleEvents: true,
currentUser: { id: 1, name: 'Peer Feedback' }, #hardcoded current user
# optional plugins
plugins: [
# Track content that is cut and pasted
{
name: 'IceCopyPastePlugin',
settings: {
# List of tags and attributes to preserve when cleaning a paste
preserve: 'p,a[href],span[id,class],em,strong'
}
}
]
})
@tracker.startTracking()
# locally scoped jquery. (scoped to the element)
$: (selector) ->
$(selector, @el)
reset_changes: (event) =>
event.preventDefault()
@tracker.rejectAll()
stop_tracking_on_submit: () =>
@tracker.stopTracking()
...@@ -86,9 +86,13 @@ class PeerGradingModule(PeerGradingFields, XModule): ...@@ -86,9 +86,13 @@ class PeerGradingModule(PeerGradingFields, XModule):
_VERSION = 1 _VERSION = 1
js = { js = {
'js': [
resource_string(__name__, 'js/src/peergrading/ice.min.js'),
],
'coffee': [ 'coffee': [
resource_string(__name__, 'js/src/peergrading/peer_grading.coffee'), resource_string(__name__, 'js/src/peergrading/peer_grading.coffee'),
resource_string(__name__, 'js/src/peergrading/peer_grading_problem.coffee'), resource_string(__name__, 'js/src/peergrading/peer_grading_problem.coffee'),
resource_string(__name__, 'js/src/peergrading/track_changes.coffee'),
resource_string(__name__, 'js/src/collapsible.coffee'), resource_string(__name__, 'js/src/collapsible.coffee'),
resource_string(__name__, 'js/src/javascript_loader.coffee'), resource_string(__name__, 'js/src/javascript_loader.coffee'),
] ]
...@@ -495,6 +499,21 @@ class PeerGradingModule(PeerGradingFields, XModule): ...@@ -495,6 +499,21 @@ class PeerGradingModule(PeerGradingFields, XModule):
}) })
return html return html
def _find_corresponding_module_for_location(self, location):
"""
Find the peer grading module that exists at the given location.
"""
try:
return self.descriptor.system.load_item(location)
except ItemNotFoundError:
# The linked problem doesn't exist.
log.error("Problem {0} does not exist in this course.".format(location))
raise
except NoPathToItem:
# The linked problem does not have a path to it (ie is in a draft or other strange state).
log.error("Cannot find a path to problem {0} in this course.".format(location))
raise
def peer_grading(self, _data=None): def peer_grading(self, _data=None):
''' '''
Show a peer grading interface Show a peer grading interface
...@@ -528,27 +547,11 @@ class PeerGradingModule(PeerGradingFields, XModule): ...@@ -528,27 +547,11 @@ class PeerGradingModule(PeerGradingFields, XModule):
log.exception("Could not contact peer grading service.") log.exception("Could not contact peer grading service.")
success = False success = False
def _find_corresponding_module_for_location(location):
"""
Find the peer grading module that exists at the given location.
"""
try:
return self.descriptor.system.load_item(location)
except ItemNotFoundError:
# The linked problem doesn't exist.
log.error("Problem {0} does not exist in this course.".format(location))
raise
except NoPathToItem:
# The linked problem does not have a path to it (ie is in a draft or other strange state).
log.error("Cannot find a path to problem {0} in this course.".format(location))
raise
good_problem_list = [] good_problem_list = []
for problem in problem_list: for problem in problem_list:
problem_location = problem['location'] problem_location = problem['location']
try: try:
descriptor = _find_corresponding_module_for_location(problem_location) descriptor = self._find_corresponding_module_for_location(problem_location)
except (NoPathToItem, ItemNotFoundError): except (NoPathToItem, ItemNotFoundError):
continue continue
if descriptor: if descriptor:
...@@ -599,6 +602,8 @@ class PeerGradingModule(PeerGradingFields, XModule): ...@@ -599,6 +602,8 @@ class PeerGradingModule(PeerGradingFields, XModule):
elif data.get('location') is not None: elif data.get('location') is not None:
problem_location = data.get('location') problem_location = data.get('location')
module = self._find_corresponding_module_for_location(problem_location)
ajax_url = self.ajax_url ajax_url = self.ajax_url
html = self.system.render_template('peer_grading/peer_grading_problem.html', { html = self.system.render_template('peer_grading/peer_grading_problem.html', {
'view_html': '', 'view_html': '',
...@@ -607,6 +612,7 @@ class PeerGradingModule(PeerGradingFields, XModule): ...@@ -607,6 +612,7 @@ class PeerGradingModule(PeerGradingFields, XModule):
'ajax_url': ajax_url, 'ajax_url': ajax_url,
# Checked above # Checked above
'staff_access': False, 'staff_access': False,
'track_changes': getattr(module, 'track_changes', False),
'use_single_location': self.use_for_single_location, 'use_single_location': self.use_for_single_location,
}) })
......
...@@ -3,11 +3,11 @@ from xmodule.modulestore import Location ...@@ -3,11 +3,11 @@ from xmodule.modulestore import Location
from .import get_test_system from .import get_test_system
from test_util_open_ended import MockQueryDict, DummyModulestore from test_util_open_ended import MockQueryDict, DummyModulestore
from xmodule.open_ended_grading_classes.peer_grading_service import MockPeerGradingService from xmodule.open_ended_grading_classes.peer_grading_service import MockPeerGradingService
import json
from mock import Mock from mock import Mock
from xmodule.peer_grading_module import PeerGradingModule from xmodule.peer_grading_module import PeerGradingModule
from xblock.field_data import DictFieldData from xblock.field_data import DictFieldData
from xblock.fields import ScopeIds from xblock.fields import ScopeIds
from xmodule.modulestore.exceptions import ItemNotFoundError
import logging import logging
...@@ -24,7 +24,9 @@ class PeerGradingModuleTest(unittest.TestCase, DummyModulestore): ...@@ -24,7 +24,9 @@ class PeerGradingModuleTest(unittest.TestCase, DummyModulestore):
""" """
problem_location = Location(["i4x", "edX", "open_ended", "peergrading", problem_location = Location(["i4x", "edX", "open_ended", "peergrading",
"PeerGradingSample"]) "PeerGradingSample"])
coe_location = Location(["i4x", "edX", "open_ended", "combinedopenended", "SampleQuestion"])
calibrated_dict = {'location': "blah"} calibrated_dict = {'location': "blah"}
coe_dict = {'location': coe_location.url()}
save_dict = MockQueryDict() save_dict = MockQueryDict()
save_dict.update({ save_dict.update({
'location': "blah", 'location': "blah",
...@@ -46,6 +48,7 @@ class PeerGradingModuleTest(unittest.TestCase, DummyModulestore): ...@@ -46,6 +48,7 @@ class PeerGradingModuleTest(unittest.TestCase, DummyModulestore):
self.test_system.open_ended_grading_interface = None self.test_system.open_ended_grading_interface = None
self.setup_modulestore(COURSE) self.setup_modulestore(COURSE)
self.peer_grading = self.get_module_from_location(self.problem_location, COURSE) self.peer_grading = self.get_module_from_location(self.problem_location, COURSE)
self.coe = self.get_module_from_location(self.coe_location, COURSE)
def test_module_closed(self): def test_module_closed(self):
""" """
...@@ -132,9 +135,18 @@ class PeerGradingModuleTest(unittest.TestCase, DummyModulestore): ...@@ -132,9 +135,18 @@ class PeerGradingModuleTest(unittest.TestCase, DummyModulestore):
See if we can render a single problem See if we can render a single problem
@return: @return:
""" """
response = self.peer_grading.peer_grading_problem(self.calibrated_dict) response = self.peer_grading.peer_grading_problem(self.coe_dict)
self.assertEqual(response['success'], True) self.assertEqual(response['success'], True)
def test___find_corresponding_module_for_location_exceptions(self):
"""
Unit test for the exception cases of __find_corresponding_module_for_location
Mainly for diff coverage
@return:
"""
with self.assertRaises(ItemNotFoundError):
self.peer_grading._find_corresponding_module_for_location(Location('i4x','a','b','c','d'))
def test_get_instance_state(self): def test_get_instance_state(self):
""" """
Get the instance state dict Get the instance state dict
...@@ -235,3 +247,35 @@ class PeerGradingModuleLinkedTest(unittest.TestCase, DummyModulestore): ...@@ -235,3 +247,35 @@ class PeerGradingModuleLinkedTest(unittest.TestCase, DummyModulestore):
# Ensure that it is properly setup. # Ensure that it is properly setup.
self.assertTrue(peer_grading.use_for_single_location) self.assertTrue(peer_grading.use_for_single_location)
class PeerGradingModuleTrackChangesTest(unittest.TestCase, DummyModulestore):
"""
Test peer grading with the track changes modification
"""
class MockedTrackChangesProblem(object):
track_changes = True
mock_track_changes_problem = Mock(side_effect=[MockedTrackChangesProblem()])
pgm_location = Location(["i4x", "edX", "open_ended", "peergrading", "PeerGradingSample"])
def setUp(self):
"""
Create a peer grading module from a test system
@return:
"""
self.test_system = get_test_system()
self.test_system.open_ended_grading_interface = None
self.setup_modulestore(COURSE)
self.peer_grading = self.get_module_from_location(self.pgm_location, COURSE)
def test_tracking_peer_eval_problem(self):
"""
Tests rendering of peer eval problem with track changes set. With the test_system render_template
this test becomes a bit tautological, but oh well.
@return:
"""
self.peer_grading._find_corresponding_module_for_location = self.mock_track_changes_problem
response = self.peer_grading.peer_grading_problem({'location': 'mocked'})
self.assertEqual(response['success'], True)
self.assertIn("'track_changes': True", response['html'])
...@@ -7,6 +7,32 @@ div.peer-grading{ ...@@ -7,6 +7,32 @@ div.peer-grading{
height: 75px; height: 75px;
} }
div.feedback-area.track-changes {
position: relative;
margin: 0px;
height: 150px;
border: 1px solid lightgray;
padding: 5px;
resize: vertical;
width: 99%;
overflow: auto;
}
div.feedback-area.track-changes, p.legend {
.ice-controls {
float: right;
}
.del {
position: relative;
text-decoration: line-through;
background-color: #ffc3c3;
}
.ins {
position: relative;
background-color: #c3ffc3;
}
}
ul.rubric-list{ ul.rubric-list{
margin: 0; margin: 0;
padding: 0; padding: 0;
......
...@@ -26,7 +26,7 @@ ...@@ -26,7 +26,7 @@
</section> </section>
</div> </div>
</div> </div>
<section class="grading-wrapper"> <section class="grading-wrapper" data-track-changes="${'true' if track_changes else 'false'}">
<div class="grading-message"> <div class="grading-message">
</div> </div>
<h2>${_("Student Response")}</h2> <h2>${_("Student Response")}</h2>
...@@ -42,9 +42,21 @@ ...@@ -42,9 +42,21 @@
<p class="rubric-selection-container"></p> <p class="rubric-selection-container"></p>
<p class="score-selection-container"></p> <p class="score-selection-container"></p>
<h3>${_("Written Feedback")}</h3> <h3>${_("Written Feedback")}</h3>
% if track_changes:
<p>${_("Please edit your peer's submission below.")}</p>
<p class="legend">
<span class="ins">${_("This is an insertion.")}</span>&nbsp;
<span class="del">${_("This is a deletion.")}</span>&nbsp;
<span class="ice-controls">
<a href="#" class="reset-changes"><i class="icon-refresh"></i> Reset Changes</a>
</span>
</p>
<div name="feedback" class="feedback-area track-changes" contenteditable="true"></div>
% else:
<p>${_("Please include some written feedback as well.")}</p> <p>${_("Please include some written feedback as well.")}</p>
<textarea name="feedback" placeholder="Feedback for student" class="feedback-area" cols="70" ></textarea> <textarea name="feedback" placeholder="Feedback for student" class="feedback-area" cols="70" ></textarea>
<div class="flag-student-container"> ${_("This submission has explicit or pornographic content : ")} % endif
<div class="flag-student-container"> ${_("This submission has explicit or offensive content : ")}
<input type="checkbox" class="flag-checkbox" value="student_is_flagged"> <input type="checkbox" class="flag-checkbox" value="student_is_flagged">
</div> </div>
<div class="answer-unknown-container"> ${_("I do not know how to grade this question : ")} <div class="answer-unknown-container"> ${_("I do not know how to grade this question : ")}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment