Commit 40d59faa by David Ormsbee

Merge pull request #1330 from MITx/diana/open-ended-ui-updates

Rubric Integration and other UI improvements
parents 0b9262a4 1d4b674c
...@@ -21,6 +21,8 @@ from .xml_module import XmlDescriptor ...@@ -21,6 +21,8 @@ from .xml_module import XmlDescriptor
from xmodule.modulestore import Location from xmodule.modulestore import Location
import self_assessment_module import self_assessment_module
import open_ended_module import open_ended_module
from combined_open_ended_rubric import CombinedOpenEndedRubric, RubricParsingError
from .stringify import stringify_children
log = logging.getLogger("mitx.courseware") log = logging.getLogger("mitx.courseware")
...@@ -138,12 +140,19 @@ class CombinedOpenEndedModule(XModule): ...@@ -138,12 +140,19 @@ class CombinedOpenEndedModule(XModule):
# completion (doesn't matter if you self-assessed correct/incorrect). # completion (doesn't matter if you self-assessed correct/incorrect).
self._max_score = int(self.metadata.get('max_score', MAX_SCORE)) self._max_score = int(self.metadata.get('max_score', MAX_SCORE))
rubric_renderer = CombinedOpenEndedRubric(system, True)
try:
rubric_feedback = rubric_renderer.render_rubric(stringify_children(definition['rubric']))
except RubricParsingError:
log.error("Failed to parse rubric in location: {1}".format(location))
raise
#Static data is passed to the child modules to render #Static data is passed to the child modules to render
self.static_data = { self.static_data = {
'max_score': self._max_score, 'max_score': self._max_score,
'max_attempts': self.max_attempts, 'max_attempts': self.max_attempts,
'prompt': definition['prompt'], 'prompt': definition['prompt'],
'rubric': definition['rubric'] 'rubric': definition['rubric'],
'display_name': self.display_name
} }
self.task_xml = definition['task_xml'] self.task_xml = definition['task_xml']
...@@ -295,6 +304,7 @@ class CombinedOpenEndedModule(XModule): ...@@ -295,6 +304,7 @@ class CombinedOpenEndedModule(XModule):
'task_count': len(self.task_xml), 'task_count': len(self.task_xml),
'task_number': self.current_task_number + 1, 'task_number': self.current_task_number + 1,
'status': self.get_status(), 'status': self.get_status(),
'display_name': self.display_name
} }
return context return context
...@@ -593,4 +603,4 @@ class CombinedOpenEndedDescriptor(XmlDescriptor, EditingDescriptor): ...@@ -593,4 +603,4 @@ class CombinedOpenEndedDescriptor(XmlDescriptor, EditingDescriptor):
for child in ['task']: for child in ['task']:
add_child(child) add_child(child)
return elt return elt
\ No newline at end of file
...@@ -3,43 +3,61 @@ from lxml import etree ...@@ -3,43 +3,61 @@ from lxml import etree
log=logging.getLogger(__name__) log=logging.getLogger(__name__)
class RubricParsingError(Exception):
pass
class CombinedOpenEndedRubric(object): class CombinedOpenEndedRubric(object):
@staticmethod def __init__ (self, system, view_only = False):
def render_rubric(rubric_xml, system): self.has_score = False
self.view_only = view_only
self.system = system
def render_rubric(self, rubric_xml):
'''
render_rubric: takes in an xml string and outputs the corresponding
html for that xml, given the type of rubric we're generating
Input:
rubric_xml: an string that has not been parsed into xml that
represents this particular rubric
Output:
html: the html that corresponds to the xml given
'''
try: try:
rubric_categories = CombinedOpenEndedRubric.extract_rubric_categories(rubric_xml) rubric_categories = self.extract_categories(rubric_xml)
html = system.render_template('open_ended_rubric.html', {'rubric_categories' : rubric_categories}) html = self.system.render_template('open_ended_rubric.html',
{'categories' : rubric_categories,
'has_score': self.has_score,
'view_only': self.view_only})
except: except:
log.exception("Could not parse the rubric.") raise RubricParsingError("[render_rubric] Could not parse the rubric with xml: {0}".format(rubric_xml))
html = rubric_xml
return html return html
@staticmethod def extract_categories(self, element):
def extract_rubric_categories(element):
''' '''
Contstruct a list of categories such that the structure looks like: Contstruct a list of categories such that the structure looks like:
[ { category: "Category 1 Name", [ { category: "Category 1 Name",
options: [{text: "Option 1 Name", points: 0}, {text:"Option 2 Name", points: 5}] options: [{text: "Option 1 Name", points: 0}, {text:"Option 2 Name", points: 5}]
}, },
{ category: "Category 2 Name", { category: "Category 2 Name",
options: [{text: "Option 1 Name", points: 0}, options: [{text: "Option 1 Name", points: 0},
{text: "Option 2 Name", points: 1}, {text: "Option 2 Name", points: 1},
{text: "Option 3 Name", points: 2]}] {text: "Option 3 Name", points: 2]}]
''' '''
element = etree.fromstring(element) if isinstance(element, basestring):
element = etree.fromstring(element)
categories = [] categories = []
for category in element: for category in element:
if category.tag != 'category': if category.tag != 'category':
raise Exception("[capa.inputtypes.extract_categories] Expected a <category> tag: got {0} instead".format(category.tag)) raise RubricParsingError("[extract_categories] Expected a <category> tag: got {0} instead".format(category.tag))
else: else:
categories.append(CombinedOpenEndedRubric.extract_category(category)) categories.append(self.extract_category(category))
return categories return categories
@staticmethod
def extract_category(category): def extract_category(self, category):
''' '''
construct an individual category construct an individual category
{category: "Category 1 Name", {category: "Category 1 Name",
options: [{text: "Option 1 text", points: 1}, options: [{text: "Option 1 text", points: 1},
...@@ -47,42 +65,33 @@ class CombinedOpenEndedRubric(object): ...@@ -47,42 +65,33 @@ class CombinedOpenEndedRubric(object):
all sorting and auto-point generation occurs in this function all sorting and auto-point generation occurs in this function
''' '''
has_score=False
descriptionxml = category[0] descriptionxml = category[0]
optionsxml = category[1:]
scorexml = category[1] scorexml = category[1]
if scorexml.tag == "option": score = None
optionsxml = category[1:] if scorexml.tag == 'score':
else: score_text = scorexml.text
optionsxml = category[2:] optionsxml = category[2:]
has_score=True score = int(score_text)
self.has_score = True
# if we are missing the score tag and we are expecting one
elif self.has_score:
raise RubricParsingError("[extract_category] Category {0} is missing a score".format(descriptionxml.text))
# parse description # parse description
if descriptionxml.tag != 'description': if descriptionxml.tag != 'description':
raise Exception("[extract_category]: expected description tag, got {0} instead".format(descriptionxml.tag)) raise RubricParsingError("[extract_category]: expected description tag, got {0} instead".format(descriptionxml.tag))
if has_score:
if scorexml.tag != 'score':
raise Exception("[extract_category]: expected score tag, got {0} instead".format(scorexml.tag))
for option in optionsxml:
if option.tag != "option":
raise Exception("[extract_category]: expected option tag, got {0} instead".format(option.tag))
description = descriptionxml.text description = descriptionxml.text
if has_score:
score = int(scorexml.text)
else:
score = 0
cur_points = 0 cur_points = 0
options = [] options = []
autonumbering = True autonumbering = True
# parse options # parse options
for option in optionsxml: for option in optionsxml:
if option.tag != 'option': if option.tag != 'option':
raise Exception("[extract_category]: expected option tag, got {0} instead".format(option.tag)) raise RubricParsingError("[extract_category]: expected option tag, got {0} instead".format(option.tag))
else: else:
pointstr = option.get("points") pointstr = option.get("points")
if pointstr: if pointstr:
...@@ -91,25 +100,24 @@ class CombinedOpenEndedRubric(object): ...@@ -91,25 +100,24 @@ class CombinedOpenEndedRubric(object):
try: try:
points = int(pointstr) points = int(pointstr)
except ValueError: except ValueError:
raise Exception("[extract_category]: expected points to have int, got {0} instead".format(pointstr)) raise RubricParsingError("[extract_category]: expected points to have int, got {0} instead".format(pointstr))
elif autonumbering: elif autonumbering:
# use the generated one if we're in the right mode # use the generated one if we're in the right mode
points = cur_points points = cur_points
cur_points = cur_points + 1 cur_points = cur_points + 1
else: else:
raise Exception("[extract_category]: missing points attribute. Cannot continue to auto-create points values after a points value is explicitly dfined.") raise Exception("[extract_category]: missing points attribute. Cannot continue to auto-create points values after a points value is explicitly defined.")
selected = score == points
optiontext = option.text optiontext = option.text
selected = False options.append({'text': option.text, 'points': points, 'selected': selected})
if has_score:
if points == score:
selected = True
options.append({'text': option.text, 'points': points, 'selected' : selected})
# sort and check for duplicates # sort and check for duplicates
options = sorted(options, key=lambda option: option['points']) options = sorted(options, key=lambda option: option['points'])
CombinedOpenEndedRubric.validate_options(options) CombinedOpenEndedRubric.validate_options(options)
return {'description': description, 'options': options, 'score' : score, 'has_score' : has_score} return {'description': description, 'options': options}
@staticmethod @staticmethod
def validate_options(options): def validate_options(options):
...@@ -117,12 +125,12 @@ class CombinedOpenEndedRubric(object): ...@@ -117,12 +125,12 @@ class CombinedOpenEndedRubric(object):
Validates a set of options. This can and should be extended to filter out other bad edge cases Validates a set of options. This can and should be extended to filter out other bad edge cases
''' '''
if len(options) == 0: if len(options) == 0:
raise Exception("[extract_category]: no options associated with this category") raise RubricParsingError("[extract_category]: no options associated with this category")
if len(options) == 1: if len(options) == 1:
return return
prev = options[0]['points'] prev = options[0]['points']
for option in options[1:]: for option in options[1:]:
if prev == option['points']: if prev == option['points']:
raise Exception("[extract_category]: found duplicate point values between two different options") raise RubricParsingError("[extract_category]: found duplicate point values between two different options")
else: else:
prev = option['points'] prev = option['points']
\ No newline at end of file
...@@ -20,6 +20,7 @@ h2 { ...@@ -20,6 +20,7 @@ h2 {
color: darken($error-red, 10%); color: darken($error-red, 10%);
} }
section.problem { section.problem {
@media print { @media print {
display: block; display: block;
...@@ -756,4 +757,49 @@ section.problem { ...@@ -756,4 +757,49 @@ section.problem {
} }
} }
} }
.rubric {
tr {
margin:10px 0px;
height: 100%;
}
td {
padding: 20px 0px;
margin: 10px 0px;
height: 100%;
}
th {
padding: 5px;
margin: 5px;
}
label,
.view-only {
margin:3px;
position: relative;
padding: 15px;
width: 150px;
height:100%;
display: inline-block;
min-height: 50px;
min-width: 50px;
background-color: #CCC;
font-size: .9em;
}
.grade {
position: absolute;
bottom:0px;
right:0px;
margin:10px;
}
.selected-grade {
background: #666;
color: white;
}
input[type=radio]:checked + label {
background: #666;
color: white; }
input[class='score-selection'] {
display: none;
}
}
} }
...@@ -37,9 +37,13 @@ section.combined-open-ended { ...@@ -37,9 +37,13 @@ section.combined-open-ended {
.result-container .result-container
{ {
float:left; float:left;
width: 93%; width: 100%;
position:relative; position:relative;
} }
h4
{
margin-bottom:10px;
}
} }
section.combined-open-ended-status { section.combined-open-ended-status {
...@@ -49,15 +53,19 @@ section.combined-open-ended-status { ...@@ -49,15 +53,19 @@ section.combined-open-ended-status {
color: #2C2C2C; color: #2C2C2C;
font-family: monospace; font-family: monospace;
font-size: 1em; font-size: 1em;
padding-top: 10px; padding: 10px;
.show-results {
margin-top: .3em;
text-align:right;
}
.show-results-button {
font: 1em monospace;
}
} }
.statusitem-current { .statusitem-current {
background-color: #BEBEBE; background-color: #d4d4d4;
color: #2C2C2C; color: #222;
font-family: monospace;
font-size: 1em;
padding-top: 10px;
} }
span { span {
...@@ -93,6 +101,7 @@ section.combined-open-ended-status { ...@@ -93,6 +101,7 @@ section.combined-open-ended-status {
div.result-container { div.result-container {
.evaluation { .evaluation {
p { p {
margin-bottom: 1px; margin-bottom: 1px;
} }
...@@ -104,6 +113,7 @@ div.result-container { ...@@ -104,6 +113,7 @@ div.result-container {
} }
.evaluation-response { .evaluation-response {
margin-bottom: 10px;
header { header {
text-align: right; text-align: right;
a { a {
...@@ -134,6 +144,7 @@ div.result-container { ...@@ -134,6 +144,7 @@ div.result-container {
} }
.external-grader-message { .external-grader-message {
margin-bottom: 5px;
section { section {
padding-left: 20px; padding-left: 20px;
background-color: #FAFAFA; background-color: #FAFAFA;
...@@ -141,6 +152,7 @@ div.result-container { ...@@ -141,6 +152,7 @@ div.result-container {
font-family: monospace; font-family: monospace;
font-size: 1em; font-size: 1em;
padding-top: 10px; padding-top: 10px;
padding-bottom:30px;
header { header {
font-size: 1.4em; font-size: 1.4em;
} }
...@@ -221,12 +233,13 @@ div.result-container { ...@@ -221,12 +233,13 @@ div.result-container {
div.result-container, section.open-ended-child { div.result-container, section.open-ended-child {
.rubric { .rubric {
margin-bottom:25px;
tr { tr {
margin:10px 0px; margin:10px 0px;
height: 100%; height: 100%;
} }
td { td {
padding: 20px 0px; padding: 20px 0px 25px 0px;
margin: 10px 0px; margin: 10px 0px;
height: 100%; height: 100%;
} }
...@@ -236,16 +249,16 @@ div.result-container, section.open-ended-child { ...@@ -236,16 +249,16 @@ div.result-container, section.open-ended-child {
} }
label, label,
.view-only { .view-only {
margin:10px; margin:2px;
position: relative; position: relative;
padding: 15px; padding: 10px 15px 25px 15px;
width: 200px; width: 145px;
height:100%; height:100%;
display: inline-block; display: inline-block;
min-height: 50px; min-height: 50px;
min-width: 50px; min-width: 50px;
background-color: #CCC; background-color: #CCC;
font-size: 1em; font-size: .85em;
} }
.grade { .grade {
position: absolute; position: absolute;
...@@ -257,12 +270,6 @@ div.result-container, section.open-ended-child { ...@@ -257,12 +270,6 @@ div.result-container, section.open-ended-child {
background: #666; background: #666;
color: white; color: white;
} }
input[type=radio]:checked + label {
background: #666;
color: white; }
input[class='score-selection'] {
display: none;
}
} }
} }
...@@ -461,7 +468,6 @@ section.open-ended-child { ...@@ -461,7 +468,6 @@ section.open-ended-child {
p { p {
line-height: 20px; line-height: 20px;
text-transform: capitalize;
margin-bottom: 0; margin-bottom: 0;
float: left; float: left;
} }
...@@ -598,13 +604,15 @@ section.open-ended-child { ...@@ -598,13 +604,15 @@ section.open-ended-child {
} }
} }
div.open-ended-alert { div.open-ended-alert,
.save_message {
padding: 8px 12px; padding: 8px 12px;
border: 1px solid #EBE8BF; border: 1px solid #EBE8BF;
border-radius: 3px; border-radius: 3px;
background: #FFFCDD; background: #FFFCDD;
font-size: 0.9em; font-size: 0.9em;
margin-top: 10px; margin-top: 10px;
margin-bottom:5px;
} }
div.capa_reset { div.capa_reset {
...@@ -623,4 +631,31 @@ section.open-ended-child { ...@@ -623,4 +631,31 @@ section.open-ended-child {
font-size: 0.9em; font-size: 0.9em;
} }
.assessment-container {
margin: 40px 0px 30px 0px;
.scoring-container
{
p
{
margin-bottom: 1em;
}
label {
margin: 10px;
padding: 5px;
display: inline-block;
min-width: 50px;
background-color: #CCC;
text-size: 1.5em;
}
input[type=radio]:checked + label {
background: #666;
color: white;
}
input[class='grade-selection'] {
display: none;
}
}
}
} }
...@@ -9,20 +9,34 @@ class @Collapsible ...@@ -9,20 +9,34 @@ class @Collapsible
### ###
el: container el: container
### ###
# standard longform + shortfom pattern
el.find('.longform').hide() el.find('.longform').hide()
el.find('.shortform').append('<a href="#" class="full">See full output</a>') el.find('.shortform').append('<a href="#" class="full">See full output</a>')
# custom longform + shortform text pattern
short_custom = el.find('.shortform-custom')
# set up each one individually
short_custom.each (index, elt) =>
open_text = $(elt).data('open-text')
close_text = $(elt).data('close-text')
$(elt).append("<a href='#' class='full-custom'>"+ open_text + "</a>")
$(elt).find('.full-custom').click (event) => @toggleFull(event, open_text, close_text)
# collapsible pattern
el.find('.collapsible header + section').hide() el.find('.collapsible header + section').hide()
el.find('.full').click @toggleFull
# set up triggers
el.find('.full').click (event) => @toggleFull(event, "See full output", "Hide output")
el.find('.collapsible header a').click @toggleHint el.find('.collapsible header a').click @toggleHint
@toggleFull: (event) => @toggleFull: (event, open_text, close_text) =>
event.preventDefault() event.preventDefault()
$(event.target).parent().siblings().slideToggle() $(event.target).parent().siblings().slideToggle()
$(event.target).parent().parent().toggleClass('open') $(event.target).parent().parent().toggleClass('open')
if $(event.target).text() == 'See full output' if $(event.target).text() == open_text
new_text = 'Hide output' new_text = close_text
else else
new_text = 'See full output' new_text = open_text
$(event.target).text(new_text) $(event.target).text(new_text)
@toggleHint: (event) => @toggleHint: (event) =>
......
...@@ -109,7 +109,8 @@ class @CombinedOpenEnded ...@@ -109,7 +109,8 @@ class @CombinedOpenEnded
@reset_button.hide() @reset_button.hide()
@next_problem_button.hide() @next_problem_button.hide()
@hint_area.attr('disabled', false) @hint_area.attr('disabled', false)
if @child_state == 'done'
@rubric_wrapper.hide()
if @child_type=="openended" if @child_type=="openended"
@skip_button.hide() @skip_button.hide()
if @allow_reset=="True" if @allow_reset=="True"
...@@ -139,6 +140,7 @@ class @CombinedOpenEnded ...@@ -139,6 +140,7 @@ class @CombinedOpenEnded
else else
@submit_button.click @message_post @submit_button.click @message_post
else if @child_state == 'done' else if @child_state == 'done'
@rubric_wrapper.hide()
@answer_area.attr("disabled", true) @answer_area.attr("disabled", true)
@hint_area.attr('disabled', true) @hint_area.attr('disabled', true)
@submit_button.hide() @submit_button.hide()
...@@ -151,7 +153,7 @@ class @CombinedOpenEnded ...@@ -151,7 +153,7 @@ class @CombinedOpenEnded
find_assessment_elements: -> find_assessment_elements: ->
@assessment = @$('select.assessment') @assessment = @$('input[name="grade-selection"]')
find_hint_elements: -> find_hint_elements: ->
@hint_area = @$('textarea.post_assessment') @hint_area = @$('textarea.post_assessment')
...@@ -163,6 +165,7 @@ class @CombinedOpenEnded ...@@ -163,6 +165,7 @@ class @CombinedOpenEnded
$.postWithPrefix "#{@ajax_url}/save_answer", data, (response) => $.postWithPrefix "#{@ajax_url}/save_answer", data, (response) =>
if response.success if response.success
@rubric_wrapper.html(response.rubric_html) @rubric_wrapper.html(response.rubric_html)
@rubric_wrapper.show()
@child_state = 'assessing' @child_state = 'assessing'
@find_assessment_elements() @find_assessment_elements()
@rebind() @rebind()
...@@ -174,7 +177,8 @@ class @CombinedOpenEnded ...@@ -174,7 +177,8 @@ class @CombinedOpenEnded
save_assessment: (event) => save_assessment: (event) =>
event.preventDefault() event.preventDefault()
if @child_state == 'assessing' if @child_state == 'assessing'
data = {'assessment' : @assessment.find(':selected').text()} checked_assessment = @$('input[name="grade-selection"]:checked')
data = {'assessment' : checked_assessment.val()}
$.postWithPrefix "#{@ajax_url}/save_assessment", data, (response) => $.postWithPrefix "#{@ajax_url}/save_assessment", data, (response) =>
if response.success if response.success
@child_state = response.state @child_state = response.state
...@@ -183,6 +187,7 @@ class @CombinedOpenEnded ...@@ -183,6 +187,7 @@ class @CombinedOpenEnded
@hint_wrapper.html(response.hint_html) @hint_wrapper.html(response.hint_html)
@find_hint_elements() @find_hint_elements()
else if @child_state == 'done' else if @child_state == 'done'
@rubric_wrapper.hide()
@message_wrapper.html(response.message_html) @message_wrapper.html(response.message_html)
@rebind() @rebind()
...@@ -279,4 +284,4 @@ class @CombinedOpenEnded ...@@ -279,4 +284,4 @@ class @CombinedOpenEnded
delete window.queuePollerID delete window.queuePollerID
location.reload() location.reload()
else else
window.queuePollerID = window.setTimeout(@poll, 10000) window.queuePollerID = window.setTimeout(@poll, 10000)
\ No newline at end of file
...@@ -121,6 +121,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): ...@@ -121,6 +121,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
'rubric': rubric_string, 'rubric': rubric_string,
'initial_display': self.initial_display, 'initial_display': self.initial_display,
'answer': self.answer, 'answer': self.answer,
'problem_id': self.display_name
}) })
updated_grader_payload = json.dumps(parsed_grader_payload) updated_grader_payload = json.dumps(parsed_grader_payload)
...@@ -381,7 +382,8 @@ class OpenEndedModule(openendedchild.OpenEndedChild): ...@@ -381,7 +382,8 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
rubric_feedback="" rubric_feedback=""
feedback = self._convert_longform_feedback_to_html(response_items) feedback = self._convert_longform_feedback_to_html(response_items)
if response_items['rubric_scores_complete']==True: if response_items['rubric_scores_complete']==True:
rubric_feedback = CombinedOpenEndedRubric.render_rubric(response_items['rubric_xml'], system) rubric_renderer = CombinedOpenEndedRubric(system, True)
rubric_feedback = rubric_renderer.render_rubric(response_items['rubric_xml'])
if not response_items['success']: if not response_items['success']:
return system.render_template("open_ended_error.html", return system.render_template("open_ended_error.html",
...@@ -446,8 +448,8 @@ class OpenEndedModule(openendedchild.OpenEndedChild): ...@@ -446,8 +448,8 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
'success': score_result['success'], 'success': score_result['success'],
'grader_id': score_result['grader_id'][i], 'grader_id': score_result['grader_id'][i],
'submission_id': score_result['submission_id'], 'submission_id': score_result['submission_id'],
'rubric_scores_complete' : score_result['rubric_scores_complete'], 'rubric_scores_complete' : score_result['rubric_scores_complete'][i],
'rubric_xml' : score_result['rubric_xml'], 'rubric_xml' : score_result['rubric_xml'][i],
} }
feedback_items.append(self._format_feedback(new_score_result, system)) feedback_items.append(self._format_feedback(new_score_result, system))
if join_feedback: if join_feedback:
......
...@@ -93,6 +93,7 @@ class OpenEndedChild(object): ...@@ -93,6 +93,7 @@ class OpenEndedChild(object):
self.prompt = static_data['prompt'] self.prompt = static_data['prompt']
self.rubric = static_data['rubric'] self.rubric = static_data['rubric']
self.display_name = static_data['display_name']
# Used for progress / grading. Currently get credit just for # Used for progress / grading. Currently get credit just for
# completion (doesn't matter if you self-assessed correct/incorrect). # completion (doesn't matter if you self-assessed correct/incorrect).
......
...@@ -75,7 +75,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): ...@@ -75,7 +75,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
'previous_answer': previous_answer, 'previous_answer': previous_answer,
'ajax_url': system.ajax_url, 'ajax_url': system.ajax_url,
'initial_rubric': self.get_rubric_html(system), 'initial_rubric': self.get_rubric_html(system),
'initial_hint': self.get_hint_html(system), 'initial_hint': "",
'initial_message': self.get_message_html(), 'initial_message': self.get_message_html(),
'state': self.state, 'state': self.state,
'allow_reset': self._allow_reset(), 'allow_reset': self._allow_reset(),
...@@ -122,7 +122,8 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): ...@@ -122,7 +122,8 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
if self.state == self.INITIAL: if self.state == self.INITIAL:
return '' return ''
rubric_html = CombinedOpenEndedRubric.render_rubric(self.rubric, system) rubric_renderer = CombinedOpenEndedRubric(system, True)
rubric_html = rubric_renderer.render_rubric(self.rubric)
# we'll render it # we'll render it
context = {'rubric': rubric_html, context = {'rubric': rubric_html,
...@@ -235,13 +236,9 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): ...@@ -235,13 +236,9 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
d = {'success': True, } d = {'success': True, }
if score == self.max_score(): self.change_state(self.DONE)
self.change_state(self.DONE) d['message_html'] = self.get_message_html()
d['message_html'] = self.get_message_html() d['allow_reset'] = self._allow_reset()
d['allow_reset'] = self._allow_reset()
else:
self.change_state(self.POST_ASSESSMENT)
d['hint_html'] = self.get_hint_html(system)
d['state'] = self.state d['state'] = self.state
return d return d
......
...@@ -42,7 +42,8 @@ class SelfAssessmentTest(unittest.TestCase): ...@@ -42,7 +42,8 @@ class SelfAssessmentTest(unittest.TestCase):
'max_attempts': 10, 'max_attempts': 10,
'rubric': etree.XML(rubric), 'rubric': etree.XML(rubric),
'prompt': prompt, 'prompt': prompt,
'max_score': 1 'max_score': 1,
'display_name': "Name"
} }
module = SelfAssessmentModule(test_system, self.location, module = SelfAssessmentModule(test_system, self.location,
...@@ -56,8 +57,6 @@ class SelfAssessmentTest(unittest.TestCase): ...@@ -56,8 +57,6 @@ class SelfAssessmentTest(unittest.TestCase):
self.assertEqual(module.state, module.ASSESSING) self.assertEqual(module.state, module.ASSESSING)
module.save_assessment({'assessment': '0'}, test_system) module.save_assessment({'assessment': '0'}, test_system)
self.assertEqual(module.state, module.POST_ASSESSMENT)
module.save_hint({'hint': 'this is a hint'}, test_system)
self.assertEqual(module.state, module.DONE) self.assertEqual(module.state, module.DONE)
d = module.reset({}) d = module.reset({})
......
...@@ -11,6 +11,10 @@ from django.http import HttpResponse, Http404 ...@@ -11,6 +11,10 @@ from django.http import HttpResponse, Http404
from courseware.access import has_access from courseware.access import has_access
from util.json_request import expect_json from util.json_request import expect_json
from xmodule.course_module import CourseDescriptor from xmodule.course_module import CourseDescriptor
from xmodule.combined_open_ended_rubric import CombinedOpenEndedRubric, RubricParsingError
from lxml import etree
from mitxmako.shortcuts import render_to_string
from xmodule.x_module import ModuleSystem
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -27,6 +31,7 @@ class GradingService(object): ...@@ -27,6 +31,7 @@ class GradingService(object):
self.url = config['url'] self.url = config['url']
self.login_url = self.url + '/login/' self.login_url = self.url + '/login/'
self.session = requests.session() self.session = requests.session()
self.system = ModuleSystem(None, None, None, render_to_string, None)
def _login(self): def _login(self):
""" """
...@@ -98,3 +103,33 @@ class GradingService(object): ...@@ -98,3 +103,33 @@ class GradingService(object):
return response return response
def _render_rubric(self, response, view_only=False):
"""
Given an HTTP Response with the key 'rubric', render out the html
required to display the rubric and put it back into the response
returns the updated response as a dictionary that can be serialized later
"""
try:
response_json = json.loads(response)
if 'rubric' in response_json:
rubric = response_json['rubric']
rubric_renderer = CombinedOpenEndedRubric(self.system, False)
rubric_html = rubric_renderer.render_rubric(rubric)
response_json['rubric'] = rubric_html
return response_json
# if we can't parse the rubric into HTML,
except etree.XMLSyntaxError, RubricParsingError:
log.exception("Cannot parse rubric string. Raw string: {0}"
.format(rubric))
return {'success': False,
'error': 'Error displaying submission'}
except ValueError:
log.exception("Error parsing response: {0}".format(response))
return {'success': False,
'error': "Error displaying submission"}
...@@ -20,7 +20,9 @@ from grading_service import GradingServiceError ...@@ -20,7 +20,9 @@ from grading_service import GradingServiceError
from courseware.access import has_access from courseware.access import has_access
from util.json_request import expect_json from util.json_request import expect_json
from xmodule.course_module import CourseDescriptor from xmodule.course_module import CourseDescriptor
from xmodule.combined_open_ended_rubric import CombinedOpenEndedRubric
from student.models import unique_id_for_user from student.models import unique_id_for_user
from lxml import etree
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -84,15 +86,17 @@ class PeerGradingService(GradingService): ...@@ -84,15 +86,17 @@ class PeerGradingService(GradingService):
def get_next_submission(self, problem_location, grader_id): def get_next_submission(self, problem_location, grader_id):
response = self.get(self.get_next_submission_url, response = self.get(self.get_next_submission_url,
{'location': problem_location, 'grader_id': grader_id}) {'location': problem_location, 'grader_id': grader_id})
return response return json.dumps(self._render_rubric(response))
def save_grade(self, location, grader_id, submission_id, score, feedback, submission_key): def save_grade(self, location, grader_id, submission_id, score, feedback, submission_key, rubric_scores):
data = {'grader_id' : grader_id, data = {'grader_id' : grader_id,
'submission_id' : submission_id, 'submission_id' : submission_id,
'score' : score, 'score' : score,
'feedback' : feedback, 'feedback' : feedback,
'submission_key': submission_key, 'submission_key': submission_key,
'location': location} 'location': location,
'rubric_scores': rubric_scores,
'rubric_scores_complete': True}
return self.post(self.save_grade_url, data) return self.post(self.save_grade_url, data)
def is_student_calibrated(self, problem_location, grader_id): def is_student_calibrated(self, problem_location, grader_id):
...@@ -101,15 +105,19 @@ class PeerGradingService(GradingService): ...@@ -101,15 +105,19 @@ class PeerGradingService(GradingService):
def show_calibration_essay(self, problem_location, grader_id): def show_calibration_essay(self, problem_location, grader_id):
params = {'problem_id' : problem_location, 'student_id': grader_id} params = {'problem_id' : problem_location, 'student_id': grader_id}
return self.get(self.show_calibration_essay_url, params) response = self.get(self.show_calibration_essay_url, params)
return json.dumps(self._render_rubric(response))
def save_calibration_essay(self, problem_location, grader_id, calibration_essay_id, submission_key, score, feedback): def save_calibration_essay(self, problem_location, grader_id, calibration_essay_id, submission_key,
score, feedback, rubric_scores):
data = {'location': problem_location, data = {'location': problem_location,
'student_id': grader_id, 'student_id': grader_id,
'calibration_essay_id': calibration_essay_id, 'calibration_essay_id': calibration_essay_id,
'submission_key': submission_key, 'submission_key': submission_key,
'score': score, 'score': score,
'feedback': feedback} 'feedback': feedback,
'rubric_scores[]': rubric_scores,
'rubric_scores_complete': True}
return self.post(self.save_calibration_essay_url, data) return self.post(self.save_calibration_essay_url, data)
def get_problem_list(self, course_id, grader_id): def get_problem_list(self, course_id, grader_id):
...@@ -196,7 +204,7 @@ def get_next_submission(request, course_id): ...@@ -196,7 +204,7 @@ def get_next_submission(request, course_id):
mimetype="application/json") mimetype="application/json")
except GradingServiceError: except GradingServiceError:
log.exception("Error getting next submission. server url: {0} location: {1}, grader_id: {2}" log.exception("Error getting next submission. server url: {0} location: {1}, grader_id: {2}"
.format(staff_grading_service().url, location, grader_id)) .format(peer_grading_service().url, location, grader_id))
return json.dumps({'success': False, return json.dumps({'success': False,
'error': 'Could not connect to grading service'}) 'error': 'Could not connect to grading service'})
...@@ -216,7 +224,7 @@ def save_grade(request, course_id): ...@@ -216,7 +224,7 @@ def save_grade(request, course_id):
error: if there was an error in the submission, this is the error message error: if there was an error in the submission, this is the error message
""" """
_check_post(request) _check_post(request)
required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback']) required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback', 'rubric_scores[]'])
success, message = _check_required(request, required) success, message = _check_required(request, required)
if not success: if not success:
return _err_response(message) return _err_response(message)
...@@ -227,14 +235,15 @@ def save_grade(request, course_id): ...@@ -227,14 +235,15 @@ def save_grade(request, course_id):
score = p['score'] score = p['score']
feedback = p['feedback'] feedback = p['feedback']
submission_key = p['submission_key'] submission_key = p['submission_key']
rubric_scores = p.getlist('rubric_scores[]')
try: try:
response = peer_grading_service().save_grade(location, grader_id, submission_id, response = peer_grading_service().save_grade(location, grader_id, submission_id,
score, feedback, submission_key) score, feedback, submission_key, rubric_scores)
return HttpResponse(response, mimetype="application/json") return HttpResponse(response, mimetype="application/json")
except GradingServiceError: except GradingServiceError:
log.exception("""Error saving grade. server url: {0}, location: {1}, submission_id:{2}, log.exception("""Error saving grade. server url: {0}, location: {1}, submission_id:{2},
submission_key: {3}, score: {4}""" submission_key: {3}, score: {4}"""
.format(staff_grading_service().url, .format(peer_grading_service().url,
location, submission_id, submission_key, score) location, submission_id, submission_key, score)
) )
return json.dumps({'success': False, return json.dumps({'success': False,
...@@ -273,7 +282,7 @@ def is_student_calibrated(request, course_id): ...@@ -273,7 +282,7 @@ def is_student_calibrated(request, course_id):
return HttpResponse(response, mimetype="application/json") return HttpResponse(response, mimetype="application/json")
except GradingServiceError: except GradingServiceError:
log.exception("Error from grading service. server url: {0}, grader_id: {0}, location: {1}" log.exception("Error from grading service. server url: {0}, grader_id: {0}, location: {1}"
.format(staff_grading_service().url, grader_id, location)) .format(peer_grading_service().url, grader_id, location))
return json.dumps({'success': False, return json.dumps({'success': False,
'error': 'Could not connect to grading service'}) 'error': 'Could not connect to grading service'})
...@@ -317,9 +326,15 @@ def show_calibration_essay(request, course_id): ...@@ -317,9 +326,15 @@ def show_calibration_essay(request, course_id):
return HttpResponse(response, mimetype="application/json") return HttpResponse(response, mimetype="application/json")
except GradingServiceError: except GradingServiceError:
log.exception("Error from grading service. server url: {0}, location: {0}" log.exception("Error from grading service. server url: {0}, location: {0}"
.format(staff_grading_service().url, location)) .format(peer_grading_service().url, location))
return json.dumps({'success': False, return json.dumps({'success': False,
'error': 'Could not connect to grading service'}) 'error': 'Could not connect to grading service'})
# if we can't parse the rubric into HTML,
except etree.XMLSyntaxError:
log.exception("Cannot parse rubric string. Raw string: {0}"
.format(rubric))
return json.dumps({'success': False,
'error': 'Error displaying submission'})
def save_calibration_essay(request, course_id): def save_calibration_essay(request, course_id):
...@@ -341,7 +356,7 @@ def save_calibration_essay(request, course_id): ...@@ -341,7 +356,7 @@ def save_calibration_essay(request, course_id):
""" """
_check_post(request) _check_post(request)
required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback']) required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback', 'rubric_scores[]'])
success, message = _check_required(request, required) success, message = _check_required(request, required)
if not success: if not success:
return _err_response(message) return _err_response(message)
...@@ -352,9 +367,11 @@ def save_calibration_essay(request, course_id): ...@@ -352,9 +367,11 @@ def save_calibration_essay(request, course_id):
submission_key = p['submission_key'] submission_key = p['submission_key']
score = p['score'] score = p['score']
feedback = p['feedback'] feedback = p['feedback']
rubric_scores = p.getlist('rubric_scores[]')
try: try:
response = peer_grading_service().save_calibration_essay(location, grader_id, calibration_essay_id, submission_key, score, feedback) response = peer_grading_service().save_calibration_essay(location, grader_id, calibration_essay_id,
submission_key, score, feedback, rubric_scores)
return HttpResponse(response, mimetype="application/json") return HttpResponse(response, mimetype="application/json")
except GradingServiceError: except GradingServiceError:
log.exception("Error saving calibration grade, location: {0}, submission_id: {1}, submission_key: {2}, grader_id: {3}".format(location, submission_id, submission_key, grader_id)) log.exception("Error saving calibration grade, location: {0}, submission_id: {1}, submission_key: {2}, grader_id: {3}".format(location, submission_id, submission_key, grader_id))
......
...@@ -17,6 +17,8 @@ from courseware.access import has_access ...@@ -17,6 +17,8 @@ from courseware.access import has_access
from util.json_request import expect_json from util.json_request import expect_json
from xmodule.course_module import CourseDescriptor from xmodule.course_module import CourseDescriptor
from student.models import unique_id_for_user from student.models import unique_id_for_user
from xmodule.x_module import ModuleSystem
from mitxmako.shortcuts import render_to_string
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -46,14 +48,14 @@ class MockStaffGradingService(object): ...@@ -46,14 +48,14 @@ class MockStaffGradingService(object):
self.cnt += 1 self.cnt += 1
return json.dumps({'success': True, return json.dumps({'success': True,
'problem_list': [ 'problem_list': [
json.dumps({'location': 'i4x://MITx/3.091x/problem/open_ended_demo1', \ json.dumps({'location': 'i4x://MITx/3.091x/problem/open_ended_demo1',
'problem_name': "Problem 1", 'num_graded': 3, 'num_pending': 5, 'min_for_ml': 10}), 'problem_name': "Problem 1", 'num_graded': 3, 'num_pending': 5, 'min_for_ml': 10}),
json.dumps({'location': 'i4x://MITx/3.091x/problem/open_ended_demo2', \ json.dumps({'location': 'i4x://MITx/3.091x/problem/open_ended_demo2',
'problem_name': "Problem 2", 'num_graded': 1, 'num_pending': 5, 'min_for_ml': 10}) 'problem_name': "Problem 2", 'num_graded': 1, 'num_pending': 5, 'min_for_ml': 10})
]}) ]})
def save_grade(self, course_id, grader_id, submission_id, score, feedback, skipped): def save_grade(self, course_id, grader_id, submission_id, score, feedback, skipped, rubric_scores):
return self.get_next(course_id, 'fake location', grader_id) return self.get_next(course_id, 'fake location', grader_id)
...@@ -107,12 +109,13 @@ class StaffGradingService(GradingService): ...@@ -107,12 +109,13 @@ class StaffGradingService(GradingService):
Raises: Raises:
GradingServiceError: something went wrong with the connection. GradingServiceError: something went wrong with the connection.
""" """
return self.get(self.get_next_url, response = self.get(self.get_next_url,
params={'location': location, params={'location': location,
'grader_id': grader_id}) 'grader_id': grader_id})
return json.dumps(self._render_rubric(response))
def save_grade(self, course_id, grader_id, submission_id, score, feedback, skipped): def save_grade(self, course_id, grader_id, submission_id, score, feedback, skipped, rubric_scores):
""" """
Save a score and feedback for a submission. Save a score and feedback for a submission.
...@@ -129,7 +132,9 @@ class StaffGradingService(GradingService): ...@@ -129,7 +132,9 @@ class StaffGradingService(GradingService):
'score': score, 'score': score,
'feedback': feedback, 'feedback': feedback,
'grader_id': grader_id, 'grader_id': grader_id,
'skipped': skipped} 'skipped': skipped,
'rubric_scores': rubric_scores,
'rubric_scores_complete': True}
return self.post(self.save_grade_url, data=data) return self.post(self.save_grade_url, data=data)
...@@ -143,6 +148,7 @@ class StaffGradingService(GradingService): ...@@ -143,6 +148,7 @@ class StaffGradingService(GradingService):
# importing this file doesn't create objects that may not have the right config # importing this file doesn't create objects that may not have the right config
_service = None _service = None
def staff_grading_service(): def staff_grading_service():
""" """
Return a staff grading service instance--if settings.MOCK_STAFF_GRADING is True, Return a staff grading service instance--if settings.MOCK_STAFF_GRADING is True,
...@@ -286,7 +292,7 @@ def save_grade(request, course_id): ...@@ -286,7 +292,7 @@ def save_grade(request, course_id):
if request.method != 'POST': if request.method != 'POST':
raise Http404 raise Http404
required = set(['score', 'feedback', 'submission_id', 'location']) required = set(['score', 'feedback', 'submission_id', 'location', 'rubric_scores[]'])
actual = set(request.POST.keys()) actual = set(request.POST.keys())
missing = required - actual missing = required - actual
if len(missing) > 0: if len(missing) > 0:
...@@ -299,13 +305,15 @@ def save_grade(request, course_id): ...@@ -299,13 +305,15 @@ def save_grade(request, course_id):
location = p['location'] location = p['location']
skipped = 'skipped' in p skipped = 'skipped' in p
try: try:
result_json = staff_grading_service().save_grade(course_id, result_json = staff_grading_service().save_grade(course_id,
grader_id, grader_id,
p['submission_id'], p['submission_id'],
p['score'], p['score'],
p['feedback'], p['feedback'],
skipped) skipped,
p.getlist('rubric_scores[]'))
except GradingServiceError: except GradingServiceError:
log.exception("Error saving grade") log.exception("Error saving grade")
return _err_response('Could not connect to grading service') return _err_response('Could not connect to grading service')
......
...@@ -94,7 +94,8 @@ class TestStaffGradingService(ct.PageLoader): ...@@ -94,7 +94,8 @@ class TestStaffGradingService(ct.PageLoader):
data = {'score': '12', data = {'score': '12',
'feedback': 'great!', 'feedback': 'great!',
'submission_id': '123', 'submission_id': '123',
'location': self.location} 'location': self.location,
'rubric_scores[]': ['1', '2']}
r = self.check_for_post_code(200, url, data) r = self.check_for_post_code(200, url, data)
d = json.loads(r.content) d = json.loads(r.content)
self.assertTrue(d['success'], str(d)) self.assertTrue(d['success'], str(d))
......
...@@ -9,5 +9,19 @@ class PeerGrading ...@@ -9,5 +9,19 @@ class PeerGrading
@message_container = $('.message-container') @message_container = $('.message-container')
@message_container.toggle(not @message_container.is(':empty')) @message_container.toggle(not @message_container.is(':empty'))
@problem_list = $('.problem-list')
@construct_progress_bar()
construct_progress_bar: () =>
problems = @problem_list.find('tr').next()
problems.each( (index, element) =>
problem = $(element)
progress_bar = problem.find('.progress-bar')
bar_value = parseInt(problem.data('graded'))
bar_max = parseInt(problem.data('required')) + bar_value
progress_bar.progressbar({value: bar_value, max: bar_max})
)
$(document).ready(() -> new PeerGrading()) $(document).ready(() -> new PeerGrading())
...@@ -24,15 +24,33 @@ div.peer-grading{ ...@@ -24,15 +24,33 @@ div.peer-grading{
color: white; color: white;
} }
input[name='score-selection'] { input[name='score-selection'],
input[name='grade-selection'] {
display: none; display: none;
} }
ul .problem-list
{ {
li text-align: center;
table-layout: auto;
width:100%;
th
{
padding: 10px;
}
td
{
padding:10px;
}
td.problem-name
{ {
margin: 16px 0px; text-align:left;
}
.ui-progressbar
{
height:1em;
margin:0px;
padding:0px;
} }
} }
...@@ -106,6 +124,7 @@ div.peer-grading{ ...@@ -106,6 +124,7 @@ div.peer-grading{
margin: 0px; margin: 0px;
background: #eee; background: #eee;
height: 10em; height: 10em;
width:47.6%;
h3 h3
{ {
text-align:center; text-align:center;
...@@ -120,12 +139,10 @@ div.peer-grading{ ...@@ -120,12 +139,10 @@ div.peer-grading{
.calibration-panel .calibration-panel
{ {
float:left; float:left;
width:48%;
} }
.grading-panel .grading-panel
{ {
float:right; float:right;
width: 48%;
} }
.current-state .current-state
{ {
...@@ -159,5 +176,49 @@ div.peer-grading{ ...@@ -159,5 +176,49 @@ div.peer-grading{
} }
} }
padding: 40px; padding: 40px;
.rubric {
tr {
margin:10px 0px;
height: 100%;
}
td {
padding: 20px 0px 25px 0px;
height: 100%;
}
th {
padding: 5px;
margin: 5px;
}
label,
.view-only {
margin:2px;
position: relative;
padding: 15px 15px 25px 15px;
width: 150px;
height:100%;
display: inline-block;
min-height: 50px;
min-width: 50px;
background-color: #CCC;
font-size: .9em;
}
.grade {
position: absolute;
bottom:0px;
right:0px;
margin:10px;
}
.selected-grade {
background: #666;
color: white;
}
input[type=radio]:checked + label {
background: #666;
color: white; }
input[class='score-selection'] {
display: none;
}
}
} }
<section id="combined-open-ended" class="combined-open-ended" data-ajax-url="${ajax_url}" data-allow_reset="${allow_reset}" data-state="${state}" data-task-count="${task_count}" data-task-number="${task_number}"> <section id="combined-open-ended" class="combined-open-ended" data-ajax-url="${ajax_url}" data-allow_reset="${allow_reset}" data-state="${state}" data-task-count="${task_count}" data-task-number="${task_number}">
<h2>${display_name}</h2>
<div class="status-container"> <div class="status-container">
<h4>Status</h4><br/> ${status | n}
${status | n}
</div> </div>
<div class="item-container"> <div class="item-container">
<h4>Problem</h4><br/> <h4>Problem</h4>
<div class="problem-container">
% for item in items: % for item in items:
<div class="item">${item['content'] | n}</div> <div class="item">${item['content'] | n}</div>
% endfor % endfor
</div>
<input type="button" value="Reset" class="reset-button" name="reset"/> <input type="button" value="Reset" class="reset-button" name="reset"/>
<input type="button" value="Next Step" class="next-step-button" name="reset"/> <input type="button" value="Next Step" class="next-step-button" name="reset"/>
</div> </div>
<a name="results" />
<div class="result-container"> <div class="result-container">
</div> </div>
</section> </section>
......
<div class="result-container"> <div class="result-container">
<h4>Results from Step ${task_number}</h4><br/> <h4>Results from Step ${task_number}</h4>
${results | n} ${results | n}
</div> </div>
\ No newline at end of file
%if status_list[0]['state'] != 'initial':
<h4>Status</h4>
<div class="status-elements">
<section id="combined-open-ended-status" class="combined-open-ended-status"> <section id="combined-open-ended-status" class="combined-open-ended-status">
%for i in xrange(0,len(status_list)): %for i in xrange(0,len(status_list)):
<%status=status_list[i]%> <%status=status_list[i]%>
%if i==len(status_list)-1: %if i==len(status_list)-1:
<div class="statusitem-current" data-status-number="${i}"> <div class="statusitem statusitem-current" data-status-number="${i}">
%else: %else:
<div class="statusitem" data-status-number="${i}"> <div class="statusitem" data-status-number="${i}">
%endif %endif
...@@ -11,18 +14,21 @@ ...@@ -11,18 +14,21 @@
% if status['state'] == 'initial': % if status['state'] == 'initial':
<span class="unanswered" id="status"></span> <span class="unanswered" id="status"></span>
% elif status['state'] in ['done', 'post_assessment'] and status['correct'] == 'correct': % elif status['state'] in ['done', 'post_assessment'] and status['correct'] == 'correct':
<span class="correct" id="status"></span> <span class="correct" id="status"></span>
% elif status['state'] in ['done', 'post_assessment'] and status['correct'] == 'incorrect': % elif status['state'] in ['done', 'post_assessment'] and status['correct'] == 'incorrect':
<span class="incorrect" id="status"></span> <span class="incorrect" id="status"></span>
% elif status['state'] == 'assessing': % elif status['state'] == 'assessing':
<span class="grading" id="status"></span> <span class="grading" id="status"></span>
% endif % endif
%if status['type']=="openended" and status['state'] in ['done', 'post_assessment']: %if status['type']=="openended" and status['state'] in ['done', 'post_assessment']:
<div class="show-results"> <div class="show-results">
<a href="#" class="show-results-button">Show results from step ${status['task_number']}</a> <a href="#results" class="show-results-button">Show results from Step ${status['task_number']}</a>
</div> </div>
%endif %endif
</div> </div>
%endfor %endfor
</section> </section>
\ No newline at end of file </div>
%endif
...@@ -33,8 +33,8 @@ ...@@ -33,8 +33,8 @@
</div> </div>
<h2>Problem List</h2> <h2>Problem List</h2>
<ul class="problem-list"> <table class="problem-list">
</ul> </table>
</section> </section>
<!-- Grading View --> <!-- Grading View -->
...@@ -54,11 +54,6 @@ ...@@ -54,11 +54,6 @@
<div class="prompt-container"> <div class="prompt-container">
</div> </div>
</div> </div>
<div class="rubric-wrapper">
<h3>Grading Rubric</h3>
<div class="rubric-container">
</div>
</div>
</section> </section>
...@@ -78,6 +73,8 @@ ...@@ -78,6 +73,8 @@
<div class="evaluation"> <div class="evaluation">
<p class="score-selection-container"> <p class="score-selection-container">
</p> </p>
<p class="grade-selection-container">
</p>
<textarea name="feedback" placeholder="Feedback for student (optional)" <textarea name="feedback" placeholder="Feedback for student (optional)"
class="feedback-area" cols="70" ></textarea> class="feedback-area" cols="70" ></textarea>
</div> </div>
......
...@@ -10,11 +10,11 @@ ...@@ -10,11 +10,11 @@
% if state == 'initial': % if state == 'initial':
<span class="unanswered" style="display:inline-block;" id="status_${id}">Unanswered</span> <span class="unanswered" style="display:inline-block;" id="status_${id}">Unanswered</span>
% elif state in ['done', 'post_assessment'] and correct == 'correct': % elif state in ['done', 'post_assessment'] and correct == 'correct':
<span class="correct" id="status_${id}">Correct</span> <span class="correct" id="status_${id}"></span> <p>Correct</p>
% elif state in ['done', 'post_assessment'] and correct == 'incorrect': % elif state in ['done', 'post_assessment'] and correct == 'incorrect':
<span class="incorrect" id="status_${id}">Incorrect</span> <span class="incorrect" id="status_${id}"></span> <p>Incorrect. </p>
% elif state == 'assessing': % elif state == 'assessing':
<span class="grading" id="status_${id}">Submitted for grading</span> <span class="grading" id="status_${id}">Submitted for grading.</span>
% endif % endif
% if hidden: % if hidden:
......
<section> <section>
<header>Feedback</header> <header>Feedback</header>
<div class="shortform"> <div class="shortform-custom" data-open-text='Show detailed results' data-close-text='Hide detailed results'>
<div class="result-output"> <div class="result-output">
<p>Score: ${score}</p> <p>Score: ${score}</p>
% if grader_type == "ML": % if grader_type == "ML":
...@@ -14,4 +14,4 @@ ...@@ -14,4 +14,4 @@
</div> </div>
${rubric_feedback | n} ${rubric_feedback | n}
</div> </div>
</section> </section>
\ No newline at end of file
<table class="rubric"> <form class="rubric-template" id="inputtype_${id}">
% for i in range(len(rubric_categories)): <h3>Rubric</h3>
<% category = rubric_categories[i] %> % if view_only and has_score:
<tr> <p>This is the rubric that was used to grade your submission. The highlighted selection matches how the grader feels you performed in each category.</p>
<th> % elif view_only:
${category['description']} <p>Use the below rubric to rate this submission.</p>
% if category['has_score'] == True: % else:
(Your score: ${category['score']}) <p>Select the criteria you feel best represents this submission in each category.</p>
% endif % endif
</th> <table class="rubric">
% for j in range(len(category['options'])): % for i in range(len(categories)):
<% option = category['options'][j] %> <% category = categories[i] %>
<td> <tr>
<div class="view-only"> <th>${category['description']}</th>
${option['text']} % for j in range(len(category['options'])):
% if option.has_key('selected'): <% option = category['options'][j] %>
% if option['selected'] == True: <td>
<div class="selected-grade">[${option['points']} points]</div> % if view_only:
%else: ## if this is the selected rubric block, show it highlighted
<div class="grade">[${option['points']} points]</div> % if option['selected']:
<div class="view-only selected-grade">
% else:
<div class="view-only">
% endif % endif
${option['text']}
<div class="grade">[${option['points']} points]</div>
</div>
% else: % else:
<div class="grade">[${option['points']} points]</div> <input type="radio" class="score-selection" name="score-selection-${i}" id="score-${i}-${j}" value="${option['points']}"/>
%endif <label for="score-${i}-${j}">${option['text']}</label>
</div> % endif
</td> </td>
% endfor
</tr>
% endfor % endfor
</tr> </table>
% endfor </form>
</table>
\ No newline at end of file
...@@ -26,13 +26,37 @@ ...@@ -26,13 +26,37 @@
Nothing to grade! Nothing to grade!
</div> </div>
%else: %else:
<ul class="problem-list"> <div class="problem-list-container">
<table class="problem-list">
<tr>
<th>Problem Name</th>
<th>Graded</th>
<th>Available</th>
<th>Required</th>
<th>Progress</th>
</tr>
%for problem in problem_list: %for problem in problem_list:
<li> <tr data-graded="${problem['num_graded']}" data-required="${problem['num_required']}">
<a href="${ajax_url}problem?location=${problem['location']}">${problem['problem_name']} (${problem['num_graded']} graded, ${problem['num_pending']} pending, required to grade ${problem['num_required']} more)</a> <td class="problem-name">
</li> <a href="${ajax_url}problem?location=${problem['location']}">${problem['problem_name']}</a>
</td>
<td>
${problem['num_graded']}
</td>
<td>
${problem['num_pending']}
</td>
<td>
${problem['num_required']}
</td>
<td>
<div class="progress-bar">
</div>
</td>
</tr>
%endfor %endfor
</ul> </table>
</div>
%endif %endif
%endif %endif
</div> </div>
......
...@@ -44,20 +44,13 @@ ...@@ -44,20 +44,13 @@
</div> </div>
<div class="prompt-wrapper"> <div class="prompt-wrapper">
<div class="prompt-information-container collapsible"> <h2>Question</h2>
<header><a href="javascript:void(0)">Question</a></header> <div class="prompt-information-container">
<section> <section>
<div class="prompt-container"> <div class="prompt-container">
</div> </div>
</section> </section>
</div> </div>
<div class="rubric-wrapper collapsible">
<header><a href="javascript:void(0)">Rubric</a></header>
<section>
<div class="rubric-container">
</div>
</section>
</div>
</div> </div>
...@@ -74,6 +67,7 @@ ...@@ -74,6 +67,7 @@
<input type="hidden" name="essay-id" value="" /> <input type="hidden" name="essay-id" value="" />
</div> </div>
<div class="evaluation"> <div class="evaluation">
<p class="rubric-selection-container"></p>
<p class="score-selection-container"> <p class="score-selection-container">
</p> </p>
<textarea name="feedback" placeholder="Feedback for student (optional)" <textarea name="feedback" placeholder="Feedback for student (optional)"
......
<div class="assessment"> <div class="assessment-container">
<div class="rubric"> <div class="rubric">
<h3>Self-assess your answer with this rubric:</h3>
${rubric | n } ${rubric | n }
</div> </div>
% if not read_only: % if not read_only:
<select name="assessment" class="assessment"> <div class="scoring-container">
%for i in xrange(0,max_score+1): <h3>Scoring</h3>
<option value="${i}">${i}</option> <p>Please select a score below:</p>
%endfor
</select> <div class="grade-selection">
%for i in xrange(0,max_score+1):
<% id = "score-{0}".format(i) %>
<input type="radio" class="grade-selection" name="grade-selection" value="${i}" id="${id}">
<label for="${id}">${i}</label>
%endfor
</div>
</div>
% endif % endif
</div> </div>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment