Commit 7febe2c2 by Vik Paruchuri

Integrate minimal rubric display

parent 52164f58
......@@ -576,4 +576,49 @@ section.open-ended-child {
font-size: 0.9em;
}
.rubric {
tr {
margin:10px 0px;
height: 100%;
}
td {
padding: 20px 0px;
margin: 10px 0px;
height: 100%;
}
th {
padding: 5px;
margin: 5px;
}
label,
.view-only {
margin:10px;
position: relative;
padding: 15px;
width: 200px;
height:100%;
display: inline-block;
min-height: 50px;
min-width: 50px;
background-color: #CCC;
font-size: 1em;
}
.grade {
position: absolute;
bottom:0px;
right:0px;
margin:10px;
}
.selected-grade {
background: #666;
color: white;
}
input[type=radio]:checked + label {
background: #666;
color: white; }
input[class='score-selection'] {
display: none;
}
}
}
......@@ -376,7 +376,11 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
Return error message or feedback template
"""
log.debug(response_items)
rubric_feedback=""
feedback = self._convert_longform_feedback_to_html(response_items)
if response_items['rubric_scores_complete']:
rubric_feedback = self.render_rubric(response_items['rubric_xml'])
if not response_items['success']:
return system.render_template("open_ended_error.html",
......@@ -386,6 +390,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
'grader_type': response_items['grader_type'],
'score': "{0} / {1}".format(response_items['score'], self.max_score()),
'feedback': feedback,
'rubric_feedback' : rubric_feedback
})
return feedback_template
......@@ -429,7 +434,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
log.error(error_message)
fail['feedback'] = error_message
return fail
#This is to support peer grading
#This is to support peer grading
if isinstance(score_result['score'], list):
feedback_items = []
for i in xrange(0, len(score_result['score'])):
......@@ -439,7 +444,9 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
'grader_type': score_result['grader_type'],
'success': score_result['success'],
'grader_id': score_result['grader_id'][i],
'submission_id': score_result['submission_id']
'submission_id': score_result['submission_id'],
'rubric_scores_complete' : score_result['rubric_scores_complete'],
'rubric_xml' : score_result['rubric_xml'],
}
feedback_items.append(self._format_feedback(new_score_result))
if join_feedback:
......@@ -597,6 +604,104 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
html = system.render_template('open_ended.html', context)
return html
def render_rubric(self, rubric_xml):
rubric_categories = OpenEndedModule.extract_rubric_categories(rubric_xml)
html = render_to_string('open_ended_rubric.html', rubric_categories)
return html
@staticmethod
def extract_rubric_categories(element):
'''
Contstruct a list of categories such that the structure looks like:
[ { category: "Category 1 Name",
options: [{text: "Option 1 Name", points: 0}, {text:"Option 2 Name", points: 5}]
},
{ category: "Category 2 Name",
options: [{text: "Option 1 Name", points: 0},
{text: "Option 2 Name", points: 1},
{text: "Option 3 Name", points: 2]}]
'''
element = etree.fromstring(element)
categories = []
for category in element:
if category.tag != 'category':
raise Exception("[capa.inputtypes.extract_categories] Expected a <category> tag: got {0} instead".format(category.tag))
else:
categories.append(OpenEndedModule.extract_category(category))
return categories
@staticmethod
def extract_category(category):
'''
construct an individual category
{category: "Category 1 Name",
options: [{text: "Option 1 text", points: 1},
{text: "Option 2 text", points: 2}]}
all sorting and auto-point generation occurs in this function
'''
descriptionxml = category[0]
scorexml = category[1]
optionsxml = category[2:]
# parse description
if descriptionxml.tag != 'description':
raise Exception("[extract_category]: expected description tag, got {0} instead".format(descriptionxml.tag))
if scorexml.tag != 'score':
raise Exception("[extract_category]: expected score tag, got {0} instead".format(descriptionxml.tag))
description = descriptionxml.text
score = int(scorexml.text)
cur_points = 0
options = []
autonumbering = True
# parse options
for option in optionsxml:
if option.tag != 'option':
raise Exception("[extract_category]: expected option tag, got {0} instead".format(option.tag))
else:
pointstr = option.get("points")
if pointstr:
autonumbering = False
# try to parse this into an int
try:
points = int(pointstr)
except ValueError:
raise Exception("[extract_category]: expected points to have int, got {0} instead".format(pointstr))
elif autonumbering:
# use the generated one if we're in the right mode
points = cur_points
cur_points = cur_points + 1
else:
raise Exception("[extract_category]: missing points attribute. Cannot continue to auto-create points values after a points value is explicitly dfined.")
optiontext = option.text
options.append({'text': option.text, 'points': points})
# sort and check for duplicates
options = sorted(options, key=lambda option: option['points'])
OpenEndedModule.validate_options(options)
return {'description': description, 'options': options, 'score' : score}
@staticmethod
def validate_options(options):
'''
Validates a set of options. This can and should be extended to filter out other bad edge cases
'''
if len(options) == 0:
raise Exception("[extract_category]: no options associated with this category")
if len(options) == 1:
return
prev = options[0]['points']
for option in options[1:]:
if prev == option['points']:
raise Exception("[extract_category]: found duplicate point values between two different options")
else:
prev = option['points']
class OpenEndedDescriptor(XmlDescriptor, EditingDescriptor):
"""
......
......@@ -12,5 +12,6 @@
<div class="result-output">
${ feedback | n}
</div>
${rubric_feedback | n}
</div>
</section>
\ No newline at end of file
<table class="rubric">
% for i in range(len(categories)):
<% category = categories[i] %>
<tr>
<th>${category['description']}</th>
% for j in range(len(category['options'])):
<% option = category['options'][j] %>
<td>
<div class="view-only">
${option['text']}
<div class="grade">[${option['points']} points]</div>
</div>
</td>
% endfor
</tr>
% endfor
</table>
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment