Commit f4addddf by Diana Huang

Merge pull request #1484 from MITx/feature/vik/improve-oe-ui

Feature/vik/improve oe ui
parents a62a575a 8447eb54
......@@ -3,6 +3,27 @@ from lxml import etree
log = logging.getLogger(__name__)
GRADER_TYPE_IMAGE_DICT = {
'8B' : '/static/images/random_grading_icon.png',
'SA' : '/static/images/self_assessment_icon.png',
'PE' : '/static/images/peer_grading_icon.png',
'ML' : '/static/images/ml_grading_icon.png',
'IN' : '/static/images/peer_grading_icon.png',
'BC' : '/static/images/ml_grading_icon.png',
}
HUMAN_GRADER_TYPE = {
'8B' : 'Magic-8-Ball-Assessment',
'SA' : 'Self-Assessment',
'PE' : 'Peer-Assessment',
'IN' : 'Instructor-Assessment',
'ML' : 'AI-Assessment',
'BC' : 'AI-Assessment',
}
DO_NOT_DISPLAY = ['BC', 'IN']
LEGEND_LIST = [{'name' : HUMAN_GRADER_TYPE[k], 'image' : GRADER_TYPE_IMAGE_DICT[k]} for k in GRADER_TYPE_IMAGE_DICT.keys() if k not in DO_NOT_DISPLAY ]
class RubricParsingError(Exception):
def __init__(self, msg):
......@@ -16,7 +37,7 @@ class CombinedOpenEndedRubric(object):
self.view_only = view_only
self.system = system
def render_rubric(self, rubric_xml):
def render_rubric(self, rubric_xml, score_list = None):
'''
render_rubric: takes in an xml string and outputs the corresponding
html for that xml, given the type of rubric we're generating
......@@ -29,22 +50,36 @@ class CombinedOpenEndedRubric(object):
success = False
try:
rubric_categories = self.extract_categories(rubric_xml)
if score_list and len(score_list)==len(rubric_categories):
for i in xrange(0,len(rubric_categories)):
category = rubric_categories[i]
for j in xrange(0,len(category['options'])):
if score_list[i]==j:
rubric_categories[i]['options'][j]['selected'] = True
rubric_scores = [cat['score'] for cat in rubric_categories]
max_scores = map((lambda cat: cat['options'][-1]['points']), rubric_categories)
max_score = max(max_scores)
html = self.system.render_template('open_ended_rubric.html',
rubric_template = 'open_ended_rubric.html'
if self.view_only:
rubric_template = 'open_ended_view_only_rubric.html'
html = self.system.render_template(rubric_template,
{'categories': rubric_categories,
'has_score': self.has_score,
'view_only': self.view_only,
'max_score': max_score})
'max_score': max_score,
'combined_rubric' : False
})
success = True
except:
error_message = "[render_rubric] Could not parse the rubric with xml: {0}".format(rubric_xml)
log.error(error_message)
raise RubricParsingError(error_message)
return success, html
return {'success' : success, 'html' : html, 'rubric_scores' : rubric_scores}
def check_if_rubric_is_parseable(self, rubric_string, location, max_score_allowed, max_score):
success, rubric_feedback = self.render_rubric(rubric_string)
rubric_dict = self.render_rubric(rubric_string)
success = rubric_dict['success']
rubric_feedback = rubric_dict['html']
if not success:
error_message = "Could not parse rubric : {0} for location {1}".format(rubric_string, location.url())
log.error(error_message)
......@@ -149,7 +184,33 @@ class CombinedOpenEndedRubric(object):
options = sorted(options, key=lambda option: option['points'])
CombinedOpenEndedRubric.validate_options(options)
return {'description': description, 'options': options}
return {'description': description, 'options': options, 'score' : score}
def render_combined_rubric(self,rubric_xml,scores,score_types,feedback_types):
success, score_tuples = CombinedOpenEndedRubric.reformat_scores_for_rendering(scores,score_types,feedback_types)
rubric_categories = self.extract_categories(rubric_xml)
max_scores = map((lambda cat: cat['options'][-1]['points']), rubric_categories)
max_score = max(max_scores)
for i in xrange(0,len(rubric_categories)):
category = rubric_categories[i]
for j in xrange(0,len(category['options'])):
rubric_categories[i]['options'][j]['grader_types'] = []
for tuple in score_tuples:
if tuple[1] == i and tuple[2] ==j:
for grader_type in tuple[3]:
rubric_categories[i]['options'][j]['grader_types'].append(grader_type)
log.debug(rubric_categories)
html = self.system.render_template('open_ended_combined_rubric.html',
{'categories': rubric_categories,
'has_score': True,
'view_only': True,
'max_score': max_score,
'combined_rubric' : True,
'grader_type_image_dict' : GRADER_TYPE_IMAGE_DICT,
'human_grader_types' : HUMAN_GRADER_TYPE,
})
return html
@staticmethod
......@@ -167,3 +228,79 @@ class CombinedOpenEndedRubric(object):
raise RubricParsingError("[extract_category]: found duplicate point values between two different options")
else:
prev = option['points']
@staticmethod
def reformat_scores_for_rendering(scores, score_types, feedback_types):
"""
Takes in a list of rubric scores, the types of those scores, and feedback associated with them
Outputs a reformatted list of score tuples (count, rubric category, rubric score, [graders that gave this score], [feedback types])
@param scores:
@param score_types:
@param feedback_types:
@return:
"""
success = False
if len(scores)==0:
log.error("Score length is 0.")
return success, ""
if len(scores) != len(score_types) or len(feedback_types) != len(scores):
log.error("Length mismatches.")
return success, ""
score_lists = []
score_type_list = []
feedback_type_list = []
for i in xrange(0,len(scores)):
score_cont_list = scores[i]
for j in xrange(0,len(score_cont_list)):
score_list = score_cont_list[j]
score_lists.append(score_list)
score_type_list.append(score_types[i][j])
feedback_type_list.append(feedback_types[i][j])
score_list_len = len(score_lists[0])
for i in xrange(0,len(score_lists)):
score_list = score_lists[i]
if len(score_list)!=score_list_len:
return success, ""
score_tuples = []
for i in xrange(0,len(score_lists)):
for j in xrange(0,len(score_lists[i])):
tuple = [1,j,score_lists[i][j],[],[]]
score_tuples, tup_ind = CombinedOpenEndedRubric.check_for_tuple_matches(score_tuples,tuple)
score_tuples[tup_ind][0] += 1
score_tuples[tup_ind][3].append(score_type_list[i])
score_tuples[tup_ind][4].append(feedback_type_list[i])
success = True
return success, score_tuples
@staticmethod
def check_for_tuple_matches(tuples, tuple):
"""
Checks to see if a tuple in a list of tuples is a match for tuple.
If not match, creates a new tuple matching tuple.
@param tuples: list of tuples
@param tuple: tuples to match
@return: a new list of tuples, and the index of the tuple that matches tuple
"""
category = tuple[1]
score = tuple[2]
tup_ind = -1
for t in xrange(0,len(tuples)):
if tuples[t][1] == category and tuples[t][2] == score:
tup_ind = t
break
if tup_ind == -1:
tuples.append([0,category,score,[],[]])
tup_ind = len(tuples)-1
return tuples, tup_ind
......@@ -24,14 +24,11 @@ section.combined-open-ended {
@include clearfix;
.status-container
{
float:right;
width:40%;
padding-bottom: 5px;
}
.item-container
{
float:left;
width: 53%;
padding-bottom: 50px;
padding-bottom: 10px;
}
.result-container
......@@ -46,14 +43,26 @@ section.combined-open-ended {
}
}
section.legend-container {
.legenditem {
background-color : #d4d4d4;
font-size: .9em;
padding: 2px;
display: inline;
width: 20%;
}
margin-bottom: 5px;
}
section.combined-open-ended-status {
.statusitem {
background-color: #FAFAFA;
color: #2C2C2C;
font-family: monospace;
font-size: 1em;
padding: 10px;
background-color : #d4d4d4;
font-size: .9em;
padding: 2px;
display: inline;
width: 20%;
.show-results {
margin-top: .3em;
text-align:right;
......@@ -61,12 +70,12 @@ section.combined-open-ended-status {
.show-results-button {
font: 1em monospace;
}
}
}
.statusitem-current {
background-color: #d4d4d4;
background-color: #B2B2B2;
color: #222;
}
}
span {
&.unanswered {
......@@ -98,8 +107,29 @@ section.combined-open-ended-status {
}
}
div.result-container {
div.combined-rubric-container {
ul.rubric-list{
list-style-type: none;
padding:0;
margin:0;
li {
&.rubric-list-item{
margin-bottom: 2px;
padding: 0px;
}
}
}
span.rubric-category {
font-size: .9em;
}
padding-bottom: 5px;
padding-top: 10px;
}
div.result-container {
padding-top: 10px;
padding-bottom: 5px;
.evaluation {
p {
......@@ -113,9 +143,8 @@ div.result-container {
}
.evaluation-response {
margin-bottom: 10px;
margin-bottom: 2px;
header {
text-align: right;
a {
font-size: .85em;
}
......@@ -198,20 +227,6 @@ div.result-container {
}
}
.result-correct {
background: url('../images/correct-icon.png') left 20px no-repeat;
.result-actual-output {
color: #090;
}
}
.result-incorrect {
background: url('../images/incorrect-icon.png') left 20px no-repeat;
.result-actual-output {
color: #B00;
}
}
.markup-text{
margin: 5px;
padding: 20px 0px 15px 50px;
......@@ -229,6 +244,16 @@ div.result-container {
}
}
}
.rubric-result-container {
.rubric-result {
font-size: .9em;
padding: 2px;
display: inline-table;
}
padding: 2px;
margin: 0px;
display : inline;
}
}
......@@ -404,7 +429,7 @@ section.open-ended-child {
div.short-form-response {
background: #F6F6F6;
border: 1px solid #ddd;
margin-bottom: 20px;
margin-bottom: 0px;
overflow-y: auto;
height: 200px;
@include clearfix;
......@@ -478,6 +503,18 @@ section.open-ended-child {
margin-left: .75rem;
}
ul.rubric-list{
list-style-type: none;
padding:0;
margin:0;
li {
&.rubric-list-item{
margin-bottom: 0px;
padding: 0px;
}
}
}
ol {
list-style: decimal outside none;
margin-bottom: lh();
......@@ -503,9 +540,8 @@ section.open-ended-child {
}
li {
line-height: 1.4em;
margin-bottom: lh(.5);
margin-bottom: 0px;
padding: 0px;
&:last-child {
margin-bottom: 0;
}
......
......@@ -114,7 +114,9 @@ class GradingService(object):
if 'rubric' in response_json:
rubric = response_json['rubric']
rubric_renderer = CombinedOpenEndedRubric(self.system, view_only)
success, rubric_html = rubric_renderer.render_rubric(rubric)
rubric_dict = rubric_renderer.render_rubric(rubric)
success = rubric_dict['success']
rubric_html = rubric_dict['html']
response_json['rubric'] = rubric_html
return response_json
# if we can't parse the rubric into HTML,
......
......@@ -4,11 +4,11 @@ class @Rubric
# finds the scores for each rubric category
@get_score_list: () =>
# find the number of categories:
num_categories = $('table.rubric tr').length
num_categories = $('.rubric-category').length
score_lst = []
# get the score for each one
for i in [0..(num_categories-2)]
for i in [0..(num_categories-1)]
score = $("input[name='score-selection-#{i}']:checked").val()
score_lst.push(score)
......@@ -23,9 +23,8 @@ class @Rubric
@check_complete: () ->
# check to see whether or not any categories have not been scored
num_categories = $('table.rubric tr').length
# -2 because we want to skip the header
for i in [0..(num_categories-2)]
num_categories = $('.rubric-category').length
for i in [0..(num_categories-1)]
score = $("input[name='score-selection-#{i}']:checked").val()
if score == undefined
return false
......@@ -52,22 +51,30 @@ class @CombinedOpenEnded
@reset_button.click @reset
@next_problem_button = @$('.next-step-button')
@next_problem_button.click @next_problem
@status_container = @$('.status-elements')
@show_results_button=@$('.show-results-button')
@show_results_button.click @show_results
@question_header = @$('.question-header')
@question_header.click @collapse_question
# valid states: 'initial', 'assessing', 'post_assessment', 'done'
Collapsible.setCollapsibles(@el)
@submit_evaluation_button = $('.submit-evaluation-button')
@submit_evaluation_button.click @message_post
@results_container = $('.result-container')
@combined_rubric_container = $('.combined-rubric-container')
@legend_container= $('.legend-container')
@show_legend_current()
# Where to put the rubric once we load it
@el = $(element).find('section.open-ended-child')
@errors_area = @$('.error')
@answer_area = @$('textarea.answer')
@prompt_container = @$('.prompt')
@rubric_wrapper = @$('.rubric-wrapper')
@hint_wrapper = @$('.hint-wrapper')
@message_wrapper = @$('.message-wrapper')
......@@ -82,11 +89,20 @@ class @CombinedOpenEnded
@can_upload_files = false
@open_ended_child= @$('.open-ended-child')
if @task_number>1
@prompt_hide()
else if @task_number==1 and @child_state!='initial'
@prompt_hide()
@find_assessment_elements()
@find_hint_elements()
@rebind()
if @task_number>1
@show_combined_rubric_current()
@show_results_current()
# locally scoped jquery.
$: (selector) ->
$(selector, @el)
......@@ -102,7 +118,7 @@ class @CombinedOpenEnded
Collapsible.setCollapsibles(@results_container)
show_results: (event) =>
status_item = $(event.target).parent().parent()
status_item = $(event.target).parent()
status_number = status_item.data('status-number')
data = {'task_number' : status_number}
$.postWithPrefix "#{@ajax_url}/get_results", data, (response) =>
......@@ -115,6 +131,27 @@ class @CombinedOpenEnded
else
@gentle_alert response.error
show_combined_rubric_current: () =>
data = {}
$.postWithPrefix "#{@ajax_url}/get_combined_rubric", data, (response) =>
if response.success
@combined_rubric_container.after(response.html).remove()
@combined_rubric_container= $('div.combined_rubric_container')
show_status_current: () =>
data = {}
$.postWithPrefix "#{@ajax_url}/get_status", data, (response) =>
if response.success
@status_container.after(response.html).remove()
@status_container= $('.status-elements')
show_legend_current: () =>
data = {}
$.postWithPrefix "#{@ajax_url}/get_legend", data, (response) =>
if response.success
@legend_container.after(response.html).remove()
@legend_container= $('.legend-container')
message_post: (event)=>
Logger.log 'message_post', @answers
external_grader_message=$(event.target).parent().parent().parent()
......@@ -156,6 +193,11 @@ class @CombinedOpenEnded
@next_problem_button.hide()
@hide_file_upload()
@hint_area.attr('disabled', false)
if @task_number>1 or @child_state!='initial'
@show_status_current()
if @task_number==1 and @child_state=='assessing'
@prompt_hide()
if @child_state == 'done'
@rubric_wrapper.hide()
if @child_type=="openended"
......@@ -257,7 +299,8 @@ class @CombinedOpenEnded
event.preventDefault()
if @child_state == 'assessing' && Rubric.check_complete()
checked_assessment = Rubric.get_total_score()
data = {'assessment' : checked_assessment}
score_list = Rubric.get_score_list()
data = {'assessment' : checked_assessment, 'score_list' : score_list}
$.postWithPrefix "#{@ajax_url}/save_assessment", data, (response) =>
if response.success
@child_state = response.state
......@@ -267,7 +310,6 @@ class @CombinedOpenEnded
@find_hint_elements()
else if @child_state == 'done'
@rubric_wrapper.hide()
@message_wrapper.html(response.message_html)
@rebind()
else
......@@ -367,13 +409,13 @@ class @CombinedOpenEnded
window.queuePollerID = window.setTimeout(@poll, 10000)
setup_file_upload: =>
if window.File and window.FileReader and window.FileList and window.Blob
if @accept_file_upload == "True"
@can_upload_files = true
@file_upload_area.html('<input type="file" class="file-upload-box">')
@file_upload_area.show()
else
@gentle_alert 'File uploads are required for this question, but are not supported in this browser. Try the newest version of google chrome. Alternatively, if you have uploaded the image to the web, you can paste a link to it into the answer box.'
if @accept_file_upload == "True"
if window.File and window.FileReader and window.FileList and window.Blob
@can_upload_files = true
@file_upload_area.html('<input type="file" class="file-upload-box">')
@file_upload_area.show()
else
@gentle_alert 'File uploads are required for this question, but are not supported in this browser. Try the newest version of google chrome. Alternatively, if you have uploaded the image to the web, you can paste a link to it into the answer box.'
hide_file_upload: =>
if @accept_file_upload == "True"
......@@ -390,3 +432,26 @@ class @CombinedOpenEnded
# wrap this so that it can be mocked
reload: ->
location.reload()
collapse_question: () =>
@prompt_container.slideToggle()
@prompt_container.toggleClass('open')
if @question_header.text() == "(Hide)"
new_text = "(Show)"
else
new_text = "(Hide)"
@question_header.text(new_text)
prompt_show: () =>
if @prompt_container.is(":hidden")==true
@prompt_container.slideToggle()
@prompt_container.toggleClass('open')
@question_header.text("(Hide)")
prompt_hide: () =>
if @prompt_container.is(":visible")==true
@prompt_container.slideToggle()
@prompt_container.toggleClass('open')
@question_header.text("(Show)")
......@@ -180,12 +180,17 @@ class @PeerGradingProblem
@content_panel = $('.content-panel')
@grading_message = $('.grading-message')
@grading_message.hide()
@question_header = $('.question-header')
@question_header.click @collapse_question
@grading_wrapper =$('.grading-wrapper')
@calibration_feedback_panel = $('.calibration-feedback')
@interstitial_page = $('.interstitial-page')
@interstitial_page.hide()
@calibration_interstitial_page = $('.calibration-interstitial-page')
@calibration_interstitial_page.hide()
@error_container = $('.error-container')
@submission_key_input = $("input[name='submission-key']")
......@@ -201,7 +206,9 @@ class @PeerGradingProblem
@action_button = $('.action-button')
@calibration_feedback_button = $('.calibration-feedback-button')
@interstitial_page_button = $('.interstitial-page-button')
@calibration_interstitial_page_button = $('.calibration-interstitial-page-button')
@flag_student_checkbox = $('.flag-checkbox')
@collapse_question()
Collapsible.setCollapsibles(@content_panel)
......@@ -210,12 +217,21 @@ class @PeerGradingProblem
@calibration_feedback_button.click =>
@calibration_feedback_panel.hide()
@grading_wrapper.show()
@gentle_alert "Calibration essay saved. Fetched the next essay."
@is_calibrated_check()
@interstitial_page_button.click =>
@interstitial_page.hide()
@is_calibrated_check()
@calibration_interstitial_page_button.click =>
@calibration_interstitial_page.hide()
@is_calibrated_check()
@calibration_feedback_button.hide()
@calibration_feedback_panel.hide()
@error_container.hide()
@is_calibrated_check()
......@@ -233,6 +249,9 @@ class @PeerGradingProblem
fetch_submission_essay: () =>
@backend.post('get_next_submission', {location: @location}, @render_submission)
gentle_alert: (msg) =>
@grading_message.fadeIn()
@grading_message.html("<p>" + msg + "</p>")
construct_data: () ->
data =
......@@ -273,6 +292,9 @@ class @PeerGradingProblem
else if response.calibrated and @calibration == true
@calibration = false
@render_interstitial_page()
else if not response.calibrated and @calibration==null
@calibration=true
@render_calibration_interstitial_page()
else
@calibration = true
@fetch_calibration_essay()
......@@ -296,7 +318,7 @@ class @PeerGradingProblem
if response.success
@is_calibrated_check()
@grading_message.fadeIn()
@grading_message.html("<p>Grade sent successfully.</p>")
@grading_message.html("<p>Successfully saved your feedback. Fetched the next essay.</p>")
else
if response.error
@render_error(response.error)
......@@ -308,6 +330,7 @@ class @PeerGradingProblem
# check to see whether or not any categories have not been scored
if Rubric.check_complete()
# show button if we have scores for all categories
@grading_message.hide()
@show_submit_button()
@grade = Rubric.get_total_score()
......@@ -323,7 +346,7 @@ class @PeerGradingProblem
if response.success
# load in all the data
@submission_container.html("<h3>Training Essay</h3>")
@submission_container.html("")
@render_submission_data(response)
# TODO: indicate that we're in calibration mode
@calibration_panel.addClass('current-state')
......@@ -337,6 +360,7 @@ class @PeerGradingProblem
@calibration_panel.find('.grading-text').hide()
@grading_panel.find('.grading-text').hide()
@flag_student_container.hide()
@feedback_area.val("")
@submit_button.unbind('click')
@submit_button.click @submit_calibration_essay
......@@ -350,7 +374,7 @@ class @PeerGradingProblem
render_submission: (response) =>
if response.success
@submit_button.hide()
@submission_container.html("<h3>Submitted Essay</h3>")
@submission_container.html("")
@render_submission_data(response)
@calibration_panel.removeClass('current-state')
......@@ -364,6 +388,7 @@ class @PeerGradingProblem
@calibration_panel.find('.grading-text').show()
@grading_panel.find('.grading-text').show()
@flag_student_container.show()
@feedback_area.val("")
@submit_button.unbind('click')
@submit_button.click @submit_grade
......@@ -408,18 +433,25 @@ class @PeerGradingProblem
actual_score = parseInt(response.actual_score)
if score == actual_score
calibration_wrapper.append("<p>Congratulations! Your score matches the actual score!</p>")
calibration_wrapper.append("<p>Your score matches the actual score!</p>")
else
calibration_wrapper.append("<p>Please try to understand the grading critera better to be more accurate next time.</p>")
calibration_wrapper.append("<p>You may want to review the rubric again.</p>")
# disable score selection and submission from the grading interface
$("input[name='score-selection']").attr('disabled', true)
@submit_button.hide()
@calibration_feedback_button.show()
render_interstitial_page: () =>
@content_panel.hide()
@grading_message.hide()
@interstitial_page.show()
render_calibration_interstitial_page: () =>
@content_panel.hide()
@action_button.hide()
@calibration_interstitial_page.show()
render_error: (error_message) =>
@error_container.show()
@calibration_feedback_panel.hide()
......@@ -433,3 +465,12 @@ class @PeerGradingProblem
setup_score_selection: (max_score) =>
# And now hook up an event handler again
$("input[class='score-selection']").change @graded_callback
collapse_question: () =>
@prompt_container.slideToggle()
@prompt_container.toggleClass('open')
if @question_header.text() == "(Hide)"
new_text = "(Show)"
else
new_text = "(Hide)"
@question_header.text(new_text)
......@@ -306,6 +306,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
'grammar': 1,
# needs to be after all the other feedback
'markup_text': 3}
do_not_render = ['topicality', 'prompt-overlap']
default_priority = 2
......@@ -360,6 +361,10 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
if len(feedback) == 0:
return format_feedback('errors', 'No feedback available')
for tag in do_not_render:
if tag in feedback:
feedback.pop(tag)
feedback_lst = sorted(feedback.items(), key=get_priority)
feedback_list_part1 = u"\n".join(format_feedback(k, v) for k, v in feedback_lst)
else:
......@@ -381,9 +386,13 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
rubric_feedback = ""
feedback = self._convert_longform_feedback_to_html(response_items)
rubric_scores = []
if response_items['rubric_scores_complete'] == True:
rubric_renderer = CombinedOpenEndedRubric(system, True)
success, rubric_feedback = rubric_renderer.render_rubric(response_items['rubric_xml'])
rubric_dict = rubric_renderer.render_rubric(response_items['rubric_xml'])
success = rubric_dict['success']
rubric_feedback = rubric_dict['html']
rubric_scores = rubric_dict['rubric_scores']
if not response_items['success']:
return system.render_template("open_ended_error.html",
......@@ -396,7 +405,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
'rubric_feedback': rubric_feedback
})
return feedback_template
return feedback_template, rubric_scores
def _parse_score_msg(self, score_msg, system, join_feedback=True):
......@@ -420,7 +429,17 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
correct: Correctness of submission (Boolean)
score: Points to be assigned (numeric, can be float)
"""
fail = {'valid': False, 'score': 0, 'feedback': ''}
fail = {
'valid': False,
'score': 0,
'feedback': '',
'rubric_scores' : [[0]],
'grader_types' : [''],
'feedback_items' : [''],
'feedback_dicts' : [{}],
'grader_ids' : [0],
'submission_ids' : [0],
}
try:
score_result = json.loads(score_msg)
except (TypeError, ValueError):
......@@ -447,6 +466,11 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
#This is to support peer grading
if isinstance(score_result['score'], list):
feedback_items = []
rubric_scores = []
grader_types = []
feedback_dicts = []
grader_ids = []
submission_ids = []
for i in xrange(0, len(score_result['score'])):
new_score_result = {
'score': score_result['score'][i],
......@@ -458,7 +482,17 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
'rubric_scores_complete': score_result['rubric_scores_complete'][i],
'rubric_xml': score_result['rubric_xml'][i],
}
feedback_items.append(self._format_feedback(new_score_result, system))
feedback_template, rubric_score = self._format_feedback(new_score_result, system)
feedback_items.append(feedback_template)
rubric_scores.append(rubric_score)
grader_types.append(score_result['grader_type'])
try:
feedback_dict = json.loads(score_result['feedback'][i])
except:
pass
feedback_dicts.append(feedback_dict)
grader_ids.append(score_result['grader_id'][i])
submission_ids.append(score_result['submission_id'])
if join_feedback:
feedback = "".join(feedback_items)
else:
......@@ -466,13 +500,33 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
score = int(median(score_result['score']))
else:
#This is for instructor and ML grading
feedback = self._format_feedback(score_result, system)
feedback, rubric_score = self._format_feedback(score_result, system)
score = score_result['score']
rubric_scores = [rubric_score]
grader_types = [score_result['grader_type']]
feedback_items = [feedback]
try:
feedback_dict = json.loads(score_result['feedback'])
except:
pass
feedback_dicts = [feedback_dict]
grader_ids = [score_result['grader_id']]
submission_ids = [score_result['submission_id']]
self.submission_id = score_result['submission_id']
self.grader_id = score_result['grader_id']
return {'valid': True, 'score': score, 'feedback': feedback}
return {
'valid': True,
'score': score,
'feedback': feedback,
'rubric_scores' : rubric_scores,
'grader_types' : grader_types,
'feedback_items' : feedback_items,
'feedback_dicts' : feedback_dicts,
'grader_ids' : grader_ids,
'submission_ids' : submission_ids,
}
def latest_post_assessment(self, system, short_feedback=False, join_feedback=True):
"""
......
......@@ -68,10 +68,10 @@ class OpenEndedChild(object):
#This is used to tell students where they are at in the module
HUMAN_NAMES = {
'initial': 'Started',
'assessing': 'Being scored',
'post_assessment': 'Scoring finished',
'done': 'Problem complete',
'initial': 'Not started',
'assessing': 'In progress',
'post_assessment': 'Done',
'done': 'Done',
}
def __init__(self, system, location, definition, descriptor, static_data,
......@@ -137,8 +137,6 @@ class OpenEndedChild(object):
else:
return False, {}
def latest_answer(self):
"""Empty string if not available"""
if not self.history:
......
......@@ -53,8 +53,6 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
@param descriptor: SelfAssessmentDescriptor
@return: None
"""
self.submit_message = definition['submitmessage']
self.hint_prompt = definition['hintprompt']
self.prompt = stringify_children(self.prompt)
self.rubric = stringify_children(self.rubric)
......@@ -76,8 +74,6 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
'previous_answer': previous_answer,
'ajax_url': system.ajax_url,
'initial_rubric': self.get_rubric_html(system),
'initial_hint': "",
'initial_message': self.get_message_html(),
'state': self.state,
'allow_reset': self._allow_reset(),
'child_type': 'selfassessment',
......@@ -108,7 +104,6 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
if dispatch not in handlers:
return 'Error'
log.debug(get)
before = self.get_progress()
d = handlers[dispatch](get, system)
after = self.get_progress()
......@@ -126,7 +121,9 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
return ''
rubric_renderer = CombinedOpenEndedRubric(system, False)
success, rubric_html = rubric_renderer.render_rubric(self.rubric)
rubric_dict = rubric_renderer.render_rubric(self.rubric)
success = rubric_dict['success']
rubric_html = rubric_dict['html']
# we'll render it
context = {'rubric': rubric_html,
......@@ -156,8 +153,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
else:
hint = ''
context = {'hint_prompt': self.hint_prompt,
'hint': hint}
context = {'hint': hint}
if self.state == self.POST_ASSESSMENT:
context['read_only'] = False
......@@ -168,15 +164,6 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
return system.render_template('self_assessment_hint.html', context)
def get_message_html(self):
"""
Return the appropriate version of the message view, based on state.
"""
if self.state != self.DONE:
return ""
return """<div class="save_message">{0}</div>""".format(self.submit_message)
def save_answer(self, get, system):
"""
......@@ -235,15 +222,19 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
try:
score = int(get['assessment'])
score_list = get.getlist('score_list[]')
for i in xrange(0,len(score_list)):
score_list[i] = int(score_list[i])
except ValueError:
return {'success': False, 'error': "Non-integer score value"}
return {'success': False, 'error': "Non-integer score value, or no score list"}
#Record score as assessment and rubric scores as post assessment
self.record_latest_score(score)
self.record_latest_post_assessment(json.dumps(score_list))
d = {'success': True, }
self.change_state(self.DONE)
d['message_html'] = self.get_message_html()
d['allow_reset'] = self._allow_reset()
d['state'] = self.state
......@@ -251,6 +242,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
def save_hint(self, get, system):
'''
Not used currently, as hints have been removed from the system.
Save the hint.
Returns a dict { 'success': bool,
'message_html': message_html,
......@@ -268,9 +260,18 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
self.change_state(self.DONE)
return {'success': True,
'message_html': self.get_message_html(),
'message_html': '',
'allow_reset': self._allow_reset()}
def latest_post_assessment(self, system):
latest_post_assessment = super(SelfAssessmentModule, self).latest_post_assessment(system)
try:
rubric_scores = json.loads(latest_post_assessment)
except:
log.error("Cannot parse rubric scores in self assessment module from {0}".format(latest_post_assessment))
rubric_scores = []
return [rubric_scores]
class SelfAssessmentDescriptor(XmlDescriptor, EditingDescriptor):
"""
......@@ -299,7 +300,7 @@ class SelfAssessmentDescriptor(XmlDescriptor, EditingDescriptor):
'hintprompt': 'some-html'
}
"""
expected_children = ['submitmessage', 'hintprompt']
expected_children = []
for child in expected_children:
if len(xml_object.xpath(child)) != 1:
raise ValueError("Self assessment definition must include exactly one '{0}' tag".format(child))
......@@ -308,9 +309,7 @@ class SelfAssessmentDescriptor(XmlDescriptor, EditingDescriptor):
"""Assumes that xml_object has child k"""
return stringify_children(xml_object.xpath(k)[0])
return {'submitmessage': parse('submitmessage'),
'hintprompt': parse('hintprompt'),
}
return {}
def definition_to_xml(self, resource_fs):
'''Return an xml element representing this definition.'''
......@@ -321,7 +320,7 @@ class SelfAssessmentDescriptor(XmlDescriptor, EditingDescriptor):
child_node = etree.fromstring(child_str)
elt.append(child_node)
for child in ['submitmessage', 'hintprompt']:
for child in []:
add_child(child)
return elt
......@@ -50,7 +50,7 @@ def staff_grading_notifications(course, user):
log.info("Problem with getting notifications from staff grading service.")
if pending_grading:
img_path = "/static/images/slider-handle.png"
img_path = "/static/images/grading_notification.png"
notification_dict = {'pending_grading': pending_grading, 'img_path': img_path, 'response': notifications}
......@@ -83,7 +83,7 @@ def peer_grading_notifications(course, user):
log.info("Problem with getting notifications from peer grading service.")
if pending_grading:
img_path = "/static/images/slider-handle.png"
img_path = "/static/images/grading_notification.png"
notification_dict = {'pending_grading': pending_grading, 'img_path': img_path, 'response': notifications}
......@@ -129,7 +129,7 @@ def combined_notifications(course, user):
log.exception("Problem with getting notifications from controller query service.")
if pending_grading:
img_path = "/static/images/slider-handle.png"
img_path = "/static/images/grading_notification.png"
notification_dict = {'pending_grading': pending_grading, 'img_path': img_path, 'response': notifications}
......
......@@ -181,6 +181,10 @@ class @StaffGrading
@ml_error_info_container = $('.ml-error-info-container')
@breadcrumbs = $('.breadcrumbs')
@question_header = $('.question-header')
@question_header.click @collapse_question
@collapse_question()
# model state
@state = state_no_data
......@@ -392,10 +396,10 @@ class @StaffGrading
else if @state == state_grading
@ml_error_info_container.html(@ml_error_info)
meta_list = $("<ul>")
meta_list.append("<li><span class='meta-info'>Available - </span> #{@num_pending}</li>")
meta_list.append("<li><span class='meta-info'>Graded - </span> #{@num_graded}</li>")
meta_list.append("<li><span class='meta-info'>Needed for ML - </span> #{Math.max(@min_for_ml - @num_graded, 0)}</li>")
meta_list = $("<div>")
meta_list.append("<div class='meta-info'>#{@num_pending} available | </div>")
meta_list.append("<div class='meta-info'>#{@num_graded} graded | </div>")
meta_list.append("<div class='meta-info'>#{Math.max(@min_for_ml - @num_graded, 0)} more needed to start ML </div><br/>")
@problem_meta_info.html(meta_list)
@prompt_container.html(@prompt)
......@@ -432,7 +436,17 @@ class @StaffGrading
@get_next_submission(@location)
else
@error('System got into invalid state for submission: ' + @state)
collapse_question: () =>
@prompt_container.slideToggle()
@prompt_container.toggleClass('open')
if @question_header.text() == "(Hide)"
new_text = "(Show)"
else
new_text = "(Hide)"
@question_header.text(new_text)
# for now, just create an instance and load it...
mock_backend = false
......
.rubric {
margin: 40px 0px;
margin: 0px 0px;
color: #3C3C3C;
tr {
margin:10px 0px;
margin:0px 0px;
height: 100%;
}
td {
padding: 20px 0px 25px 0px;
height: 100%;
border: 1px black solid;
text-align: center;
......@@ -21,19 +21,13 @@
.rubric-label
{
position: relative;
padding: 0px 15px 15px 15px;
width: 130px;
min-height: 50px;
min-width: 50px;
font-size: .9em;
background-color: white;
display: block;
}
.grade {
position: absolute;
bottom:0px;
right:0px;
margin:10px;
}
.selected-grade,
.selected-grade .rubric-label {
......@@ -42,11 +36,21 @@
}
input[type=radio]:checked + .rubric-label {
background: white;
color: $base-font-color; }
color: $base-font-color;
white-space:nowrap;
}
.wrappable {
white-space:normal;
}
input[class='score-selection'] {
position: relative;
margin-left: 10px;
font-size: 16px;
}
ul.rubric-list
{
list-style-type: none;
padding:0;
margin:0;
}
}
......@@ -2,19 +2,48 @@ div.staff-grading,
div.peer-grading{
textarea.feedback-area {
height: 75px;
margin: 20px;
margin: 0px;
}
ul.rubric-list{
list-style-type: none;
padding:0;
margin:0;
li {
&.rubric-list-item{
margin-bottom: 0px;
padding: 0px;
}
}
}
h1 {
margin : 0 0 0 10px;
}
h2{
a
{
text-size: .5em;
}
}
div {
margin: 10px;
margin: 0px;
&.submission-container{
overflow-y: auto;
height: 150px;
background: #F6F6F6;
border: 1px solid #ddd;
@include clearfix;
}
}
label {
margin: 10px;
padding: 5px;
@include inline-block;
margin: 0px;
padding: 2px;
min-width: 50px;
background-color: #CCC;
background-color: white;
text-size: 1.5em;
}
......@@ -36,11 +65,11 @@ div.peer-grading{
width:100%;
th
{
padding: 10px;
padding: 2px;
}
td
{
padding:10px;
padding:2px;
}
td.problem-name
{
......@@ -59,71 +88,61 @@ div.peer-grading{
.calibration-feedback-wrapper,
.grading-container
{
border: 1px solid gray;
padding: 15px;
padding: 2px;
}
.error-container
{
background-color: #FFCCCC;
padding: 15px;
padding: 2px;
margin-left: 0px;
}
.submission-wrapper
{
h3
{
margin-bottom: 15px;
margin-bottom: 2px;
}
p
{
margin-left:10px;
margin-left:2px;
}
padding: 15px;
padding: 2px;
padding-bottom: 15px;
}
.meta-info-wrapper
{
background-color: #eee;
padding:15px;
h3
{
font-size:1em;
}
ul
padding:2px;
div
{
list-style-type: none;
font-size: .85em;
li
{
margin: 5px 0px;
}
display : inline;
}
}
.message-container,
.grading-message
{
background-color: $yellow;
padding: 10px;
padding: 2px;
margin-left:0px;
}
.breadcrumbs
{
margin-top:20px;
margin-top:2px;
margin-left:0px;
margin-bottom:5px;
margin-bottom:2px;
font-size: .8em;
}
.instructions-panel
{
margin-right:20px;
margin-right:2px;
> div
{
padding: 2px;
margin: 0px;
margin-bottom: 5px;
background: #eee;
height: 10em;
width:47.6%;
h3
{
......@@ -161,8 +180,8 @@ div.peer-grading{
margin-left: 0px;
header
{
margin-top:20px;
margin-bottom:20px;
margin-top:2px;
margin-bottom:2px;
font-size: 1.2em;
}
}
......@@ -175,5 +194,7 @@ div.peer-grading{
margin-top: 20px;
}
}
padding: 40px;
padding: 15px;
border: none;
}
<section id="combined-open-ended" class="combined-open-ended" data-ajax-url="${ajax_url}" data-allow_reset="${allow_reset}" data-state="${state}" data-task-count="${task_count}" data-task-number="${task_number}" data-accept-file-upload = "${accept_file_upload}">
<h2>${display_name}</h2>
<div class="status-container">
${status | n}
${status|n}
</div>
<h2>${display_name}</h2>
<div class="item-container">
<h4>Problem</h4>
<h4>Prompt <a href="#" class="question-header">(Hide)</a> </h4>
<div class="problem-container">
% for item in items:
<div class="item">${item['content'] | n}</div>
<div class="item">${item['content'] | n}</div>
% endfor
</div>
<input type="button" value="Reset" class="reset-button" name="reset"/>
<input type="button" value="Next Step" class="next-step-button" name="reset"/>
</div>
<a name="results" />
<section class="legend-container">
</section>
<div class="combined-rubric-container">
</div>
<div class="result-container">
</div>
</section>
......
<section class="legend-container">
<div class="legenditem">
Legend
</div>
% for i in xrange(0,len(legend_list)):
<%legend_title=legend_list[i]['name'] %>
<%legend_image=legend_list[i]['image'] %>
<div class="legenditem">
${legend_title}=<img src="${legend_image}" title=${legend_title}>
</div>
% endfor
</section>
<div class="result-container">
<h4>Results from Step ${task_number}</h4>
<div class="${class_name}">
<h4>${task_name}</h4>
${results | n}
</div>
%if status_list[0]['state'] != 'initial':
<h4>Status</h4>
<div class="status-elements">
<section id="combined-open-ended-status" class="combined-open-ended-status">
%for i in xrange(0,len(status_list)):
<%status=status_list[i]%>
%if i==len(status_list)-1:
<div class="statusitem statusitem-current" data-status-number="${i}">
%else:
<div class="statusitem" data-status-number="${i}">
%endif
${status['task_number']}. ${status['human_task']} (${status['human_state']}) : ${status['score']} / ${status['max_score']}
% if status['state'] == 'initial':
<span class="unanswered" id="status"></span>
% elif status['state'] in ['done', 'post_assessment'] and status['correct'] == 'correct':
<span class="correct" id="status"></span>
% elif status['state'] in ['done', 'post_assessment'] and status['correct'] == 'incorrect':
<span class="incorrect" id="status"></span>
% elif status['state'] == 'assessing':
<span class="grading" id="status"></span>
% endif
%if status['type']=="openended" and status['state'] in ['done', 'post_assessment']:
<div class="show-results">
<a href="#results" class="show-results-button">Show results from Step ${status['task_number']}</a>
</div>
%endif
<section id="combined-open-ended-status" class="combined-open-ended-status">
<div class="statusitem">
Status
</div>
%endfor
</section>
%for i in xrange(0,len(status_list)):
<%status=status_list[i]%>
%if i==len(status_list)-1:
<div class="statusitem statusitem-current" data-status-number="${i}">
%else:
<div class="statusitem" data-status-number="${i}">
%endif
%if status['grader_type'] in grader_type_image_dict and render_via_ajax:
<% grader_image = grader_type_image_dict[status['grader_type']]%>
<img src="${grader_image}" title=${status['human_grader_type']}>
%else:
${status['human_task']}
%endif
(${status['human_state']}) ${status['score']} / ${status['max_score']}
</div>
%endfor
</section>
</div>
%endif
......@@ -42,15 +42,13 @@
<section class="prompt-wrapper">
<h2 class="prompt-name"></h2>
<div class="meta-info-wrapper">
<h3>Problem Information</h3>
<div class="problem-meta-info-container">
</div>
<h3>Maching Learning Information</h3>
<div class="ml-error-info-container">
</div>
</div>
<div class="prompt-information-container">
<h3>Question</h3>
<h3>Prompt <a href="#" class="question-header">(Hide)</a> </h3>
<div class="prompt-container">
</div>
</div>
......@@ -62,11 +60,10 @@
</div>
<section class="grading-wrapper">
<h2>Grading</h2>
<div class="grading-container">
<div class="submission-wrapper">
<h3>Student Submission</h3>
<h3>Student Response</h3>
<div class="submission-container">
</div>
</div>
......
......@@ -3,7 +3,7 @@
<div class="prompt">
${prompt|n}
</div>
<h4>Answer</h4>
<h4>Response</h4>
<textarea rows="${rows}" cols="${cols}" name="answer" class="answer short-form-response" id="input_${id}">${previous_answer|h}</textarea>
<div class="message-wrapper"></div>
......
<div class="rubric">
% for i in range(len(categories)):
<% category = categories[i] %>
<span class="rubric-category">${category['description']}</span> <br/>
<ul class="rubric-list">
% for j in range(len(category['options'])):
<% option = category['options'][j] %>
<li class="rubric-list-item">
<div class="rubric-label">
%for grader_type in category['options'][j]['grader_types']:
% if grader_type in grader_type_image_dict:
<% grader_image = grader_type_image_dict[grader_type] %>
% if grader_type in human_grader_types:
<% human_title = human_grader_types[grader_type] %>
% else:
<% human_title = grader_type %>
% endif
<img src="${grader_image}" title="${human_title}"/>
% endif
%endfor
${option['points']} points : ${option['text']}
</div>
</li>
% endfor
</ul>
% endfor
</div>
<section>
<header>Feedback</header>
<div class="shortform-custom" data-open-text='Show detailed results' data-close-text='Hide detailed results'>
<div class="result-output">
<p>Score: ${score}</p>
% if grader_type == "ML":
<p>Check below for full feedback:</p>
% endif
</div>
</div>
<div class="longform">
<div class="result-output">
${ feedback | n}
</div>
<div class="grader-feedback">
${rubric_feedback | n}
% if grader_type=="PE":
<div class="result-output">
${ feedback | n}
</div>
% endif
</div>
</section>
......@@ -33,7 +33,7 @@
<a href="${notification['url']}" class="notification-link">
<div class="notification-title">${notification['name']}</div>
%if notification['alert']:
<p class="alert-message"><img src="/static/images/white-error-icon.png" /> ${notification['alert_message']}</p>
<p class="alert-message"><img src="/static/images/grading_notification.png" /> ${notification['alert_message']}</p>
%endif
<div class="notification-description">
<p>${notification['description']}</p>
......
% for co in context_list:
% if co['grader_type'] in grader_type_image_dict:
<%grader_type=co['grader_type']%>
<% grader_image = grader_type_image_dict[grader_type] %>
% if grader_type in human_grader_types:
<% human_title = human_grader_types[grader_type] %>
% else:
<% human_title = grader_type %>
% endif
<section class="rubric-result-container">
<div class="rubric-result">
<img src="${grader_image}" title="${human_title}"/>
</div>
<div class="rubric-result">
${co['rubric_html']}
</div>
<div class="rubric-result">
%if len(co['feedback'])>2:
<div class="collapsible evaluation-response">
<header>
<a href="#">See full feedback</a>
</header>
<section class="feedback-full">
${co['feedback']}
</section>
</div>
%endif
</div>
%if grader_type!="SA":
<div class="rubric-result">
<input type="hidden" value="${co['grader_id']}" class="grader_id" />
<input type="hidden" value="${co['submission_id']}" class="submission_id" />
<div class="collapsible evaluation-response">
<header>
<a href="#">Respond to Feedback</a>
</header>
<section id="evaluation" class="evaluation">
<p>How accurate do you find this feedback?</p>
<div class="evaluation-scoring">
<ul class="scoring-list">
<li><input type="radio" name="evaluation-score" id="evaluation-score-5" value="5" /> <label for="evaluation-score-5"> Correct</label></li>
<li><input type="radio" name="evaluation-score" id="evaluation-score-4" value="4" /> <label for="evaluation-score-4"> Partially Correct</label></li>
<li><input type="radio" name="evaluation-score" id="evaluation-score-3" value="3" /> <label for="evaluation-score-3"> No Opinion</label></li>
<li><input type="radio" name="evaluation-score" id="evaluation-score-2" value="2" /> <label for="evaluation-score-2"> Partially Incorrect</label></li>
<li><input type="radio" name="evaluation-score" id="evaluation-score-1" value="1" /> <label for="evaluation-score-1"> Incorrect</label></li>
</ul>
</div>
<p>Additional comments:</p>
<textarea rows="${rows}" cols="${cols}" name="feedback" class="feedback-on-feedback" id="feedback"></textarea>
<input type="button" value="Submit Feedback" class="submit-evaluation-button" name="reset"/>
</section>
</div>
</div>
%endif
</section>
<br/>
%endif
%endfor
\ No newline at end of file
<form class="rubric-template" id="inputtype_${id}">
<form class="rubric-template" id="inputtype_${id}" xmlns="http://www.w3.org/1999/html">
<h3>Rubric</h3>
% if view_only and has_score:
<p>This is the rubric that was used to grade your submission. The highlighted selection matches how the grader feels you performed in each category.</p>
% elif view_only:
<p>Use the below rubric to rate this submission.</p>
% else:
<p>Select the criteria you feel best represents this submission in each category.</p>
% endif
<table class="rubric">
<tr class="points-header">
<th></th>
% for i in range(max_score + 1):
<th>
${i} points
</th>
% endfor
</tr>
<div class="rubric">
% for i in range(len(categories)):
<% category = categories[i] %>
<tr>
<th>${category['description']}</th>
% for j in range(len(category['options'])):
<% category = categories[i] %>
<span class="rubric-category">${category['description']}</span> <br/>
<ul class="rubric-list">
% for j in range(len(category['options'])):
<% option = category['options'][j] %>
%if option['selected']:
<td class="selected-grade">
%else:
<td>
% endif
% if view_only:
## if this is the selected rubric block, show it highlighted
<div class="rubric-label">
${option['text']}
</div>
% else:
<input type="radio" class="score-selection" name="score-selection-${i}" id="score-${i}-${j}" value="${option['points']}"/>
<label class="rubric-label" for="score-${i}-${j}">${option['text']}</label>
% endif
</td>
% endfor
</tr>
%if option['selected']:
<li class="selected-grade rubric-list-item">
%else:
<li class="rubric-list-item">
% endif
<label class="rubric-label" for="score-${i}-${j}">
<input type="radio" class="score-selection" name="score-selection-${i}" id="score-${i}-${j}" value="${option['points']}"/>
<span class="wrappable"> ${option['points']} points : ${option['text']}</span>
</label>
</li>
% endfor
</ul>
% endfor
</table>
</div>
</form>
<div class="rubric">
% for i in range(len(categories)):
<% category = categories[i] %>
% for j in range(len(category['options'])):
<% option = category['options'][j] %>
% if option['selected']:
${category['description']} : ${option['points']} |
% endif
% endfor
% endfor
</div>
......@@ -3,30 +3,17 @@
<div class="error-container"></div>
<section class="content-panel">
<h1>Peer Grading </h1>
<div class="instructions-panel">
<div class="calibration-panel">
<h3>Learning to Grade</h3>
<div class="calibration-text">
<p>Before you can do any proper peer grading, you first need to understand how your own grading compares to that of the instrutor. Once your grades begin to match the instructor's, you will move on to grading your peers!</p>
</div>
<div class="grading-text">
<p>You have successfully managed to calibrate your answers to that of the instructors and have moved onto the next step in the peer grading process.</p>
</div>
</div>
<div class="grading-panel">
<h3>Grading</h3>
<div class="calibration-text">
<p>You cannot start grading until you have graded a sufficient number of training problems and have been able to demonstrate that your scores closely match that of the instructor.</p>
</div>
<div class="grading-text">
<p>Now that you have finished your training, you are now allowed to grade your peers. Please keep in mind that students are allowed to respond to the grades and feedback they receive.</p>
</div>
<h3>Peer Grading</h3>
</div>
</div>
<div class="prompt-wrapper">
<h2>Question</h2>
<h2>Prompt <a href="#" class="question-header">(Hide)</a> </h2>
<div class="prompt-information-container">
<section>
<div class="prompt-container">
......@@ -34,11 +21,11 @@
</section>
</div>
</div>
</div>
<section class="grading-wrapper">
<h2>Grading</h2>
<h2>Student Response</h2>
<div class="grading-container">
<div class="submission-wrapper">
......@@ -70,7 +57,6 @@
</section>
</section>
<!-- Calibration feedback: Shown after a calibration is sent -->
<section class="calibration-feedback">
<h2>How did I do?</h2>
......@@ -81,11 +67,20 @@
<!-- Interstitial Page: Shown between calibration and grading steps -->
<section class="interstitial-page">
<h1>Congratulations!</h1>
<p> You have now completed the calibration step. You are now ready to start grading.</p>
<h1>Ready to grade!</h1>
<p> You have finished learning to grade, which means that you are now ready to start grading.</p>
<input type="button" class="interstitial-page-button" value="Start Grading!" name="interstitial-page-button" />
</section>
<!-- Calibration Interstitial Page: Shown before calibration -->
<section class="calibration-interstitial-page">
<h1>Learning to grade</h1>
<p> You have not yet finished learning to grade this problem.</p>
<p> You will now be shown a series of instructor-scored essays, and will be asked to score them yourself.</p>
<p> Once you can score the essays similarly to an instructor, you will be ready to grade your peers.</p>
<input type="button" class="calibration-interstitial-page-button" value="Start learning to grade" name="calibration-interstitial-page-button" />
</section>
<input type="button" value="Go Back" class="action-button" name="back" />
</div>
</section>
<div class="hint">
<div class="hint-prompt">
${hint_prompt}
Please enter a hint below:
</div>
<textarea name="post_assessment" class="post_assessment" cols="70" rows="5"
${'readonly="true"' if read_only else ''}>${hint}</textarea>
......
......@@ -5,7 +5,7 @@
${prompt}
</div>
<h4>Answer</h4>
<h4>Response</h4>
<div>
<textarea name="answer" class="answer short-form-response" cols="70" rows="20">${previous_answer|n}</textarea>
</div>
......@@ -14,9 +14,9 @@
<div class="rubric-wrapper">${initial_rubric}</div>
<div class="hint-wrapper">${initial_hint}</div>
<div class="hint-wrapper"></div>
<div class="message-wrapper">${initial_message}</div>
<div class="message-wrapper"></div>
<div class="file-upload"></div>
<input type="button" value="Submit" class="submit-button" name="show"/>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment