Commit 26a301f4 by Vik Paruchuri

Merge pull request #504 from edx/feature/vik/oe-ui

Feature/vik/oe ui
parents f3943c7e 21e13e44
...@@ -213,7 +213,7 @@ class CombinedOpenEndedFields(object): ...@@ -213,7 +213,7 @@ class CombinedOpenEndedFields(object):
help="The number of times the student can try to answer this problem.", help="The number of times the student can try to answer this problem.",
default=1, default=1,
scope=Scope.settings, scope=Scope.settings,
values={"min" : 1 } values={"min": 1 }
) )
accept_file_upload = Boolean( accept_file_upload = Boolean(
display_name="Allow File Uploads", display_name="Allow File Uploads",
...@@ -229,12 +229,10 @@ class CombinedOpenEndedFields(object): ...@@ -229,12 +229,10 @@ class CombinedOpenEndedFields(object):
) )
due = Date( due = Date(
help="Date that this problem is due by", help="Date that this problem is due by",
default=None,
scope=Scope.settings scope=Scope.settings
) )
graceperiod = Timedelta( graceperiod = Timedelta(
help="Amount of time after the due date that submissions will be accepted", help="Amount of time after the due date that submissions will be accepted",
default=None,
scope=Scope.settings scope=Scope.settings
) )
version = VersionInteger(help="Current version number", default=DEFAULT_VERSION, scope=Scope.settings) version = VersionInteger(help="Current version number", default=DEFAULT_VERSION, scope=Scope.settings)
...@@ -244,7 +242,7 @@ class CombinedOpenEndedFields(object): ...@@ -244,7 +242,7 @@ class CombinedOpenEndedFields(object):
display_name="Problem Weight", display_name="Problem Weight",
help="Defines the number of points each problem is worth. If the value is not set, each problem is worth one point.", help="Defines the number of points each problem is worth. If the value is not set, each problem is worth one point.",
scope=Scope.settings, scope=Scope.settings,
values={"min" : 0 , "step": ".1"}, values={"min": 0, "step": ".1"},
default=1 default=1
) )
min_to_calibrate = Integer( min_to_calibrate = Integer(
...@@ -252,28 +250,28 @@ class CombinedOpenEndedFields(object): ...@@ -252,28 +250,28 @@ class CombinedOpenEndedFields(object):
help="The minimum number of calibration essays each student will need to complete for peer grading.", help="The minimum number of calibration essays each student will need to complete for peer grading.",
default=3, default=3,
scope=Scope.settings, scope=Scope.settings,
values={"min" : 1, "max" : 20, "step" : "1"} values={"min": 1, "max": 20, "step": "1"}
) )
max_to_calibrate = Integer( max_to_calibrate = Integer(
display_name="Maximum Peer Grading Calibrations", display_name="Maximum Peer Grading Calibrations",
help="The maximum number of calibration essays each student will need to complete for peer grading.", help="The maximum number of calibration essays each student will need to complete for peer grading.",
default=6, default=6,
scope=Scope.settings, scope=Scope.settings,
values={"min" : 1, "max" : 20, "step" : "1"} values={"min": 1, "max": 20, "step": "1"}
) )
peer_grader_count = Integer( peer_grader_count = Integer(
display_name="Peer Graders per Response", display_name="Peer Graders per Response",
help="The number of peers who will grade each submission.", help="The number of peers who will grade each submission.",
default=3, default=3,
scope=Scope.settings, scope=Scope.settings,
values={"min" : 1, "step" : "1", "max" : 5} values={"min": 1, "step": "1", "max": 5}
) )
required_peer_grading = Integer( required_peer_grading = Integer(
display_name="Required Peer Grading", display_name="Required Peer Grading",
help="The number of other students each student making a submission will have to grade.", help="The number of other students each student making a submission will have to grade.",
default=3, default=3,
scope=Scope.settings, scope=Scope.settings,
values={"min" : 1, "step" : "1", "max" : 5} values={"min": 1, "step": "1", "max": 5}
) )
markdown = String( markdown = String(
help="Markdown source of this module", help="Markdown source of this module",
......
<section class="course-content"> <section class="course-content">
<section class="xmodule_display xmodule_CombinedOpenEndedModule" data-type="CombinedOpenEnded"> <section class="xmodule_display xmodule_CombinedOpenEndedModule" data-type="CombinedOpenEnded">
<section id="combined-open-ended" class="combined-open-ended" data-ajax-url="/courses/MITx/6.002x/2012_Fall/modx/i4x://MITx/6.002x/combinedopenended/CombinedOE" data-allow_reset="False" data-state="assessing" data-task-count="2" data-task-number="1"> <section id="combined-open-ended" class="combined-open-ended" data-ajax-url="/courses/MITx/6.002x/2012_Fall/modx/i4x://MITx/6.002x/combinedopenended/CombinedOE" data-allow_reset="False" data-state="assessing" data-task-count="2" data-task-number="1">
<h2>Problem 1</h2>
<h2>Problem 1</h2> <div class="status-container">
<div class="status-container"> <h4>Status</h4>
<h4>Status</h4> <div class="status-elements">
<div class="status-elements"> <section id="combined-open-ended-status" class="combined-open-ended-status">
<section id="combined-open-ended-status" class="combined-open-ended-status"> <div class="statusitem" data-status-number="0">
Step 1 (Problem complete) : 1 / 1
<div class="statusitem" data-status-number="0">
Step 1 (Problem complete) : 1 / 1
<span class="correct" id="status"></span> <span class="correct" id="status"></span>
</div>
</div> <div class="statusitem statusitem-current" data-status-number="1">
Step 2 (Being scored) : None / 1
<div class="statusitem statusitem-current" data-status-number="1">
Step 2 (Being scored) : None / 1
<span class="grading" id="status"></span> <span class="grading" id="status"></span>
</div>
</section>
</div>
</div> </div>
</section> <div class="item-container">
</div> <h4>Problem</h4>
</div>
<div class="item-container">
<h4>Problem</h4>
<div class="problem-container"> <div class="problem-container">
<div class="item"><section id="openended_open_ended" class="open-ended-child" data-state="assessing" data-child-type="openended"><div class="error"></div> <div class="item">
<div class="prompt"> <section id="openended_open_ended" class="open-ended-child" data-state="assessing" data-child-type="openended">
<div class="error">
Some prompt. </div>
<div class="prompt">
</div> Some prompt.
<textarea rows="30" cols="80" name="answer" class="answer short-form-response" id="input_open_ended" disabled="disabled">Test submission. Yaaaaaaay!</textarea><div class="message-wrapper"></div> </div>
<div class="grader-status"> <textarea rows="30" cols="80" name="answer" class="answer short-form-response" id="input_open_ended" disabled="disabled">
<span class="grading" id="status_open_ended">Submitted for grading.</span> Test submission. Yaaaaaaay!
</textarea>
</div> <div class="message-wrapper"></div>
<div class="grader-status">
<input type="button" value="Submit assessment" class="submit-button" name="show" style="display: none;"><input name="skip" class="skip-button" type="button" value="Skip Post-Assessment" style="display: none;"><div class="open-ended-action"></div> <span class="grading" id="status_open_ended">Submitted for grading.</span>
</div>
<span id="answer_open_ended"></span> <input type="button" value="Submit assessment" class="submit-button" name="show" style="display: none;">
</section></div> <input name="skip" class="skip-button" type="button" value="Skip Post-Assessment" style="display: none;">
<div class="open-ended-action"></div>
<span id="answer_open_ended"></span>
</section>
</div> </div>
</div>
<div class="oe-tools response-tools">
<span class="oe-tools-label"></span>
<input type="button" value="Reset" class="reset-button" name="reset" style="display: none;"> <input type="button" value="Reset" class="reset-button" name="reset" style="display: none;">
<input type="button" value="Next Step" class="next-step-button" name="reset" style="display: none;"> </div>
<input type="button" value="Next Step" class="next-step-button" name="reset" style="display: none;">
</div> </div>
<a name="results"> <a name="results">
<div class="result-container"> <div class="result-container">
</div> </div>
</a></section><a name="results"> </a>
</section>
<a name="results">
</a></section><a name="results"> </a>
</section>
</a><div><a name="results"> <a name="results">
</a><a href="https://github.com/MITx/content-mit-6002x/tree/master/combinedopenended/CombinedOE.xml">Edit</a> / </a>
<a href="#i4x_MITx_6_002x_combinedopenended_CombinedOE_xqa-modal" onclick="javascript:getlog('i4x_MITx_6_002x_combinedopenended_CombinedOE', { <div>
<a name="results">
</a>
<a href="https://github.com/MITx/content-mit-6002x/tree/master/combinedopenended/CombinedOE.xml">
Edit
</a> /
<a href="#i4x_MITx_6_002x_combinedopenended_CombinedOE_xqa-modal" onclick="javascript:getlog('i4x_MITx_6_002x_combinedopenended_CombinedOE', {
'location': 'i4x://MITx/6.002x/combinedopenended/CombinedOE', 'location': 'i4x://MITx/6.002x/combinedopenended/CombinedOE',
'xqa_key': 'KUBrWtK3RAaBALLbccHrXeD3RHOpmZ2A', 'xqa_key': 'KUBrWtK3RAaBALLbccHrXeD3RHOpmZ2A',
'category': 'CombinedOpenEndedModule', 'category': 'CombinedOpenEndedModule',
'user': 'blah' 'user': 'blah'
})" id="i4x_MITx_6_002x_combinedopenended_CombinedOE_xqa_log">QA</a> })" id="i4x_MITx_6_002x_combinedopenended_CombinedOE_xqa_log">QA</a>
</div> </div>
<div><a href="#i4x_MITx_6_002x_combinedopenended_CombinedOE_debug" id="i4x_MITx_6_002x_combinedopenended_CombinedOE_trig">Staff Debug Info</a></div> <div>
<a href="#i4x_MITx_6_002x_combinedopenended_CombinedOE_debug" id="i4x_MITx_6_002x_combinedopenended_CombinedOE_trig">
Staff Debug Info
</a>
</div>
<section id="i4x_MITx_6_002x_combinedopenended_CombinedOE_xqa-modal" class="modal xqa-modal" style="width:80%; left:20%; height:80%; overflow:auto"> <section id="i4x_MITx_6_002x_combinedopenended_CombinedOE_xqa-modal" class="modal xqa-modal" style="width:80%; left:20%; height:80%; overflow:auto">
<div class="inner-wrapper"> <div class="inner-wrapper">
......
describe 'Rubric', ->
beforeEach ->
spyOn Logger, 'log'
# load up some fixtures
loadFixtures 'rubric.html'
jasmine.Clock.useMock()
@element = $('.combined-open-ended')
@location = @element.data('location')
describe 'constructor', ->
beforeEach ->
@rub = new Rubric @element
it 'rubric should properly grab the element', ->
expect(@rub.el).toEqual @element
describe 'initialize', ->
beforeEach ->
@rub = new Rubric @element
@rub.initialize @location
it 'rubric correctly sets location', ->
expect($(@rub.rubric_sel).data('location')).toEqual @location
it 'rubric correctly read', ->
expect(@rub.categories.length).toEqual 5
describe 'CombinedOpenEnded', -> describe 'CombinedOpenEnded', ->
beforeEach -> beforeEach ->
spyOn Logger, 'log' spyOn Logger, 'log'
...@@ -13,7 +40,7 @@ describe 'CombinedOpenEnded', -> ...@@ -13,7 +40,7 @@ describe 'CombinedOpenEnded', ->
@combined = new CombinedOpenEnded @element @combined = new CombinedOpenEnded @element
it 'set the element', -> it 'set the element', ->
expect(@combined.element).toEqual @element expect(@combined.el).toEqual @element
it 'get the correct values from data fields', -> it 'get the correct values from data fields', ->
expect(@combined.ajax_url).toEqual '/courses/MITx/6.002x/2012_Fall/modx/i4x://MITx/6.002x/combinedopenended/CombinedOE' expect(@combined.ajax_url).toEqual '/courses/MITx/6.002x/2012_Fall/modx/i4x://MITx/6.002x/combinedopenended/CombinedOE'
...@@ -77,7 +104,7 @@ describe 'CombinedOpenEnded', -> ...@@ -77,7 +104,7 @@ describe 'CombinedOpenEnded', ->
@combined.child_state = 'done' @combined.child_state = 'done'
@combined.rebind() @combined.rebind()
expect(@combined.answer_area.attr("disabled")).toBe("disabled") expect(@combined.answer_area.attr("disabled")).toBe("disabled")
expect(@combined.next_problem).toHaveBeenCalled() expect(@combined.next_problem_button).toBe(":visible")
describe 'next_problem', -> describe 'next_problem', ->
beforeEach -> beforeEach ->
...@@ -109,3 +136,5 @@ describe 'CombinedOpenEnded', -> ...@@ -109,3 +136,5 @@ describe 'CombinedOpenEnded', ->
...@@ -3,10 +3,20 @@ ...@@ -3,10 +3,20 @@
# Can (and should be) expanded upon when our problem list # Can (and should be) expanded upon when our problem list
# becomes more sophisticated # becomes more sophisticated
class @PeerGrading class @PeerGrading
peer_grading_sel: '.peer-grading'
peer_grading_container_sel: '.peer-grading-container'
error_container_sel: '.error-container'
message_container_sel: '.message-container'
problem_button_sel: '.problem-button'
problem_list_sel: '.problem-list'
progress_bar_sel: '.progress-bar'
constructor: (element) -> constructor: (element) ->
@peer_grading_container = $('.peer-grading') @el = element
@peer_grading_container = @$(@peer_grading_sel)
@use_single_location = @peer_grading_container.data('use-single-location') @use_single_location = @peer_grading_container.data('use-single-location')
@peer_grading_outer_container = $('.peer-grading-container') @peer_grading_outer_container = @$(@peer_grading_container_sel)
@ajax_url = @peer_grading_container.data('ajax-url') @ajax_url = @peer_grading_container.data('ajax-url')
if @use_single_location.toLowerCase() == "true" if @use_single_location.toLowerCase() == "true"
...@@ -14,23 +24,27 @@ class @PeerGrading ...@@ -14,23 +24,27 @@ class @PeerGrading
@activate_problem() @activate_problem()
else else
#Otherwise, activate the panel view. #Otherwise, activate the panel view.
@error_container = $('.error-container') @error_container = @$(@error_container_sel)
@error_container.toggle(not @error_container.is(':empty')) @error_container.toggle(not @error_container.is(':empty'))
@message_container = $('.message-container') @message_container = @$(@message_container_sel)
@message_container.toggle(not @message_container.is(':empty')) @message_container.toggle(not @message_container.is(':empty'))
@problem_button = $('.problem-button') @problem_button = @$(@problem_button_sel)
@problem_button.click @show_results @problem_button.click @show_results
@problem_list = $('.problem-list') @problem_list = @$(@problem_list_sel)
@construct_progress_bar() @construct_progress_bar()
# locally scoped jquery.
$: (selector) ->
$(selector, @el)
construct_progress_bar: () => construct_progress_bar: () =>
problems = @problem_list.find('tr').next() problems = @problem_list.find('tr').next()
problems.each( (index, element) => problems.each( (index, element) =>
problem = $(element) problem = $(element)
progress_bar = problem.find('.progress-bar') progress_bar = problem.find(@progress_bar_sel)
bar_value = parseInt(problem.data('graded')) bar_value = parseInt(problem.data('graded'))
bar_max = parseInt(problem.data('required')) + bar_value bar_max = parseInt(problem.data('required')) + bar_value
progress_bar.progressbar({value: bar_value, max: bar_max}) progress_bar.progressbar({value: bar_value, max: bar_max})
...@@ -43,10 +57,10 @@ class @PeerGrading ...@@ -43,10 +57,10 @@ class @PeerGrading
if response.success if response.success
@peer_grading_outer_container.after(response.html).remove() @peer_grading_outer_container.after(response.html).remove()
backend = new PeerGradingProblemBackend(@ajax_url, false) backend = new PeerGradingProblemBackend(@ajax_url, false)
new PeerGradingProblem(backend) new PeerGradingProblem(backend, @el)
else else
@gentle_alert response.error @gentle_alert response.error
activate_problem: () => activate_problem: () =>
backend = new PeerGradingProblemBackend(@ajax_url, false) backend = new PeerGradingProblemBackend(@ajax_url, false)
new PeerGradingProblem(backend) new PeerGradingProblem(backend, @el)
\ No newline at end of file \ No newline at end of file
...@@ -206,20 +206,49 @@ class CombinedOpenEndedRubric(object): ...@@ -206,20 +206,49 @@ class CombinedOpenEndedRubric(object):
def render_combined_rubric(self, rubric_xml, scores, score_types, feedback_types): def render_combined_rubric(self, rubric_xml, scores, score_types, feedback_types):
success, score_tuples = CombinedOpenEndedRubric.reformat_scores_for_rendering(scores, score_types, success, score_tuples = CombinedOpenEndedRubric.reformat_scores_for_rendering(scores, score_types,
feedback_types) feedback_types)
#Get all the categories in the rubric
rubric_categories = self.extract_categories(rubric_xml) rubric_categories = self.extract_categories(rubric_xml)
#Get a list of max scores, each entry belonging to a rubric category
max_scores = map((lambda cat: cat['options'][-1]['points']), rubric_categories) max_scores = map((lambda cat: cat['options'][-1]['points']), rubric_categories)
actual_scores = []
#Get the highest possible score across all categories
max_score = max(max_scores) max_score = max(max_scores)
for i in xrange(0, len(rubric_categories)): #Loop through each category
category = rubric_categories[i] for i, category in enumerate(rubric_categories):
for j in xrange(0, len(category['options'])): #Loop through each option in the category
for j in xrange(len(category['options'])):
#Intialize empty grader types list
rubric_categories[i]['options'][j]['grader_types'] = [] rubric_categories[i]['options'][j]['grader_types'] = []
for tuple in score_tuples: #Score tuples are a flat data structure with (category, option, grader_type_list) for selected graders
if tuple[1] == i and tuple[2] == j: for tup in score_tuples:
for grader_type in tuple[3]: if tup[1] == i and tup[2] == j:
for grader_type in tup[3]:
#Set the rubric grader type to the tuple grader types
rubric_categories[i]['options'][j]['grader_types'].append(grader_type) rubric_categories[i]['options'][j]['grader_types'].append(grader_type)
#Grab the score and add it to the actual scores. J will be the score for the selected
#grader type
if len(actual_scores)<=i:
#Initialize a new list in the list of lists
actual_scores.append([j])
else:
#If a list in the list of lists for this position exists, append to it
actual_scores[i] += [j]
actual_scores = [sum(i) / len(i) for i in actual_scores]
correct = []
#Define if the student is "correct" (1) "incorrect" (0) or "partially correct" (.5)
for (i, a) in enumerate(actual_scores):
if int(a) == max_scores[i]:
correct.append(1)
elif int(a)==0:
correct.append(0)
else:
correct.append(.5)
html = self.system.render_template('{0}/open_ended_combined_rubric.html'.format(self.TEMPLATE_DIR), html = self.system.render_template('{0}/open_ended_combined_rubric.html'.format(self.TEMPLATE_DIR),
{'categories': rubric_categories, {'categories': rubric_categories,
'max_scores': max_scores,
'correct' : correct,
'has_score': True, 'has_score': True,
'view_only': True, 'view_only': True,
'max_score': max_score, 'max_score': max_score,
......
...@@ -11,6 +11,9 @@ log = logging.getLogger(__name__) ...@@ -11,6 +11,9 @@ log = logging.getLogger(__name__)
class GradingServiceError(Exception): class GradingServiceError(Exception):
"""
Exception for grading service. Shown when Open Response Assessment servers cannot be reached.
"""
pass pass
...@@ -62,7 +65,6 @@ class GradingService(object): ...@@ -62,7 +65,6 @@ class GradingService(object):
""" """
Make a get request to the grading controller Make a get request to the grading controller
""" """
log.debug(params)
op = lambda: self.session.get(url, op = lambda: self.session.get(url,
allow_redirects=allow_redirects, allow_redirects=allow_redirects,
params=params) params=params)
......
...@@ -641,6 +641,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): ...@@ -641,6 +641,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
""" """
# Once we close the problem, we should not allow students # Once we close the problem, we should not allow students
# to save answers # to save answers
error_message = ""
closed, msg = self.check_if_closed() closed, msg = self.check_if_closed()
if closed: if closed:
return msg return msg
...@@ -650,17 +651,11 @@ class OpenEndedModule(openendedchild.OpenEndedChild): ...@@ -650,17 +651,11 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
# add new history element with answer and empty score and hint. # add new history element with answer and empty score and hint.
success, data = self.append_image_to_student_answer(data) success, data = self.append_image_to_student_answer(data)
error_message = ""
if success: if success:
success, allowed_to_submit, error_message = self.check_if_student_can_submit() data['student_answer'] = OpenEndedModule.sanitize_html(data['student_answer'])
if allowed_to_submit: self.new_history_entry(data['student_answer'])
data['student_answer'] = OpenEndedModule.sanitize_html(data['student_answer']) self.send_to_grader(data['student_answer'], system)
self.new_history_entry(data['student_answer']) self.change_state(self.ASSESSING)
self.send_to_grader(data['student_answer'], system)
self.change_state(self.ASSESSING)
else:
# Error message already defined
success = False
else: else:
# This is a student_facing_error # This is a student_facing_error
error_message = "There was a problem saving the image in your submission. Please try a different image, or try pasting a link to an image into the answer box." error_message = "There was a problem saving the image in your submission. Please try a different image, or try pasting a link to an image into the answer box."
...@@ -668,7 +663,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild): ...@@ -668,7 +663,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
return { return {
'success': success, 'success': success,
'error': error_message, 'error': error_message,
'student_response': data['student_answer'] 'student_response': data['student_answer'].replace("\n","<br/>")
} }
def update_score(self, data, system): def update_score(self, data, system):
...@@ -699,12 +694,12 @@ class OpenEndedModule(openendedchild.OpenEndedChild): ...@@ -699,12 +694,12 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
score = self.latest_score() score = self.latest_score()
correct = 'correct' if self.is_submission_correct(score) else 'incorrect' correct = 'correct' if self.is_submission_correct(score) else 'incorrect'
if self.child_state == self.ASSESSING: if self.child_state == self.ASSESSING:
eta_string = self.get_eta() eta_string = "Your response has been submitted. Please check back later for your grade."
else: else:
post_assessment = "" post_assessment = ""
correct = "" correct = ""
previous_answer = self.initial_display previous_answer = ""
previous_answer = previous_answer.replace("\n","<br/>")
context = { context = {
'prompt': self.child_prompt, 'prompt': self.child_prompt,
'previous_answer': previous_answer, 'previous_answer': previous_answer,
......
...@@ -58,7 +58,7 @@ class OpenEndedChild(object): ...@@ -58,7 +58,7 @@ class OpenEndedChild(object):
'assessing': 'In progress', 'assessing': 'In progress',
'post_assessment': 'Done', 'post_assessment': 'Done',
'done': 'Done', 'done': 'Done',
} }
def __init__(self, system, location, definition, descriptor, static_data, def __init__(self, system, location, definition, descriptor, static_data,
instance_state=None, shared_state=None, **kwargs): instance_state=None, shared_state=None, **kwargs):
...@@ -179,10 +179,11 @@ class OpenEndedChild(object): ...@@ -179,10 +179,11 @@ class OpenEndedChild(object):
answer = autolink_html(answer) answer = autolink_html(answer)
cleaner = Cleaner(style=True, links=True, add_nofollow=False, page_structure=True, safe_attrs_only=True, cleaner = Cleaner(style=True, links=True, add_nofollow=False, page_structure=True, safe_attrs_only=True,
host_whitelist=open_ended_image_submission.TRUSTED_IMAGE_DOMAINS, host_whitelist=open_ended_image_submission.TRUSTED_IMAGE_DOMAINS,
whitelist_tags=set(['embed', 'iframe', 'a', 'img'])) whitelist_tags=set(['embed', 'iframe', 'a', 'img', 'br']))
clean_html = cleaner.clean_html(answer) clean_html = cleaner.clean_html(answer)
clean_html = re.sub(r'</p>$', '', re.sub(r'^<p>', '', clean_html)) clean_html = re.sub(r'</p>$', '', re.sub(r'^<p>', '', clean_html))
except: clean_html = re.sub("\n","<br/>", clean_html)
except Exception:
clean_html = answer clean_html = answer
return clean_html return clean_html
...@@ -230,7 +231,7 @@ class OpenEndedChild(object): ...@@ -230,7 +231,7 @@ class OpenEndedChild(object):
'max_score': self._max_score, 'max_score': self._max_score,
'child_attempts': self.child_attempts, 'child_attempts': self.child_attempts,
'child_created': False, 'child_created': False,
} }
return json.dumps(state) return json.dumps(state)
def _allow_reset(self): def _allow_reset(self):
...@@ -332,7 +333,7 @@ class OpenEndedChild(object): ...@@ -332,7 +333,7 @@ class OpenEndedChild(object):
try: try:
image_data.seek(0) image_data.seek(0)
image_ok = open_ended_image_submission.run_image_tests(image_data) image_ok = open_ended_image_submission.run_image_tests(image_data)
except: except Exception:
log.exception("Could not create image and check it.") log.exception("Could not create image and check it.")
if image_ok: if image_ok:
...@@ -345,7 +346,7 @@ class OpenEndedChild(object): ...@@ -345,7 +346,7 @@ class OpenEndedChild(object):
success, s3_public_url = open_ended_image_submission.upload_to_s3( success, s3_public_url = open_ended_image_submission.upload_to_s3(
image_data, image_key, self.s3_interface image_data, image_key, self.s3_interface
) )
except: except Exception:
log.exception("Could not upload image to S3.") log.exception("Could not upload image to S3.")
return success, image_ok, s3_public_url return success, image_ok, s3_public_url
...@@ -434,38 +435,6 @@ class OpenEndedChild(object): ...@@ -434,38 +435,6 @@ class OpenEndedChild(object):
return success, string return success, string
def check_if_student_can_submit(self):
location = self.location_string
student_id = self.system.anonymous_student_id
success = False
allowed_to_submit = True
response = {}
# This is a student_facing_error
error_string = ("You need to peer grade {0} more in order to make another submission. "
"You have graded {1}, and {2} are required. You have made {3} successful peer grading submissions.")
try:
response = self.peer_gs.get_data_for_location(self.location_string, student_id)
count_graded = response['count_graded']
count_required = response['count_required']
student_sub_count = response['student_sub_count']
success = True
except:
# This is a dev_facing_error
log.error("Could not contact external open ended graders for location {0} and student {1}".format(
self.location_string, student_id))
# This is a student_facing_error
error_message = "Could not contact the graders. Please notify course staff."
return success, allowed_to_submit, error_message
if count_graded >= count_required:
return success, allowed_to_submit, ""
else:
allowed_to_submit = False
# This is a student_facing_error
error_message = error_string.format(count_required - count_graded, count_graded, count_required,
student_sub_count)
return success, allowed_to_submit, error_message
def get_eta(self): def get_eta(self):
if self.controller_qs: if self.controller_qs:
response = self.controller_qs.check_for_eta(self.location_string) response = self.controller_qs.check_for_eta(self.location_string)
......
...@@ -124,4 +124,4 @@ class MockPeerGradingService(object): ...@@ -124,4 +124,4 @@ class MockPeerGradingService(object):
]} ]}
def get_data_for_location(self, problem_location, student_id): def get_data_for_location(self, problem_location, student_id):
return {"version": 1, "count_graded": 3, "count_required": 3, "success": True, "student_sub_count": 1} return {"version": 1, "count_graded": 3, "count_required": 3, "success": True, "student_sub_count": 1, 'submissions_available' : 0}
...@@ -61,6 +61,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): ...@@ -61,6 +61,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
else: else:
previous_answer = '' previous_answer = ''
previous_answer = previous_answer.replace("\n","<br/>")
context = { context = {
'prompt': self.child_prompt, 'prompt': self.child_prompt,
'previous_answer': previous_answer, 'previous_answer': previous_answer,
...@@ -184,14 +185,9 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): ...@@ -184,14 +185,9 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
# add new history element with answer and empty score and hint. # add new history element with answer and empty score and hint.
success, data = self.append_image_to_student_answer(data) success, data = self.append_image_to_student_answer(data)
if success: if success:
success, allowed_to_submit, error_message = self.check_if_student_can_submit() data['student_answer'] = SelfAssessmentModule.sanitize_html(data['student_answer'])
if allowed_to_submit: self.new_history_entry(data['student_answer'])
data['student_answer'] = SelfAssessmentModule.sanitize_html(data['student_answer']) self.change_state(self.ASSESSING)
self.new_history_entry(data['student_answer'])
self.change_state(self.ASSESSING)
else:
# Error message already defined
success = False
else: else:
# This is a student_facing_error # This is a student_facing_error
error_message = "There was a problem saving the image in your submission. Please try a different image, or try pasting a link to an image into the answer box." error_message = "There was a problem saving the image in your submission. Please try a different image, or try pasting a link to an image into the answer box."
...@@ -200,7 +196,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): ...@@ -200,7 +196,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
'success': success, 'success': success,
'rubric_html': self.get_rubric_html(system), 'rubric_html': self.get_rubric_html(system),
'error': error_message, 'error': error_message,
'student_response': data['student_answer'], 'student_response': data['student_answer'].replace("\n","<br/>")
} }
def save_assessment(self, data, _system): def save_assessment(self, data, _system):
...@@ -272,8 +268,6 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild): ...@@ -272,8 +268,6 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
try: try:
rubric_scores = json.loads(latest_post_assessment) rubric_scores = json.loads(latest_post_assessment)
except: except:
# This is a dev_facing_error
log.error("Cannot parse rubric scores in self assessment module from {0}".format(latest_post_assessment))
rubric_scores = [] rubric_scores = []
return [rubric_scores] return [rubric_scores]
......
...@@ -46,7 +46,6 @@ class PeerGradingFields(object): ...@@ -46,7 +46,6 @@ class PeerGradingFields(object):
) )
due = Date( due = Date(
help="Due date that should be displayed.", help="Due date that should be displayed.",
default=None,
scope=Scope.settings) scope=Scope.settings)
graceperiod = Timedelta( graceperiod = Timedelta(
help="Amount of grace to give on the due date.", help="Amount of grace to give on the due date.",
...@@ -189,9 +188,8 @@ class PeerGradingModule(PeerGradingFields, XModule): ...@@ -189,9 +188,8 @@ class PeerGradingModule(PeerGradingFields, XModule):
return json.dumps(d, cls=ComplexEncoder) return json.dumps(d, cls=ComplexEncoder)
def query_data_for_location(self): def query_data_for_location(self, location):
student_id = self.system.anonymous_student_id student_id = self.system.anonymous_student_id
location = self.link_to_location
success = False success = False
response = {} response = {}
...@@ -229,7 +227,7 @@ class PeerGradingModule(PeerGradingFields, XModule): ...@@ -229,7 +227,7 @@ class PeerGradingModule(PeerGradingFields, XModule):
count_graded = self.student_data_for_location['count_graded'] count_graded = self.student_data_for_location['count_graded']
count_required = self.student_data_for_location['count_required'] count_required = self.student_data_for_location['count_required']
except: except:
success, response = self.query_data_for_location() success, response = self.query_data_for_location(self.location)
if not success: if not success:
log.exception( log.exception(
"No instance data found and could not get data from controller for loc {0} student {1}".format( "No instance data found and could not get data from controller for loc {0} student {1}".format(
...@@ -312,17 +310,26 @@ class PeerGradingModule(PeerGradingFields, XModule): ...@@ -312,17 +310,26 @@ class PeerGradingModule(PeerGradingFields, XModule):
error: if there was an error in the submission, this is the error message error: if there was an error in the submission, this is the error message
""" """
required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback', 'rubric_scores[]', 'submission_flagged', 'answer_unknown']) required = ['location', 'submission_id', 'submission_key', 'score', 'feedback', 'submission_flagged', 'answer_unknown']
success, message = self._check_required(data, required) if data.get("submission_flagged", False) in ["false", False, "False", "FALSE"]:
required.append("rubric_scores[]")
success, message = self._check_required(data, set(required))
if not success: if not success:
return self._err_response(message) return self._err_response(message)
data_dict = {k:data.get(k) for k in required} data_dict = {k:data.get(k) for k in required}
data_dict['rubric_scores'] = data.getlist('rubric_scores[]') if 'rubric_scores[]' in required:
data_dict['rubric_scores'] = data.getlist('rubric_scores[]')
data_dict['grader_id'] = self.system.anonymous_student_id data_dict['grader_id'] = self.system.anonymous_student_id
try: try:
response = self.peer_gs.save_grade(**data_dict) response = self.peer_gs.save_grade(**data_dict)
success, location_data = self.query_data_for_location(data_dict['location'])
#Don't check for success above because the response = statement will raise the same Exception as the one
#that will cause success to be false.
response.update({'required_done' : False})
if 'count_graded' in location_data and 'count_required' in location_data and int(location_data['count_graded'])>=int(location_data['count_required']):
response['required_done'] = True
return response return response
except GradingServiceError: except GradingServiceError:
# This is a dev_facing_error # This is a dev_facing_error
...@@ -502,7 +509,7 @@ class PeerGradingModule(PeerGradingFields, XModule): ...@@ -502,7 +509,7 @@ class PeerGradingModule(PeerGradingFields, XModule):
error_text = "Could not get list of problems to peer grade. Please notify course staff." error_text = "Could not get list of problems to peer grade. Please notify course staff."
log.error(error_text) log.error(error_text)
success = False success = False
except: except Exception:
log.exception("Could not contact peer grading service.") log.exception("Could not contact peer grading service.")
success = False success = False
...@@ -513,20 +520,24 @@ class PeerGradingModule(PeerGradingFields, XModule): ...@@ -513,20 +520,24 @@ class PeerGradingModule(PeerGradingFields, XModule):
''' '''
try: try:
return modulestore().get_instance(self.system.course_id, location) return modulestore().get_instance(self.system.course_id, location)
except: except Exception:
# the linked problem doesn't exist # the linked problem doesn't exist
log.error("Problem {0} does not exist in this course".format(location)) log.error("Problem {0} does not exist in this course".format(location))
raise raise
good_problem_list = []
for problem in problem_list: for problem in problem_list:
problem_location = problem['location'] problem_location = problem['location']
descriptor = _find_corresponding_module_for_location(problem_location) try:
descriptor = _find_corresponding_module_for_location(problem_location)
except Exception:
continue
if descriptor: if descriptor:
problem['due'] = descriptor.lms.due problem['due'] = descriptor.lms.due
grace_period = descriptor.lms.graceperiod grace_period = descriptor.lms.graceperiod
try: try:
problem_timeinfo = TimeInfo(problem['due'], grace_period) problem_timeinfo = TimeInfo(problem['due'], grace_period)
except: except Exception:
log.error("Malformed due date or grace period string for location {0}".format(problem_location)) log.error("Malformed due date or grace period string for location {0}".format(problem_location))
raise raise
if self._closed(problem_timeinfo): if self._closed(problem_timeinfo):
...@@ -537,13 +548,14 @@ class PeerGradingModule(PeerGradingFields, XModule): ...@@ -537,13 +548,14 @@ class PeerGradingModule(PeerGradingFields, XModule):
# if we can't find the due date, assume that it doesn't have one # if we can't find the due date, assume that it doesn't have one
problem['due'] = None problem['due'] = None
problem['closed'] = False problem['closed'] = False
good_problem_list.append(problem)
ajax_url = self.ajax_url ajax_url = self.ajax_url
html = self.system.render_template('peer_grading/peer_grading.html', { html = self.system.render_template('peer_grading/peer_grading.html', {
'course_id': self.system.course_id, 'course_id': self.system.course_id,
'ajax_url': ajax_url, 'ajax_url': ajax_url,
'success': success, 'success': success,
'problem_list': problem_list, 'problem_list': good_problem_list,
'error_text': error_text, 'error_text': error_text,
# Checked above # Checked above
'staff_access': False, 'staff_access': False,
......
...@@ -73,6 +73,7 @@ class OpenEndedChildTest(unittest.TestCase): ...@@ -73,6 +73,7 @@ class OpenEndedChildTest(unittest.TestCase):
def setUp(self): def setUp(self):
self.test_system = get_test_system() self.test_system = get_test_system()
self.test_system.open_ended_grading_interface = None
self.openendedchild = OpenEndedChild(self.test_system, self.location, self.openendedchild = OpenEndedChild(self.test_system, self.location,
self.definition, self.descriptor, self.static_data, self.metadata) self.definition, self.descriptor, self.static_data, self.metadata)
...@@ -203,7 +204,7 @@ class OpenEndedModuleTest(unittest.TestCase): ...@@ -203,7 +204,7 @@ class OpenEndedModuleTest(unittest.TestCase):
def setUp(self): def setUp(self):
self.test_system = get_test_system() self.test_system = get_test_system()
self.test_system.open_ended_grading_interface = None
self.test_system.location = self.location self.test_system.location = self.location
self.mock_xqueue = MagicMock() self.mock_xqueue = MagicMock()
self.mock_xqueue.send_to_queue.return_value = (None, "Message") self.mock_xqueue.send_to_queue.return_value = (None, "Message")
...@@ -410,6 +411,7 @@ class CombinedOpenEndedModuleTest(unittest.TestCase): ...@@ -410,6 +411,7 @@ class CombinedOpenEndedModuleTest(unittest.TestCase):
full_definition = definition_template.format(prompt=prompt, rubric=rubric, task1=task_xml1, task2=task_xml2) full_definition = definition_template.format(prompt=prompt, rubric=rubric, task1=task_xml1, task2=task_xml2)
descriptor = Mock(data=full_definition) descriptor = Mock(data=full_definition)
test_system = get_test_system() test_system = get_test_system()
test_system.open_ended_grading_interface = None
combinedoe_container = CombinedOpenEndedModule( combinedoe_container = CombinedOpenEndedModule(
test_system, test_system,
descriptor, descriptor,
...@@ -536,6 +538,7 @@ class OpenEndedModuleXmlTest(unittest.TestCase, DummyModulestore): ...@@ -536,6 +538,7 @@ class OpenEndedModuleXmlTest(unittest.TestCase, DummyModulestore):
def setUp(self): def setUp(self):
self.test_system = get_test_system() self.test_system = get_test_system()
self.test_system.open_ended_grading_interface = None
self.test_system.xqueue['interface'] = Mock( self.test_system.xqueue['interface'] = Mock(
send_to_queue=Mock(side_effect=[1, "queued"]) send_to_queue=Mock(side_effect=[1, "queued"])
) )
...@@ -569,9 +572,9 @@ class OpenEndedModuleXmlTest(unittest.TestCase, DummyModulestore): ...@@ -569,9 +572,9 @@ class OpenEndedModuleXmlTest(unittest.TestCase, DummyModulestore):
module = self.get_module_from_location(self.problem_location, COURSE) module = self.get_module_from_location(self.problem_location, COURSE)
#Simulate a student saving an answer #Simulate a student saving an answer
module.handle_ajax("save_answer", {"student_answer": self.answer}) html = module.handle_ajax("get_html", {})
status = module.handle_ajax("get_status", {}) module.handle_ajax("save_answer", {"student_answer": self.answer, "can_upload_files" : False, "student_file" : None})
self.assertTrue(isinstance(status, basestring)) html = module.handle_ajax("get_html", {})
#Mock a student submitting an assessment #Mock a student submitting an assessment
assessment_dict = MockQueryDict() assessment_dict = MockQueryDict()
...@@ -579,8 +582,7 @@ class OpenEndedModuleXmlTest(unittest.TestCase, DummyModulestore): ...@@ -579,8 +582,7 @@ class OpenEndedModuleXmlTest(unittest.TestCase, DummyModulestore):
module.handle_ajax("save_assessment", assessment_dict) module.handle_ajax("save_assessment", assessment_dict)
task_one_json = json.loads(module.task_states[0]) task_one_json = json.loads(module.task_states[0])
self.assertEqual(json.loads(task_one_json['child_history'][0]['post_assessment']), assessment) self.assertEqual(json.loads(task_one_json['child_history'][0]['post_assessment']), assessment)
status = module.handle_ajax("get_status", {}) rubric = module.handle_ajax("get_combined_rubric", {})
self.assertTrue(isinstance(status, basestring))
#Move to the next step in the problem #Move to the next step in the problem
module.handle_ajax("next_problem", {}) module.handle_ajax("next_problem", {})
...@@ -617,7 +619,6 @@ class OpenEndedModuleXmlTest(unittest.TestCase, DummyModulestore): ...@@ -617,7 +619,6 @@ class OpenEndedModuleXmlTest(unittest.TestCase, DummyModulestore):
module.handle_ajax("save_assessment", assessment_dict) module.handle_ajax("save_assessment", assessment_dict)
task_one_json = json.loads(module.task_states[0]) task_one_json = json.loads(module.task_states[0])
self.assertEqual(json.loads(task_one_json['child_history'][0]['post_assessment']), assessment) self.assertEqual(json.loads(task_one_json['child_history'][0]['post_assessment']), assessment)
module.handle_ajax("get_status", {})
#Move to the next step in the problem #Move to the next step in the problem
try: try:
...@@ -660,15 +661,11 @@ class OpenEndedModuleXmlTest(unittest.TestCase, DummyModulestore): ...@@ -660,15 +661,11 @@ class OpenEndedModuleXmlTest(unittest.TestCase, DummyModulestore):
#Get html and other data client will request #Get html and other data client will request
module.get_html() module.get_html()
legend = module.handle_ajax("get_legend", {})
self.assertTrue(isinstance(legend, basestring))
module.handle_ajax("get_status", {})
module.handle_ajax("skip_post_assessment", {}) module.handle_ajax("skip_post_assessment", {})
self.assertTrue(isinstance(legend, basestring))
#Get all results #Get all results
module.handle_ajax("get_results", {}) module.handle_ajax("get_combined_rubric", {})
#reset the problem #reset the problem
module.handle_ajax("reset", {}) module.handle_ajax("reset", {})
...@@ -686,6 +683,7 @@ class OpenEndedModuleXmlAttemptTest(unittest.TestCase, DummyModulestore): ...@@ -686,6 +683,7 @@ class OpenEndedModuleXmlAttemptTest(unittest.TestCase, DummyModulestore):
def setUp(self): def setUp(self):
self.test_system = get_test_system() self.test_system = get_test_system()
self.test_system.open_ended_grading_interface = None
self.test_system.xqueue['interface'] = Mock( self.test_system.xqueue['interface'] = Mock(
send_to_queue=Mock(side_effect=[1, "queued"]) send_to_queue=Mock(side_effect=[1, "queued"])
) )
...@@ -702,8 +700,6 @@ class OpenEndedModuleXmlAttemptTest(unittest.TestCase, DummyModulestore): ...@@ -702,8 +700,6 @@ class OpenEndedModuleXmlAttemptTest(unittest.TestCase, DummyModulestore):
#Simulate a student saving an answer #Simulate a student saving an answer
module.handle_ajax("save_answer", {"student_answer": self.answer}) module.handle_ajax("save_answer", {"student_answer": self.answer})
status = module.handle_ajax("get_status", {})
self.assertTrue(isinstance(status, basestring))
#Mock a student submitting an assessment #Mock a student submitting an assessment
assessment_dict = MockQueryDict() assessment_dict = MockQueryDict()
...@@ -711,8 +707,6 @@ class OpenEndedModuleXmlAttemptTest(unittest.TestCase, DummyModulestore): ...@@ -711,8 +707,6 @@ class OpenEndedModuleXmlAttemptTest(unittest.TestCase, DummyModulestore):
module.handle_ajax("save_assessment", assessment_dict) module.handle_ajax("save_assessment", assessment_dict)
task_one_json = json.loads(module.task_states[0]) task_one_json = json.loads(module.task_states[0])
self.assertEqual(json.loads(task_one_json['child_history'][0]['post_assessment']), assessment) self.assertEqual(json.loads(task_one_json['child_history'][0]['post_assessment']), assessment)
status = module.handle_ajax("get_status", {})
self.assertTrue(isinstance(status, basestring))
#Move to the next step in the problem #Move to the next step in the problem
module.handle_ajax("next_problem", {}) module.handle_ajax("next_problem", {})
......
...@@ -61,7 +61,7 @@ class PeerGradingModuleTest(unittest.TestCase, DummyModulestore): ...@@ -61,7 +61,7 @@ class PeerGradingModuleTest(unittest.TestCase, DummyModulestore):
Try getting data from the external grading service Try getting data from the external grading service
@return: @return:
""" """
success, data = self.peer_grading.query_data_for_location() success, data = self.peer_grading.query_data_for_location(self.problem_location.url())
self.assertEqual(success, True) self.assertEqual(success, True)
def test_get_score(self): def test_get_score(self):
......
...@@ -93,7 +93,6 @@ def peer_grading_notifications(course, user): ...@@ -93,7 +93,6 @@ def peer_grading_notifications(course, user):
log.info( log.info(
"Problem with getting notifications from peer grading service for course {0} user {1}.".format(course_id, "Problem with getting notifications from peer grading service for course {0} user {1}.".format(course_id,
student_id)) student_id))
if pending_grading: if pending_grading:
img_path = "/static/images/grading_notification.png" img_path = "/static/images/grading_notification.png"
...@@ -154,7 +153,7 @@ def combined_notifications(course, user): ...@@ -154,7 +153,7 @@ def combined_notifications(course, user):
last_time_viewed) last_time_viewed)
notifications = json.loads(controller_response) notifications = json.loads(controller_response)
if notifications['success']: if notifications['success']:
if notifications['overall_need_to_check']: if notifications['staff_needs_to_grade'] or notifications['student_needs_to_peer_grade']:
pending_grading = True pending_grading = True
except: except:
#Non catastrophic error, so no real action #Non catastrophic error, so no real action
......
...@@ -103,7 +103,6 @@ The standard chunk of Lorem Ipsum used since the 1500s is reproduced below for t ...@@ -103,7 +103,6 @@ The standard chunk of Lorem Ipsum used since the 1500s is reproduced below for t
else if cmd == 'save_grade' else if cmd == 'save_grade'
console.log("eval: #{data.score} pts, Feedback: #{data.feedback}")
response = response =
@mock('get_next', {location: data.location}) @mock('get_next', {location: data.location})
# get_problem_list # get_problem_list
...@@ -147,12 +146,14 @@ The standard chunk of Lorem Ipsum used since the 1500s is reproduced below for t ...@@ -147,12 +146,14 @@ The standard chunk of Lorem Ipsum used since the 1500s is reproduced below for t
class @StaffGrading class @StaffGrading
grading_message_sel: '.grading-message'
constructor: (backend) -> constructor: (backend) ->
AjaxPrefix.addAjaxPrefix(jQuery, -> "") AjaxPrefix.addAjaxPrefix(jQuery, -> "")
@backend = backend @backend = backend
# all the jquery selectors # all the jquery selectors
@el = $('.staff-grading')
@problem_list_container = $('.problem-list-container') @problem_list_container = $('.problem-list-container')
@problem_list = $('.problem-list') @problem_list = $('.problem-list')
...@@ -224,12 +225,12 @@ class @StaffGrading ...@@ -224,12 +225,12 @@ class @StaffGrading
setup_score_selection: => setup_score_selection: =>
@score_selection_container.html(@rubric) @score_selection_container.html(@rubric)
$('input[class="score-selection"]').change => @graded_callback() $('input[class="score-selection"]').change => @graded_callback()
Rubric.initialize(@location) @rub = new Rubric(@el)
@rub.initialize(@location)
graded_callback: () => graded_callback: () =>
# show button if we have scores for all categories # show button if we have scores for all categories
if Rubric.check_complete() if @rub.check_complete()
@state = state_graded @state = state_graded
@submit_button.show() @submit_button.show()
...@@ -237,7 +238,7 @@ class @StaffGrading ...@@ -237,7 +238,7 @@ class @StaffGrading
#Previously, responses were submitted when hitting enter. Add in a modifier that ensures that ctrl+enter is needed. #Previously, responses were submitted when hitting enter. Add in a modifier that ensures that ctrl+enter is needed.
if event.which == 17 && @is_ctrl==false if event.which == 17 && @is_ctrl==false
@is_ctrl=true @is_ctrl=true
else if @is_ctrl==true && event.which == 13 && !@list_view && Rubric.check_complete() else if @is_ctrl==true && event.which == 13 && !@list_view && @rub.check_complete()
@submit_and_get_next() @submit_and_get_next()
keyup_handler: (event) => keyup_handler: (event) =>
...@@ -252,7 +253,7 @@ class @StaffGrading ...@@ -252,7 +253,7 @@ class @StaffGrading
# always clear out errors and messages on transition. # always clear out errors and messages on transition.
@error_msg = '' @error_msg = ''
@message = '' @message = ''
if response.success if response.success
if response.problem_list if response.problem_list
@problems = response.problem_list @problems = response.problem_list
...@@ -264,6 +265,7 @@ class @StaffGrading ...@@ -264,6 +265,7 @@ class @StaffGrading
@error(response.error) @error(response.error)
@render_view() @render_view()
@scroll_to_top()
get_next_submission: (location) -> get_next_submission: (location) ->
@location = location @location = location
...@@ -272,13 +274,14 @@ class @StaffGrading ...@@ -272,13 +274,14 @@ class @StaffGrading
skip_and_get_next: () => skip_and_get_next: () =>
data = data =
score: Rubric.get_total_score() score: @rub.get_total_score()
rubric_scores: Rubric.get_score_list() rubric_scores: @rub.get_score_list()
feedback: @feedback_area.val() feedback: @feedback_area.val()
submission_id: @submission_id submission_id: @submission_id
location: @location location: @location
skipped: true skipped: true
submission_flagged: false submission_flagged: false
@gentle_alert "Skipped the submission."
@backend.post('save_grade', data, @ajax_callback) @backend.post('save_grade', data, @ajax_callback)
get_problem_list: () -> get_problem_list: () ->
...@@ -287,15 +290,21 @@ class @StaffGrading ...@@ -287,15 +290,21 @@ class @StaffGrading
submit_and_get_next: () -> submit_and_get_next: () ->
data = data =
score: Rubric.get_total_score() score: @rub.get_total_score()
rubric_scores: Rubric.get_score_list() rubric_scores: @rub.get_score_list()
feedback: @feedback_area.val() feedback: @feedback_area.val()
submission_id: @submission_id submission_id: @submission_id
location: @location location: @location
submission_flagged: @flag_submission_checkbox.is(':checked') submission_flagged: @flag_submission_checkbox.is(':checked')
@gentle_alert "Grades saved. Fetching the next submission to grade."
@backend.post('save_grade', data, @ajax_callback) @backend.post('save_grade', data, @ajax_callback)
gentle_alert: (msg) =>
@grading_message = $(@grading_message_sel)
@grading_message.html("")
@grading_message.fadeIn()
@grading_message.html("<p>" + msg + "</p>")
error: (msg) -> error: (msg) ->
@error_msg = msg @error_msg = msg
@state = state_error @state = state_error
...@@ -466,6 +475,15 @@ class @StaffGrading ...@@ -466,6 +475,15 @@ class @StaffGrading
new_text = "(Hide)" new_text = "(Hide)"
@question_header.text(new_text) @question_header.text(new_text)
scroll_to_top: () =>
#This try/catch is needed because jasmine fails with it
try
$('html, body').animate({
scrollTop: $(".staff-grading").offset().top
}, 200)
catch error
console.log("Scrolling error.")
# for now, just create an instance and load it... # for now, just create an instance and load it...
......
...@@ -23,7 +23,11 @@ ...@@ -23,7 +23,11 @@
</section> </section>
<section class="rubric-wrapper"> <section class="rubric-wrapper">
<h3>Rubric</h3> <div class="visibility-control visibility-control-rubric">
<div class="inner">
</div>
<span class="section-header section-header-rubric">Rubric</span>
</div>
<div class="rubric-container"> <div class="rubric-container">
</div> </div>
......
.rubric-header {
background-color: #fafafa;
border-radius: 5px;
.rubric-collapse {
margin-right: $baseline/2;
}
}
.button {
display: inline-block;
}
.rubric { .rubric {
margin: 0px 0px; margin: 0;
color: #3C3C3C; color: #3C3C3C;
tr { tr {
margin:0px 0px; margin: 0;
height: 100%; height: 100%;
} }
td { td {
height: 100%; height: 100%;
border: 1px black solid; border: 1px black solid;
text-align: center; text-align: center;
} }
th { th {
padding: 5px; margin: $baseline/4;
margin: 5px; padding: $baseline/4;
text-align: center; text-align: center;
} }
.points-header th { .points-header th {
padding: 0px; padding: 0px;
} }
.rubric-label
{ .rubric-label {
position: relative; position: relative;
font-size: .9em; display: block;
display: block; font-size: .9em;
.choicegroup-correct {
//nothing
}
.choicegroup-incorrect {
display:none;
}
} }
.grade { .grade {
position: absolute; position: absolute;
bottom:0px; bottom: 0;
right:0px; right: 0;
} }
.selected-grade, .selected-grade,
.selected-grade .rubric-label { .selected-grade .rubric-label {
background: #666; background: #666;
color: white; color: white;
} }
input[type=radio]:checked + .rubric-label {
input[type=radio]:checked + .rubric-label {
background: white; background: white;
color: $base-font-color; color: $base-font-color;
white-space:nowrap; white-space:nowrap;
} }
.wrappable { .wrappable {
white-space:normal; white-space:normal;
} }
input[class='score-selection'] { input[class='score-selection'] {
position: relative; position: relative;
font-size: 16px; font-size: 16px;
} }
ul.rubric-list
{ ul.rubric-list {
list-style-type: none; margin: 0;
padding:0; padding: 0;
margin:0; list-style-type: none;
} }
} }
div.staff-grading, div.staff-grading,
div.peer-grading{ div.peer-grading{
border: 1px solid lightgray;
textarea.feedback-area { textarea.feedback-area {
margin: 0;
height: 75px; height: 75px;
margin: 0px;
} }
ul.rubric-list{ ul.rubric-list{
margin: 0;
padding: 0;
list-style-type: none; list-style-type: none;
padding:0;
margin:0;
li { li {
&.rubric-list-item{ &.rubric-list-item{
margin-bottom: 0px; margin-bottom: 0;
padding: 0px; padding: 0;
} }
} }
} }
h1 { h1 {
margin : 0 0 0 10px; margin: 0 0 0 $baseline/2;
} }
h2{ h2 {
a a {
{
text-size: .5em; text-size: .5em;
} }
} }
div { div {
margin: 0px; margin: 0;
&.submission-container{ &.submission-container{
overflow-y: auto; @include clearfix;
height: 150px; overflow-y: auto;
background: #F6F6F6; max-height: 300px;
border: 1px solid #ddd; height: auto;
@include clearfix; border: 1px solid #ddd;
background: #f6f6f6;
} }
} }
label { label {
margin: 0px; margin: 0;
padding: 2px; padding: 2px;
min-width: 50px; min-width: 50px;
background-color: white;
text-size: 1.5em; text-size: 1.5em;
} }
...@@ -58,143 +61,161 @@ div.peer-grading{ ...@@ -58,143 +61,161 @@ div.peer-grading{
display: none; display: none;
} }
.problem-list .problem-list {
{ width: 100%;
text-align: center;
table-layout: auto; table-layout: auto;
width:100%; text-align: center;
th
{ th {
padding: 2px; padding: 2px;
} }
td
{ td {
padding:2px; padding: 2px;
} }
td.problem-name
{ td.problem-name {
text-align:left; text-align: left;
} }
.ui-progressbar
{ .ui-progressbar {
height:1em; margin: 0;
margin:0px; padding: 0;
padding:0px; height: 1em;
} }
} }
.prompt-information-container, .prompt-information-container,
.rubric-wrapper, .rubric-wrapper,
.calibration-feedback-wrapper, .calibration-feedback-wrapper,
.grading-container .grading-container {
{ padding: $baseline/2 0;
padding: 2px;
} }
.error-container
{ .error-container {
background-color: #FFCCCC; margin-left: 0;
padding: 2px; padding: 2px;
margin-left: 0px; background-color: #ffcccc;
} }
.submission-wrapper
{ .submission-wrapper {
h3 padding: 2px;
{ padding-bottom: 15px;
h3 {
margin-bottom: 2px; margin-bottom: 2px;
} }
p
{ p {
margin-left:2px; margin-left: 2px;
} }
padding: 2px;
padding-bottom: 15px;
} }
.meta-info-wrapper .meta-info-wrapper {
{
background-color: #eee;
padding:2px; padding:2px;
div background-color: #eee;
{
display : inline; div {
display: inline;
} }
} }
.message-container, .message-container,
.grading-message .grading-message {
{ margin-left: 0;
background-color: $yellow;
padding: 2px; padding: 2px;
margin-left:0px; background-color: $yellow;
} }
.breadcrumbs .breadcrumbs {
{ margin: $baseline/2 $baseline/4;
margin-top:2px;
margin-left:0px;
margin-bottom:2px;
font-size: .8em; font-size: .8em;
} }
.instructions-panel .instructions-panel {
{ @include clearfix;
padding: $baseline/2;
margin-right:2px; background-color: #eee;
> div font-size: .8em;
{
padding: 2px; > div {
margin-bottom: 5px; margin-bottom: 5px;
padding: $baseline/2;
width: 49%;
background: #eee; background: #eee;
width:47.6%;
h3 h3 {
{
text-align:center;
text-transform:uppercase;
color: #777; color: #777;
text-align: center;
text-transform: uppercase;
} }
p
{ p{
color: #777; color: #777;
} }
} }
.calibration-panel .calibration-panel {
{ display: inline-block;
float:left; width: 20%;
border-radius: 3px;
} }
.grading-panel
{ .grading-panel {
float:right; display: inline-block;
width: 20%;
border-radius: 3px;
} }
.current-state .current-state {
{ background: #fff;
background: #1D9DD9;
h3, p
{
color: white;
}
} }
@include clearfix;
} }
.collapsible {
margin-left: 0;
.collapsible header {
{ margin-top: 2px;
margin-left: 0px; margin-bottom: 2px;
header
{
margin-top:2px;
margin-bottom:2px;
font-size: 1.2em; font-size: 1.2em;
} }
} }
.interstitial-page .interstitial-page {
{
text-align: center; text-align: center;
input[type=button]
{ input[type=button] {
margin-top: 20px; margin-top: $baseline;
} }
} }
padding: 15px; }
border: none;
div.peer-grading {
border-radius: $baseline/2;
padding: 0;
.peer-grading-tools {
padding: $baseline;
}
.error-container {
margin: $baseline;
border-radius: $baseline/4;
padding: $baseline/2;
}
.interstitial-page, .calibration -feedback, .calibration-interstitial-page {
padding: $baseline;
}
.prompt-wrapper {
padding: $baseline;
}
.grading-wrapper {
padding: $baseline;
}
}
div.staff-grading {
padding: $baseline;
} }
<%! from django.utils.translation import ugettext as _ %> <%! from django.utils.translation import ugettext as _ %>
<section id="combined-open-ended" class="combined-open-ended" data-location="${location}" data-ajax-url="${ajax_url}" data-allow_reset="${allow_reset}" data-state="${state}" data-task-count="${task_count}" data-task-number="${task_number}" data-accept-file-upload = "${accept_file_upload}"> <section id="combined-open-ended" class="combined-open-ended" data-location="${location}" data-ajax-url="${ajax_url}" data-allow_reset="${allow_reset}" data-state="${state}" data-task-count="${task_count}" data-task-number="${task_number}" data-accept-file-upload = "${accept_file_upload}">
<div class="status-container"> <div class="name">
${status|n} <h2>${display_name}</h2>
<div class="progress-container">
</div>
</div> </div>
<h2>${display_name}</h2> <div class="problemwrapper">
<div class="status-bar">
<table class="statustable">
<tr>
<td class="problemtype-container">
<div class="problemtype">
${_("Open Response")}
</div>
</td>
<td class="assessments-container">
<div class="assessment-text">
${_("Assessments:")}
</div>
<div class="status-container">
${status|n}
</div>
</td>
</tr>
</table>
</div>
<div class="item-container"> <div class="item-container">
<h4>Prompt <a href="#" class="question-header">(Hide)</a> </h4> <div class="visibility-control visibility-control-prompt">
<div class="problem-container"> <div class="inner">
% for item in items: </div>
<div class="item">${item['content'] | n}</div> <a href="" class="section-header section-header-prompt question-header">${_("Hide Prompt")}</a>
% endfor </div>
<div class="problem-container">
% for item in items:
<div class="item">${item['content'] | n}</div>
% endfor
</div>
<div class="oe-tools response-tools">
<span class="oe-tools-label"></span>
<input type="button" value="${_('Try Again')}" class="reset-button" name="reset"/>
</div>
</div> </div>
<input type="button" value="${_("Reset")}" class="reset-button" name="reset"/> <div class="combined-rubric-container">
<input type="button" value="${_("Next Step")}" class="next-step-button" name="reset"/> </div>
</div> <div class="oe-tools problem-tools">
<!--<span class="oe-tools-label">Once you have completed this form of assessment, you may continue. </span>-->
<input type="button" value="${_('Next Step')}" class="next-step-button" name="reset"/>
</div>
<section class="legend-container"> <section class="legend-container">
</section> </section>
<div class="combined-rubric-container">
</div>
<div class="result-container"> <div class="result-container">
</div>
</div> </div>
</section> </section>
<div class="combined-rubric-container" data-status="shown" data-number="1">
<div class="visibility-control visibility-control-rubric">
<div class="inner">
</div>
<span class="section-header section-header-rubric">Submitted Rubric</span>
</div>
<div class="written-feedback">
${error}
</div>
</div>
<div class="${class_name}"> <%! from django.utils.translation import ugettext as _ %>
<h4>${task_name}</h4> % for (i,result) in enumerate(results):
${results | n} % if 'task_name' in result and 'result' in result:
</div> <div class="combined-rubric-container"
%if i>0:
data-status="hidden" data-number="${i}">
% else:
data-status="shown" data-number="${i}">
% endif
<div class="visibility-control visibility-control-rubric">
<div class="inner">
</div>
<span class="section-header section-header-rubric">${_("Submitted Rubric")}</span>
</div>
<div class="oe-tools rubric-header">
<span class="oe-tools-label"></span>
<button class="rubric-collapse" href="#">${_("Toggle Full Rubric")}</button>
<span class="oe-tools-scores">
<span class="oe-tools-scores-label"></span>
% if len(results)>1:
<button href="#" alt="Previous" class="rubric-button rubric-previous-button"><i class="icon-chevron-left"></i></button>
% endif
${result['task_name']} from grader ${i+1}
% if len(results)>1:
<button href="#" alt="Next" class="rubric-button rubric-next-button"><i class="icon-chevron-right"></i></button>
% endif
</span>
</div>
${result['result'] | n}
<div class="written-feedback">
${result['feedback'] | n}
</div>
</div>
%endif
% endfor
<%! from django.utils.translation import ugettext as _ %> <%! from django.utils.translation import ugettext as _ %>
<div class="status-elements"> <div class="status-elements">
<section id="combined-open-ended-status" class="combined-open-ended-status"> <section id="combined-open-ended-status" class="combined-open-ended-status">
<div class="statusitem">
${_("Status")}
</div>
%for i in xrange(0,len(status_list)): %for i in xrange(0,len(status_list)):
<%status=status_list[i]%> <%status=status_list[i]%>
%if i==len(status_list)-1: %if status['current']:
<div class="statusitem statusitem-current" data-status-number="${i}"> <div class="statusitem statusitem-current" data-status-number="${i}">
%else: %else:
<div class="statusitem" data-status-number="${i}"> <div class="statusitem" data-status-number="${i}">
%endif %endif
%if status['grader_type'] in grader_type_image_dict and render_via_ajax: ${status['human_task']}
<% grader_image = grader_type_image_dict[status['grader_type']]%>
<img src="${grader_image}" title=${status['human_grader_type']}>
%else:
${status['human_task']}
%endif
(${status['human_state']})
</div> </div>
%endfor %endfor
</section> </section>
......
...@@ -4,7 +4,11 @@ ...@@ -4,7 +4,11 @@
<div class="prompt"> <div class="prompt">
${prompt|n} ${prompt|n}
</div> </div>
<h4>${_("Response")}</h4> <div class="visibility-control visibility-control-response">
<div class="inner">
</div>
<span class="section-header section-header-response">${_("Response")}</span>
</div>
<textarea rows="${rows}" cols="${cols}" name="answer" class="answer short-form-response" id="input_${id}">${previous_answer|h}</textarea> <textarea rows="${rows}" cols="${cols}" name="answer" class="answer short-form-response" id="input_${id}">${previous_answer|h}</textarea>
<div class="message-wrapper"></div> <div class="message-wrapper"></div>
...@@ -12,7 +16,7 @@ ...@@ -12,7 +16,7 @@
% if state == 'initial': % if state == 'initial':
<span class="unanswered" style="display:inline-block;" id="status_${id}">${_("Unanswered")}</span> <span class="unanswered" style="display:inline-block;" id="status_${id}">${_("Unanswered")}</span>
% elif state == 'assessing': % elif state == 'assessing':
<span class="grading" id="status_${id}">${_("Submitted for grading.")} <span class="grading" id="status_${id}">
% if eta_message is not None: % if eta_message is not None:
${eta_message} ${eta_message}
% endif % endif
...@@ -27,8 +31,8 @@ ...@@ -27,8 +31,8 @@
<div class="file-upload"></div> <div class="file-upload"></div>
<input type="button" value="${_("Submit")}" class="submit-button" name="show"/> <input type="button" value="${_('Submit')}" class="submit-button" name="show"/>
<input name="skip" class="skip-button" type="button" value="${_("Skip Post-Assessment")}"/> <input name="skip" class="skip-button" type="button" value="${_('Skip Post-Assessment')}"/>
<div class="open-ended-action"></div> <div class="open-ended-action"></div>
......
<div class="rubric"> <div class="rubric">
% for i in range(len(categories)): % for i in range(len(categories)):
<% category = categories[i] %> <% category = categories[i] %>
<span class="rubric-category">${category['description']}</span> <br/> <span class="rubric-category">${category['description']}</span>
<ul class="rubric-list"> <ul class="rubric-list">
% for j in range(len(category['options'])): % for j in range(len(category['options'])):
<% option = category['options'][j] %> <% option = category['options'][j] %>
<li class="rubric-list-item"> %if len(category['options'][j]['grader_types'])>0:
<li class="rubric-list-item">
%else:
<li class="rubric-list-item rubric-info-item">
%endif
<div class="rubric-label"> <div class="rubric-label">
%for grader_type in category['options'][j]['grader_types']: %if len(category['options'][j]['grader_types'])>0:
% if grader_type in grader_type_image_dict: %if correct[i]==1:
<% grader_image = grader_type_image_dict[grader_type] %> <label class="choicegroup_correct wrapper-score-selection"></label>
% if grader_type in human_grader_types: %elif correct[i]==.5:
<% human_title = human_grader_types[grader_type] %> <label class="choicegroup_partialcorrect wrapper-score-selection"></label>
% else: %else:
<% human_title = grader_type %> <label class="choicegroup_incorrect wrapper-score-selection"></label>
% endif %endif
<img src="${grader_image}" title="${human_title}"/> <span class="wrappable"> ${option['points']} points : ${option['text']}</span>
% endif </label>
%endfor %else:
${option['points']} points : ${option['text']} <label class="rubric-elements-info">
<span class="wrapper-score-selection"> </span>
<span class="wrappable"> ${option['points']} points : ${option['text']}</span>
</label>
%endif
</div> </div>
</li> </li>
% endfor % endfor
......
<%! from django.utils.translation import ugettext as _ %> <%! from django.utils.translation import ugettext as _ %>
<% from random import randint %>
<form class="rubric-template" id="inputtype_${id}" xmlns="http://www.w3.org/1999/html"> <form class="rubric-template" id="inputtype_${id}" xmlns="http://www.w3.org/1999/html">
<h3>${_("Rubric")}</h3> <div class="visibility-control visibility-control-rubric">
<p>${_("Select the criteria you feel best represents this submission in each category.")}</p> <div class="inner">
</div>
<span class="section-header section-header-rubric">${_("Rubric")}</span>
</div>
<p>Select the criteria you feel best represents this submission in each category.</p>
<div class="rubric"> <div class="rubric">
% for i in range(len(categories)): % for i in range(len(categories)):
<% category = categories[i] %> <% category = categories[i] %>
<span class="rubric-category">${category['description']}</span> <br/> <% m = randint(0,1000) %>
<span class="rubric-category">${category['description']}</span>
<ul class="rubric-list"> <ul class="rubric-list">
% for j in range(len(category['options'])): % for j in range(len(category['options'])):
<% option = category['options'][j] %> <% option = category['options'][j] %>
...@@ -14,8 +20,8 @@ ...@@ -14,8 +20,8 @@
%else: %else:
<li class="rubric-list-item"> <li class="rubric-list-item">
% endif % endif
<label class="rubric-label" for="score-${i}-${j}"> <label class="rubric-label" for="score-${i}-${j}-${m}">
<input type="radio" class="score-selection" data-category="${i}" name="score-selection-${i}" id="score-${i}-${j}" value="${option['points']}"/> <span class="wrapper-score-selection"><input type="radio" class="score-selection" data-category="${i}" name="score-selection-${i}" id="score-${i}-${j}-${m}" value="${option['points']}"/></span>
<span class="wrappable"> ${option['points']} points : ${option['text']}</span> <span class="wrappable"> ${option['points']} points : ${option['text']}</span>
</label> </label>
</li> </li>
......
...@@ -5,20 +5,22 @@ ...@@ -5,20 +5,22 @@
<div class="prompt"> <div class="prompt">
${prompt} ${prompt}
</div> </div>
<div class="visibility-control visibility-control-response">
<h4>${_("Response")}</h4> <div class="inner">
</div>
<span class="section-header section-header-response">${_("Response")}</span>
</div>
<div> <div>
<textarea name="answer" class="answer short-form-response" cols="70" rows="20">${previous_answer|n}</textarea> <textarea name="answer" class="answer short-form-response" cols="70" rows="20">${previous_answer|n}</textarea>
</div>
<div class="open-ended-action"></div> <div class="message-wrapper"></div>
<div class="grader-status"></div>
<div class="rubric-wrapper">${initial_rubric}</div> <div class="rubric-wrapper">${initial_rubric}</div>
<div class="hint-wrapper"></div>
<div class="message-wrapper"></div>
<div class="file-upload"></div> <div class="file-upload"></div>
<input type="button" value="${_("Submit")}" class="submit-button" name="show"/> <input type="button" value="${_('Submit')}" class="submit-button" name="show"/>
<div class="open-ended-action"></div>
<span id="answer_${id}"></span>
</section> </section>
...@@ -19,75 +19,71 @@ ...@@ -19,75 +19,71 @@
<div class="staff-grading" data-ajax_url="${ajax_url}"> <div class="staff-grading" data-ajax_url="${ajax_url}">
<h1>${_("Staff grading")}</h1> <h1>${_("Staff grading")}</h1>
<div class="breadcrumbs"> <div class="breadcrumbs"></div>
</div> <div class="error-container"></div>
<div class="error-container"> <div class="message-container"></div>
</div>
<div class="message-container">
</div>
<! -- Problem List View --> <! -- Problem List View -->
<section class="problem-list-container"> <section class="problem-list-container">
<h2>${_("Instructions")}</h2> <h2>${_("Instructions")}</h2>
<div class="instructions"> <div class="instructions">
<p>${_("This is the list of problems that currently need to be graded in order to train the machine learning models. Each problem needs to be trained separately, and we have indicated the number of student submissions that need to be graded in order for a model to be generated. You can grade more than the minimum required number of submissions--this will improve the accuracy of machine learning, though with diminishing returns. You can see the current accuracy of machine learning while grading.")}</p> <p>${_("This is the list of problems that currently need to be graded in order to train the machine learning models. Each problem needs to be trained separately, and we have indicated the number of student submissions that need to be graded in order for a model to be generated. You can grade more than the minimum required number of submissions--this will improve the accuracy of machine learning, though with diminishing returns. You can see the current accuracy of machine learning while grading.")}</p>
</div> </div>
<h2>${_("Problem List")}</h2> <h2>${_("Problem List")}</h2>
<table class="problem-list"> <table class="problem-list">
</table> </table>
</section> </section>
<!-- Grading View --> <!-- Grading View -->
<section class="prompt-wrapper"> <section class="prompt-wrapper">
<h2 class="prompt-name"></h2> <div class="grading-message"></div>
<div class="meta-info-wrapper"> <h2 class="prompt-name">
<div class="problem-meta-info-container"> </h2>
</div> <div class="meta-info-wrapper">
<div class="ml-error-info-container"> <div class="problem-meta-info-container">
</div> </div>
</div> <div class="ml-error-info-container">
<div class="prompt-information-container"> </div>
</div>
<div class="prompt-information-container">
<h3>${_('Prompt')} <a href="#" class="question-header">${_("(Hide)")}</a></h3> <h3>${_('Prompt')} <a href="#" class="question-header">${_("(Hide)")}</a></h3>
<div class="prompt-container"> <div class="prompt-container">
</div>
</div> </div>
</div>
</section> </section>
<div class="action-button"> <div class="action-button">
<input type=button value="${_("Submit")}" class="action-button" name="show" /> <input type=button value="${_("Submit")}" class="action-button" name="show" />
</div> </div>
<section class="grading-wrapper"> <section class="grading-wrapper">
<div class="grading-container">
<div class="grading-container"> <div class="submission-wrapper">
<div class="submission-wrapper"> <h3>${_("Student Response")}</h3>
<h3>${_("Student Response")}</h3> <div class="submission-container">
<div class="submission-container"> </div>
</div> </div>
</div> <div class="evaluation">
<div class="evaluation"> <p class="score-selection-container">
<p class="score-selection-container"> </p>
</p> <p class="grade-selection-container">
<p class="grade-selection-container"> </p>
</p> <h3>${_("Written Feedback")}</h3>
<h3>${_("Written Feedback")}</h3> <textarea name="feedback" placeholder="${_("Feedback for student (optional)")}" class="feedback-area" cols="70" >
<textarea name="feedback" placeholder="${_("Feedback for student (optional)")}" </textarea>
class="feedback-area" cols="70" ></textarea> <p>
<p> ${_("Flag as inappropriate content for later review")} <input class="flag-checkbox" type="checkbox" />
${_("Flag as inappropriate content for later review")} <input class="flag-checkbox" type="checkbox" /> </p>
</p> </div>
</div> <div class="submission">
<input type="button" value="${_("Submit")}" class="submit-button" name="show"/>
<input type="button" value="${_("Skip")}" class="skip-button" name="skip"/>
<div class="submission"> </div>
<input type="button" value="${_("Submit")}" class="submit-button" name="show"/>
<input type="button" value="${_("Skip")}" class="skip-button" name="skip"/> </div>
</div> </section>
</div> </div>
</div>
</section> </section>
...@@ -2,59 +2,61 @@ ...@@ -2,59 +2,61 @@
<section class="container peer-grading-container"> <section class="container peer-grading-container">
<div class="peer-grading" data-ajax-url="${ajax_url}" data-use-single-location="${use_single_location}"> <div class="peer-grading" data-ajax-url="${ajax_url}" data-use-single-location="${use_single_location}">
<div class="error-container">${error_text}</div> <div class="error-container">${error_text}</div>
<h1>${_("Peer Grading")}</h1> <div class="peer-grading-tools">
<h2>${_("Instructions")}</h2> <h1 class="peer-grading-title">${_("Peer Grading")}</h1>
<p>${_("Here are a list of problems that need to be peer graded for this course.")}</p> <h2 class="peer-grading-instructions">${_("Instructions")}</h2>
% if success: <p>${_("Here are a list of problems that need to be peer graded for this course.")}</p>
% if len(problem_list) == 0: % if success:
<div class="message-container"> % if len(problem_list) == 0:
${_("Nothing to grade!")} <div class="message-container">
</div> ${_("You currently do not having any peer grading to do. In order to have peer grading to do, you need to have submitted a response to a peer grading problem. The instructor also needs to score the essays that are used to help you better understand the grading criteria.")}
%else: </div>
<div class="problem-list-container"> %else:
<table class="problem-list"> <div class="problem-list-container">
<tr> <table class="problem-list">
<th>${_("Problem Name")}</th> <tr>
<th>${_("Due date")}</th> <th>${_("Problem Name")}</th>
<th>${_("Graded")}</th> <th>${_("Due date")}</th>
<th>${_("Available")}</th> <th>${_("Graded")}</th>
<th>${_("Required")}</th> <th>${_("Available")}</th>
<th>${_("Progress")}</th> <th>${_("Required")}</th>
<th>${_("Progress")}</th>
</tr>
%for problem in problem_list:
<tr data-graded="${problem['num_graded']}" data-required="${problem['num_required']}">
<td class="problem-name">
%if problem['closed']:
${problem['problem_name']}
%else:
<a href="#problem" data-location="${problem['location']}" class="problem-button">${problem['problem_name']}</a>
%endif
</td>
<td>
% if problem['due']:
${problem['due']}
% else:
${_("No due date")}
% endif
</td>
<td>
${problem['num_graded']}
</td>
<td>
${problem['num_pending']}
</td>
<td>
${problem['num_required']}
</td>
<td>
<div class="progress-bar">
</div>
</td>
</tr> </tr>
%for problem in problem_list: %endfor
<tr data-graded="${problem['num_graded']}" data-required="${problem['num_required']}"> </table>
<td class="problem-name"> </div>
%if problem['closed']: %endif
${problem['problem_name']}
%else:
<a href="#problem" data-location="${problem['location']}" class="problem-button">${problem['problem_name']}</a>
%endif
</td>
<td>
% if problem['due']:
${problem['due']}
% else:
${_("No due date")}
% endif
</td>
<td>
${problem['num_graded']}
</td>
<td>
${problem['num_pending']}
</td>
<td>
${problem['num_required']}
</td>
<td>
<div class="progress-bar">
</div>
</td>
</tr>
%endfor
</table>
</div>
%endif %endif
%endif </div>
</div> </div>
</section> </section>
...@@ -4,98 +4,95 @@ ...@@ -4,98 +4,95 @@
<div class="error-container"></div> <div class="error-container"></div>
<section class="content-panel"> <section class="content-panel">
<div class="instructions-panel"> <div class="instructions-panel">
<div class="calibration-panel"> <div class="calibration-panel">
<h3>${_("Learning to Grade")}</h3> <h3>${_("Learning to Grade")}</h3>
</div> </div>
<div class="grading-panel"> <div class="grading-panel">
<h3>${_("Peer Grading")}</h3> <h3>${_("Peer Grading")}</h3>
</div>
</div> </div>
</div>
<div class="prompt-wrapper"> <div class="prompt-wrapper">
<h2>${_('Prompt')} <a href="#" class="question-header">${_('(Hide)')}</a></h2> <div class="visibility-control visibility-control-prompt">
<div class="prompt-information-container"> <div class="inner">
<section> </div>
<div class="prompt-container"> <a href="" class="section-header section-header-prompt question-header">${_('Hide Prompt')}</a>
</div>
<div class="prompt-information-container">
<section>
<div class="prompt-container">
</div>
</section>
</div>
</div>
<section class="grading-wrapper">
<div class="grading-message">
</div>
<h2>${_("Student Response")}</h2>
<div class="grading-container">
<div class="submission-wrapper">
<h3></h3>
<div class="submission-container">
</div>
<input type="hidden" name="submission-key" value="" />
<input type="hidden" name="essay-id" value="" />
</div>
<div class="evaluation">
<p class="rubric-selection-container"></p>
<p class="score-selection-container"></p>
<h3>${_("Written Feedback")}</h3>
<p>${_("Please include some written feedback as well.")}</p>
<textarea name="feedback" placeholder="Feedback for student" class="feedback-area" cols="70" ></textarea>
<div class="flag-student-container"> ${_("This submission has explicit or pornographic content : ")}
<input type="checkbox" class="flag-checkbox" value="student_is_flagged">
</div>
<div class="answer-unknown-container"> ${_("I do not know how to grade this question : ")}
<input type="checkbox" class="answer-unknown-checkbox" value="answer_is_unknown">
</div>
</div>
<div class="submission">
<input type="button" value="${_("Submit")}" class="submit-button" name="show"/>
</div>
</div> </div>
</section> </section>
</section>
<!-- Calibration feedback: Shown after a calibration is sent -->
<section class="calibration-feedback">
<h2>${_("How did I do?")}</h2>
<div class="calibration-feedback-wrapper">
</div> </div>
<input type="button" class="calibration-feedback-button" value="${_("Continue")}" name="calibration-feedback-button" />
</section>
</div> <!-- Interstitial Page: Shown between calibration and grading steps -->
<section class="interstitial-page">
<h1>${_("Ready to grade!")}</h1>
<p>${_("You have finished learning to grade, which means that you are now ready to start grading.")}</p>
<input type="button" class="interstitial-page-button" value="${_("Start Grading!")}" name="interstitial-page-button" />
</section>
<section class="grading-wrapper"> <!-- Calibration Interstitial Page: Shown before calibration -->
<h2>${_("Student Response")}</h2> <section class="calibration-interstitial-page">
<h1>${_("Learning to grade")}</h1>
<p>${_("You have not yet finished learning to grade this problem.")}</p>
<p>${_("You will now be shown a series of instructor-scored essays, and will be asked to score them yourself.")}</p>
<p>${_("Once you can score the essays similarly to an instructor, you will be ready to grade your peers.")}</p>
<input type="button" class="calibration-interstitial-page-button" value="${_("Start learning to grade")}" name="calibration-interstitial-page-button" />
</section>
<div class="grading-container"> <!-- Flag submission confirmation dialog -->
<div class="submission-wrapper"> <section class="flag-submission-confirmation">
<h3></h3> <h4>${_("Are you sure that you want to flag this submission?")}</h4>
<div class="submission-container"> <p>
${_("You are about to flag a submission. You should only flag a submission that contains explicit or offensive content. If the submission is not addressed to the question or is incorrect, you should give it a score of zero and accompanying feedback instead of flagging it.")}
</p>
<div>
<input type="button" class="flag-submission-removal-button" value="${_("Remove Flag")}" name="calibration-interstitial-page-button" />
<input type="button" class="flag-submission-confirmation-button" value="${_("Keep Flag")}" name="calibration-interstitial-page-button" />
</div> </div>
<input type="hidden" name="submission-key" value="" /> </section>
<input type="hidden" name="essay-id" value="" />
</div>
<div class="evaluation">
<p class="rubric-selection-container"></p>
<p class="score-selection-container">
</p>
<h3>${_("Written Feedback")}</h3>
<p>${_("Please include some written feedback as well.")}</p>
<textarea name="feedback" placeholder="Feedback for student"
class="feedback-area" cols="70" ></textarea>
<div class="flag-student-container"> ${_("This submission has explicit or pornographic content : ")}<input type="checkbox" class="flag-checkbox" value="student_is_flagged"> </div>
<div class="answer-unknown-container"> ${_("I do not know how to grade this question : ")}<input type="checkbox" class="answer-unknown-checkbox" value="answer_is_unknown"></div>
</div>
<div class="submission">
<input type="button" value="${_("Submit")}" class="submit-button" name="show"/>
</div>
</div>
<div class="grading-message">
</div>
</section>
</section>
<!-- Calibration feedback: Shown after a calibration is sent -->
<section class="calibration-feedback">
<h2>${_("How did I do?")}</h2>
<div class="calibration-feedback-wrapper">
</div>
<input type="button" class="calibration-feedback-button" value="${_("Continue")}" name="calibration-feedback-button" />
</section>
<!-- Interstitial Page: Shown between calibration and grading steps -->
<section class="interstitial-page">
<h1>${_("Ready to grade!")}</h1>
<p>${_("You have finished learning to grade, which means that you are now ready to start grading.")}</p>
<input type="button" class="interstitial-page-button" value="${_("Start Grading!")}" name="interstitial-page-button" />
</section>
<!-- Calibration Interstitial Page: Shown before calibration -->
<section class="calibration-interstitial-page">
<h1>${_("Learning to grade")}</h1>
<p>${_("You have not yet finished learning to grade this problem.")}</p>
<p>${_("You will now be shown a series of instructor-scored essays, and will be asked to score them yourself.")}</p>
<p>${_("Once you can score the essays similarly to an instructor, you will be ready to grade your peers.")}</p>
<input type="button" class="calibration-interstitial-page-button" value="${_("Start learning to grade")}" name="calibration-interstitial-page-button" />
</section>
<!-- Flag submission confirmation dialog -->
<section class="flag-submission-confirmation">
<h4>${_("Are you sure that you want to flag this submission?")}</h4>
<p>
${_("You are about to flag a submission. You should only flag a submission that contains explicit or offensive content. If the submission is not addressed to the question or is incorrect, you should give it a score of zero and accompanying feedback instead of flagging it.")}
</p>
<div>
<input type="button" class="flag-submission-removal-button" value="${_("Remove Flag")}" name="calibration-interstitial-page-button" />
<input type="button" class="flag-submission-confirmation-button" value="${_("Keep Flag")}" name="calibration-interstitial-page-button" />
</div>
</section>
<input type="button" value="${_("Go Back")}" class="action-button" name="back" /> <input type="button" value="${_("Go Back")}" class="action-button" name="back" />
</div> </div>
</section> </section>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment