Commit e789c382 by VikParuchuri

Merge pull request #1837 from MITx/fix/vik/sa-and-coe

Fix/vik/sa and coe
parents 2314c609 5125d8b0
......@@ -14,7 +14,7 @@ from xmodule.open_ended_grading_classes.xblock_field_types import StringyFloat
log = logging.getLogger("mitx.courseware")
V1_SETTINGS_ATTRIBUTES = ["display_name", "attempts", "is_graded", "accept_file_upload",
"skip_spelling_checks", "due", "graceperiod"]
"skip_spelling_checks", "due", "graceperiod", "weight"]
V1_STUDENT_ATTRIBUTES = ["current_task_number", "task_states", "state",
"student_attempts", "ready_to_reset"]
......
......@@ -122,6 +122,7 @@ div.combined-rubric-container {
span.rubric-category {
font-size: .9em;
font-weight: bold;
}
padding-bottom: 5px;
padding-top: 10px;
......
......@@ -90,6 +90,7 @@ class @CombinedOpenEnded
@element=element
@reinitialize(element)
$(window).keydown @keydown_handler
$(window).keyup @keyup_handler
reinitialize: (element) ->
@wrapper=$(element).find('section.xmodule_CombinedOpenEndedModule')
......@@ -104,6 +105,7 @@ class @CombinedOpenEnded
@location = @el.data('location')
# set up handlers for click tracking
Rubric.initialize(@location)
@is_ctrl = false
@allow_reset = @el.data('allow_reset')
@reset_button = @$('.reset-button')
......@@ -322,6 +324,7 @@ class @CombinedOpenEnded
save_answer: (event) =>
event.preventDefault()
max_filesize = 2*1000*1000 #2MB
pre_can_upload_files = @can_upload_files
if @child_state == 'initial'
files = ""
if @can_upload_files == true
......@@ -353,6 +356,7 @@ class @CombinedOpenEnded
@find_assessment_elements()
@rebind()
else
@can_upload_files = pre_can_upload_files
@gentle_alert response.error
$.ajaxWithPrefix("#{@ajax_url}/save_answer",settings)
......@@ -360,10 +364,17 @@ class @CombinedOpenEnded
else
@errors_area.html(@out_of_sync_message)
keydown_handler: (e) =>
# only do anything when the key pressed is the 'enter' key
if e.which == 13 && @child_state == 'assessing' && Rubric.check_complete()
@save_assessment(e)
keydown_handler: (event) =>
#Previously, responses were submitted when hitting enter. Add in a modifier that ensures that ctrl+enter is needed.
if event.which == 17 && @is_ctrl==false
@is_ctrl=true
else if @is_ctrl==true && event.which == 13 && @child_state == 'assessing' && Rubric.check_complete()
@save_assessment(event)
keyup_handler: (event) =>
#Handle keyup event when ctrl key is released
if event.which == 17 && @is_ctrl==true
@is_ctrl=false
save_assessment: (event) =>
event.preventDefault()
......@@ -482,8 +493,10 @@ class @CombinedOpenEnded
if @accept_file_upload == "True"
if window.File and window.FileReader and window.FileList and window.Blob
@can_upload_files = true
@file_upload_area.html('<input type="file" class="file-upload-box">')
@file_upload_area.html('<input type="file" class="file-upload-box"><img class="file-upload-preview" src="#" alt="Uploaded image" />')
@file_upload_area.show()
$('.file-upload-preview').hide()
$('.file-upload-box').change @preview_image
else
@gentle_alert 'File uploads are required for this question, but are not supported in this browser. Try the newest version of google chrome. Alternatively, if you have uploaded the image to the web, you can paste a link to it into the answer box.'
......@@ -539,3 +552,28 @@ class @CombinedOpenEnded
log_feedback_selection: (event) ->
target_selection = $(event.target).val()
Logger.log 'oe_feedback_response_selected', {value: target_selection}
remove_attribute: (name) =>
if $('.file-upload-preview').attr(name)
$('.file-upload-preview')[0].removeAttribute(name)
preview_image: () =>
if $('.file-upload-box')[0].files && $('.file-upload-box')[0].files[0]
reader = new FileReader()
reader.onload = (e) =>
max_dim = 150
@remove_attribute('src')
@remove_attribute('height')
@remove_attribute('width')
$('.file-upload-preview').attr('src', e.target.result)
height_px = $('.file-upload-preview')[0].height
width_px = $('.file-upload-preview')[0].width
scale_factor = 0
if height_px>width_px
scale_factor = height_px/max_dim
else
scale_factor = width_px/max_dim
$('.file-upload-preview')[0].width = width_px/scale_factor
$('.file-upload-preview')[0].height = height_px/scale_factor
$('.file-upload-preview').show()
reader.readAsDataURL($('.file-upload-box')[0].files[0])
......@@ -161,6 +161,7 @@ class @PeerGradingProblem
constructor: (backend) ->
@prompt_wrapper = $('.prompt-wrapper')
@backend = backend
@is_ctrl = false
# get the location of the problem
......@@ -183,6 +184,12 @@ class @PeerGradingProblem
@grading_message.hide()
@question_header = $('.question-header')
@question_header.click @collapse_question
@flag_submission_confirmation = $('.flag-submission-confirmation')
@flag_submission_confirmation_button = $('.flag-submission-confirmation-button')
@flag_submission_removal_button = $('.flag-submission-removal-button')
@flag_submission_confirmation_button.click @close_dialog_box
@flag_submission_removal_button.click @remove_flag
@grading_wrapper =$('.grading-wrapper')
@calibration_feedback_panel = $('.calibration-feedback')
......@@ -212,6 +219,7 @@ class @PeerGradingProblem
@answer_unknown_checkbox = $('.answer-unknown-checkbox')
$(window).keydown @keydown_handler
$(window).keyup @keyup_handler
@collapse_question()
......@@ -233,9 +241,13 @@ class @PeerGradingProblem
@calibration_interstitial_page.hide()
@is_calibrated_check()
@flag_student_checkbox.click =>
@flag_box_checked()
@calibration_feedback_button.hide()
@calibration_feedback_panel.hide()
@error_container.hide()
@flag_submission_confirmation.hide()
@is_calibrated_check()
......@@ -283,6 +295,17 @@ class @PeerGradingProblem
#
##########
remove_flag: () =>
@flag_student_checkbox.removeAttr("checked")
@close_dialog_box()
close_dialog_box: () =>
$( ".flag-submission-confirmation" ).dialog('close')
flag_box_checked: () =>
if @flag_student_checkbox.is(':checked')
$( ".flag-submission-confirmation" ).dialog({ height: 400, width: 400 })
# called after we perform an is_student_calibrated check
calibration_check_callback: (response) =>
if response.success
......@@ -338,13 +361,19 @@ class @PeerGradingProblem
@grade = Rubric.get_total_score()
keydown_handler: (event) =>
if event.which == 13 && @submit_button.is(':visible')
#Previously, responses were submitted when hitting enter. Add in a modifier that ensures that ctrl+enter is needed.
if event.which == 17 && @is_ctrl==false
@is_ctrl=true
else if event.which == 13 && @submit_button.is(':visible') && @is_ctrl==true
if @calibration
@submit_calibration_essay()
else
@submit_grade()
keyup_handler: (event) =>
#Handle keyup event when ctrl key is released
if event.which == 17 && @is_ctrl==true
@is_ctrl=false
##########
......@@ -443,7 +472,6 @@ class @PeerGradingProblem
calibration_wrapper = $('.calibration-feedback-wrapper')
calibration_wrapper.html("<p>The score you gave was: #{@grade}. The actual score is: #{response.actual_score}</p>")
score = parseInt(@grade)
actual_score = parseInt(response.actual_score)
......@@ -452,6 +480,11 @@ class @PeerGradingProblem
else
calibration_wrapper.append("<p>You may want to review the rubric again.</p>")
if response.actual_rubric != undefined
calibration_wrapper.append("<div>Instructor Scored Rubric: #{response.actual_rubric}</div>")
if response.actual_feedback!=undefined
calibration_wrapper.append("<div>Instructor Feedback: #{response.actual_feedback}</div>")
# disable score selection and submission from the grading interface
$("input[name='score-selection']").attr('disabled', true)
@submit_button.hide()
......
......@@ -131,6 +131,7 @@ class CombinedOpenEndedV1Module():
self.state = instance_state.get('state', self.INITIAL)
self.student_attempts = instance_state.get('student_attempts', 0)
self.weight = instance_state.get('weight', 1)
#Allow reset is true if student has failed the criteria to move to the next child task
self.ready_to_reset = instance_state.get('ready_to_reset', False)
......@@ -144,7 +145,7 @@ class CombinedOpenEndedV1Module():
grace_period_string = self.instance_state.get('graceperiod', None)
try:
self.timeinfo = TimeInfo(due_date, grace_period_string)
except:
except Exception:
log.error("Error parsing due date information in location {0}".format(location))
raise
self.display_due_date = self.timeinfo.display_due_date
......@@ -362,7 +363,7 @@ class CombinedOpenEndedV1Module():
# if link.startswith(XASSET_SRCREF_PREFIX):
# Placing try except so that if the error is fixed, this code will start working again.
return_html = rewrite_links(html, self.rewrite_content_links)
except:
except Exception:
pass
return return_html
......@@ -402,6 +403,7 @@ class CombinedOpenEndedV1Module():
self.static_data, instance_state=task_state)
last_response = task.latest_answer()
last_score = task.latest_score()
all_scores = task.all_scores()
last_post_assessment = task.latest_post_assessment(self.system)
last_post_feedback = ""
feedback_dicts = [{}]
......@@ -417,13 +419,18 @@ class CombinedOpenEndedV1Module():
else:
last_post_evaluation = task.format_feedback_with_evaluation(self.system, last_post_assessment)
last_post_assessment = last_post_evaluation
try:
rubric_data = task._parse_score_msg(task.child_history[-1].get('post_assessment', ""), self.system)
rubric_scores = rubric_data['rubric_scores']
grader_types = rubric_data['grader_types']
feedback_items = rubric_data['feedback_items']
feedback_dicts = rubric_data['feedback_dicts']
grader_ids = rubric_data['grader_ids']
submission_ids = rubric_data['submission_ids']
except Exception:
log.debug("Could not parse rubric data from child history. "
"Likely we have not yet initialized a previous step, so this is perfectly fine.")
rubric_data = {}
rubric_scores = rubric_data.get('rubric_scores')
grader_types = rubric_data.get('grader_types')
feedback_items = rubric_data.get('feedback_items')
feedback_dicts = rubric_data.get('feedback_dicts')
grader_ids = rubric_data.get('grader_ids')
submission_ids = rubric_data.get('submission_ids')
elif task_type == "selfassessment":
rubric_scores = last_post_assessment
grader_types = ['SA']
......@@ -441,7 +448,7 @@ class CombinedOpenEndedV1Module():
human_state = task.HUMAN_NAMES[state]
else:
human_state = state
if len(grader_types) > 0:
if grader_types is not None and len(grader_types) > 0:
grader_type = grader_types[0]
else:
grader_type = "IN"
......@@ -454,6 +461,7 @@ class CombinedOpenEndedV1Module():
last_response_dict = {
'response': last_response,
'score': last_score,
'all_scores': all_scores,
'post_assessment': last_post_assessment,
'type': task_type,
'max_score': max_score,
......@@ -732,10 +740,37 @@ class CombinedOpenEndedV1Module():
"""
max_score = None
score = None
if self.check_if_done_and_scored():
last_response = self.get_last_response(self.current_task_number)
max_score = last_response['max_score']
score = last_response['score']
if self.is_scored and self.weight is not None:
#Finds the maximum score of all student attempts and keeps it.
score_mat = []
for i in xrange(0, len(self.task_states)):
#For each task, extract all student scores on that task (each attempt for each task)
last_response = self.get_last_response(i)
max_score = last_response.get('max_score', None)
score = last_response.get('all_scores', None)
if score is not None:
#Convert none scores and weight scores properly
for z in xrange(0, len(score)):
if score[z] is None:
score[z] = 0
score[z] *= float(self.weight)
score_mat.append(score)
if len(score_mat) > 0:
#Currently, assume that the final step is the correct one, and that those are the final scores.
#This will change in the future, which is why the machinery above exists to extract all scores on all steps
#TODO: better final score handling.
scores = score_mat[-1]
score = max(scores)
else:
score = 0
if max_score is not None:
#Weight the max score if it is not None
max_score *= float(self.weight)
else:
#Without a max_score, we cannot have a score!
score = None
score_dict = {
'score': score,
......
......@@ -72,7 +72,8 @@ class OpenEndedChild(object):
try:
instance_state = json.loads(instance_state)
except:
log.error("Could not load instance state for open ended. Setting it to nothing.: {0}".format(instance_state))
log.error(
"Could not load instance state for open ended. Setting it to nothing.: {0}".format(instance_state))
else:
instance_state = {}
......@@ -81,8 +82,8 @@ class OpenEndedChild(object):
# element.
# Scores are on scale from 0 to max_score
self.child_history=instance_state.get('child_history',[])
self.child_state=instance_state.get('child_state', self.INITIAL)
self.child_history = instance_state.get('child_history', [])
self.child_state = instance_state.get('child_state', self.INITIAL)
self.child_created = instance_state.get('child_created', False)
self.child_attempts = instance_state.get('child_attempts', 0)
......@@ -162,6 +163,12 @@ class OpenEndedChild(object):
return None
return self.child_history[-1].get('score')
def all_scores(self):
"""None if not available"""
if not self.child_history:
return None
return [self.child_history[i].get('score') for i in xrange(0, len(self.child_history))]
def latest_post_assessment(self, system):
"""Empty string if not available"""
if not self.child_history:
......
......@@ -291,7 +291,7 @@ class SelfAssessmentDescriptor():
template_dir_name = "selfassessment"
def __init__(self, system):
self.system =system
self.system = system
@classmethod
def definition_from_xml(cls, xml_object, system):
......
......@@ -15,6 +15,7 @@ from xmodule.open_ended_grading_classes.xblock_field_types import StringyFloat
from xmodule.fields import Date
from xmodule.open_ended_grading_classes.peer_grading_service import PeerGradingService, GradingServiceError, MockPeerGradingService
from open_ended_grading_classes import combined_open_ended_rubric
log = logging.getLogger(__name__)
......@@ -178,8 +179,14 @@ class PeerGradingModule(PeerGradingFields, XModule):
pass
def get_score(self):
max_score = None
score = None
score_dict = {
'score': score,
'total': max_score,
}
if self.use_for_single_location not in TRUE_DICT or self.is_graded not in TRUE_DICT:
return None
return score_dict
try:
count_graded = self.student_data_for_location['count_graded']
......@@ -198,10 +205,11 @@ class PeerGradingModule(PeerGradingFields, XModule):
#Ensures that once a student receives a final score for peer grading, that it does not change.
self.student_data_for_location = response
score_dict = {
'score': int(count_graded >= count_required and count_graded>0) * int(self.weight),
'total': self.max_grade * int(self.weight),
}
if self.weight is not None:
score = int(count_graded >= count_required and count_graded > 0) * float(self.weight)
total = self.max_grade * float(self.weight)
score_dict['score'] = score
score_dict['total'] = total
return score_dict
......@@ -384,8 +392,7 @@ class PeerGradingModule(PeerGradingFields, XModule):
# if we can't parse the rubric into HTML,
except etree.XMLSyntaxError:
#This is a dev_facing_error
log.exception("Cannot parse rubric string. Raw string: {0}"
.format(rubric))
log.exception("Cannot parse rubric string.")
#This is a student_facing_error
return {'success': False,
'error': 'Error displaying submission. Please notify course staff.'}
......@@ -425,12 +432,15 @@ class PeerGradingModule(PeerGradingFields, XModule):
try:
response = self.peer_gs.save_calibration_essay(location, grader_id, calibration_essay_id,
submission_key, score, feedback, rubric_scores)
if 'actual_rubric' in response:
rubric_renderer = combined_open_ended_rubric.CombinedOpenEndedRubric(self.system, True)
response['actual_rubric'] = rubric_renderer.render_rubric(response['actual_rubric'])['html']
return response
except GradingServiceError:
#This is a dev_facing_error
log.exception(
"Error saving calibration grade, location: {0}, submission_id: {1}, submission_key: {2}, grader_id: {3}".format(
location, submission_id, submission_key, grader_id))
"Error saving calibration grade, location: {0}, submission_key: {1}, grader_id: {2}".format(
location, submission_key, grader_id))
#This is a student_facing_error
return self._err_response('There was an error saving your score. Please notify course staff.')
......@@ -577,5 +587,5 @@ class PeerGradingDescriptor(PeerGradingFields, RawDescriptor):
stores_state = True
has_score = True
always_recalculate_grades=True
always_recalculate_grades = True
template_dir_name = "peer_grading"
---
metadata:
display_name: Open Ended Response
max_attempts: 1
attempts: 1
is_graded: False
version: 1
display_name: Open Ended Response
skip_spelling_checks: False
accept_file_upload: False
weight: ""
......
---
metadata:
display_name: Peer Grading Interface
attempts: 1
use_for_single_location: False
link_to_location: None
is_graded: False
......
......@@ -310,19 +310,24 @@ def save_grade(request, course_id):
if request.method != 'POST':
raise Http404
required = set(['score', 'feedback', 'submission_id', 'location', 'submission_flagged', 'rubric_scores[]'])
actual = set(request.POST.keys())
p = request.POST
required = set(['score', 'feedback', 'submission_id', 'location', 'submission_flagged'])
skipped = 'skipped' in p
#If the instructor has skipped grading the submission, then there will not be any rubric scores.
#Only add in the rubric scores if the instructor has not skipped.
if not skipped:
required|=set(['rubric_scores[]'])
actual = set(p.keys())
missing = required - actual
if len(missing) > 0:
return _err_response('Missing required keys {0}'.format(
', '.join(missing)))
grader_id = unique_id_for_user(request.user)
p = request.POST
location = p['location']
skipped = 'skipped' in p
try:
result_json = staff_grading_service().save_grade(course_id,
......
......@@ -97,7 +97,7 @@ class TestStaffGradingService(LoginEnrollmentTestCase):
self.assertIsNotNone(d['rubric'])
def test_save_grade(self):
def save_grade_base(self,skip=False):
self.login(self.instructor, self.password)
url = reverse('staff_grading_save_grade', kwargs={'course_id': self.course_id})
......@@ -108,12 +108,20 @@ class TestStaffGradingService(LoginEnrollmentTestCase):
'location': self.location,
'submission_flagged': "true",
'rubric_scores[]': ['1', '2']}
if skip:
data.update({'skipped' : True})
r = self.check_for_post_code(200, url, data)
d = json.loads(r.content)
self.assertTrue(d['success'], str(d))
self.assertEquals(d['submission_id'], self.mock_service.cnt)
def test_save_grade(self):
self.save_grade_base(skip=False)
def test_save_grade_skip(self):
self.save_grade_base(skip=True)
def test_get_problem_list(self):
self.login(self.instructor, self.password)
......
......@@ -185,6 +185,7 @@ class @StaffGrading
$(window).keydown @keydown_handler
$(window).keyup @keyup_handler
@question_header = $('.question-header')
@question_header.click @collapse_question
@collapse_question()
......@@ -206,6 +207,7 @@ class @StaffGrading
@num_pending = 0
@score_lst = []
@grade = null
@is_ctrl = false
@problems = null
......@@ -231,10 +233,18 @@ class @StaffGrading
@state = state_graded
@submit_button.show()
keydown_handler: (e) =>
if e.which == 13 && !@list_view && Rubric.check_complete()
keydown_handler: (event) =>
#Previously, responses were submitted when hitting enter. Add in a modifier that ensures that ctrl+enter is needed.
if event.which == 17 && @is_ctrl==false
@is_ctrl=true
else if @is_ctrl==true && event.which == 13 && !@list_view && Rubric.check_complete()
@submit_and_get_next()
keyup_handler: (event) =>
#Handle keyup event when ctrl key is released
if event.which == 17 && @is_ctrl==true
@is_ctrl=false
set_button_text: (text) =>
@action_button.attr('value', text)
......
......@@ -43,8 +43,8 @@
<p>Please include some written feedback as well.</p>
<textarea name="feedback" placeholder="Feedback for student"
class="feedback-area" cols="70" ></textarea>
<div class="flag-student-container"> <input type="checkbox" class="flag-checkbox" value="student_is_flagged"> Flag this submission for review by course staff (use if the submission contains inappropriate content) </div>
<div class="answer-unknown-container"> <input type="checkbox" class="answer-unknown-checkbox" value="answer_is_unknown"> I do not know how to grade this question </div>
<div class="flag-student-container"> This submission has explicit or pornographic content : <input type="checkbox" class="flag-checkbox" value="student_is_flagged"> </div>
<div class="answer-unknown-container"> I do not know how to grade this question : <input type="checkbox" class="answer-unknown-checkbox" value="answer_is_unknown"></div>
</div>
......@@ -82,6 +82,19 @@
<input type="button" class="calibration-interstitial-page-button" value="Start learning to grade" name="calibration-interstitial-page-button" />
</section>
<!-- Flag submission confirmation dialog -->
<section class="flag-submission-confirmation">
<h4> Are you sure that you want to flag this submission?</h4>
<p>
You are about to flag a submission. You should only flag a submission that contains explicit or offensive content. If the submission is not addressed to the question or is incorrect, you should give it a score of zero and accompanying feedback instead of flagging it.
</p>
<div>
<input type="button" class="flag-submission-removal-button" value="Remove Flag" name="calibration-interstitial-page-button" />
<input type="button" class="flag-submission-confirmation-button" value="Keep Flag" name="calibration-interstitial-page-button" />
</div>
</section>
<input type="button" value="Go Back" class="action-button" name="back" />
</div>
</section>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment