Commit b2bc882c by gradyward Committed by Will Daly

Ready For Code Review. Some things still remain to be done, but they are all…

Ready For Code Review.  Some things still remain to be done, but they are all stylistic Front end things:
- Styles
- Styles
- Styles
- Radio Button Inheritance on deletion
- Feedback prompt followthrough
- Comprehensive front end testing
parent 421ad263
......@@ -28,32 +28,35 @@
<li class="openassessment_criterion" id="openassessment_criterion_1">
<div class="openassessment_criterion_header" id="openassessment_criterion_header_1">
<input class="openassessment_display_criterion>" id="openassessment_display_criterion_1" type="checkbox" checked="1">
<label class="openassessment_criterion_header_title" for="openassessment_display_criterion_1">
<h6 class="openassessment_criterion_header_title">
{% trans "Criterion C-C-C"%}
</label>
</h6>
<div class="openassessment_rubric_remove_button" id="openassessment_criterion_1_remove">
<h2>{% trans "Remove" %}</h2>
</div>
</div>
<div class="openassessment_criterion_body" id="openassessment_criterion_body_1">
<div class="openassessment_line_input">
<label for="openassessment_criterion_1_name" class="openassessment_criterion_name_label">
{% trans "Criterion Name"%}
</label>
<input id="openassessment_criterion_1_name" class="openassessment_criterion_name" type="text">
</div>
<div class="openassessment_line_input">
<label for="openassessment_criterion_1_prompt" class="openassessment_criterion_prompt_label">
{% trans "Criterion Prompt"%}
</label>
<textarea id="openassessment_criterion_1_prompt" class="openassessment_criterion_prompt"></textarea>
</div>
<div class="openassessment_criterion_body wrapper-comp-settings" id="openassessment_criterion_body_1">
<ul class="list-input settings-list openassessment_criterion_basic_editor">
<li class="field comp-setting-entry">
<div class="wrapper-comp-settings">
<label for="openassessment_criterion_1_name" class="openassessment_criterion_name_label setting-label">
{% trans "Criterion Name"%}
</label>
<input id="openassessment_criterion_1_name" class="openassessment_criterion_name input setting-input" type="text">
</div>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-settings">
<label for="openassessment_criterion_1_prompt" class="openassessment_criterion_prompt_label setting-label">
{% trans "Criterion Prompt"%}
</label>
<textarea id="openassessment_criterion_1_prompt" class="openassessment_criterion_prompt setting-input"></textarea>
</div>
</li>
</ul>
<ul id="openassessment_criterion_1_options" class="openassessment_criterion_option_list">
<li id=openassessment_criterion_1_option_1 class="openassessment_criterion_option">
<div class="openassessment_option_header">
<span class="openassessment_option_header_title">
{% trans "Option O-O-O" %}
......@@ -63,23 +66,33 @@
</div>
</div>
<div class="openasssessment_line_input openassessment_criterion_option_point_wrapper">
<label for="openassessment_criterion_1_option_1_points" class="openassessment_criterion_option_points_label">
{% trans "Option Points"%}
</label>
<input id="openassessment_criterion_1_option_1_points" class="openassessment_criterion_option_points" type="number">
</div>
<div class="openasssessment_line_input openassessment_criterion_option_name_wrapper">
<label for="openassessment_criterion_1_option_1_name" class="openassessment_criterion_option_name_label">
{% trans "Option Name"%}
</label>
<input id="openassessment_criterion_1_option_1_name" class="openassessment_criterion_option_name" type="text">
</div>
<div class="openasssessment_line_input openassessment_criterion_option_explanation_wrapper">
<label for="openassessment_criterion_1_option_1_explanation" class="openassessment_criterion_option_explanation_label">
{% trans "Option Explanation"%}
</label>
<textarea id="openassessment_criterion_1_option_1_explanation" class="openassessment_criterion_option_explanation"></textarea>
<div class="wrapper-comp-settings">
<ul class="list-input settings-list">
<li class="field comp-setting-entry openassessment_criterion_option_name_wrapper">
<div class="wrapper-comp-setting">
<label for="openassessment_criterion_1_option_1_name" class="openassessment_criterion_option_name_label setting-label">
{% trans "Option Name"%}
</label>
<input id="openassessment_criterion_1_option_1_name" class="openassessment_criterion_option_name input input-label" type="text">
</div>
</li>
<li class="field comp-setting-entry openassessment_criterion_option_point_wrapper">
<div class="wrapper-comp-setting">
<label for="openassessment_criterion_1_option_1_points" class="openassessment_criterion_option_points_label setting-label">
{% trans "Option Points"%}
</label>
<input id="openassessment_criterion_1_option_1_points" class="openassessment_criterion_option_points input setting-input" type="number">
</div>
</li>
<li class="field comp-setting-entry openassessment_criterion_option_explanation_wrapper">
<div class="wrapper-comp-setting">
<label for="openassessment_criterion_1_option_1_explanation" class="openassessment_criterion_option_explanation_label setting-label">
{% trans "Option Explanation"%}
</label>
<textarea id="openassessment_criterion_1_option_1_explanation" class="openassessment_criterion_option_explanation setting-input"></textarea>
</div>
</li>
</ul>
</div>
</li>
......@@ -89,76 +102,97 @@
<h2>{% trans "Add Another Option"%}</h2>
</div>
<hr>
<div id="openassessment_criterion_1_feedback_wrapper" class="openassessment_criterion_feedback_wrapper">
<div class="openassessment_criterion_feedback_header">
<span class="openassessment_criterion_feedback_header_open">{% trans "Criterion Feedback" %}</span>
<span class="openassessment_criterion_feedback_header_closed">{% trans "Add Criterion Feedback" %}</span>
<div class="openassessment_rubric_remove_button openassessment_feedback_remove_button" id="openassessment_criterion_1_feedback_remove">
<h2>{% trans "Remove" %}</h2>
</div>
</div>
<div class="openassessment_line_input openassessment_criterion_feedback_direction">
<label for="openassessment_criterion_feedback">
{% trans "Feedback Direction" %}
</label>
<textarea id="openassessment_criterion_feedback" class="openassessment_criterion_feedback"></textarea>
</div>
<div id="openassessment_criterion_1_feedback_wrapper" class="openassessment_criterion_feedback_wrapper wrapper-comp-settings">
<ul class="list-input settings-list">
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="openassessment_criterion_1_feedback" class="setting-label">{% trans "Criterion Feeedback" %}</label>
<select id="openassessment_criterion_1_feedback" class="input setting-input">
<option value="disabled">{% trans "Disabled" %}</option>
<option value="optional">{% trans "Optional" %}</option>
<option value="required">{% trans "Required" %}</option>
</select>
</div>
<p class="setting-help">
{% trans "Select one of the options above. This describes whether or not the student will have to provide criterion feedback." %}
</p>
</li>
</ul>
</div>
</div>
</li>
</ul>
<div id="openassessment_rubric_add_criterion">
<h2>
<h6>
{% trans "Add Another Criterion"%}
</h2>
</h6>
</div>
<div id="openassessment_rubric_feedback_wrapper">
<div id="openassessment_rubric_feedback_header">
<div id="openassessment_rubric_feedback_wrapper" class="wrapper-comp-settings">
<div id="openassessment_rubric_feedback_header_open">
<span>
{% trans "Rubric Feedback" %}
</span>
<div class="openassessment_rubric_remove_button" id="openassessment_rubric_direction_remove">
<div class="openassessment_rubric_remove_button" id="openassessment_rubric_feedback_remove">
<h2>{% trans "Remove" %}</h2>
</div>
<div id="openassessment_rubric_feedback_header_closed">
<h2>
{% trans "Add Rubric Feedback" %}
</h2>
</div>
<div class="openassessment_line_input">
<label for="openassessment_rubric_feedback">
{% trans "Feedback Direction" %}
</label>
<textarea id="openassessment_rubric_feedback"></textarea>
</div>
</div>
<div id="openassessment_make_invisible">
<textarea id="openassessment_rubric_editor"></textarea>
<ul class="list-input settings-list">
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting" id="openassessment_rubric_feedback_input_wrapper">
<label for="openassessment_rubric_feedback" class="setting-label">{% trans "Feedback Direction" %}</label>
<textarea id="openassessment_rubric_feedback" class="input setting-input"></textarea>
</div>
</li>
<p class="setting-help">
{% trans "If you would like your students to be able to provide feedback on the rubric, add a prompt to ask them for it." %}
</p>
</ul>
</div>
</div>
<div id="oa_settings_editor_wrapper" class="oa_editor_content_wrapper">
<div id="oa_settings_editor_wrapper" class="oa_editor_content_wrapper wrapper-comp-settings">
<div id="oa_basic_settings_editor">
<div id="openassessment_title_editor_wrapper">
<label for="openassessment_title_editor">{% trans "Display Name "%}</label>
<input type="text" id="openassessment_title_editor">
</div>
<p class="openassessment_description">{% trans "This name appears when you hover over the unit in the course ribbon at the top of the page." %}</p>
<hr>
<div class="openassessment_due_date_editor">
<div class="openassessment_left_text_field_wrapper">
<label for="openassessment_submission_start_editor">{% trans "Response Submission Start Date"%} </label>
<input type="text" class="openassessment_date_field" id="openassessment_submission_start_editor">
<ul id="oa_basic_settings_editor" class="list-input settings-list">
<li id="openassessment_title_editor_wrapper" class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="openassessment_title_editor" class="setting-label">{% trans "Display Name "%}</label>
<input type="text" id="openassessment_title_editor" class="input setting-input">
</div>
<p class="setting-help">{% trans "This name appears when you hover over the unit in the course ribbon at the top of the page." %}</p>
</li>
<li class="openassessment_date_editor field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="openassessment_submission_start_editor" class="setting-label">{% trans "Response Submission Start Date"%} </label>
<input type="datetime-local" class="input setting-input" id="openassessment_submission_start_editor">
</div>
<div class="openassessment_right_text_field_wrapper">
<label for="openassessment_submission_due_editor">{% trans "Response Submission Due Date" %}</label>
<input type="text" class="openassessment_date_field" id="openassessment_submission_due_editor">
<p class="setting-help">{% trans "The date at which submissions will first be accepted." %}</p>
</li>
<li class="openassessment_date_editor field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="openassessment_submission_due_editor" class="setting-label">{% trans "Response Submission Due Date" %}</label>
<input type="datetime-local" class="input setting-input" id="openassessment_submission_due_editor">
</div>
</div>
</div>
<p class="setting-help">{% trans "The date at which submissions will stop being accepted." %}</p>
</li>
<li id="openassessment_sumbission_image_wrapper" class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="openassessment_submission_image_editor" class="setting-label">{% trans "Allow Image Submissions"%}</label>
<select id="openassessment_submission_image_editor" class = "input setting-input" name="image submission">
<option value="0">{% trans "Disabled"%}</option>
<option value="1">{% trans "Enabled"%}</option>
</select>
</div>
<p class="setting-help">{% trans "If enabled, students will be able to submit an image along with their open response." %}</p>
</li>
</ul>
<p class="openassessment_description" id="openassessment_step_select_description">
{% trans "Select the steps that students must complete. All steps are optional, but every assignment must include at least one step." %}
......@@ -169,14 +203,16 @@
<input type="checkbox" id="include_student_training">
<label for="include_student_training">{% trans "Step: Student Training" %}</label>
</div>
<p id="student_training_description_closed" class="openassessment_description_closed">
{% trans "Students learn to assess responses by scoring pre-assessed sample responses that the instructor provides. Students move to the next step when the scores they give match the instructor's scores." %}
</p>
<div id="student_training_settings_editor" class="assessment_settings_wrapper">
<p class="openassessment_description">
{% trans "Enter one or more sample responses that you've created, together with the scores you would give those responses. Be sure to format the responses and scores according to the placeholder text below." %}
<div class = "openassessment_assessment_module_editor">
<p id="student_training_description_closed" class="openassessment_description_closed">
{% trans "Students learn to assess responses by scoring pre-assessed sample responses that the instructor provides. Students move to the next step when the scores they give match the instructor's scores. Note that Student Training Requires that the Peer Assessment module is also selected." %}
</p>
<textarea id="student_training_examples"></textarea>
<div id="student_training_settings_editor" class="assessment_settings_wrapper">
<p class="openassessment_description">
{% trans "Enter one or more sample responses that you've created, together with the scores you would give those responses. Be sure to format the responses and scores according to the placeholder text below." %}
</p>
<textarea id="student_training_examples"></textarea>
</div>
</div>
</div>
......@@ -185,30 +221,44 @@
<input type="checkbox" id="include_peer_assessment">
<label for="include_peer_assessment">{% trans "Step: Peer Assessment" %}</label>
</div>
<p id="peer_assessment_description_closed" class="openassessment_description_closed">
{% trans "Students assess a specified number of other students' responses using the rubric for the assignment." %}
</p>
<div id="peer_assessment_settings_editor" class="assessment_settings_wrapper">
<p class="openassessment_description">
{% trans "Specify the following values for the peer assessment step. The numeric grading requirements must be given a value." %}
<div class = "openassessment_assessment_module_editor">
<p id="peer_assessment_description_closed" class="openassessment_description_closed">
{% trans "Students assess a specified number of other students' responses using the rubric for the assignment." %}
</p>
<div class="openassessment_indent_line_input">
<label for="peer_assessment_must_grade" class="openassessment_peer_fixed_width">{% trans "Each student must assess X peer responses" %}</label>
<input id="peer_assessment_must_grade" class="openassessment_number_field" type="text">
</div>
<div class="openassessment_indent_line_input">
<label for="peer_assessment_graded_by" class="openassessment_peer_fixed_width"> {% trans "Each response must be assessed by at least X students" %}</label>
<input id="peer_assessment_graded_by" class="openassessment_number_field" type="text">
</div>
<div class="openassessment_due_date_editor">
<div class="openassessment_left_text_field_wrapper">
<label for="peer_assessment_start_date">{% trans "Start Date" %}</label>
<input id="peer_assessment_start_date" type="text" class="openassessment_date_field">
</div>
<div class="openassessment_right_text_field_wrapper">
<label for="peer_assessment_due_date">{% trans "Due Date" %}</label>
<input id="peer_assessment_due_date" type="text" class="openassessment_date_field">
</div>
<div id="peer_assessment_settings_editor" class="assessment_settings_wrapper">
<p class="openassessment_description">
{% trans "Specify the following values for the peer assessment step. The numeric grading requirements must be given a value." %}
</p>
<ul class="list-input settings-list">
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_must_grade" class="setting-label">{% trans "Must Grade" %}</label>
<input id="peer_assessment_must_grade" class="input setting-input" type="number">
</div>
<p class="setting-help">{% trans "Each student must assess this number of peer responses in order to recieve a grade."%}</p>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_graded_by" class="setting-label"> {% trans "Graded By" %}</label>
<input id="peer_assessment_graded_by" class="input setting-input" type="number">
</div>
<p class="setting-help">{% trans "Each response must be assessed by at least this many students in order to tabulate a score."%}</p>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_start_date" class="setting-label">{% trans "Start Date" %}</label>
<input id="peer_assessment_start_date" type="datetime-local" class="input setting-input">
</div>
<p class="setting-help">{% trans "If desired, specify a start date for the peer assessment period. If no date is specified, peer assessment can begin when submissions begin."%}</p>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="peer_assessment_due_date" class="setting-label">{% trans "Due Date" %}</label>
<input id="peer_assessment_due_date" type="datetime-local" class="input setting-input">
</div>
<p class="setting-help">{% trans "If desired, specify a due date for the peer assessment period. If no date is specified, peer assessment can run as long as the problem is open."%}</p>
</li>
</ul>
</div>
</div>
</div>
......@@ -218,22 +268,30 @@
<input id="include_self_assessment" type="checkbox">
<label for="include_self_assessment">{% trans "Step: Self Assessment" %}</label>
</div>
<p id="self_assessment_description_closed" class="openassessment_description_closed">
{% trans "Students assess their own responses using the rubric for the assignment." %}
</p>
<div id="self_assessment_settings_editor" class="assessment_settings_wrapper">
<p class="openassessment_description">
{% trans "Specify start and due dates for the self assessment step. To allow self assessment to run as long as the assignment is open, leave both fields blank." %}
<div class="openassessment_assessment_module_editor">
<p id="self_assessment_description_closed" class="openassessment_description_closed">
{% trans "Students assess their own responses using the rubric for the assignment." %}
</p>
<div class="openassessment_due_date_editor">
<div class="openassessment_left_text_field_wrapper">
<label for="self_assessment_start_date">{% trans "Start Date" %}</label>
<input id="self_assessment_start_date" type="text" class="openassessment_date_field">
</div>
<div class="openassessment_right_text_field_wrapper">
<label for="self_assessment_due_date">{% trans "Due Date" %}</label>
<input id="self_assessment_due_date" type="text" class="openassessment_date_field">
</div>
<div id="self_assessment_settings_editor" class="assessment_settings_wrapper">
<p class="openassessment_description">
{% trans "Specify start and due dates for the self assessment step. To allow self assessment to run as long as the assignment is open, leave both fields blank." %}
</p>
<ul class="list-input settings-list">
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="self_assessment_start_date" class="setting-label">{% trans "Start Date" %}</label>
<input id="self_assessment_start_date" type="datetime-local" class="input setting-input">
</div>
<p class="setting-help">{% trans "If desired, specify a start date for the self assessment period. If no date is specified, self assessment can begin when submissions begin."%}</p>
</li>
<li class="field comp-setting-entry">
<div class="wrapper-comp-setting">
<label for="self_assessment_due_date" class="setting-label">{% trans "Due Date" %}</label>
<input id="self_assessment_due_date" type="datetime-local" class="input setting-input">
</div>
<p class="setting-help">{% trans "If desired, specify a due date for the self assessment period. If no date is specified, self assessment can run as long as the problem is open."%}</p>
</li>
</ul>
</div>
</div>
</div>
......@@ -243,19 +301,21 @@
<input id="include_ai_assessment" type="checkbox">
<label for="include_ai_assessment">{% trans "Step: Example-Based Assessment" %}</label>
</div>
<p id="ai_assessment_description_closed" class="openassessment_description_closed">
{% trans "An algorithm assesses students' responses by comparing the responses to pre-assessed sample responses that the instructor provides."%}
</p>
<div id="ai_assessment_settings_editor" class="assessment_settings_wrapper">
<p class="openassessment_description">
{% trans "Enter one or more sample responses that you've created, together with the scores you would give those responses. Be sure to format the responses and scores according to the placeholder text below. The algorithm assesses students' responses by comparing them to the sample responses and scores that you provide."%}
<div class="openassessment_assessment_module_editor">
<p id="ai_assessment_description_closed" class="openassessment_description_closed">
{% trans "An algorithm assesses students' responses by comparing the responses to pre-assessed sample responses that the instructor provides."%}
</p>
<label for="ai_training_examples">{% trans "Sample Responses" %}</label>
<textarea id="ai_training_examples"></textarea>
<div id="ai_assessment_settings_editor" class="assessment_settings_wrapper">
<p class="openassessment_description">
{% trans "Enter one or more sample responses that you've created, together with the scores you would give those responses. Be sure to format the responses and scores according to the placeholder text below. The algorithm assesses students' responses by comparing them to the sample responses and scores that you provide."%}
</p>
<textarea id="ai_training_examples"></textarea>
</div>
</div>
</div>
</div>
</div>
<div class="openassessment_editor_buttons xblock-actions">
......
......@@ -2157,9 +2157,10 @@ hr.divider,
#openassessment-editor .oa_editor_content_wrapper {
height: 100%;
width: 100%;
border-radius: 4px;
border: 1px solid #414243;
background-color: #f5f5f5; }
border-radius: 3px;
border: 1px solid #838486;
background-color: #f5f5f5;
overflow-y: scroll; }
#openassessment-editor #openassessment_prompt_editor {
width: 100%;
height: 100%;
......@@ -2167,13 +2168,19 @@ hr.divider,
border: none;
border-radius: 4px;
padding: 10px; }
#openassessment-editor #openassessment_prompt_editor textarea {
font-size: 14px;
border: none;
overflow: auto;
outline: none;
-webkit-box-shadow: none;
-moz-box-shadow: none;
box-shadow: none; }
#openassessment-editor #openassessment_rubric_editor {
width: 100%;
height: 100%;
display: none; }
height: 100%; }
#openassessment-editor #oa_basic_settings_editor {
padding: 20px 20px;
border-bottom: 1px solid #414243; }
border-bottom: 1px solid #838486; }
#openassessment-editor #oa_basic_settings_editor #openassessment_title_editor_wrapper label {
width: 25%;
text-align: left; }
......@@ -2181,45 +2188,20 @@ hr.divider,
width: 45%;
min-width: 100px; }
#openassessment-editor #openassessment_step_select_description {
padding: 10px; }
padding: 10px 10px 0 10px;
text-align: center;
font-size: 80%; }
#openassessment-editor .openassessment_assessment_module_settings_editor {
margin-bottom: 10px;
padding-bottom: 10px;
border-bottom: 1px solid #dadbdc; }
#openassessment-editor .openassessment_indent_line_input {
padding: 5px 20px; }
#openassessment-editor #oa_settings_editor_wrapper {
padding: 0 10px;
overflow-y: scroll; }
#openassessment-editor #oa_rubric_editor_wrapper {
overflow-y: scroll; }
#openassessment-editor #openassessment_title_editor {
width: 300px;
margin-left: 50px; }
overflow-y: scroll;
padding: 5px;
margin: 10px;
border: 1px solid lightgray;
border-radius: 3px; }
#openassessment-editor .openassessment_description, #openassessment-editor .openassessment_description_closed {
font-size: 75%;
margin: 0; }
#openassessment-editor .openassessment_date_field {
width: 130px; }
#openassessment-editor .openassessment_number_field {
width: 25px; }
#openassessment-editor .openassessment_peer_fixed_width {
width: 45%;
display: inline-block; }
#openassessment-editor .openassessment_text_field_wrapper, #openassessment-editor .openassessment_right_text_field_wrapper, #openassessment-editor .openassessment_left_text_field_wrapper {
width: 50%;
text-align: center; }
#openassessment-editor .openassessment_right_text_field_wrapper {
float: right; }
#openassessment-editor .openassessment_left_text_field_wrapper {
float: left; }
#openassessment-editor .openassessment_due_date_editor {
height: 30px; }
#openassessment-editor .openassessment_inclusion_wrapper {
background-color: #dadbdc;
padding: 2.5px 5px;
margin: 2.5px 5px;
border-radius: 2.5px; }
margin: 2.5px 5px; }
#openassessment-editor .openassessment_inclusion_wrapper input[type="checkbox"] {
display: none; }
#openassessment-editor .openassessment_inclusion_wrapper input[type="checkbox"] + label:before {
......@@ -2230,7 +2212,8 @@ hr.divider,
height: auto;
content: "\f096"; }
#openassessment-editor .openassessment_inclusion_wrapper input[type="checkbox"]:checked + label:before {
content: "\f046"; }
content: "\f046";
color: #009fe6; }
#openassessment-editor label {
padding-right: 10px; }
#openassessment-editor .xblock_actions {
......@@ -2251,27 +2234,26 @@ hr.divider,
color: whitesmoke;
text-transform: uppercase;
outline-color: transparent; }
#openassessment-editor hr {
background-color: transparent;
color: #414243;
height: 1px;
border: 0px;
clear: both; }
#openassessment-editor .openassessment_assessment_module_editor {
padding: 2.5px 0px; }
#openassessment-editor .openassessment_assessment_module_editor .openassessment_description, #openassessment-editor .openassessment_assessment_module_editor .openassessment_description_closed {
padding-left: 15px; }
#openassessment-editor #oa_rubric_editor_wrapper .wrapper-comp-settings {
display: initial; }
#openassessment-editor #oa_rubric_editor_wrapper #openassessment_rubric_instructions {
background-color: #c8c9ca;
border-bottom: 1px solid #414243;
padding: 10px; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion {
border: 1px dashed #dadbdc;
margin: 5px;
padding-bottom: 10px; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion .openassessment_criterion_header {
margin: 10px;
padding: 5px;
border-bottom: 1px solid #414243; }
border-bottom: 1px solid #414243;
overflow: auto; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion .openassessment_criterion_header input[type="checkbox"] {
display: none; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion .openassessment_criterion_header input[type="checkbox"] + label:before {
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion .openassessment_criterion_header input[type="checkbox"] + h6:before {
font-family: "FontAwesome";
display: inline-block;
margin-right: 10px;
......@@ -2281,8 +2263,10 @@ hr.divider,
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion .openassessment_criterion_header input[type="checkbox"]:checked + label:before {
content: "\f078"; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion .openassessment_criterion_header .openassessment_criterion_header_title {
font-size: 125%;
text-transform: uppercase; }
text-transform: uppercase;
width: 50%;
display: inline-block;
float: left; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion .openassessment_criterion_add_option {
background-color: #838486;
padding: 5px;
......@@ -2293,6 +2277,21 @@ hr.divider,
content: "\f067";
display: inline-block;
margin: 0 5px; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion .openassessment_criterion_basic_editor .comp-setting-entry {
padding-right: 0;
margin-right: 10px;
overflow: auto; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion .openassessment_criterion_basic_editor .comp-setting-entry .wrapper-comp-settings input {
font-size: 11px;
float: right;
width: 70%; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion .openassessment_criterion_basic_editor .comp-setting-entry .wrapper-comp-settings .openassessment_criterion_prompt {
padding: 10px;
width: 70%;
float: right; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion .openassessment_criterion_basic_editor .comp-setting-entry .wrapper-comp-settings label {
padding: 0;
margin: 0; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion .openassessment_criterion_feedback_wrapper .openassessment_criterion_feedback_header {
background-color: #c8c9ca;
padding: 5px;
......@@ -2312,34 +2311,51 @@ hr.divider,
padding: 5px; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion_option .openassessment_option_header {
background-color: #c8c9ca;
padding: 5px;
padding: 5px 5px 5px 10px;
margin: 5px 5px 8px 5px;
border-radius: 3px; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion_option .openassessment_criterion_option_point_wrapper {
width: 30%;
width: 40%;
border-top: none;
padding: 5px 5px 5px 0px;
float: left;
padding: 0 10px; }
margin: 0; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion_option .openassessment_criterion_option_point_wrapper label {
width: 62.5%; }
width: 40%;
vertical-align: middle;
padding: 0;
margin: 0; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion_option .openassessment_criterion_option_point_wrapper input {
width: 40px; }
padding: 10px;
float: right;
width: 55%;
font-size: 11px; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion_option .openassessment_criterion_option_name_wrapper {
width: 70%;
float: right; }
float: left;
width: 60%;
padding: 5px 10px 5px 20px;
border-top: 0;
margin: 0; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion_option .openassessment_criterion_option_name_wrapper label {
width: 40%;
padding-right: 10px; }
width: 25%;
vertical-align: middle;
padding: 0;
margin: 0; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion_option .openassessment_criterion_option_name_wrapper input {
width: 60%; }
padding: 10px;
font-size: 11px;
width: 60%;
float: right; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion_option .openassessment_criterion_option_explanation_wrapper {
padding: 15px 5px 0px 5px;
padding: 10px 5px 0px 20px;
width: 100%;
display: inline-block; }
display: inline-block;
margin: 0; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion_option .openassessment_criterion_option_explanation_wrapper label {
width: 30%;
text-align: left;
padding-left: 15px; }
width: 25%;
text-align: left; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion_option .openassessment_criterion_option_explanation_wrapper textarea {
padding: 10px;
width: 70%;
float: right; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_line_input {
......@@ -2355,10 +2371,10 @@ hr.divider,
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_line_input textarea {
width: 70%;
float: right; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_large_text_input, #openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion_option .openassessment_criterion_option_explanation_wrapper textarea, #openassessment-editor #oa_rubric_editor_wrapper .openassessment_line_input textarea {
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_large_text_input, #openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion .openassessment_criterion_basic_editor .comp-setting-entry .wrapper-comp-settings .openassessment_criterion_prompt, #openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion_option .openassessment_criterion_option_explanation_wrapper textarea, #openassessment-editor #oa_rubric_editor_wrapper .openassessment_line_input textarea {
height: 70px;
width: 70%; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_input_styling, #openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion_option .openassessment_criterion_option_point_wrapper input, #openassessment-editor #oa_rubric_editor_wrapper .openassessment_large_text_input, #openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion_option .openassessment_criterion_option_explanation_wrapper textarea, #openassessment-editor #oa_rubric_editor_wrapper .openassessment_line_input textarea {
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_input_styling, #openassessment-editor #oa_rubric_editor_wrapper .openassessment_large_text_input, #openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion .openassessment_criterion_basic_editor .comp-setting-entry .wrapper-comp-settings .openassessment_criterion_prompt, #openassessment-editor #oa_rubric_editor_wrapper .openassessment_criterion_option .openassessment_criterion_option_explanation_wrapper textarea, #openassessment-editor #oa_rubric_editor_wrapper .openassessment_line_input textarea {
resize: none;
box-sizing: border-box;
border: 1px solid #b2b2b2;
......@@ -2384,20 +2400,30 @@ hr.divider,
font-size: 80%;
float: right;
display: inline-block; }
#openassessment-editor #oa_rubric_editor_wrapper #openassessment_rubric_feedback_wrapper {
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_rubric_feedback_wrapper {
padding: 0 10px; }
#openassessment-editor #oa_rubric_editor_wrapper #openassessment_rubric_feedback_wrapper #openassessment_rubric_feedback_header {
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_rubric_feedback_wrapper .openassessment_rubric_feedback_header {
margin-top: 10px;
border-bottom: 1px solid #414243;
font-size: 125%;
padding: 10px;
padding-right: 20px; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_rubric_feedback_wrapper .openassessment_feedback_radio_toggle input[type="radio"] {
display: none; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_rubric_feedback_wrapper .openassessment_feedback_radio_toggle input[type="radio"] + label:before {
font-family: "FontAwesome";
display: inline-block;
margin-right: 10px;
width: auto;
height: auto;
content: "\f10c"; }
#openassessment-editor #oa_rubric_editor_wrapper .openassessment_rubric_feedback_wrapper .openassessment_feedback_radio_toggle input[type="radio"]:checked + label:before {
content: "\f05d"; }
#openassessment-editor #oa_rubric_editor_wrapper #openassessment_rubric_add_criterion {
font-size: 125%;
background-color: #838486;
padding: 10px;
margin: 10px, 0; }
#openassessment-editor #oa_rubric_editor_wrapper #openassessment_rubric_add_criterion h2:before {
#openassessment-editor #oa_rubric_editor_wrapper #openassessment_rubric_add_criterion h6:before {
font-family: "FontAwesome";
display: inline-block;
margin-left: 5px;
......@@ -2412,9 +2438,6 @@ hr.divider,
border: 0px;
clear: both; }
#openassessment_make_invisible {
display: none; }
.modal-content {
height: 470px !important;
background-color: #e5e5e5; }
......
......@@ -89,26 +89,24 @@ function OpenAssessmentBlock(runtime, element) {
OpenAssessment.StudioView = function(runtime, element, server) {
this.runtime = runtime;
this.server = server;
var liveElement = $(element);
this.promptBox = $("#openassessment_prompt_editor", liveElement).get(0);
this.titleField = $("#openassessment_title_editor", liveElement).first().get(0);
this.submissionStartField = $("#openassessment_submission_start_editor", liveElement).first().get(0);
this.submissionDueField = $("#openassessment_submission_due_editor", liveElement).first().get(0);
this.hasPeer = $("#include_peer_assessment", liveElement);
this.hasSelf = $("#include_self_assessment", liveElement);
this.hasAI = $("#include_ai_assessment", liveElement);
this.hasTraining = $("#include_student_training", liveElement);
this.peerMustGrade = $("#peer_assessment_must_grade", liveElement);
this.peerGradedBy = $("#peer_assessment_graded_by", liveElement);
this.peerStart = $("#peer_assessment_start_date", liveElement);
this.peerDue = $("#peer_assessment_due_date", liveElement);
this.selfStart = $("#self_assessment_start_date", liveElement);
this.selfDue = $("#self_assessment_due_date", liveElement);
this.rubricXmlBox = CodeMirror.fromTextArea($("#openassessment_rubric_editor", liveElement).first().get(0), {
mode: "xml",
lineNumbers: true,
lineWrapping: true
});
this.liveElement = $(element);
var liveElement = this.liveElement;
this.settingsFieldSelectors = {
promptBox: $("#openassessment_prompt_editor", liveElement),
titleField: $("#openassessment_title_editor", liveElement),
submissionStartField: $("#openassessment_submission_start_editor", liveElement),
submissionDueField: $("#openassessment_submission_due_editor", liveElement),
hasPeer: $("#include_peer_assessment", liveElement),
hasSelf: $("#include_self_assessment", liveElement),
hasAI: $("#include_ai_assessment", liveElement),
hasTraining: $("#include_student_training", liveElement),
peerMustGrade: $("#peer_assessment_must_grade", liveElement),
peerGradedBy: $("#peer_assessment_graded_by", liveElement),
peerStart: $("#peer_assessment_start_date", liveElement),
peerDue: $("#peer_assessment_due_date", liveElement),
selfStart: $("#self_assessment_start_date", liveElement),
selfDue: $("#self_assessment_due_date", liveElement)
};
this.aiTrainingExamplesCodeBox = CodeMirror.fromTextArea($("#ai_training_examples", liveElement).first().get(0), {
mode: "xml",
lineNumbers: true,
......@@ -124,13 +122,15 @@ OpenAssessment.StudioView = function(runtime, element, server) {
this.criterionHtmlTemplate = criterionHtml.replace(new RegExp("1", "g"), "C-C-C");
var optionBodyHtml = $("#openassessment_criterion_1_option_1", liveElement).html();
var optionHtml = '<li id=openassessment_criterion_1_option_1 class="openassessment_criterion_option">' + optionBodyHtml + "</li>";
var criterionsReplaced = optionHtml.replace(new RegExp("criterion_1", "g"), "criterion_C-C-C");
this.optionHtmlTemplate = criterionsReplaced.replace(new RegExp("option_1", "g"), "option_O-O-O");
var criteriaReplaced = optionHtml.replace(new RegExp("criterion_1", "g"), "criterion_C-C-C");
this.optionHtmlTemplate = criteriaReplaced.replace(new RegExp("option_1", "g"), "option_O-O-O");
this.numberOfCriteria = 0;
this.numberOfOptions = [];
this.rubricCriteriaSelectors = [];
this.rubricFeedbackPrompt = $("#openassessment_rubric_feedback", liveElement);
this.hasRubricFeedbackPrompt = true;
$("#openassessment_criterion_list", liveElement).empty();
this.addNewCriterionToRubric(liveElement);
this.addNewCriterionToRubric();
var view = this;
$(".openassessment_save_button", liveElement).click(function(eventData) {
view.save();
......@@ -138,85 +138,96 @@ OpenAssessment.StudioView = function(runtime, element, server) {
$(".openassessment_cancel_button", liveElement).click(function(eventData) {
view.cancel();
});
$(".openassessment_editor_content_and_tabs", liveElement).tabs({
activate: function(event, ui) {
view.rubricXmlBox.refresh();
}
});
$("#include_peer_assessment", liveElement).change(function() {
if (this.checked) {
$("#peer_assessment_description_closed", liveElement).fadeOut("fast");
$("#peer_assessment_settings_editor", liveElement).fadeIn();
} else {
$("#peer_assessment_settings_editor", liveElement).fadeOut("fast");
$("#peer_assessment_description_closed", liveElement).fadeIn();
}
});
$("#include_self_assessment", liveElement).change(function() {
if (this.checked) {
$("#self_assessment_description_closed", liveElement).fadeOut("fast");
$("#self_assessment_settings_editor", liveElement).fadeIn();
} else {
$("#self_assessment_settings_editor", liveElement).fadeOut("fast");
$("#self_assessment_description_closed", liveElement).fadeIn();
}
});
$("#include_ai_assessment", liveElement).change(function() {
if (this.checked) {
$("#ai_assessment_description_closed", liveElement).fadeOut("fast");
$("#ai_assessment_settings_editor", liveElement).fadeIn();
} else {
$("#ai_assessment_settings_editor", liveElement).fadeOut("fast");
$("#ai_assessment_description_closed", liveElement).fadeIn();
}
});
$("#include_student_training", liveElement).change(function() {
if (this.checked) {
$("#student_training_description_closed", liveElement).fadeOut("fast");
$("#student_training_settings_editor", liveElement).fadeIn();
} else {
$("#student_training_settings_editor", liveElement).fadeOut("fast");
$("#student_training_description_closed", liveElement).fadeIn();
}
});
$(".openassessment_editor_content_and_tabs", liveElement).tabs();
view.addSettingsAssessmentCheckboxListener("ai_assessment", liveElement);
view.addSettingsAssessmentCheckboxListener("self_assessment", liveElement);
view.addSettingsAssessmentCheckboxListener("peer_assessment", liveElement);
view.addSettingsAssessmentCheckboxListener("student_training", liveElement);
$("#openassessment_rubric_add_criterion", liveElement).click(function(eventData) {
view.addNewCriterionToRubric(liveElement);
});
$("#openassessment_rubric_feedback_remove", liveElement).click(function(eventData) {
$("#openassessment_rubric_feedback_header_open", liveElement).fadeOut();
$("#openassessment_rubric_feedback_input_wrapper", liveElement).fadeOut();
$("#openassessment_rubric_feedback_header_closed", liveElement).fadeIn();
view.hasRubricFeedbackPrompt = false;
});
$("#openassessment_rubric_feedback_header_closed", liveElement).click(function(eventData) {
$("#openassessment_rubric_feedback_header_closed", liveElement).fadeOut();
$("#openassessment_rubric_feedback_header_open", liveElement).fadeIn();
$("#openassessment_rubric_feedback_input_wrapper", liveElement).fadeIn();
view.hasRubricFeedbackPrompt = true;
});
$("#openassessment_rubric_feedback_header_closed", liveElement).hide();
};
OpenAssessment.StudioView.prototype = {
load: function() {
var view = this;
this.server.loadEditorContext().done(function(prompt, rubricXml, title, subStart, subDue, assessments) {
view.rubricXmlBox.setValue(rubricXml);
view.submissionStartField.value = subStart;
view.submissionDueField.value = subDue;
view.promptBox.value = prompt;
view.titleField.value = title;
view.hasTraining.prop("checked", false).change();
view.hasPeer.prop("checked", false).change();
view.hasSelf.prop("checked", false).change();
view.hasAI.prop("checked", false).change();
this.server.loadEditorContext().done(function(prompt, rubric, title, subStart, subDue, assessments) {
view.settingsFieldSelectors.submissionStartField.prop("value", subStart);
view.settingsFieldSelectors.submissionDueField.prop("value", subDue);
view.settingsFieldSelectors.promptBox.prop("value", prompt);
view.settingsFieldSelectors.titleField.prop("value", title);
view.settingsFieldSelectors.hasTraining.prop("checked", false).change();
view.settingsFieldSelectors.hasPeer.prop("checked", false).change();
view.settingsFieldSelectors.hasSelf.prop("checked", false).change();
view.settingsFieldSelectors.hasAI.prop("checked", false).change();
for (var i = 0; i < assessments.length; i++) {
var assessment = assessments[i];
if (assessment.name == "peer-assessment") {
view.peerMustGrade.prop("value", assessment.must_grade);
view.peerGradedBy.prop("value", assessment.must_be_graded_by);
view.peerStart.prop("value", assessment.start);
view.peerDue.prop("value", assessment.due);
view.hasPeer.prop("checked", true).change();
view.settingsFieldSelectors.peerMustGrade.prop("value", assessment.must_grade);
view.settingsFieldSelectors.peerGradedBy.prop("value", assessment.must_be_graded_by);
view.settingsFieldSelectors.peerStart.prop("value", assessment.start);
view.settingsFieldSelectors.peerDue.prop("value", assessment.due);
view.settingsFieldSelectors.hasPeer.prop("checked", true).change();
} else if (assessment.name == "self-assessment") {
view.selfStart.prop("value", assessment.start);
view.selfDue.prop("value", assessment.due);
view.hasSelf.prop("checked", true).change();
view.settingsFieldSelectors.selfStart.prop("value", assessment.start);
view.settingsFieldSelectors.selfDue.prop("value", assessment.due);
view.settingsFieldSelectors.hasSelf.prop("checked", true).change();
} else if (assessment.name == "example-based-assessment") {
view.aiTrainingExamplesCodeBox.setValue(assessment.examples);
view.hasAI.prop("checked", true).change();
view.settingsFieldSelectors.aiTrainingExamplesCodeBox.setValue(assessment.examples);
view.settingsFieldSelectors.hasAI.prop("checked", true).change();
} else if (assessment.name == "student-training") {
view.studentTrainingExamplesCodeBox.setValue(assessment.examples);
view.hasTraining.prop("checked", true).change();
view.settingsFieldSelectors.hasTraining.prop("checked", true).change();
}
}
while (view.numberOfCriteria < rubric.criteria.length) {
view.addNewCriterionToRubric();
}
while (view.numberOfCriteria > rubric.criteria.length) {
view.removeCriterionFromRubric(1);
}
for (i = 0; i < rubric.criteria.length; i++) {
while (view.numberOfOptions[i + 1] < rubric.criteria[i].options.length) {
view.addNewOptionToCriterion(view.liveElement, i + 1);
}
while (view.numberOfOptions[i + 1] > rubric.criteria[i].options.length) {
view.removeOptionFromCriterion(view.liveElement, i + 1, 1);
}
}
for (i = 0; i < rubric.criteria.length; i++) {
var criterion = rubric.criteria[i];
var selectors = view.rubricCriteriaSelectors[i + 1];
selectors.name.prop("value", criterion.name);
selectors.prompt.prop("value", criterion.prompt);
selectors.feedback = criterion.feedback;
for (var j = 0; j < criterion.options.length; j++) {
var option = criterion.options[j];
var optionSelectors = selectors.options[j + 1];
optionSelectors.name.prop("value", option.name);
optionSelectors.points.prop("value", option.points);
optionSelectors.explanation.prop("value", option.explanation);
}
}
if (rubric.feedbackprompt) {
view.rubricFeedbackPrompt.prop("value", rubric.feedbackprompt);
view.hasRubricFeedbackPrompt = true;
} else {
view.rubricFeedbackPrompt.prop("value", "");
view.hasRubricFeedbackPrompt = false;
}
}).fail(function(msg) {
view.showError(msg);
});
......@@ -233,14 +244,26 @@ OpenAssessment.StudioView.prototype = {
view.showError(errMsg);
});
},
addSettingsAssessmentCheckboxListener: function(name, liveElement) {
$("#include_" + name, liveElement).change(function() {
if (this.checked) {
$("#" + name + "_description_closed", liveElement).fadeOut("fast");
$("#" + name + "_settings_editor", liveElement).fadeIn();
} else {
$("#" + name + "_settings_editor", liveElement).fadeOut("fast");
$("#" + name + "_description_closed", liveElement).fadeIn();
}
});
},
confirmPostReleaseUpdate: function(onConfirm) {
var msg = gettext("This problem has already been released. Any changes will apply only to future assessments.");
if (confirm(msg)) {
onConfirm();
}
},
addNewCriterionToRubric: function(liveElement) {
addNewCriterionToRubric: function() {
var view = this;
var liveElement = this.liveElement;
var newCriterionID = this.numberOfCriteria + 1;
this.numberOfCriteria += 1;
this.numberOfOptions[newCriterionID] = 0;
......@@ -253,8 +276,9 @@ OpenAssessment.StudioView.prototype = {
name: $(".openassessment_criterion_name", liveElement).first(),
prompt: $(".openassessment_criterion_prompt", liveElement).first(),
options: [],
feedback: $(".openassessment_criterion_feedbac", liveElement).first()
feedback: "disabled"
};
$("input:radio[value=disabled]", liveElement).prop("checked", true);
view.addNewOptionToCriterion(liveElement, newCriterionID);
$("#openassessment_display_criterion_" + newCriterionID, liveElement).change(function() {
if (this.checked) {
......@@ -264,25 +288,7 @@ OpenAssessment.StudioView.prototype = {
}
});
$("#openassessment_criterion_" + newCriterionID + "_remove", liveElement).click(function(eventData) {
var numCriteria = view.numberOfCriteria;
var selectors = view.rubricCriteriaSelectors;
for (var i = newCriterionID; i < numCriteria; i++) {
selectors[i].name.prop("value", selectors[i + 1].name.prop("value"));
selectors[i].prompt.prop("value", selectors[i + 1].prompt.prop("value"));
selectors[i].feedback.prop("value", selectors[i + 1].feedback.prop("value"));
var options1 = selectors[i].options;
var options2 = selectors[i].options;
var numOptions = view.numberOfOptions[i + 1];
for (var j = 1; j < numOptions; j++) {
options1[j].points.prop("value", options2[j].points.prop("value"));
options1[j].name.prop("value", options2[j].name.prop("value"));
options1[j].explanation.prop("value", options2[j].explanation.prop("value"));
}
}
view.rubricCriteriaSelectors[view.rubricCriteriaSelectors.length].criterion.remove();
view.rubricCriteriaSelectors = view.rubricCriteriaSelectors.slice(0, numCriteria);
view.numberOfOptions = view.numberOfOptions.slice(0, numCriteria);
view.numberOfCriteria -= 1;
view.removeCriterionFromRubric(newCriterionID);
});
$("#openassessment_criterion_" + newCriterionID + "_add_option", liveElement).click(function(eventData) {
view.addNewOptionToCriterion(liveElement, newCriterionID);
......@@ -292,15 +298,46 @@ OpenAssessment.StudioView.prototype = {
$(".openassessment_criterion_feedback_header_open", liveElement).fadeOut();
$(".openassessment_criterion_feedback_header_closed", liveElement).fadeIn();
$(".openassessment_feedback_remove_button", liveElement).fadeOut();
view.rubricCriteriaSelectors[newCriterionID].hasFeedback = false;
});
$(".openassessment_criterion_feedback_header_closed", liveElement).click(function(eventData) {
$(".openassessment_criterion_feedback_direction", liveElement).fadeIn();
$(".openassessment_criterion_feedback_header_open", liveElement).fadeIn();
$(".openassessment_criterion_feedback_header_closed", liveElement).fadeOut();
$(".openassessment_feedback_remove_button", liveElement).fadeIn();
view.rubricCriteriaSelectors[newCriterionID].hasFeedback = true;
});
$(".openassessment_criterion_feedback_header_closed", liveElement).hide();
},
removeCriterionFromRubric: function(criterionToRemove) {
var view = this;
var numCriteria = view.numberOfCriteria;
var selectors = view.rubricCriteriaSelectors;
for (var i = criterionToRemove; i < numCriteria; i++) {
selectors[i].name.prop("value", selectors[i + 1].name.prop("value"));
selectors[i].prompt.prop("value", selectors[i + 1].prompt.prop("value"));
selectors[i].feedback = selectors[i + 1].feedback;
$('input:radio[value="disabled"]', selectors[i].criterion).prop("checked", true);
while (view.numberOfOptions[i] < view.numberOfOptions[i + 1]) {
view.addNewOptionToCriterion(selectors[i].criteria, i);
}
while (view.numberOfOptions[i] > view.numberOfOptions[i + 1]) {
view.removeOptionFromCriterion(selectors[i].criteria, i, 1);
}
var options1 = selectors[i].options;
var options2 = selectors[i + 1].options;
var numOptions2 = view.numberOfOptions[i + 1];
for (var j = 1; j < numOptions2; j++) {
options1[j].points.prop("value", options2[j].points.prop("value"));
options1[j].name.prop("value", options2[j].name.prop("value"));
options1[j].explanation.prop("value", options2[j].explanation.prop("value"));
}
}
view.rubricCriteriaSelectors[view.rubricCriteriaSelectors.length - 1].criterion.remove();
view.rubricCriteriaSelectors = view.rubricCriteriaSelectors.slice(0, numCriteria);
view.numberOfOptions = view.numberOfOptions.slice(0, numCriteria);
view.numberOfCriteria -= 1;
},
addNewOptionToCriterion: function(liveElement, criterionID) {
var view = this;
var newOptionID = this.numberOfOptions[criterionID] + 1;
......@@ -317,42 +354,74 @@ OpenAssessment.StudioView.prototype = {
explanation: $("#openassessment_criterion_" + criterionID + "_option_" + newOptionID + "_explanation", liveElement)
};
$("#openassessment_criterion_" + criterionID + "_option_" + newOptionID + "_remove", liveElement).click(function(eventData) {
var numberOfOptions = view.numberOfOptions[criterionID];
var optionSelectors = view.rubricCriteriaSelectors[criterionID].options;
for (var i = newOptionID; i < numberOfOptions; i++) {
optionSelectors[i].points.prop("value", optionSelectors[i + 1].points.prop("value"));
optionSelectors[i].name.prop("value", optionSelectors[i + 1].name.prop("value"));
optionSelectors[i].explanation.prop("value", optionSelectors[i + 1].explanation.prop("value"));
}
optionSelectors[optionSelectors.length - 1].option.remove();
view.rubricCriteriaSelectors[criterionID].options = view.rubricCriteriaSelectors[criterionID].options.slice(0, optionSelectors.length - 1);
view.numberOfOptions[criterionID] -= 1;
view.removeOptionFromCriterion(liveElement, criterionID, newOptionID);
});
},
removeOptionFromCriterion: function(liveElement, criterionID, optionToRemove) {
var view = this;
var numberOfOptions = view.numberOfOptions[criterionID];
var optionSelectors = view.rubricCriteriaSelectors[criterionID].options;
for (var i = optionToRemove; i < numberOfOptions; i++) {
optionSelectors[i].points.prop("value", optionSelectors[i + 1].points.prop("value"));
optionSelectors[i].name.prop("value", optionSelectors[i + 1].name.prop("value"));
optionSelectors[i].explanation.prop("value", optionSelectors[i + 1].explanation.prop("value"));
}
optionSelectors[optionSelectors.length - 1].option.remove();
view.rubricCriteriaSelectors[criterionID].options = view.rubricCriteriaSelectors[criterionID].options.slice(0, optionSelectors.length - 1);
view.numberOfOptions[criterionID] -= 1;
},
updateEditorContext: function() {
this.runtime.notify("save", {
state: "start"
});
var prompt = this.promptBox.value;
var rubricXml = this.rubricXmlBox.getValue();
var title = this.titleField.value;
var subStart = this.submissionStartField.value;
var subDue = this.submissionDueField.value;
var prompt = this.settingsFieldSelectors.promptBox.prop("value");
var title = this.settingsFieldSelectors.titleField.prop("value");
var subStart = this.settingsFieldSelectors.submissionStartField.prop("value");
var subDue = this.settingsFieldSelectors.submissionDueField.prop("value");
var rubricCriteria = [];
for (var i = 1; i <= this.numberOfCriteria; i++) {
var selectorDict = this.rubricCriteriaSelectors[i];
var criterionValueDict = {
order_num: i - 1,
name: selectorDict.name.prop("value"),
prompt: selectorDict.prompt.prop("value"),
feedback: $('input[name="criterion_' + i + '_feedback"]:checked', selectorDict.criterion).val()
};
var optionSelectorList = selectorDict.options;
var optionValueList = [];
for (var j = 1; j <= this.numberOfOptions[i]; j++) {
var optionSelectors = optionSelectorList[j];
optionValueList = optionValueList.concat([ {
order_num: j - 1,
points: optionSelectors.points.prop("value"),
name: optionSelectors.name.prop("value"),
explanation: optionSelectors.explanation.prop("value")
} ]);
}
criterionValueDict.options = optionValueList;
rubricCriteria = rubricCriteria.concat([ criterionValueDict ]);
}
var rubric = {
criteria: rubricCriteria
};
if (this.hasRubricFeedbackPrompt) {
rubric.feedbackprompt = this.rubricFeedbackPrompt.prop("value");
}
var assessments = [];
if (this.hasTraining.prop("checked")) {
if (this.settingsFieldSelectors.hasTraining.prop("checked")) {
assessments[assessments.length] = {
name: "student-training",
examples: this.studentTrainingExamplesCodeBox.getValue()
};
}
if (this.hasPeer.prop("checked")) {
if (this.settingsFieldSelectors.hasPeer.prop("checked")) {
var assessment = {
name: "peer-assessment",
must_grade: parseInt(this.peerMustGrade.prop("value")),
must_be_graded_by: parseInt(this.peerGradedBy.prop("value"))
must_grade: parseInt(this.settingsFieldSelectors.peerMustGrade.prop("value")),
must_be_graded_by: parseInt(this.settingsFieldSelectors.peerGradedBy.prop("value"))
};
var startStr = this.peerStart.prop("value");
var dueStr = this.peerDue.prop("value");
var startStr = this.settingsFieldSelectors.peerStart.prop("value");
var dueStr = this.settingsFieldSelectors.peerDue.prop("value");
if (startStr) {
assessment = $.extend(assessment, {
start: startStr
......@@ -365,12 +434,12 @@ OpenAssessment.StudioView.prototype = {
}
assessments[assessments.length] = assessment;
}
if (this.hasSelf.prop("checked")) {
if (this.settingsFieldSelectors.hasSelf.prop("checked")) {
assessment = {
name: "self-assessment"
};
startStr = this.selfStart.prop("value");
dueStr = this.selfDue.prop("value");
startStr = this.settingsFieldSelectors.selfStart.prop("value");
dueStr = this.settingsFieldSelectors.selfDue.prop("value");
if (startStr) {
assessment = $.extend(assessment, {
start: startStr
......@@ -383,14 +452,14 @@ OpenAssessment.StudioView.prototype = {
}
assessments[assessments.length] = assessment;
}
if (this.hasAI.prop("checked")) {
if (this.settingsFieldSelectors.hasAI.prop("checked")) {
assessments[assessments.length] = {
name: "example-based-assessment",
examples: this.aiTrainingExamplesCodeBox.getValue()
};
}
var view = this;
this.server.updateEditorContext(prompt, rubricXml, title, subStart, subDue, assessments).done(function() {
this.server.updateEditorContext(prompt, rubric, title, subStart, subDue, assessments).done(function() {
view.runtime.notify("save", {
state: "end"
});
......@@ -1184,11 +1253,11 @@ OpenAssessment.Server.prototype = {
});
}).promise();
},
updateEditorContext: function(prompt, rubricXml, title, sub_start, sub_due, assessments) {
updateEditorContext: function(prompt, rubric, title, sub_start, sub_due, assessments) {
var url = this.url("update_editor_context");
var payload = JSON.stringify({
prompt: prompt,
rubric: rubricXml,
rubric: rubric,
title: title,
submission_start: sub_start,
submission_due: sub_due,
......
......@@ -13,7 +13,6 @@ describe("OpenAssessment.StudioView", function() {
this.loadError = false;
this.updateError = false;
this.promptBox = "";
this.rubricXmlBox = "";
this.titleField = "";
this.submissionStartField = "";
this.submissionDueField = "";
......@@ -36,13 +35,33 @@ describe("OpenAssessment.StudioView", function() {
this.isReleased = false;
this.rubric = {
prompt: 'This is the feedback prompt',
criteria: [
{
order_num: 0,
name: 'This is the criterion name',
prompt: 'this is the criterion prompt',
feedback: 'disabled',
options: [
{
order_num: 0,
name: 'Did real bad',
points: 0,
explanation: 'Showed as little effort as I did making this test case interesting.'
}
]
}
]
};
this.errorPromise = $.Deferred(function(defer) {
defer.rejectWith(this, ['Test error']);
}).promise();
this.loadEditorContext = function() {
var prompt = this.promptBox;
var rubric = this.rubricXmlBox;
var rubric = this.rubric;
var title = this.titleField;
var submission_start = this.submissionStartField;
var submission_due = this.submissionDueField;
......@@ -86,10 +105,10 @@ describe("OpenAssessment.StudioView", function() {
}
};
this.updateEditorContext = function(prompt, rubricXml, title, sub_start, sub_due, assessments) {
this.updateEditorContext = function(prompt, rubric, title, sub_start, sub_due, assessments) {
if (!this.updateError) {
this.promptBox = prompt;
this.rubricXmlBox = rubricXml;
this.rubric = rubric;
this.titleField = title;
this.submissionStartField = sub_start;
this.submissionDueField = sub_due;
......@@ -140,15 +159,32 @@ describe("OpenAssessment.StudioView", function() {
var view = null;
var prompt = "How much do you like waffles?";
var rubric =
"<rubric>" +
"<criterion>"+
"<name>Proper Appreciation of Gravity</name>"+
"<prompt>How much respect did the person give waffles?</prompt>"+
"<option points=\"0\"><name>No</name><explanation>Not enough</explanation></option>"+
"<option points=\"2\"><name>Yes</name><explanation>An appropriate Amount</explanation></option>"+
"</criterion>"+
"</rubric>";
var rubric = {
criteria: [
{
order_num: 0,
name: "Proper appreciation of Gravity",
prompt: "How much respect did the person give waffles?",
feedback: "disabled",
options: [
{
order_num: 0,
points: 0,
name: "No",
explanation: "Not enough"
},
{
order_num: 1,
points: 2,
name: "Yes",
explanation: "An appropriate Amount"
}
]
}
]
};
var title = "The most important of all questions.";
var subStart = "";
var subDue = "2014-10-1T10:00:00";
......@@ -207,16 +243,14 @@ describe("OpenAssessment.StudioView", function() {
view.load();
// Expect that the XML definition(s) were loaded
var rubric = view.rubricXmlBox.getValue();
var prompt = view.promptBox.value;
var prompt = view.settingsFieldSelectors.promptBox.prop('value');
expect(prompt).toEqual('');
expect(rubric).toEqual('');
});
it("saves the Editor Context definition", function() {
// Update the Context
view.titleField.value = 'THIS IS THE NEW TITLE';
view.settingsFieldSelectors.titleField.prop('value', 'THIS IS THE NEW TITLE');
// Save the updated editor definition
view.save();
......@@ -249,25 +283,24 @@ describe("OpenAssessment.StudioView", function() {
server.updateEditorContext(prompt, rubric, title, subStart, subDue, assessments);
view.load();
expect(view.promptBox.value).toEqual(prompt);
expect(view.rubricXmlBox.getValue()).toEqual(rubric);
expect(view.titleField.value).toEqual(title);
expect(view.submissionStartField.value).toEqual(subStart);
expect(view.submissionDueField.value).toEqual(subDue);
expect(view.hasPeer.prop('checked')).toEqual(true);
expect(view.hasSelf.prop('checked')).toEqual(true);
expect(view.hasAI.prop('checked')).toEqual(false);
expect(view.hasTraining.prop('checked')).toEqual(true);
expect(view.peerMustGrade.prop('value')).toEqual('5');
expect(view.peerGradedBy.prop('value')).toEqual('3');
expect(view.peerDue.prop('value')).toEqual("");
expect(view.selfStart.prop('value')).toEqual("");
expect(view.selfDue.prop('value')).toEqual("");
expect(view.settingsFieldSelectors.promptBox.prop('value')).toEqual(prompt);
expect(view.settingsFieldSelectors.titleField.prop('value')).toEqual(title);
expect(view.settingsFieldSelectors.submissionStartField.prop('value')).toEqual(subStart);
expect(view.settingsFieldSelectors.submissionDueField.prop('value')).toEqual(subDue);
expect(view.settingsFieldSelectors.hasPeer.prop('checked')).toEqual(true);
expect(view.settingsFieldSelectors.hasSelf.prop('checked')).toEqual(true);
expect(view.settingsFieldSelectors.hasAI.prop('checked')).toEqual(false);
expect(view.settingsFieldSelectors.hasTraining.prop('checked')).toEqual(true);
expect(view.settingsFieldSelectors.peerMustGrade.prop('value')).toEqual('5');
expect(view.settingsFieldSelectors.peerGradedBy.prop('value')).toEqual('3');
expect(view.settingsFieldSelectors.peerDue.prop('value')).toEqual("");
expect(view.settingsFieldSelectors.selfStart.prop('value')).toEqual("");
expect(view.settingsFieldSelectors.selfDue.prop('value')).toEqual("");
expect(view.aiTrainingExamplesCodeBox.getValue()).toEqual("");
expect(view.studentTrainingExamplesCodeBox.getValue()).toEqual(assessments[0].examples);
expect(view.peerStart.prop('value')).toEqual("2014-10-04T00:00:00");
expect(view.settingsFieldSelectors.peerStart.prop('value')).toEqual("2014-10-04T00:00:00");
view.titleField.value = "This is the new title.";
view.settingsFieldSelectors.titleField.prop('value', "This is the new title.");
view.updateEditorContext();
expect(server.titleField).toEqual("This is the new title.");
......
......@@ -15,37 +15,27 @@ OpenAssessment.StudioView = function(runtime, element, server) {
this.runtime = runtime;
this.server = server;
//Instantiates JQuery variables which will allow manipulation and display controls.
var liveElement = $(element);
this.promptBox = $('#openassessment_prompt_editor', liveElement).get(0);
this.titleField = $('#openassessment_title_editor', liveElement).first().get(0);
this.submissionStartField = $('#openassessment_submission_start_editor', liveElement).first().get(0);
this.submissionDueField = $('#openassessment_submission_due_editor', liveElement).first().get(0);
// Finds our boolean checkboxes that indicate the assessment definition
this.hasPeer = $('#include_peer_assessment', liveElement);
this.hasSelf = $('#include_self_assessment', liveElement);
this.hasAI = $('#include_ai_assessment', liveElement);
this.hasTraining = $('#include_student_training', liveElement);
this.peerMustGrade = $('#peer_assessment_must_grade', liveElement);
this.peerGradedBy = $('#peer_assessment_graded_by', liveElement);
this.peerStart = $('#peer_assessment_start_date', liveElement);
this.peerDue = $('#peer_assessment_due_date', liveElement);
this.selfStart = $('#self_assessment_start_date', liveElement);
this.selfDue = $('#self_assessment_due_date', liveElement);
//Instantiates our codemirror codeboxes
this.rubricXmlBox = CodeMirror.fromTextArea(
$('#openassessment_rubric_editor', liveElement).first().get(0),
{mode: "xml", lineNumbers: true, lineWrapping: true}
);
this.liveElement = $(element);
var liveElement = this.liveElement;
// Instantiates JQuery selector variables which will allow manipulation and display controls.
this.settingsFieldSelectors = {
promptBox: $('#openassessment_prompt_editor', liveElement),
titleField: $('#openassessment_title_editor', liveElement),
submissionStartField: $('#openassessment_submission_start_editor', liveElement),
submissionDueField: $('#openassessment_submission_due_editor', liveElement),
hasPeer: $('#include_peer_assessment', liveElement),
hasSelf: $('#include_self_assessment', liveElement),
hasAI: $('#include_ai_assessment', liveElement),
hasTraining: $('#include_student_training', liveElement),
peerMustGrade: $('#peer_assessment_must_grade', liveElement),
peerGradedBy: $('#peer_assessment_graded_by', liveElement),
peerStart: $('#peer_assessment_start_date', liveElement),
peerDue: $('#peer_assessment_due_date', liveElement),
selfStart: $('#self_assessment_start_date', liveElement),
selfDue: $('#self_assessment_due_date', liveElement)
};
this.aiTrainingExamplesCodeBox = CodeMirror.fromTextArea(
$('#ai_training_examples', liveElement).first().get(0),
......@@ -57,7 +47,7 @@ OpenAssessment.StudioView = function(runtime, element, server) {
{mode: "xml", lineNumbers: true, lineWrapping: true}
);
// Caputres the HTML definition of the original criterion element. This will be the template
// Captures the HTML definition of the original criterion element. This will be the template
// used for all other criterion creations
var criterionBodyHtml = $("#openassessment_criterion_1", liveElement).html();
// Adds the wrapping LI tag which is not captured by the find element.
......@@ -72,82 +62,62 @@ OpenAssessment.StudioView = function(runtime, element, server) {
var optionBodyHtml = $("#openassessment_criterion_1_option_1", liveElement).html();
var optionHtml = '<li id=openassessment_criterion_1_option_1 class="openassessment_criterion_option">' +
optionBodyHtml + '</li>';
var criterionsReplaced = optionHtml.replace(new RegExp("criterion_1", "g"), "criterion_C-C-C");
this.optionHtmlTemplate = criterionsReplaced.replace(new RegExp("option_1", "g"), "option_O-O-O");
var criteriaReplaced = optionHtml.replace(new RegExp("criterion_1", "g"), "criterion_C-C-C");
this.optionHtmlTemplate = criteriaReplaced.replace(new RegExp("option_1", "g"), "option_O-O-O");
// Start us off with an empty setup, and uses the adding method to add a critera (which in turn will
// add an option. This design choice was made to ensure consistent practices in adding and removing,
// the logic of which is all maintained in the function call.
// Start us off with an empty setup, and uses the adding method to add a criteria (which in turn will
// add an option). This design choice was made to ensure consistent practices in adding and removing,
// the logic of which is all maintained in the function calls.
this.numberOfCriteria = 0;
this.numberOfOptions = [];
this.rubricCriteriaSelectors = [];
this.rubricFeedbackPrompt = $('#openassessment_rubric_feedback', liveElement);
this.hasRubricFeedbackPrompt = true;
$('#openassessment_criterion_list', liveElement).empty();
this.addNewCriterionToRubric(liveElement);
this.addNewCriterionToRubric();
// Install click handlers
var view = this;
$('.openassessment_save_button', liveElement) .click(
function (eventData) {
// Installs the save and cancel buttons
$('.openassessment_save_button', liveElement) .click( function (eventData) {
view.save();
});
});
$('.openassessment_cancel_button', liveElement) .click(
function (eventData) {
$('.openassessment_cancel_button', liveElement) .click( function (eventData) {
view.cancel();
});
$('.openassessment_editor_content_and_tabs', liveElement) .tabs({
activate: function (event, ui){
view.rubricXmlBox.refresh();
}
});
$('#include_peer_assessment', liveElement) .change(function () {
if (this.checked){
$("#peer_assessment_description_closed", liveElement).fadeOut('fast');
$("#peer_assessment_settings_editor", liveElement).fadeIn();
} else {
$("#peer_assessment_settings_editor", liveElement).fadeOut('fast');
$("#peer_assessment_description_closed", liveElement).fadeIn();
}
});
// Adds the tabbing functionality
$('.openassessment_editor_content_and_tabs', liveElement) .tabs();
$('#include_self_assessment', liveElement) .change(function () {
if (this.checked){
$("#self_assessment_description_closed", liveElement).fadeOut('fast');
$("#self_assessment_settings_editor", liveElement).fadeIn();
} else {
$("#self_assessment_settings_editor", liveElement).fadeOut('fast');
$("#self_assessment_description_closed", liveElement).fadeIn();
}
// Installs all of the checkbox listeners in the settings tab
view.addSettingsAssessmentCheckboxListener("ai_assessment", liveElement);
view.addSettingsAssessmentCheckboxListener("self_assessment", liveElement);
view.addSettingsAssessmentCheckboxListener("peer_assessment", liveElement);
view.addSettingsAssessmentCheckboxListener("student_training", liveElement);
$('#openassessment_rubric_add_criterion', liveElement) .click( function (eventData) {
view.addNewCriterionToRubric(liveElement);
});
$('#include_ai_assessment', liveElement) .change(function () {
if (this.checked){
$("#ai_assessment_description_closed", liveElement).fadeOut('fast');
$("#ai_assessment_settings_editor", liveElement).fadeIn();
} else {
$("#ai_assessment_settings_editor", liveElement).fadeOut('fast');
$("#ai_assessment_description_closed", liveElement).fadeIn();
}
// Adds a listener which removes rubric feedback
$("#openassessment_rubric_feedback_remove", liveElement). click( function(eventData){
$("#openassessment_rubric_feedback_header_open", liveElement).fadeOut();
$("#openassessment_rubric_feedback_input_wrapper", liveElement).fadeOut();
$("#openassessment_rubric_feedback_header_closed", liveElement).fadeIn();
view.hasRubricFeedbackPrompt = false;
});
$('#include_student_training', liveElement) .change(function () {
if (this.checked){
$("#student_training_description_closed", liveElement).fadeOut('fast');
$("#student_training_settings_editor", liveElement).fadeIn();
} else {
$("#student_training_settings_editor", liveElement).fadeOut('fast');
$("#student_training_description_closed", liveElement).fadeIn();
}
// Adds a listener which adds rubric feedback if not already displayed.
$("#openassessment_rubric_feedback_header_closed", liveElement). click( function(eventData){
$("#openassessment_rubric_feedback_header_closed", liveElement).fadeOut();
$("#openassessment_rubric_feedback_header_open", liveElement).fadeIn();
$("#openassessment_rubric_feedback_input_wrapper", liveElement).fadeIn();
view.hasRubricFeedbackPrompt = true;
});
$('#openassessment_rubric_add_criterion', liveElement).click(
function (eventData) {
view.addNewCriterionToRubric(liveElement);
}
);
// Initially Hides the rubric "add rubric feedback" div
$("#openassessment_rubric_feedback_header_closed", liveElement).hide();
};
......@@ -159,36 +129,80 @@ OpenAssessment.StudioView.prototype = {
load: function () {
var view = this;
this.server.loadEditorContext().done(
function (prompt, rubricXml, title, subStart, subDue, assessments) {
view.rubricXmlBox.setValue(rubricXml);
view.submissionStartField.value = subStart;
view.submissionDueField.value = subDue;
view.promptBox.value = prompt;
view.titleField.value = title;
view.hasTraining.prop('checked', false).change();
view.hasPeer.prop('checked', false).change();
view.hasSelf.prop('checked', false).change();
view.hasAI.prop('checked', false).change();
function (prompt, rubric, title, subStart, subDue, assessments) {
view.settingsFieldSelectors.submissionStartField.prop('value', subStart);
view.settingsFieldSelectors.submissionDueField.prop('value', subDue);
view.settingsFieldSelectors.promptBox.prop('value', prompt);
view.settingsFieldSelectors.titleField.prop('value', title);
view.settingsFieldSelectors.hasTraining.prop('checked', false).change();
view.settingsFieldSelectors.hasPeer.prop('checked', false).change();
view.settingsFieldSelectors.hasSelf.prop('checked', false).change();
view.settingsFieldSelectors.hasAI.prop('checked', false).change();
for (var i = 0; i < assessments.length; i++) {
var assessment = assessments[i];
if (assessment.name == 'peer-assessment') {
view.peerMustGrade.prop('value', assessment.must_grade);
view.peerGradedBy.prop('value', assessment.must_be_graded_by);
view.peerStart.prop('value', assessment.start);
view.peerDue.prop('value', assessment.due);
view.hasPeer.prop('checked', true).change();
view.settingsFieldSelectors.peerMustGrade.prop('value', assessment.must_grade);
view.settingsFieldSelectors.peerGradedBy.prop('value', assessment.must_be_graded_by);
view.settingsFieldSelectors.peerStart.prop('value', assessment.start);
view.settingsFieldSelectors.peerDue.prop('value', assessment.due);
view.settingsFieldSelectors.hasPeer.prop('checked', true).change();
} else if (assessment.name == 'self-assessment') {
view.selfStart.prop('value', assessment.start);
view.selfDue.prop('value', assessment.due);
view.hasSelf.prop('checked', true).change();
view.settingsFieldSelectors.selfStart.prop('value', assessment.start);
view.settingsFieldSelectors.selfDue.prop('value', assessment.due);
view.settingsFieldSelectors.hasSelf.prop('checked', true).change();
} else if (assessment.name == 'example-based-assessment') {
view.aiTrainingExamplesCodeBox.setValue(assessment.examples);
view.hasAI.prop('checked', true).change();
view.settingsFieldSelectors.aiTrainingExamplesCodeBox.setValue(assessment.examples);
view.settingsFieldSelectors.hasAI.prop('checked', true).change();
} else if (assessment.name == 'student-training') {
view.studentTrainingExamplesCodeBox.setValue(assessment.examples);
view.hasTraining.prop('checked', true).change();
view.settingsFieldSelectors.hasTraining.prop('checked', true).change();
}
}
// Corrects the length of the number of criteria
while(view.numberOfCriteria < rubric.criteria.length){
view.addNewCriterionToRubric();
}
while(view.numberOfCriteria > rubric.criteria.length){
view.removeCriterionFromRubric(1)
}
// Corrects the number of options in each criterion
for (i = 0; i < rubric.criteria.length; i++){
while(view.numberOfOptions[i+1] < rubric.criteria[i].options.length){
view.addNewOptionToCriterion(view.liveElement, i+1);
}
while(view.numberOfOptions[i+1] > rubric.criteria[i].options.length){
view.removeOptionFromCriterion(view.liveElement, i+1, 1);
}
}
// Inserts the data from the rubric into the GUI's fields
for (i = 0; i < rubric.criteria.length; i++){
var criterion = rubric.criteria[i];
var selectors = view.rubricCriteriaSelectors[i+1];
// Transfers the Criteria Fields
selectors.name.prop('value', criterion.name);
selectors.prompt.prop('value', criterion.prompt);
selectors.feedback = criterion.feedback;
for (var j = 0; j < criterion.options.length; j++){
var option = criterion.options[j];
var optionSelectors = selectors.options[j+1];
// Transfers all of the option data.
optionSelectors.name.prop('value', option.name);
optionSelectors.points.prop('value', option.points);
optionSelectors.explanation.prop('value', option.explanation);
}
}
if (rubric.feedbackprompt){
view.rubricFeedbackPrompt.prop('value', rubric.feedbackprompt);
view.hasRubricFeedbackPrompt = true;
} else {
view.rubricFeedbackPrompt.prop('value', "");
view.hasRubricFeedbackPrompt = false;
}
}).fail(function (msg) {
view.showError(msg);
}
......@@ -219,6 +233,23 @@ OpenAssessment.StudioView.prototype = {
},
/**
* A simple helper method which constructs checkbox listeners for all of our assessment modules
* @param name name of assessment module to install listener on
* @param liveElement the live DOM selector
*/
addSettingsAssessmentCheckboxListener: function (name, liveElement) {
$("#include_" + name , liveElement) .change(function () {
if (this.checked){
$("#" + name + "_description_closed", liveElement).fadeOut('fast');
$("#" + name + "_settings_editor", liveElement).fadeIn();
} else {
$("#" + name + "_settings_editor", liveElement).fadeOut('fast');
$("#" + name + "_description_closed", liveElement).fadeIn();
}
});
},
/**
Make the user confirm that he/she wants to update a problem
that has already been released.
......@@ -238,9 +269,10 @@ OpenAssessment.StudioView.prototype = {
Initializes a new criterion for the rubric. Has multiple elements. This block of code dictates
the methodology that we add and remove rubric criteria
*/
addNewCriterionToRubric: function (liveElement){
addNewCriterionToRubric: function (){
var view = this;
var liveElement = this.liveElement;
// Always appends the new criterion to the end of the list, and we force linear ordering.
var newCriterionID = this.numberOfCriteria + 1;
this.numberOfCriteria += 1;
......@@ -262,9 +294,12 @@ OpenAssessment.StudioView.prototype = {
name: $('.openassessment_criterion_name', liveElement).first(),
prompt: $('.openassessment_criterion_prompt', liveElement).first(),
options: [],
feedback: $('.openassessment_criterion_feedbac', liveElement).first()
feedback: 'disabled'
};
// Defaults to no feedback
$('input:radio[value=disabled]', liveElement).prop('checked', true);
view.addNewOptionToCriterion(liveElement, newCriterionID);
// Adds a listener that will collapse/expand the criterion on click.
......@@ -278,40 +313,12 @@ OpenAssessment.StudioView.prototype = {
// Adds a listener which will delete the criterion on a click of the remove button
// The methodology for deletion is to shift all information from previous elements down into
$("#openassessment_criterion_" + newCriterionID + "_remove", liveElement) .click( function(eventData) {
var numCriteria = view.numberOfCriteria;
var selectors = view.rubricCriteriaSelectors;
// shifts all data from "higher up" criterions down one in order to allow us to delete the last
// element without deleting information input by the user
for (var i = newCriterionID; i < numCriteria; i++){
selectors[i].name.prop('value', selectors[i+1].name.prop('value'));
selectors[i].prompt.prop('value', selectors[i+1].prompt.prop('value'));
selectors[i].feedback.prop('value', selectors[i+1].feedback.prop('value'));
var options1 = selectors[i].options;
var options2 = selectors[i].options;
var numOptions = view.numberOfOptions[i+1];
for (var j = 1; j < numOptions; j++){
options1[j].points.prop('value', options2[j].points.prop('value'));
options1[j].name.prop('value', options2[j].name.prop('value'));
options1[j].explanation.prop('value', options2[j].explanation.prop('value'));
}
}
// Physically removes the rubric criteria from the DOM
view.rubricCriteriaSelectors[view.rubricCriteriaSelectors.length].criterion.remove();
// Deletes the criteria from our three tracking statistics/structures
view.rubricCriteriaSelectors = view.rubricCriteriaSelectors.slice(0,numCriteria);
view.numberOfOptions = view.numberOfOptions.slice(0, numCriteria);
view.numberOfCriteria -= 1;
$("#openassessment_criterion_" + newCriterionID + "_remove", liveElement) .click( function (eventData) {
view.removeCriterionFromRubric(newCriterionID);
});
// Adds a listener which will add another option to the Criterion's definition.
$("#openassessment_criterion_" + newCriterionID + "_add_option", liveElement).click( function(eventData){
$("#openassessment_criterion_" + newCriterionID + "_add_option", liveElement).click( function (eventData) {
view.addNewOptionToCriterion(liveElement, newCriterionID);
});
......@@ -321,6 +328,7 @@ OpenAssessment.StudioView.prototype = {
$(".openassessment_criterion_feedback_header_open", liveElement).fadeOut();
$(".openassessment_criterion_feedback_header_closed", liveElement).fadeIn();
$(".openassessment_feedback_remove_button", liveElement).fadeOut();
view.rubricCriteriaSelectors[newCriterionID].hasFeedback = false;
});
// Adds a listener which adds criterion feedback if not already displayed.
......@@ -329,6 +337,7 @@ OpenAssessment.StudioView.prototype = {
$(".openassessment_criterion_feedback_header_open", liveElement).fadeIn();
$(".openassessment_criterion_feedback_header_closed", liveElement).fadeOut();
$(".openassessment_feedback_remove_button", liveElement).fadeIn();
view.rubricCriteriaSelectors[newCriterionID].hasFeedback = true;
});
// Hides the criterion header used for adding
......@@ -337,6 +346,56 @@ OpenAssessment.StudioView.prototype = {
},
/**
* Removes a specified criterion from the problem's rubric definition. Changes are made in the DOM,
* in support/control structures.
* @param criterionToRemove
*/
removeCriterionFromRubric: function(criterionToRemove){
var view = this;
var numCriteria = view.numberOfCriteria;
var selectors = view.rubricCriteriaSelectors;
// Shifts all data from "higher up" criteria down one in order to allow us to delete the last
// element without deleting information input by the user
for (var i = criterionToRemove; i < numCriteria; i++){
// Shifts all criterion field values
selectors[i].name.prop('value', selectors[i+1].name.prop('value'));
selectors[i].prompt.prop('value', selectors[i+1].prompt.prop('value'));
selectors[i].feedback = selectors[i+1].feedback;
$('input:radio[value="disabled"]', selectors[i].criterion).prop('checked', true);
// Ensures that we won't delete information during the shift by ensuring that the option lists are of the
//same length. Note it doesn't matter what we add or delete, simply that the lengths add up.
while (view.numberOfOptions[i] < view.numberOfOptions[i+1]){
view.addNewOptionToCriterion(selectors[i].criteria, i);
}
while (view.numberOfOptions[i] > view.numberOfOptions[i+1]){
view.removeOptionFromCriterion(selectors[i].criteria, i, 1);
}
// Transfers all data from each option to the next within a criterion.
var options1 = selectors[i].options;
var options2 = selectors[i+1].options;
var numOptions2 = view.numberOfOptions[i+1];
for (var j = 1; j < numOptions2; j++){
options1[j].points.prop('value', options2[j].points.prop('value'));
options1[j].name.prop('value', options2[j].name.prop('value'));
options1[j].explanation.prop('value', options2[j].explanation.prop('value'));
}
}
// Physically removes the rubric criteria from the DOM
view.rubricCriteriaSelectors[view.rubricCriteriaSelectors.length - 1].criterion.remove();
// Deletes the criteria from our three tracking statistics/structures
view.rubricCriteriaSelectors = view.rubricCriteriaSelectors.slice(0,numCriteria);
view.numberOfOptions = view.numberOfOptions.slice(0, numCriteria);
view.numberOfCriteria -= 1;
},
/**
* Initializes a new option for a given criterion. This code block dictates the methodology for
* adding and removing options to a rubric.
* @param liveElement A selector representing the current state of the Criterion DOM
......@@ -372,28 +431,41 @@ OpenAssessment.StudioView.prototype = {
// are always increasing by one, and that the data doesn't remain tethered to where it was entered.
$("#openassessment_criterion_" + criterionID + "_option_" + newOptionID + "_remove", liveElement).click(
function(eventData){
var numberOfOptions = view.numberOfOptions[criterionID];
var optionSelectors = view.rubricCriteriaSelectors[criterionID].options;
// Shifts all data down, then deletes the last element, to create the appearance we deleted the given
// elements.
for (var i = newOptionID; i < numberOfOptions; i++){
// Utilizes stored selectors to perform the swaps.
optionSelectors[i].points.prop('value', optionSelectors[i+1].points.prop('value'));
optionSelectors[i].name.prop('value', optionSelectors[i+1].name.prop('value'));
optionSelectors[i].explanation.prop('value', optionSelectors[i+1].explanation.prop('value'));
}
optionSelectors[optionSelectors.length - 1].option.remove();
view.rubricCriteriaSelectors[criterionID].options =
view.rubricCriteriaSelectors[criterionID].options.slice(0, (optionSelectors.length - 1));
view.numberOfOptions[criterionID] -= 1;
view.removeOptionFromCriterion(liveElement, criterionID, newOptionID);
}
)
},
/**
* Removes a specified element from the DOM and from all tracking data. Note that no action is
* taken against the specified element, rather, data is shifted down the chain (to construct the
* illusion that the specified element was deleted), and then the last element is actually deleted.
* @param liveElement A selector for the criterion that we are deleting from
* @param criterionID The criterion ID that we are deleting from
* @param optionToRemove The option ID (discriminator really) that we are "deleting"
*/
removeOptionFromCriterion: function(liveElement, criterionID, optionToRemove){
var view = this;
var numberOfOptions = view.numberOfOptions[criterionID];
var optionSelectors = view.rubricCriteriaSelectors[criterionID].options;
// Shifts all data down, then deletes the last element, to create the appearance we deleted the given
// elements.
for (var i = optionToRemove; i < numberOfOptions; i++){
// Utilizes stored selectors to perform the swaps.
optionSelectors[i].points.prop('value', optionSelectors[i+1].points.prop('value'));
optionSelectors[i].name.prop('value', optionSelectors[i+1].name.prop('value'));
optionSelectors[i].explanation.prop('value', optionSelectors[i+1].explanation.prop('value'));
}
optionSelectors[optionSelectors.length - 1].option.remove();
view.rubricCriteriaSelectors[criterionID].options =
view.rubricCriteriaSelectors[criterionID].options.slice(0, (optionSelectors.length - 1));
view.numberOfOptions[criterionID] -= 1;
},
/**
Save the updated XML definition to the server.
**/
updateEditorContext: function () {
......@@ -402,29 +474,61 @@ OpenAssessment.StudioView.prototype = {
this.runtime.notify('save', {state: 'start'});
// Send the updated XML to the server
var prompt = this.promptBox.value;
var rubricXml = this.rubricXmlBox.getValue();
var title = this.titleField.value;
var subStart = this.submissionStartField.value;
var subDue = this.submissionDueField.value;
var prompt = this.settingsFieldSelectors.promptBox.prop('value');
var title = this.settingsFieldSelectors.titleField.prop('value');
var subStart = this.settingsFieldSelectors.submissionStartField.prop('value');
var subDue = this.settingsFieldSelectors.submissionDueField.prop('value');
// Grabs values from all of our fields, and stores them in a format which can be easily validated.
var rubricCriteria = [];
for (var i = 1; i <= this.numberOfCriteria; i++){
var selectorDict = this.rubricCriteriaSelectors[i];
var criterionValueDict = {
order_num: i - 1,
name: selectorDict.name.prop('value'),
prompt: selectorDict.prompt.prop('value'),
feedback: $('input[name="criterion_'+ i +'_feedback"]:checked', selectorDict.criterion).val()
};
var optionSelectorList = selectorDict.options;
var optionValueList = [];
for (var j = 1; j <= this.numberOfOptions[i]; j++){
var optionSelectors = optionSelectorList[j];
optionValueList = optionValueList.concat([{
order_num: j-1,
points: optionSelectors.points.prop('value'),
name: optionSelectors.name.prop('value'),
explanation: optionSelectors.explanation.prop('value')
}]);
}
criterionValueDict.options = optionValueList;
rubricCriteria = rubricCriteria.concat([criterionValueDict]);
}
var rubric = { 'criteria': rubricCriteria };
if (this.hasRubricFeedbackPrompt){
rubric.feedbackprompt = this.rubricFeedbackPrompt.prop('value');
}
var assessments = [];
if (this.hasTraining.prop('checked')){
if (this.settingsFieldSelectors.hasTraining.prop('checked')){
assessments[assessments.length] = {
"name": "student-training",
"examples": this.studentTrainingExamplesCodeBox.getValue()
};
}
if (this.hasPeer.prop('checked')) {
if (this.settingsFieldSelectors.hasPeer.prop('checked')) {
var assessment = {
"name": "peer-assessment",
"must_grade": parseInt(this.peerMustGrade.prop('value')),
"must_be_graded_by": parseInt(this.peerGradedBy.prop('value'))
"must_grade": parseInt(this.settingsFieldSelectors.peerMustGrade.prop('value')),
"must_be_graded_by": parseInt(this.settingsFieldSelectors.peerGradedBy.prop('value'))
};
var startStr = this.peerStart.prop('value');
var dueStr = this.peerDue.prop('value');
var startStr = this.settingsFieldSelectors.peerStart.prop('value');
var dueStr = this.settingsFieldSelectors.peerDue.prop('value');
if (startStr){
assessment = $.extend(assessment, {"start": startStr})
}
......@@ -434,12 +538,12 @@ OpenAssessment.StudioView.prototype = {
assessments[assessments.length] = assessment;
}
if (this.hasSelf.prop('checked')) {
if (this.settingsFieldSelectors.hasSelf.prop('checked')) {
assessment = {
"name": "self-assessment"
};
startStr = this.selfStart.prop('value');
dueStr = this.selfDue.prop('value');
startStr = this.settingsFieldSelectors.selfStart.prop('value');
dueStr = this.settingsFieldSelectors.selfDue.prop('value');
if (startStr){
assessment = $.extend(assessment, {"start": startStr})
}
......@@ -449,7 +553,7 @@ OpenAssessment.StudioView.prototype = {
assessments[assessments.length] = assessment;
}
if (this.hasAI.prop('checked')) {
if (this.settingsFieldSelectors.hasAI.prop('checked')) {
assessments[assessments.length] = {
"name": "example-based-assessment",
"examples": this.aiTrainingExamplesCodeBox.getValue()
......@@ -457,7 +561,7 @@ OpenAssessment.StudioView.prototype = {
}
var view = this;
this.server.updateEditorContext(prompt, rubricXml, title, subStart, subDue, assessments).done(function () {
this.server.updateEditorContext(prompt, rubric, title, subStart, subDue, assessments).done(function () {
// Notify the client-side runtime that we finished saving
// so it can hide the "Saving..." notification.
view.runtime.notify('save', {state: 'end'});
......
......@@ -438,11 +438,11 @@ OpenAssessment.Server.prototype = {
function(err) { console.log(err); }
);
**/
updateEditorContext: function(prompt, rubricXml, title, sub_start, sub_due, assessments) {
updateEditorContext: function(prompt, rubric, title, sub_start, sub_due, assessments) {
var url = this.url('update_editor_context');
var payload = JSON.stringify({
'prompt': prompt,
'rubric': rubricXml,
'rubric': rubric,
'title': title,
'submission_start': sub_start,
'submission_due': sub_due,
......
......@@ -207,9 +207,10 @@
.oa_editor_content_wrapper {
height: 100%;
width: 100%;
border-radius: 4px;
border: 1px solid $edx-gray-d3;
border-radius: 3px;
border: 1px solid $edx-gray-d1;
background-color: #f5f5f5;
overflow-y: scroll;
}
#openassessment_prompt_editor {
......@@ -219,17 +220,25 @@
border: none;
border-radius: 4px;
padding: 10px;
textarea{
font-size: 14px;
border: none;
overflow: auto;
outline: none;
-webkit-box-shadow: none;
-moz-box-shadow: none;
box-shadow: none;
}
}
#openassessment_rubric_editor {
width: 100%;
height: 100%;
display: none;
}
#oa_basic_settings_editor {
padding: 20px 20px;
border-bottom: 1px solid $edx-gray-d3;
border-bottom: 1px solid $edx-gray-d1;
#openassessment_title_editor_wrapper{
label{
width: 25%;
......@@ -243,31 +252,17 @@
}
#openassessment_step_select_description{
padding: 10px;
padding: 10px 10px 0 10px;
text-align: center;
font-size: 80%;
}
.openassessment_assessment_module_settings_editor{
margin-bottom: 10px;
padding-bottom: 10px;
border-bottom: 1px solid $edx-gray-l3;
}
.openassessment_indent_line_input{
padding: 5px 20px;
}
#oa_settings_editor_wrapper {
padding: 0 10px;
overflow-y: scroll;
}
#oa_rubric_editor_wrapper{
overflow-y: scroll;
}
#openassessment_title_editor {
width: 300px;
margin-left: 50px;
padding: 5px;
margin: 10px;
border: 1px solid lightgray;
border-radius: 3px;
}
.openassessment_description{
......@@ -275,46 +270,12 @@
margin: 0;
}
.openassessment_date_field{
width: 130px;
}
.openassessment_number_field{
width: 25px;
}
.openassessment_peer_fixed_width{
width: 45%;
display: inline-block;
}
.openassessment_description_closed{
@extend .openassessment_description;
}
.openassessment_text_field_wrapper{
width: 50%;
text-align: center;
}
.openassessment_right_text_field_wrapper {
@extend .openassessment_text_field_wrapper;
float: right;
}
.openassessment_left_text_field_wrapper {
@extend .openassessment_text_field_wrapper;
float: left;
}
.openassessment_due_date_editor{
height: 30px;
}
.openassessment_inclusion_wrapper{
background-color: $edx-gray-l3;
padding: ($baseline-v/8) ($baseline-h/8);
margin: ($baseline-v/8) ($baseline-h/8);
border-radius: ($baseline-v)/8;
margin: 2.5px 5px;
input[type="checkbox"]{
display: none;
......@@ -331,6 +292,7 @@
input[type="checkbox"]:checked + label:before{
content: "\f046";
color: #009fe6;
}
}
......@@ -364,16 +326,19 @@
}
}
hr {
background-color: transparent;
color: $edx-gray-d3;
height: 1px;
border: 0px;
clear: both;
.openassessment_assessment_module_editor{
padding: 2.5px 0px;
.openassessment_description{
padding-left: 15px;
}
}
#oa_rubric_editor_wrapper{
.wrapper-comp-settings{
display: initial;
}
#openassessment_rubric_instructions{
background-color: $edx-gray-l2;
border-bottom: 1px solid $edx-gray-d3;
......@@ -381,20 +346,19 @@
}
.openassessment_criterion {
border: 1px dashed $edx-gray-l3;
margin: 5px;
padding-bottom: 10px;
.openassessment_criterion_header {
margin: 10px;
padding: 5px;
border-bottom: 1px solid $edx-gray-d3;
overflow: auto;
input[type="checkbox"] {
display: none;
}
input[type="checkbox"] + label:before {
input[type="checkbox"] + h6:before {
font-family: "FontAwesome";
display: inline-block;
margin-right: ($baseline-h/4);
......@@ -408,8 +372,10 @@
}
.openassessment_criterion_header_title {
font-size: 125%;
text-transform: uppercase;
width: 50%;
display: inline-block;
float: left;
}
.openassessment_criterion_header_remove {
......@@ -430,6 +396,33 @@
border-radius: 3px;
}
.openassessment_criterion_basic_editor{
.comp-setting-entry{
padding-right: 0;
margin-right: 10px;
overflow: auto;
.wrapper-comp-settings{
input{
font-size: 11px;
float: right;
width: 70%
}
.openassessment_criterion_prompt{
padding: 10px;
@extend .openassessment_large_text_input;
width: 70%;
float: right;
}
label{
padding: 0;
margin: 0;
}
}
}
}
.openassessment_criterion_feedback_wrapper{
.openassessment_criterion_feedback_header {
......@@ -463,7 +456,7 @@
.openassessment_option_header{
background-color: $edx-gray-l2;
padding: 5px;
padding: 5px 5px 5px 10px;
margin: 5px 5px 8px 5px;
border-radius: 3px;
......@@ -473,44 +466,62 @@
}
.openassessment_criterion_option_point_wrapper{
width: 30%;
width: 40%;
border-top: none;
padding: 5px 5px 5px 0px;
float: left;
padding: 0 10px;
margin: 0;
label{
width: 62.5%
width: 40%;
vertical-align: middle;
padding: 0;
margin: 0;
}
input{
width: 40px;
@extend .openassessment_input_styling
padding: 10px;
float: right;
width: 55%;
font-size: 11px;
}
}
.openassessment_criterion_option_name_wrapper{
width: 70%;
float: right;
float: left;
width: 60%;
padding: 5px 10px 5px 20px;
border-top: 0;
margin: 0;
label{
width: 40%;
padding-right: 10px;
width: 25%;
vertical-align: middle;
padding: 0;
margin: 0;
}
input{
padding: 10px;
font-size: 11px;
width: 60%;
float: right;
}
}
.openassessment_criterion_option_explanation_wrapper{
padding: 15px 5px 0px 5px;
padding: 10px 5px 0px 20px;
width: 100%;
display: inline-block;
margin: 0;
label{
width: 30%;
width: 25%;
text-align: left;
padding-left:15px;
}
textarea{
padding: 10px;
@extend .openassessment_large_text_input;
width: 70%;
float: right;
......@@ -577,23 +588,42 @@
margin: 0 5px;
float: right;
}
#openassessment_rubric_feedback_wrapper{
.openassessment_rubric_feedback_wrapper{
padding: 0 10px;
#openassessment_rubric_feedback_header{
margin-top: 10px;
border-bottom: 1px solid $edx-gray-d3;
font-size: 125%;
padding: 10px;
padding-right: 20px;
.openassessment_rubric_feedback_header{
margin-top: 10px;
border-bottom: 1px solid $edx-gray-d3;
font-size: 125%;
padding: 10px;
padding-right: 20px;
}
.openassessment_feedback_radio_toggle{
input[type="radio"]{
display: none;
}
input[type="radio"] + label:before{
font-family: "FontAwesome";
display: inline-block;
margin-right: ($baseline-h/4);
width: auto;
height: auto;
content: "\f10c";
}
input[type="radio"]:checked + label:before{
content: "\f05d";
}
}
}
#openassessment_rubric_add_criterion{
font-size: 125%;
h2:before{
h6:before{
font-family: "FontAwesome";
display: inline-block;
margin-left: 5px;
......@@ -620,7 +650,7 @@
}
#openassessment_make_invisible{
display: none;
}
.modal-content {
......
......@@ -69,7 +69,7 @@ class StudioMixin(object):
return {'success': False, 'msg': _('Error updating XBlock configuration')}
try:
rubric = xml.parse_rubric_xml_str(data["rubric"])
rubric = verify_rubric_format(data['rubric'])
submission_due = xml.parse_date(data["submission_due"], name="submission due date")
submission_start = xml.parse_date(data["submission_start"], name="submission start date")
assessments = parse_assessment_dictionaries(data["assessments"])
......@@ -82,8 +82,8 @@ class StudioMixin(object):
return {'success': False, 'msg': _('Validation error: {error}').format(error=msg)}
self.update(
rubric['criteria'],
rubric['feedbackprompt'],
rubric,
rubric.get('feedbackprompt', None),
assessments,
submission_due,
submission_start,
......@@ -112,12 +112,9 @@ class StudioMixin(object):
"""
try:
rubric = xml.serialize_rubric_to_xml_str(self)
# Copies the rubric assessments so that we can change student training examples from dict -> str without
# negatively modifying the openassessmentblock definition.
assessment_list = copy.deepcopy(self.rubric_assessments)
# Finds the student training dictionary, if it exists, and replaces the examples with their XML definition
student_training_dictionary = [d for d in assessment_list if d["name"] == "student-training"]
if student_training_dictionary:
......@@ -140,10 +137,14 @@ class StudioMixin(object):
submission_start = self.submission_start if self.submission_start else ''
rubric_dict = { 'criteria' : self.rubric_criteria }
rubric_dict['feedbackprompt'] = unicode(self.rubric_feedback_prompt)
return {
'success': True,
'msg': '',
'rubric': rubric,
'rubric': rubric_dict,
'prompt': self.prompt,
'submission_due': submission_due,
'submission_start': submission_start,
......@@ -240,4 +241,128 @@ def parse_assessment_dictionaries(input_assessments):
# Update the list of assessments
assessments_list.append(assessment_dict)
return assessments_list
\ No newline at end of file
return assessments_list
def verify_rubric_format(rubric):
"""
Verifies that the rubric that was passed in follows the conventions that we expect, including
types and structure. The code documents itself well here.
Args:
rubric (dict): Unsanitized version of our rubric. Usually taken from the GUI.
Returns:
rubric (dict): Sanitized version of the same form.
Raises:
UpdateFromXMLError
"""
# import pudb, sys as __sys;__sys.stdout=__sys.__stdout__;pudb.set_trace() # -={XX}=-={XX}=-={XX}=
if not isinstance(rubric, dict):
# import pudb,sys as __sys;__sys.stdout=__sys.__stdout__;pudb.set_trace() # -={XX}=-={XX}=-={XX}=
raise UpdateFromXmlError(_("The given rubric was not a dictionary of the form {criteria: [criteria1, criteria2...]}"))
if "criteria" not in rubric.keys():
raise UpdateFromXmlError(_("The given rubric did not contain a key for a list of criteria, and is invalid"))
if len((set(rubric.keys()) - {'prompt', 'criteria'})) > 0:
unexpected_keys = list(set(rubric.keys()) - {"prompt", "criteria"})
raise UpdateFromXmlError(_("The following keys were included in the rubric when they were not allowed to be: {}".format(unexpected_keys)))
if rubric.get('prompt', False):
if not isinstance(rubric['prompt'], basestring):
# import pudb,sys as __sys;__sys.stdout=__sys.__stdout__;pudb.set_trace() # -={XX}=-={XX}=-={XX}=
raise UpdateFromXmlError(_("The given rubric's feedback prompt was invalid, it must be a string."))
criteria = rubric["criteria"]
if not isinstance(criteria, list):
raise UpdateFromXmlError(_("The criteria term in the rubric dictionary corresponds to a non-list object."))
sanitized_criteria = []
for criterion in criteria:
if not isinstance(criterion, dict):
raise UpdateFromXmlError(_("A criterion given was not a dictionary."))
criterion = dict(criterion)
expected_keys = {'order_num', 'name', 'prompt', 'options', 'feedback'}
unexpected_keys = list(set(criterion.keys()) - expected_keys)
missing_keys = list(expected_keys - set(criterion.keys()))
if missing_keys:
raise UpdateFromXmlError(_("The following keys were missing from the Definition of one or more criteria: {}".format(missing_keys)))
if unexpected_keys:
raise UpdateFromXmlError(_("The following extraneous keys were found in the definition for one or more criteria: {}".format(unexpected_keys)))
name = str(criterion['name'])
prompt = str(criterion['prompt'])
feedback = str(criterion['feedback'])
try:
order_num = int(criterion['order_num'])
except (TypeError, ValueError):
raise UpdateFromXmlError(_("The order_num value must be an integer."))
if not isinstance(criterion['options'], list):
raise UpdateFromXmlError(_("The dictionary entry for 'options' in a criteria's dictionary definition must be a list."))
options = criterion['options']
sanitized_options = []
for option in options:
if not isinstance(option, dict):
raise UpdateFromXmlError(_("An option given was not a dictionary."))
expected_keys = {'order_num','name', 'points', 'explanation'}
unexpected_keys = list(set(option.keys()) - expected_keys)
missing_keys = list(expected_keys - set(option.keys()))
if missing_keys:
raise UpdateFromXmlError(_("The following keys were missing from the Definition of one or more options: {}".format(missing_keys)))
if unexpected_keys:
raise UpdateFromXmlError(_("The following extraneous keys were found in the definition for one or more options: {}".format(unexpected_keys)))
option_name = str(option['name'])
option_explanation = str(option['explanation'])
try:
option_points = int(option['points'])
except (TypeError, ValueError):
raise UpdateFromXmlError(_("All option point values must be integers."))
option_dict = {
"order_num": option['order_num'],
"name": option_name,
"explanation": option_explanation,
"points": option_points
}
sanitized_options.append(option_dict)
criterion_dict = {
"order_num": order_num,
"name": name,
"prompt": prompt,
"options": sanitized_options,
"feedback": feedback
}
sanitized_criteria.append(criterion_dict)
sanitized_rubric = {
'criteria': sanitized_criteria
}
if rubric.get('prompt'):
sanitized_rubric['prompt'] = str(rubric.get('prompt'))
return sanitized_rubric
\ No newline at end of file
{
"simple": {
"rubric": [
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<!-- no options -->",
"</criterion>",
"</rubric>"
],
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
}
]
}
]
},
"prompt": "My new prompt.",
"submission_due": "4014-02-27T09:46:28",
"submission_start": "4014-02-10T09:46:28",
......
......@@ -21,17 +21,31 @@
"expected_error": "error"
},
"no_prompt": {
"rubric": [
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>"
],
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
],
"feedback": "optional"
}
]
},
"title": "My new title.",
"assessments": [
{
......@@ -52,17 +66,31 @@
"expected_error": "error"
},
"no_submission_due": {
"rubric": [
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>"
],
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
],
"feedback": "optional"
}
]
},
"prompt": "My new prompt.",
"title": "My new title.",
"assessments": [
......@@ -83,17 +111,31 @@
"expected_error": "error"
},
"invalid_dates_one": {
"rubric": [
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>"
],
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
],
"feedback": "optional"
}
]
},
"prompt": "My new prompt.",
"title": "My new title.",
......@@ -111,23 +153,37 @@
"due": ""
}
],
"submission_due": "2012-02-27T09:46:28",
"submission_start": "2015-02-10T09:46:28",
"expected_error": "cannot be later"
"submission_due": "2012-02-27T09:46:28",
"submission_start": "2015-02-10T09:46:28",
"expected_error": "cannot be later"
},
"invalid_dates_two": {
"rubric": [
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>"
],
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
],
"feedback": "optional"
}
]
},
"prompt": "My new prompt.",
"title": "My new title.",
......@@ -145,9 +201,486 @@
"due": "2003-01-02T00:00:00"
}
],
"submission_due": "2012-02-27T09:46:28",
"submission_start": "",
"expected_error": "cannot be later"
"submission_due": "2012-02-27T09:46:28",
"submission_start": "",
"expected_error": "cannot be later"
},
"order num is string": {
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": "Hello",
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
],
"feedback": "optional"
}
]
},
"prompt": "My new prompt.",
"title": "My new title.",
"assessments": [
{
"name": "self-assessment",
"start": "",
"due": ""
}
],
"submission_due": "",
"submission_start": "",
"expected_error": "must be an integer"
},
"rubric not a dictionary": {
"rubric": [],
"prompt": "My new prompt.",
"title": "My new title.",
"assessments": [
{
"name": "self-assessment",
"start": "",
"due": ""
}
],
"submission_due": "",
"submission_start": "",
"expected_error": "dictionary of the form"
},
"feedback missing": {
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
]
}
]
},
"prompt": "My new prompt.",
"title": "My new title.",
"assessments": [
{
"name": "self-assessment",
"start": "",
"due": ""
}
],
"submission_due": "",
"submission_start": "",
"expected_error": "feedback"
},
"extra rubric keys": {
"rubric": {
"prompt": "Test Prompt",
"hellooooo": "waddup",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
],
"feedback": "optional"
}
]
},
"prompt": "My new prompt.",
"title": "My new title.",
"assessments": [
{
"name": "self-assessment",
"start": "",
"due": ""
}
],
"submission_due": "",
"submission_start": "",
"expected_error": "following keys"
},
"expected rubric keys missing": {
"rubric": {
"prompt": "Test Prompt"
},
"prompt": "My new prompt.",
"title": "My new title.",
"assessments": [
{
"name": "self-assessment",
"start": "",
"due": ""
}
],
"submission_due": "",
"submission_start": "",
"expected_error": "did not contain a key"
},
"prompt not a string": {
"rubric": {
"prompt": 5,
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
],
"feedback": "optional"
}
]
},
"prompt": "My new prompt.",
"title": "My new title.",
"assessments": [
{
"name": "self-assessment",
"start": "",
"due": ""
}
],
"submission_due": "",
"submission_start": "",
"expected_error": "string"
},
"criterion not a dict": {
"rubric": {
"prompt": "prompty",
"criteria": [
[]
]
},
"prompt": "My new prompt.",
"title": "My new title.",
"assessments": [
{
"name": "self-assessment",
"start": "",
"due": ""
}
],
"submission_due": "",
"submission_start": "",
"expected_error": "not a dictionary"
},
"criteria missing keys": {
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
],
"feedback": "optional"
}
]
},
"prompt": "My new prompt.",
"title": "My new title.",
"assessments": [
{
"name": "self-assessment",
"start": "",
"due": ""
}
],
"submission_due": "",
"submission_start": "",
"expected_error": "keys were missing"
},
"criteria too many keys": {
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
],
"feedback": "optional",
"magic": "indeed"
}
]
},
"prompt": "My new prompt.",
"title": "My new title.",
"assessments": [
{
"name": "self-assessment",
"start": "",
"due": ""
}
],
"submission_due": "",
"submission_start": "",
"expected_error": "extraneous keys"
},
"options not a list": {
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": "not a list",
"feedback": "optional"
}
]
},
"prompt": "My new prompt.",
"title": "My new title.",
"assessments": [
{
"name": "self-assessment",
"start": "",
"due": ""
}
],
"submission_due": "",
"submission_start": "",
"expected_error": "must be a list"
},
"option not a dictionary": {
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
[]
],
"feedback": "optional"
}
]
},
"prompt": "My new prompt.",
"title": "My new title.",
"assessments": [
{
"name": "self-assessment",
"start": "",
"due": ""
}
],
"submission_due": "",
"submission_start": "",
"expected_error": "not a dictionary"
},
"option missing keys": {
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
],
"feedback": "optional"
}
]
},
"prompt": "My new prompt.",
"title": "My new title.",
"assessments": [
{
"name": "self-assessment",
"start": "",
"due": ""
}
],
"submission_due": "",
"submission_start": "",
"expected_error": "keys were missing"
},
"option extraneous keys": {
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"kittens": "ADORABLE",
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
],
"feedback": "optional"
}
]
},
"prompt": "My new prompt.",
"title": "My new title.",
"assessments": [
{
"name": "self-assessment",
"start": "",
"due": ""
}
],
"submission_due": "",
"submission_start": "",
"expected_error": "extraneous keys"
},
"option points must be int": {
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": "a million stanley nickels",
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": "One Shrutebuck",
"name": "Yes",
"explanation": "Yes explanation"
}
],
"feedback": "optional"
}
]
},
"prompt": "My new prompt.",
"title": "My new title.",
"assessments": [
{
"name": "self-assessment",
"start": "",
"due": ""
}
],
"submission_due": "",
"submission_start": "",
"expected_error": "must be integers"
}
}
\ No newline at end of file
{
"simple": {
"rubric": [
"<rubric>",
"<prompt>Test prompt</prompt>",
"<criterion>",
"<name>Test criterion</name>",
"<prompt>Test criterion prompt</prompt>",
"<option points=\"0\"><name>No</name><explanation>No explanation</explanation></option>",
"<option points=\"2\"><name>Yes</name><explanation>Yes explanation</explanation></option>",
"</criterion>",
"</rubric>"
],
"rubric": {
"prompt": "Test Prompt",
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
],
"feedback": "required"
}
]
},
"prompt": "My new prompt.",
"submission_due": "4014-02-27T09:46:28",
"submission_start": "4014-02-10T09:46:28",
......
......@@ -29,9 +29,9 @@ class StudioViewTest(XBlockHandlerTestCase):
self.assertTrue(resp['success'])
self.assertEqual(resp['msg'], u'')
# Verify that the Rubric XML is parse-able and the root is <rubric>
rubric = etree.fromstring(resp['rubric'])
self.assertEqual(rubric.tag, 'rubric')
# Verify that the Rubric has criteria, and that they are a list of dictionaries
self.assertTrue(isinstance(resp['rubric']['criteria'], list))
self.assertTrue(isinstance(resp['rubric']['criteria'][0], dict))
# Verify that every assessment in the list of assessments has a name.
for assessment_dict in resp['assessments']:
......@@ -40,7 +40,8 @@ class StudioViewTest(XBlockHandlerTestCase):
examples = etree.fromstring(assessment_dict['examples'])
self.assertEqual(examples.tag, 'examples')
@mock.patch('openassessment.xblock.xml.serialize_rubric_to_xml_str')
# WEDALY!!! I cannot figure out how to mock out this call correctly, so it is consequently failing.
@mock.patch('studio_mixin.verify_rubric_format')
@scenario('data/basic_scenario.xml')
def test_get_editor_context_error(self, xblock, mock_rubric_serializer):
# Simulate an unexpected error while serializing the XBlock
......@@ -51,28 +52,30 @@ class StudioViewTest(XBlockHandlerTestCase):
self.assertFalse(resp['success'])
self.assertIn(u'unexpected error', resp['msg'].lower())
@file_data('data/update_xblock.json')
@scenario('data/basic_scenario.xml')
def test_update_xblock(self, xblock, data):
# First, parse XML data into a single string.
data['rubric'] = "".join(data['rubric'])
xblock.published_date = None
# Test that we can update the xblock with the expected configuration.
request = json.dumps(data)
# Verify the response is successfully
resp = self.request(xblock, 'update_editor_context', request, response_format='json')
print "ERROR IS {}".format(resp['msg'])
self.assertTrue(resp['success'])
self.assertIn('success', resp['msg'].lower())
# Check that the XBlock fields were updated
# We don't need to be exhaustive here, because we have other unit tests
# that verify this extensively.
self.assertEqual(xblock.title, data['title'])
self.assertEqual(xblock.prompt, data['prompt'])
self.assertEqual(xblock.rubric_assessments[0]['name'], data['expected-assessment'])
self.assertEqual(xblock.rubric_criteria[0]['prompt'], data['expected-criterion-prompt'])
# WEDALY!!! I don't know if this test is relevant any more (using update editor context with
# XML is so OVER am-i-right? Rather, we now test teh same behavior a million times with the
# Dictionary/List structures.
# Thoughts?
# @file_data('data/update_xblock.json')
# @scenario('data/basic_scenario.xml')
# def test_update_xblock(self, xblock, data):
# xblock.published_date = None
# # Test that we can update the xblock with the expected configuration.
# request = json.dumps(data)
#
# # Verify the response is successfully
# resp = self.request(xblock, 'update_editor_context', request, response_format='json')
# print "ERROR IS {}".format(resp['msg'])
# self.assertTrue(resp['success'])
# self.assertIn('success', resp['msg'].lower())
#
# # Check that the XBlock fields were updated
# # We don't need to be exhaustive here, because we have other unit tests
# # that verify this extensively.
# self.assertEqual(xblock.title, data['title'])
# self.assertEqual(xblock.prompt, data['prompt'])
# self.assertEqual(xblock.rubric_assessments[0]['name'], data['expected-assessment'])
# self.assertEqual(xblock.rubric_criteria[0]['prompt'], data['expected-criterion-prompt'])
@file_data('data/update_xblock.json')
@scenario('data/basic_scenario.xml')
......@@ -91,9 +94,6 @@ class StudioViewTest(XBlockHandlerTestCase):
@file_data('data/invalid_update_xblock.json')
@scenario('data/basic_scenario.xml')
def test_update_context_invalid_request_data(self, xblock, data):
# First, parse XML data into a single string.
if 'rubric' in data:
data['rubric'] = "".join(data['rubric'])
xblock.published_date = None
......@@ -104,8 +104,6 @@ class StudioViewTest(XBlockHandlerTestCase):
@file_data('data/invalid_rubric.json')
@scenario('data/basic_scenario.xml')
def test_update_rubric_invalid(self, xblock, data):
# First, parse XML data into a single string.
data['rubric'] = "".join(data['rubric'])
request = json.dumps(data)
......@@ -118,7 +116,7 @@ class StudioViewTest(XBlockHandlerTestCase):
# Verify the response fails
resp = self.request(xblock, 'update_editor_context', request, response_format='json')
self.assertFalse(resp['success'])
self.assertIn("not valid", resp['msg'].lower())
self.assertIn("the following keys were missing", resp['msg'].lower())
# Check that the XBlock fields were NOT updated
# We don't need to be exhaustive here, because we have other unit tests
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment