Commit 496843db by Usman Khalid

Merge branch 'usman/tnl708-multiple-prompts-training' into…

Merge branch 'usman/tnl708-multiple-prompts-training' into usman/tnl708-multiple-prompts-integration

Conflicts:
	openassessment/xblock/data_conversion.py
	openassessment/xblock/static/css/openassessment-ltr.css
	openassessment/xblock/static/css/openassessment-rtl.css
	openassessment/xblock/static/js/openassessment-lms.min.js
parents 33445577 14145825
...@@ -25,9 +25,7 @@ ...@@ -25,9 +25,7 @@
</a> </a>
</div> </div>
<div id="oa_prompt_editor_wrapper" class="oa_editor_content_wrapper"> {% include "openassessmentblock/edit/oa_edit_prompts.html" %}
<textarea id="openassessment_prompt_editor" maxlength="10000">{{ prompt }}</textarea>
</div>
{% include "openassessmentblock/edit/oa_edit_rubric.html" %} {% include "openassessmentblock/edit/oa_edit_rubric.html" %}
......
{% load i18n %} {% load i18n %}
{% spaceless %} {% spaceless %}
<li class="openassessment_criterion is-collapsible" data-criterion="{{ criterion_name }}"> <li class="openassessment_criterion is-collapsible" data-criterion="{{ criterion_name }}">
<div class="openassessment_criterion_header view-outline"> <div class="openassessment_container_header openassessment_criterion_header view-outline">
<a class="action expand-collapse collapse"><i class="icon-caret-down ui-toggle-expansion"></i></a> <a class="action expand-collapse collapse"><i class="icon-caret-down ui-toggle-expansion"></i></a>
<div class="openassessment_criterion_header_title_box"> <div class="openassessment_container_header_title_box openassessment_criterion_header_title_box">
<h6 class="openassessment_criterion_header_title">{% trans "Criterion" %}</h6> <h6 class="openassessment_container_header_title openassessment_criterion_header_title">{% trans "Criterion" %}</h6>
<p class="openassessment_criterion_guide">{% trans "You cannot delete a criterion after the assignment has been released." %}</p> <p class="openassessment_container_guide openassessment_criterion_guide">{% trans "You cannot delete a criterion after the assignment has been released." %}</p>
</div> </div>
<div class="openassessment_criterion_remove_button"><h2>{% trans "Remove" %}</h2></div> <div class="openassessment_container_remove_button openassessment_criterion_remove_button"><h2>{% trans "Remove" %}</h2></div>
</div> </div>
<div class="openassessment_criterion_body wrapper-comp-settings"> <div class="openassessment_criterion_body wrapper-comp-settings">
<input type="hidden" class="openassessment_criterion_name" value="{{ criterion_name }}" /> <input type="hidden" class="openassessment_criterion_name" value="{{ criterion_name }}" />
......
{% load i18n %}
{% spaceless %}
<li class="openassessment_prompt is-collapsible" data-prompt="{{ prompt_uuid }}">
<div class="openassessment_container_header openassessment_prompt_header view-outline">
<a class="action expand-collapse collapse"><i class="icon-caret-down ui-toggle-expansion"></i></a>
<div class="openassessment_container_header_title_box openassessment_prompt_header_title_box">
<h6 class="openassessment_container_header_title openassessment_prompt_header_title">{% trans "Prompt" %}</h6>
<p class="openassessment_container_guide openassessment_prompt_guide">{% trans "You cannot delete a prompt after the assignment has been released." %}</p>
</div>
<div class="openassessment_container_remove_button openassessment_prompt_remove_button"><h2>{% trans "Remove" %}</h2></div>
</div>
<div class="openassessment_prompt_body wrapper-comp-settings">
<input type="hidden" class="openassessment_prompt_uuid" value="{{ prompt_uuid }}" />
<ul class="list-input settings-list">
<li class="field comp-setting-entry openassessment_prompt_description_wrapper">
<div class="wrapper-comp-settings">
<textarea class="openassessment_prompt_description setting-input" maxlength="10000">{{ prompt_description }}</textarea>
</div>
</li>
</ul>
</div>
</li>
{% endspaceless %}
{% load i18n %}
{% spaceless %}
<div id="oa_prompts_editor_wrapper" class="oa_editor_content_wrapper">
<div id="openassessment_prompt_template" class="is--hidden">
{% include "openassessmentblock/edit/oa_edit_prompt.html" with prompt_uuid="" prompt_description="" %}
</div>
<div id="openassessment_prompts_instructions" class="openassessment_tab_instructions">
<p class="openassessment_description">
{% trans "Prompts. Replace the sample text with your own text. For more information, see the ORA documentation." %}
</p>
</div>
<ul id="openassessment_prompts_list">
{% for prompt in prompts %}
{% include "openassessmentblock/edit/oa_edit_prompt.html" with prompt_uuid=prompt.uuid prompt_description=prompt.description %}
{% endfor %}
</ul>
<div id="openassessment_prompts_add_prompt" class="openassessment_container_add_button">
<h6>
{% trans "Add Prompt" %}
</h6>
</div>
</div>
{% endspaceless %}
...@@ -9,7 +9,7 @@ ...@@ -9,7 +9,7 @@
{% include "openassessmentblock/edit/oa_edit_option.html" with option_name="" option_label="" option_points=1 option_explanation="" %} {% include "openassessmentblock/edit/oa_edit_option.html" with option_name="" option_label="" option_points=1 option_explanation="" %}
</div> </div>
<div id="openassessment_rubric_instructions"> <div id="openassessment_rubric_instructions" class="openassessment_tab_instructions">
<p class="openassessment_description"> <p class="openassessment_description">
{% trans "Rubrics are made up of criteria, which usually contain one or more options. Each option has a point value. This template contains two sample criteria and their options. Replace the sample text with your own text. For more information, see the ORA documentation." %} {% trans "Rubrics are made up of criteria, which usually contain one or more options. Each option has a point value. This template contains two sample criteria and their options. Replace the sample text with your own text. For more information, see the ORA documentation." %}
</p> </p>
...@@ -21,7 +21,7 @@ ...@@ -21,7 +21,7 @@
{% endfor %} {% endfor %}
</ul> </ul>
<div id="openassessment_rubric_add_criterion"> <div id="openassessment_rubric_add_criterion" class="openassessment_container_add_button">
<h6> <h6>
{% trans "Add Criterion" %} {% trans "Add Criterion" %}
</h6> </h6>
......
...@@ -33,6 +33,11 @@ ...@@ -33,6 +33,11 @@
<ol id="openassessment_training_example_criterion_template" class="is--hidden"> <ol id="openassessment_training_example_criterion_template" class="is--hidden">
{% include "openassessmentblock/edit/oa_training_example_criterion.html" %} {% include "openassessmentblock/edit/oa_training_example_criterion.html" %}
</ol> </ol>
<ol id="openassessment_training_example_part_template" class="is--hidden">
<li class="openassessment_training_example_essay_part">
<textarea maxlength="100000"></textarea>
</li>
</ol>
</div> </div>
</li> </li>
......
...@@ -28,7 +28,13 @@ ...@@ -28,7 +28,13 @@
<div class="openassessment_training_example_essay_wrapper"> <div class="openassessment_training_example_essay_wrapper">
<h2>{% trans "Response" %}</h2> <h2>{% trans "Response" %}</h2>
<textarea class="openassessment_training_example_essay" maxlength="100000">{{ example.answer }}</textarea> <ol class="openassessment_training_example_essay">
{% for part in example.answer.parts %}
<li class="openassessment_training_example_essay_part">
<textarea maxlength="100000">{{ part.text }}</textarea>
</li>
{% endfor %}
</ol>
</div> </div>
</div> </div>
</li> </li>
......
...@@ -85,7 +85,7 @@ def update_assessments_format(assessments): ...@@ -85,7 +85,7 @@ def update_assessments_format(assessments):
for assessment in assessments: for assessment in assessments:
if 'examples' in assessment: if 'examples' in assessment:
for example in assessment['examples']: for example in assessment['examples']:
if isinstance(example['answer'], unicode): if isinstance(example['answer'], unicode) or isinstance(example['answer'], str):
example['answer'] = { example['answer'] = {
'parts': [ 'parts': [
{'text': example['answer']} {'text': example['answer']}
......
...@@ -66,7 +66,11 @@ VALID_ASSESSMENT_TYPES = [ ...@@ -66,7 +66,11 @@ VALID_ASSESSMENT_TYPES = [
# Schema definition for an update from the Studio JavaScript editor. # Schema definition for an update from the Studio JavaScript editor.
EDITOR_UPDATE_SCHEMA = Schema({ EDITOR_UPDATE_SCHEMA = Schema({
Required('prompt'): utf8_validator, Required('prompts'): [
Schema({
Required('description'): utf8_validator,
})
],
Required('title'): utf8_validator, Required('title'): utf8_validator,
Required('feedback_prompt'): utf8_validator, Required('feedback_prompt'): utf8_validator,
Required('feedback_default_text'): utf8_validator, Required('feedback_default_text'): utf8_validator,
...@@ -84,7 +88,7 @@ EDITOR_UPDATE_SCHEMA = Schema({ ...@@ -84,7 +88,7 @@ EDITOR_UPDATE_SCHEMA = Schema({
'must_be_graded_by': All(int, Range(min=0)), 'must_be_graded_by': All(int, Range(min=0)),
'examples': [ 'examples': [
Schema({ Schema({
Required('answer'): utf8_validator, Required('answer'): [utf8_validator],
Required('options_selected'): [ Required('options_selected'): [
Schema({ Schema({
Required('criterion'): utf8_validator, Required('criterion'): utf8_validator,
......
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -403,7 +403,7 @@ ...@@ -403,7 +403,7 @@
{ {
"template": "openassessmentblock/edit/oa_edit.html", "template": "openassessmentblock/edit/oa_edit.html",
"context": { "context": {
"prompt": "How much do you like waffles?", "prompts": [{ "description": "How much do you like waffles?" }, { "description": "How much do you like waffles 2?" }],
"title": "The most important of all questions.", "title": "The most important of all questions.",
"submission_start": "2014-01-02T12:15", "submission_start": "2014-01-02T12:15",
"submission_due": "2014-10-01T04:53", "submission_due": "2014-10-01T04:53",
...@@ -484,7 +484,7 @@ ...@@ -484,7 +484,7 @@
{ {
"template": "openassessmentblock/edit/oa_edit.html", "template": "openassessmentblock/edit/oa_edit.html",
"context": { "context": {
"prompt": "Test prompt", "prompts": [{ "description": "How much do you like waffles?" }, { "description": "How much do you like waffles 2?" }],
"title": "Test title", "title": "Test title",
"submission_start": "2014-01-1T10:00:00", "submission_start": "2014-01-1T10:00:00",
"submission_due": "2014-10-1T10:00:00", "submission_due": "2014-10-1T10:00:00",
......
...@@ -30,7 +30,7 @@ describe("OpenAssessment.Server", function() { ...@@ -30,7 +30,7 @@ describe("OpenAssessment.Server", function() {
); );
}; };
var PROMPT = "Hello this is the prompt yes."; var PROMPTS = [{"description": "Hello this is the prompt yes."}];
var FEEDBACK_PROMPT = "Prompt for feedback"; var FEEDBACK_PROMPT = "Prompt for feedback";
var FEEDBACK_DEFAULT_TEXT = "Default feedback response text"; var FEEDBACK_DEFAULT_TEXT = "Default feedback response text";
...@@ -253,7 +253,7 @@ describe("OpenAssessment.Server", function() { ...@@ -253,7 +253,7 @@ describe("OpenAssessment.Server", function() {
it("updates the XBlock's editor context definition", function() { it("updates the XBlock's editor context definition", function() {
stubAjax(true, { success: true }); stubAjax(true, { success: true });
server.updateEditorContext({ server.updateEditorContext({
prompt: PROMPT, prompts: PROMPTS,
feedbackPrompt: FEEDBACK_PROMPT, feedbackPrompt: FEEDBACK_PROMPT,
feedback_default_text: FEEDBACK_DEFAULT_TEXT, feedback_default_text: FEEDBACK_DEFAULT_TEXT,
title: TITLE, title: TITLE,
...@@ -268,7 +268,7 @@ describe("OpenAssessment.Server", function() { ...@@ -268,7 +268,7 @@ describe("OpenAssessment.Server", function() {
expect($.ajax).toHaveBeenCalledWith({ expect($.ajax).toHaveBeenCalledWith({
type: "POST", url: '/update_editor_context', type: "POST", url: '/update_editor_context',
data: JSON.stringify({ data: JSON.stringify({
prompt: PROMPT, prompts: PROMPTS,
feedback_prompt: FEEDBACK_PROMPT, feedback_prompt: FEEDBACK_PROMPT,
feedback_default_text: FEEDBACK_DEFAULT_TEXT, feedback_default_text: FEEDBACK_DEFAULT_TEXT,
title: TITLE, title: TITLE,
......
...@@ -43,7 +43,7 @@ describe("OpenAssessment.StudioView", function() { ...@@ -43,7 +43,7 @@ describe("OpenAssessment.StudioView", function() {
var EXPECTED_SERVER_DATA = { var EXPECTED_SERVER_DATA = {
title: "The most important of all questions.", title: "The most important of all questions.",
prompt: "How much do you like waffles?", prompts: [{"description": "How much do you like waffles?"}, {description : 'How much do you like waffles 2?'}],
feedbackPrompt: "", feedbackPrompt: "",
submissionStart: "2014-01-02T12:15", submissionStart: "2014-01-02T12:15",
submissionDue: "2014-10-01T04:53", submissionDue: "2014-10-01T04:53",
...@@ -145,7 +145,7 @@ describe("OpenAssessment.StudioView", function() { ...@@ -145,7 +145,7 @@ describe("OpenAssessment.StudioView", function() {
// Top-level attributes // Top-level attributes
expect(server.receivedData.title).toEqual(EXPECTED_SERVER_DATA.title); expect(server.receivedData.title).toEqual(EXPECTED_SERVER_DATA.title);
expect(server.receivedData.prompt).toEqual(EXPECTED_SERVER_DATA.prompt); expect(server.receivedData.prompts).toEqual(EXPECTED_SERVER_DATA.prompts);
expect(server.receivedData.feedbackPrompt).toEqual(EXPECTED_SERVER_DATA.feedbackPrompt); expect(server.receivedData.feedbackPrompt).toEqual(EXPECTED_SERVER_DATA.feedbackPrompt);
expect(server.receivedData.submissionStart).toEqual(EXPECTED_SERVER_DATA.submissionStart); expect(server.receivedData.submissionStart).toEqual(EXPECTED_SERVER_DATA.submissionStart);
expect(server.receivedData.submissionDue).toEqual(EXPECTED_SERVER_DATA.submissionDue); expect(server.receivedData.submissionDue).toEqual(EXPECTED_SERVER_DATA.submissionDue);
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
Tests for OpenAssessment prompt editing view. Tests for OpenAssessment prompt editing view.
**/ **/
describe("OpenAssessment.EditPromptView", function() { describe("OpenAssessment.EditPromptViews", function() {
var view = null; var view = null;
...@@ -12,13 +12,38 @@ describe("OpenAssessment.EditPromptView", function() { ...@@ -12,13 +12,38 @@ describe("OpenAssessment.EditPromptView", function() {
// Create the view // Create the view
var element = $("#oa_prompt_editor_wrapper").get(0); var element = $("#oa_prompt_editor_wrapper").get(0);
view = new OpenAssessment.EditPromptView(element); view = new OpenAssessment.EditPromptsView(element);
}); });
it("sets and loads prompt text", function() { it("reads prompts from the editor", function() {
view.promptText(""); // This assumes a particular structure of the DOM,
expect(view.promptText()).toEqual(""); // which is set by the HTML fixture.
view.promptText("This is a test prompt!"); var prompts = view.promptsDefinition();
expect(view.promptText()).toEqual("This is a test prompt!"); expect(prompts.length).toEqual(2);
expect(prompts[0]).toEqual({
"description": "How much do you like waffles?"
});
});
it("creates new prompts", function() {
// Delete all existing prompts
// Then add new prompts (created from a client-side template)
$.each(view.getAllPrompts(), function() { view.removePrompt(this); });
view.addPrompt();
view.addPrompt();
view.addPrompt();
var prompts = view.promptsDefinition();
expect(prompts.length).toEqual(3);
expect(prompts[0]).toEqual({
description: ""
});
expect(prompts[1]).toEqual({
description: ""
}); });
});
}); });
...@@ -449,7 +449,7 @@ if (typeof OpenAssessment.Server == "undefined" || !OpenAssessment.Server) { ...@@ -449,7 +449,7 @@ if (typeof OpenAssessment.Server == "undefined" || !OpenAssessment.Server) {
updateEditorContext: function(kwargs) { updateEditorContext: function(kwargs) {
var url = this.url('update_editor_context'); var url = this.url('update_editor_context');
var payload = JSON.stringify({ var payload = JSON.stringify({
prompt: kwargs.prompt, prompts: kwargs.prompts,
feedback_prompt: kwargs.feedbackPrompt, feedback_prompt: kwargs.feedbackPrompt,
feedback_default_text: kwargs.feedback_default_text, feedback_default_text: kwargs.feedback_default_text,
title: kwargs.title, title: kwargs.title,
......
...@@ -65,6 +65,113 @@ OpenAssessment.ItemUtilities = { ...@@ -65,6 +65,113 @@ OpenAssessment.ItemUtilities = {
}; };
/** /**
The Prompt Class is used to construct and maintain references to prompts from within a prompts
container object. Constructs a new Prompt element.
Args:
element (OpenAssessment.Container): The container that the prompt is a member of.
notifier (OpenAssessment.Notifier): Used to send notifications of updates to prompts.
Returns:
OpenAssessment.Prompt
**/
OpenAssessment.Prompt = function(element, notifier) {
this.element = element;
this.notifier = notifier;
};
OpenAssessment.Prompt.prototype = {
/**
Finds the values currently entered in the Prompts's fields, and returns them.
Returns:
object literal of the form:
{
'description': 'Write a nice long essay about anything.'
}
**/
getFieldValues: function () {
var fields = {
description: this.description()
};
return fields;
},
/**
Get or set the description of the prompt.
Args:
text (string, optional): If provided, set the description of the prompt.
Returns:
string
**/
description: function(text) {
var sel = $('.openassessment_prompt_description', this.element);
return OpenAssessment.Fields.stringField(sel, text);
},
addEventListeners: function() {},
/**
Hook into the event handler for addition of a prompt.
*/
addHandler: function (){
this.notifier.notificationFired(
"promptAdd",
{
"index": this.element.index()
}
);
},
/**
Hook into the event handler for removal of a prompt.
*/
removeHandler: function (){
this.notifier.notificationFired(
"promptRemove",
{
"index": this.element.index()
}
);
},
updateHandler: function() {},
/**
Mark validation errors.
Returns:
Boolean indicating whether the option is valid.
**/
validate: function() {
return true;
},
/**
Return a list of validation errors visible in the UI.
Mainly useful for testing.
Returns:
list of strings
**/
validationErrors: function() {
return [];
},
/**
Clear all validation errors from the UI.
**/
clearValidationErrors: function() {}
};
/**
The RubricOption Class used to construct and maintain references to rubric options from within an options The RubricOption Class used to construct and maintain references to rubric options from within an options
container object. Constructs a new RubricOption element. container object. Constructs a new RubricOption element.
...@@ -506,7 +613,7 @@ OpenAssessment.RubricCriterion.prototype = { ...@@ -506,7 +613,7 @@ OpenAssessment.RubricCriterion.prototype = {
OpenAssessment.TrainingExample = function(element){ OpenAssessment.TrainingExample = function(element){
this.element = element; this.element = element;
this.criteria = $(".openassessment_training_example_criterion_option", this.element); this.criteria = $(".openassessment_training_example_criterion_option", this.element);
this.answer = $('.openassessment_training_example_essay', this.element).first(); this.answer = $('.openassessment_training_example_essay_part textarea', this.element)
}; };
OpenAssessment.TrainingExample.prototype = { OpenAssessment.TrainingExample.prototype = {
...@@ -527,7 +634,9 @@ OpenAssessment.TrainingExample.prototype = { ...@@ -527,7 +634,9 @@ OpenAssessment.TrainingExample.prototype = {
).get(); ).get();
return { return {
answer: this.answer.prop('value'), answer: this.answer.map(function() {
return $(this).prop('value');
}).get(),
options_selected: optionsSelected options_selected: optionsSelected
}; };
}, },
......
...@@ -25,9 +25,14 @@ OpenAssessment.StudioView = function(runtime, element, server) { ...@@ -25,9 +25,14 @@ OpenAssessment.StudioView = function(runtime, element, server) {
// Initialize the validation alert // Initialize the validation alert
this.alert = new OpenAssessment.ValidationAlert().install(); this.alert = new OpenAssessment.ValidationAlert().install();
var studentTrainingListener = new OpenAssessment.StudentTrainingListener();
// Initialize the prompt tab view // Initialize the prompt tab view
this.promptView = new OpenAssessment.EditPromptView( this.promptsView = new OpenAssessment.EditPromptsView(
$("#oa_prompt_editor_wrapper", this.element).get(0) $("#oa_prompts_editor_wrapper", this.element).get(0),
new OpenAssessment.Notifier([
studentTrainingListener
])
); );
// Initialize the settings tab view // Initialize the settings tab view
...@@ -57,7 +62,7 @@ OpenAssessment.StudioView = function(runtime, element, server) { ...@@ -57,7 +62,7 @@ OpenAssessment.StudioView = function(runtime, element, server) {
this.rubricView = new OpenAssessment.EditRubricView( this.rubricView = new OpenAssessment.EditRubricView(
$("#oa_rubric_editor_wrapper", this.element).get(0), $("#oa_rubric_editor_wrapper", this.element).get(0),
new OpenAssessment.Notifier([ new OpenAssessment.Notifier([
new OpenAssessment.StudentTrainingListener() studentTrainingListener
]) ])
); );
...@@ -185,7 +190,7 @@ OpenAssessment.StudioView.prototype = { ...@@ -185,7 +190,7 @@ OpenAssessment.StudioView.prototype = {
var view = this; var view = this;
this.server.updateEditorContext({ this.server.updateEditorContext({
prompt: view.promptView.promptText(), prompts: view.promptsView.promptsDefinition(),
feedbackPrompt: view.rubricView.feedbackPrompt(), feedbackPrompt: view.rubricView.feedbackPrompt(),
feedback_default_text: view.rubricView.feedback_default_text(), feedback_default_text: view.rubricView.feedback_default_text(),
criteria: view.rubricView.criteriaDefinition(), criteria: view.rubricView.criteriaDefinition(),
...@@ -236,7 +241,8 @@ OpenAssessment.StudioView.prototype = { ...@@ -236,7 +241,8 @@ OpenAssessment.StudioView.prototype = {
validate: function() { validate: function() {
var settingsValid = this.settingsView.validate(); var settingsValid = this.settingsView.validate();
var rubricValid = this.rubricView.validate(); var rubricValid = this.rubricView.validate();
return settingsValid && rubricValid; var promptsValid = this.promptsView.validate();
return settingsValid && rubricValid && promptsValid;
}, },
/** /**
...@@ -249,7 +255,9 @@ OpenAssessment.StudioView.prototype = { ...@@ -249,7 +255,9 @@ OpenAssessment.StudioView.prototype = {
**/ **/
validationErrors: function() { validationErrors: function() {
return this.settingsView.validationErrors().concat( return this.settingsView.validationErrors().concat(
this.rubricView.validationErrors() this.rubricView.validationErrors().concat(
this.promptsView.validationErrors()
)
); );
}, },
...@@ -259,6 +267,7 @@ OpenAssessment.StudioView.prototype = { ...@@ -259,6 +267,7 @@ OpenAssessment.StudioView.prototype = {
clearValidationErrors: function() { clearValidationErrors: function() {
this.settingsView.clearValidationErrors(); this.settingsView.clearValidationErrors();
this.rubricView.clearValidationErrors(); this.rubricView.clearValidationErrors();
this.promptsView.clearValidationErrors();
}, },
}; };
......
...@@ -421,7 +421,7 @@ OpenAssessment.EditStudentTrainingView.prototype = { ...@@ -421,7 +421,7 @@ OpenAssessment.EditStudentTrainingView.prototype = {
{ {
examples: [ examples: [
{ {
answer: "I love pokemon", answer: ("I love pokemon 1", "I love pokemon 2"),
options_selected: [ options_selected: [
{ {
criterion: "brevity", criterion: "brevity",
......
/** /**
Dynamically update student training examples based on Dynamically update student training examples based on
changes to the rubric. changes to the prompts or the rubric.
**/ **/
OpenAssessment.StudentTrainingListener = function() { OpenAssessment.StudentTrainingListener = function() {
this.element = $('#oa_student_training_editor'); this.element = $('#oa_student_training_editor');
...@@ -8,6 +8,28 @@ OpenAssessment.StudentTrainingListener = function() { ...@@ -8,6 +8,28 @@ OpenAssessment.StudentTrainingListener = function() {
}; };
OpenAssessment.StudentTrainingListener.prototype = { OpenAssessment.StudentTrainingListener.prototype = {
/**
Add a answer part in the training examples when a prompt is added.
*/
promptAdd: function(data) {
var view = this.element;
var essay_part = $("#openassessment_training_example_part_template")
.children().first()
.clone()
.removeAttr('id')
.toggleClass('is--hidden', false)
.appendTo(".openassessment_training_example_essay", view);
},
/**
Remove the answer part in the training examples when a prompt is removed.
*/
promptRemove: function(data) {
var view = this.element;
$(".openassessment_training_example_essay li:nth-child(" + (data.index + 1) + ")", view).remove();
},
/** /**
Event handler for updating training examples when a criterion option has Event handler for updating training examples when a criterion option has
been updated. been updated.
......
/**
Editing interface for the rubric prompt.
Args:
element (DOM element): The DOM element representing this view.
Returns:
OpenAssessment.EditPromptView
**/
OpenAssessment.EditPromptView = function(element) {
this.element = element;
};
OpenAssessment.EditPromptView.prototype = {
/**
Get or set the text of the prompt.
Args:
text (string, optional): If provided, set the text of the prompt.
Returns:
string
**/
promptText: function(text) {
var sel = $('#openassessment_prompt_editor', this.element);
return OpenAssessment.Fields.stringField(sel, text);
},
};
\ No newline at end of file
/**
Editing interface for the prompts.
Args:
element (DOM element): The DOM element representing this view.
Returns:
OpenAssessment.EditPromptsView
**/
OpenAssessment.EditPromptsView = function(element, notifier) {
this.element = element;
this.promptsContainer = new OpenAssessment.Container(
OpenAssessment.Prompt, {
containerElement: $("#openassessment_prompts_list", this.element).get(0),
templateElement: $("#openassessment_prompt_template", this.element).get(0),
addButtonElement: $("#openassessment_prompts_add_prompt", this.element).get(0),
removeButtonClass: "openassessment_prompt_remove_button",
containerItemClass: "openassessment_prompt",
notifier: notifier
}
);
this.promptsContainer.addEventListeners();
};
OpenAssessment.EditPromptsView.prototype = {
/**
Construct a list of prompts definitions from the editor UI.
Returns:
list of prompt objects
Example usage:
>>> editPromptsView.promptsDefinition();
[
{
uuid: "cfvgbh657",
description: "Description",
order_num: 0,
},
...
]
**/
promptsDefinition: function() {
var prompts = this.promptsContainer.getItemValues();
return prompts;
},
/**
Add a new prompt.
Uses a client-side template to create the new prompt.
**/
addPrompt: function() {
this.promptsContainer.add();
},
/**
Remove a prompt.
Args:
item (OpenAssessment.RubricCriterion): The criterion item to remove.
**/
removePrompt: function(item) {
this.promptsContainer.remove(item);
},
/**
Retrieve all prompts.
Returns:
Array of OpenAssessment.Prompt objects.
**/
getAllPrompts: function() {
return this.promptsContainer.getAllItems();
},
/**
Retrieve a prompt item from the prompts.
Args:
index (int): The index of the prompt, starting from 0.
Returns:
OpenAssessment.Prompt or null
**/
getPromptItem: function(index) {
return this.promptsContainer.getItem(index);
},
/**
Mark validation errors.
Returns:
Boolean indicating whether the view is valid.
**/
validate: function() {
return true;
},
/**
Return a list of validation errors visible in the UI.
Mainly useful for testing.
Returns:
list of string
**/
validationErrors: function() {
var errors = [];
return errors;
},
/**
Clear all validation errors from the UI.
**/
clearValidationErrors: function() {}
};
\ No newline at end of file
...@@ -331,26 +331,6 @@ ...@@ -331,26 +331,6 @@
-moz-transition: height 1s ease-in-out 0; -moz-transition: height 1s ease-in-out 0;
} }
#openassessment_prompt_editor {
width: 100%;
height: 100%;
resize: none;
border: none;
@include border-radius(4px);
padding: 10px;
font-family: $f-sans-serif;
textarea{
font-size: 14px;
border: none;
overflow: auto;
outline: none;
-webkit-box-shadow: none;
-moz-box-shadow: none;
box-shadow: none;
}
}
#openassessment_rubric_editor { #openassessment_rubric_editor {
width: 100%; width: 100%;
height: 100%; height: 100%;
...@@ -493,33 +473,22 @@ ...@@ -493,33 +473,22 @@
bottom: 0; bottom: 0;
} }
#oa_rubric_editor_wrapper{ .openassessment_tab_instructions {
.wrapper-comp-settings{
display: block;
}
#openassessment_rubric_instructions{
background-color: $edx-gray-t1; background-color: $edx-gray-t1;
border-bottom: 1px solid $edx-gray-d1; border-bottom: 1px solid $edx-gray-d1;
padding: 10px; padding: 10px;
} }
.openassessment_criterion { .openassessment_container_header {
.openassessment_criterion_remove_button{
@extend .openassessment_rubric_remove_button;
}
.openassessment_criterion_header {
margin: 10px; margin: 10px;
padding-bottom: 5px; padding-bottom: 5px;
border-bottom: 1px solid; border-bottom: 1px solid;
overflow: auto; overflow: auto;
color: $edx-gray-d2; color: $edx-gray-d2;
}
.action.expand-collapse { .action.expand-collapse {
@include float(left); float: left;
.ui-toggle-expansion { .ui-toggle-expansion {
color: $edx-gray-d2; color: $edx-gray-d2;
cursor: pointer; cursor: pointer;
...@@ -530,30 +499,112 @@ ...@@ -530,30 +499,112 @@
} }
} }
.openassessment_criterion_header_title_box { .openassessment_container_header_title_box {
@include float(left); float: left;
width: 80%; width: 80%;
display: inline-block; display: inline-block;
} }
.openassessment_criterion_header_title { .openassessment_container_header_title {
@include float(left);
text-transform: uppercase; text-transform: uppercase;
width: 50%;
display: inline-block;
cursor: default; cursor: default;
padding-top: 2px; padding-top: 2px;
} }
.openassessment_criterion_guide { .openassessment_container_guide {
@extend %t-small; @extend %t-small;
} }
.openassessment_criterion_header_remove { .openassessment_container_remove_button{
@extend .openassessment_rubric_remove_button; cursor: pointer;
h2:after{
font-family: FontAwesome;
content: "\f00d";
display: inline-block;
color: inherit;
margin: 0 5px;
} }
h2{
text-transform: uppercase;
font-size: 80%;
float: right;
display: inline-block;
padding: 3px 8px 3px 13px;
}
float: right;
}
.openassessment_container_remove_button:hover{
background-color: $edx-gray-d2;
border-radius: 4px;
color: white;
}
.openassessment_container_add_button {
h6:before{
font-family: "FontAwesome";
display: inline-block;
margin-left: 5px;
margin-right: 10px;
width: auto;
height: auto;
content: "\f067";
}
background-color: white;
border: 1px solid;
border-radius: 4px;
text-align: center;
color: #009fe6;
padding: 10px;
margin: 15px 10px;
}
.openassessment_container_add_button.openassessment_highlighted_field {
color: red;
border-width: 2px;
}
.openassessment_container_add_button:hover {
color: white;
background-color: #009fe6;
cursor: pointer;
}
#oa_prompts_editor_wrapper {
.wrapper-comp-settings {
display: block;
} }
.openassessment_prompt_description {
width: 100%;
min-height: 100px;
resize: none;
border: 1px solid #b2b2b2;
border-radius: 4px;
padding: 10px;
font-family: $f-sans-serif;
font-size: 14px;
overflow: auto;
outline: none;
-webkit-box-shadow: none;
-moz-box-shadow: none;
box-shadow: none;
}
.openassessment_prompt_description.openassessment_highlighted_field {
border: 2px solid red;
}
}
#oa_rubric_editor_wrapper{
.wrapper-comp-settings{
display: block;
}
.openassessment_criterion {
.openassessment_criterion_add_option { .openassessment_criterion_add_option {
h2:before { h2:before {
font-family: FontAwesome; font-family: FontAwesome;
...@@ -653,7 +704,7 @@ ...@@ -653,7 +704,7 @@
@include padding(5px, 5px, 5px, 15px); @include padding(5px, 5px, 5px, 15px);
.openassessment_criterion_option_remove_button{ .openassessment_criterion_option_remove_button{
@extend .openassessment_rubric_remove_button; @extend .openassessment_container_remove_button;
} }
.openassessment_option_header{ .openassessment_option_header{
...@@ -670,7 +721,7 @@ ...@@ -670,7 +721,7 @@
} }
.openassessment_option_header_remove{ .openassessment_option_header_remove{
@extend .openassessment_rubric_remove_button; @extend .openassessment_container_remove_button;
} }
} }
...@@ -767,30 +818,6 @@ ...@@ -767,30 +818,6 @@
outline: 0; outline: 0;
} }
.openassessment_rubric_remove_button{
cursor: pointer;
h2:after{
font-family: FontAwesome;
content: "\f00d";
display: inline-block;
color: inherit;
margin: 0 5px;
}
h2{
text-transform: uppercase;
font-size: 80%;
@include float(right);
display: inline-block;
@include padding(3px, 8px, 3px, 13px);
}
@include float(right);
}
.openassessment_rubric_remove_button:hover{
background-color: $edx-gray-d2;
border-radius: 4px;
color: white;
}
#openassessment_rubric_feedback_wrapper{ #openassessment_rubric_feedback_wrapper{
padding: 0; padding: 0;
...@@ -813,39 +840,6 @@ ...@@ -813,39 +840,6 @@
@include float(right); @include float(right);
} }
} }
#openassessment_rubric_add_criterion{
h6:before{
font-family: "FontAwesome";
display: inline-block;
@include margin-left(5px);
@include margin-right(10px);
width: auto;
height: auto;
content: "\f067";
}
background-color: white;
border: 1px solid;
border-radius: 4px;
@include text-align(center);
color: #009fe6;
padding: 10px;
margin: 15px 10px;
}
#openassessment_rubric_add_criterion.openassessment_highlighted_field{
color: red;
border-width: 2px;
}
#openassessment_rubric_add_criterion:hover{
color: white;
background-color: #009fe6;
cursor: pointer;
}
} }
...@@ -940,7 +934,7 @@ ...@@ -940,7 +934,7 @@
.openassessment_training_example_body { .openassessment_training_example_body {
@include padding(0, 15px, 15px, 15px); @include padding(0, 15px, 15px, 15px);
position: relative; position: relative;
overflow: hidden; overflow: scroll;
.openassessment_training_example_essay_wrapper { .openassessment_training_example_essay_wrapper {
width: 58%; width: 58%;
display: inline-block; display: inline-block;
......
""" """
Studio editing view for OpenAssessment XBlock. Studio editing view for OpenAssessment XBlock.
""" """
import pkg_resources
import copy import copy
import logging import logging
import pkg_resources
from uuid import uuid4
from django.template import Context from django.template import Context
from django.template.loader import get_template from django.template.loader import get_template
from voluptuous import MultipleInvalid from voluptuous import MultipleInvalid
...@@ -12,7 +14,7 @@ from xblock.fields import List, Scope ...@@ -12,7 +14,7 @@ from xblock.fields import List, Scope
from xblock.fragment import Fragment from xblock.fragment import Fragment
from openassessment.xblock.defaults import DEFAULT_EDITOR_ASSESSMENTS_ORDER, DEFAULT_RUBRIC_FEEDBACK_TEXT from openassessment.xblock.defaults import DEFAULT_EDITOR_ASSESSMENTS_ORDER, DEFAULT_RUBRIC_FEEDBACK_TEXT
from openassessment.xblock.validation import validator from openassessment.xblock.validation import validator
from openassessment.xblock.data_conversion import create_rubric_dict, make_django_template_key from openassessment.xblock.data_conversion import create_rubric_dict, make_django_template_key, update_assessments_format
from openassessment.xblock.schema import EDITOR_UPDATE_SCHEMA from openassessment.xblock.schema import EDITOR_UPDATE_SCHEMA
from openassessment.xblock.resolve_dates import resolve_dates from openassessment.xblock.resolve_dates import resolve_dates
from openassessment.xblock.xml import serialize_examples_to_xml_str, parse_examples_from_xml_str from openassessment.xblock.xml import serialize_examples_to_xml_str, parse_examples_from_xml_str
...@@ -112,7 +114,7 @@ class StudioMixin(object): ...@@ -112,7 +114,7 @@ class StudioMixin(object):
feedback_default_text = DEFAULT_RUBRIC_FEEDBACK_TEXT feedback_default_text = DEFAULT_RUBRIC_FEEDBACK_TEXT
return { return {
'prompt': self.prompt, 'prompts': self.prompts,
'title': self.title, 'title': self.title,
'submission_due': submission_due, 'submission_due': submission_due,
'submission_start': submission_start, 'submission_start': submission_start,
...@@ -189,10 +191,14 @@ class StudioMixin(object): ...@@ -189,10 +191,14 @@ class StudioMixin(object):
)} )}
# This is where we default to EASE for problems which are edited in the GUI # This is where we default to EASE for problems which are edited in the GUI
assessment['algorithm_id'] = 'ease' assessment['algorithm_id'] = 'ease'
if assessment['name'] == 'student-training':
for example in assessment['examples']:
example['answer'] = {'parts': [{'text': text} for text in example['answer']]}
xblock_validator = validator(self, self._) xblock_validator = validator(self, self._)
success, msg = xblock_validator( success, msg = xblock_validator(
create_rubric_dict(data['prompt'], data['criteria']), create_rubric_dict(data['prompts'], data['criteria']),
data['assessments'], data['assessments'],
submission_start=data['submission_start'], submission_start=data['submission_start'],
submission_due=data['submission_due'], submission_due=data['submission_due'],
...@@ -205,7 +211,7 @@ class StudioMixin(object): ...@@ -205,7 +211,7 @@ class StudioMixin(object):
# so we can safely modify the XBlock fields. # so we can safely modify the XBlock fields.
self.title = data['title'] self.title = data['title']
self.display_name = data['title'] self.display_name = data['title']
self.prompt = data['prompt'] self.prompts = data['prompts']
self.rubric_criteria = data['criteria'] self.rubric_criteria = data['criteria']
self.rubric_assessments = data['assessments'] self.rubric_assessments = data['assessments']
self.editor_assessments_order = data['editor_assessments_order'] self.editor_assessments_order = data['editor_assessments_order']
...@@ -267,13 +273,20 @@ class StudioMixin(object): ...@@ -267,13 +273,20 @@ class StudioMixin(object):
# could be accomplished within the template, we are opting to remove logic from the template. # could be accomplished within the template, we are opting to remove logic from the template.
student_training_module = self.get_assessment_module('student-training') student_training_module = self.get_assessment_module('student-training')
student_training_template = {'answer': ""} student_training_template = {
'answer': {
'parts': [
{'text': ''} for prompt in self.prompts
]
}
}
criteria_list = copy.deepcopy(self.rubric_criteria_with_labels) criteria_list = copy.deepcopy(self.rubric_criteria_with_labels)
for criterion in criteria_list: for criterion in criteria_list:
criterion['option_selected'] = "" criterion['option_selected'] = ""
student_training_template['criteria'] = criteria_list student_training_template['criteria'] = criteria_list
if student_training_module: if student_training_module:
student_training_module = update_assessments_format([student_training_module])[0]
example_list = [] example_list = []
# Adds each example to a modified version of the student training module dictionary. # Adds each example to a modified version of the student training module dictionary.
for example in student_training_module['examples']: for example in student_training_module['examples']:
......
{ {
"missing_feedback": { "missing_feedback": {
"rubric": {
"prompt": "Test Prompt",
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
"name": "Test criterion", "name": "Test criterion",
"label": "Test criterion label",
"prompt": "Test criterion prompt", "prompt": "Test criterion prompt",
"options": [ "options": [
{ {
"order_num": 0, "order_num": 0,
"points": 0, "points": 0,
"name": "No", "name": "No",
"label": "No label",
"explanation": "No explanation" "explanation": "No explanation"
} }
] ]
} }
] ],
}, "prompts": [{"description": "My new prompt."}],
"prompt": "My new prompt.",
"submission_due": "4014-02-27T09:46:28", "submission_due": "4014-02-27T09:46:28",
"submission_start": "4014-02-10T09:46:28", "submission_start": "4014-02-10T09:46:28",
"title": "My new title.", "title": "My new title.",
...@@ -36,9 +35,59 @@ ...@@ -36,9 +35,59 @@
"start": "", "start": "",
"due": "" "due": ""
} }
]
},
"prompts_is_string": {
"title": "Foo",
"prompts": "My new prompt.",
"feedback_prompt": "Test Feedback Prompt",
"feedback_default_text": "Test default text...",
"submission_start": null,
"submission_due": null,
"allow_file_upload": true,
"leaderboard_show": true,
"allow_latex": false,
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"label": "Test criterion label",
"prompt": "Test criterion prompt",
"feedback": "optional",
"options": [
{
"order_num": 0,
"points": 0,
"label": "No label",
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"label": "Yes label",
"name": "Yes",
"explanation": "Yes explanation"
}
]
}
],
"assessments": [
{
"name": "peer-assessment",
"start": "2014-02-27T09:46:28",
"due": "2014-03-01T00:00:00",
"must_grade": 5,
"must_be_graded_by": 3
},
{
"name": "self-assessment",
"start": "2014-04-01T00:00:00",
"due": "2014-06-01T00:00:00"
}
], ],
"expected-assessment": "peer-assessment", "editor_assessments_order": ["student-training", "peer-assessment", "self-assessment"]
"expected-criterion-prompt": "Test criterion prompt"
} }
} }
{ {
"zero_criteria": { "zero_criteria": {
"rubric": { "rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [] "criteria": []
} }
}, },
"negative_points": { "negative_points": {
"rubric": { "rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -29,7 +29,7 @@ ...@@ -29,7 +29,7 @@
"duplicate_criteria_names": { "duplicate_criteria_names": {
"rubric": { "rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -63,7 +63,7 @@ ...@@ -63,7 +63,7 @@
"duplicate_option_names": { "duplicate_option_names": {
"rubric": { "rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -88,9 +88,62 @@ ...@@ -88,9 +88,62 @@
} }
}, },
"change_prompts_number_after_release": {
"rubric": {
"prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}, {"description": "Test Prompt 3."}],
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
]
}
]
},
"current_rubric": {
"prompts": [{"description": "Test Prompt 3."}, {"description": "Test Prompt 4."}],
"criteria": [
{
"order_num": 0,
"name": "Test criterion",
"prompt": "Test criterion prompt",
"options": [
{
"order_num": 0,
"points": 0,
"name": "No",
"explanation": "No explanation"
},
{
"order_num": 1,
"points": 2,
"name": "Yes",
"explanation": "Yes explanation"
}
]
}
]
},
"is_released": true
},
"change_points_after_release": { "change_points_after_release": {
"rubric": { "rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -114,7 +167,7 @@ ...@@ -114,7 +167,7 @@
] ]
}, },
"current_rubric": { "current_rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -142,7 +195,7 @@ ...@@ -142,7 +195,7 @@
"add_criteria_after_release": { "add_criteria_after_release": {
"rubric": { "rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -166,7 +219,7 @@ ...@@ -166,7 +219,7 @@
] ]
}, },
"current_rubric": { "current_rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -213,7 +266,7 @@ ...@@ -213,7 +266,7 @@
"remove_criteria_after_release": { "remove_criteria_after_release": {
"rubric": { "rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -256,7 +309,7 @@ ...@@ -256,7 +309,7 @@
] ]
}, },
"current_rubric": { "current_rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -284,7 +337,7 @@ ...@@ -284,7 +337,7 @@
"add_options_after_release": { "add_options_after_release": {
"rubric": { "rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -308,7 +361,7 @@ ...@@ -308,7 +361,7 @@
] ]
}, },
"current_rubric": { "current_rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -330,7 +383,7 @@ ...@@ -330,7 +383,7 @@
"remove_options_after_release": { "remove_options_after_release": {
"rubric": { "rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -348,7 +401,7 @@ ...@@ -348,7 +401,7 @@
] ]
}, },
"current_rubric": { "current_rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -376,7 +429,7 @@ ...@@ -376,7 +429,7 @@
"rename_criterion_name_after_release": { "rename_criterion_name_after_release": {
"rubric": { "rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -400,7 +453,7 @@ ...@@ -400,7 +453,7 @@
] ]
}, },
"current_rubric": { "current_rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -428,7 +481,7 @@ ...@@ -428,7 +481,7 @@
"rename_multiple_criteria_after_release": { "rename_multiple_criteria_after_release": {
"rubric": { "rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -465,7 +518,7 @@ ...@@ -465,7 +518,7 @@
] ]
}, },
"current_rubric": { "current_rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -507,7 +560,7 @@ ...@@ -507,7 +560,7 @@
"example_based_duplicate_option_points": { "example_based_duplicate_option_points": {
"is_example_based": true, "is_example_based": true,
"rubric": { "rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -534,7 +587,7 @@ ...@@ -534,7 +587,7 @@
"zero_options_feedback_optional": { "zero_options_feedback_optional": {
"rubric": { "rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -549,7 +602,7 @@ ...@@ -549,7 +602,7 @@
"zero_options_feedback_disabled": { "zero_options_feedback_disabled": {
"rubric": { "rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
...@@ -564,7 +617,7 @@ ...@@ -564,7 +617,7 @@
"zero_options_no_feedback": { "zero_options_no_feedback": {
"rubric": { "rubric": {
"prompt": "Test Prompt", "prompts": [{"description": "Test Prompt 1."}, {"description": "Test Prompt 2."}],
"criteria": [ "criteria": [
{ {
"order_num": 0, "order_num": 0,
......
...@@ -25,7 +25,7 @@ ...@@ -25,7 +25,7 @@
"feedback": "required" "feedback": "required"
} }
], ],
"prompt": "My new prompt.", "prompts": [{"description": "My new prompt 1."}, {"description": "My new prompt 2."}],
"feedback_prompt": "Feedback prompt", "feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text", "feedback_default_text": "Feedback default text",
"submission_due": "4014-02-27T09:46", "submission_due": "4014-02-27T09:46",
...@@ -77,7 +77,7 @@ ...@@ -77,7 +77,7 @@
"feedback": "required" "feedback": "required"
} }
], ],
"prompt": "Ṁÿ ṅëẅ ṗṛöṁṗẗ.", "prompts": [{"description": "Ṁÿ ṅëẅ ṗṛöṁṗẗ 1."}, {"description": "Ṁÿ ṅëẅ ṗṛöṁṗẗ 2."}],
"feedback_prompt": "ḟëëḋḅäċḳ ṗṛöṁṗẗ", "feedback_prompt": "ḟëëḋḅäċḳ ṗṛöṁṗẗ",
"feedback_default_text": "Ṫëṡẗ ḋëḟäüḷẗ ẗëẍẗ", "feedback_default_text": "Ṫëṡẗ ḋëḟäüḷẗ ẗëẍẗ",
"submission_due": "4014-02-27T09:46", "submission_due": "4014-02-27T09:46",
...@@ -129,7 +129,7 @@ ...@@ -129,7 +129,7 @@
"feedback": "required" "feedback": "required"
} }
], ],
"prompt": "My new prompt.", "prompts": [{"description": "My new prompt 1."}, {"description": "My new prompt 2."}],
"feedback_prompt": "Feedback prompt", "feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text", "feedback_default_text": "Feedback default text",
"submission_due": "4014-02-27T09:46", "submission_due": "4014-02-27T09:46",
...@@ -193,7 +193,7 @@ ...@@ -193,7 +193,7 @@
"feedback": "required" "feedback": "required"
} }
], ],
"prompt": "My new prompt.", "prompts": [{"description": "My new prompt 1."}, {"description": "My new prompt 2."}],
"feedback_prompt": "Feedback prompt", "feedback_prompt": "Feedback prompt",
"feedback_default_text": "Feedback default text", "feedback_default_text": "Feedback default text",
"submission_due": "4014-02-27T09:46", "submission_due": "4014-02-27T09:46",
......
...@@ -18,7 +18,7 @@ class StudioViewTest(XBlockHandlerTestCase): ...@@ -18,7 +18,7 @@ class StudioViewTest(XBlockHandlerTestCase):
""" """
UPDATE_EDITOR_DATA = { UPDATE_EDITOR_DATA = {
"title": "Test title", "title": "Test title",
"prompt": "Test prompt", "prompts": [{"description": "Test prompt"}],
"feedback_prompt": "Test feedback prompt", "feedback_prompt": "Test feedback prompt",
"feedback_default_text": "Test feedback default text", "feedback_default_text": "Test feedback default text",
"submission_start": "4014-02-10T09:46", "submission_start": "4014-02-10T09:46",
...@@ -205,10 +205,13 @@ class StudioViewTest(XBlockHandlerTestCase): ...@@ -205,10 +205,13 @@ class StudioViewTest(XBlockHandlerTestCase):
# Store old XBlock fields for later verification # Store old XBlock fields for later verification
old_title = xblock.title old_title = xblock.title
old_prompt = xblock.prompt old_prompts = xblock.prompts
old_assessments = xblock.rubric_assessments old_assessments = xblock.rubric_assessments
old_criteria = xblock.rubric_criteria old_criteria = xblock.rubric_criteria
xblock.runtime.modulestore = MagicMock()
xblock.runtime.modulestore.has_published_version.return_value = False
# Verify the response fails # Verify the response fails
resp = self.request(xblock, 'update_editor_context', request, response_format='json') resp = self.request(xblock, 'update_editor_context', request, response_format='json')
self.assertFalse(resp['success']) self.assertFalse(resp['success'])
...@@ -218,7 +221,7 @@ class StudioViewTest(XBlockHandlerTestCase): ...@@ -218,7 +221,7 @@ class StudioViewTest(XBlockHandlerTestCase):
# We don't need to be exhaustive here, because we have other unit tests # We don't need to be exhaustive here, because we have other unit tests
# that verify this extensively. # that verify this extensively.
self.assertEqual(xblock.title, old_title) self.assertEqual(xblock.title, old_title)
self.assertEqual(xblock.prompt, old_prompt) self.assertEqual(xblock.prompts, old_prompts)
self.assertItemsEqual(xblock.rubric_assessments, old_assessments) self.assertItemsEqual(xblock.rubric_assessments, old_assessments)
self.assertItemsEqual(xblock.rubric_criteria, old_criteria) self.assertItemsEqual(xblock.rubric_criteria, old_criteria)
......
...@@ -81,7 +81,7 @@ class AssessmentValidationTest(TestCase): ...@@ -81,7 +81,7 @@ class AssessmentValidationTest(TestCase):
class RubricValidationTest(TestCase): class RubricValidationTest(TestCase):
@ddt.file_data('data/valid_rubrics.json') @ddt.file_data('data/valid_rubrics.json')
def test_valid_assessment(self, data): def test_valid_rubric(self, data):
current_rubric = data.get('current_rubric') current_rubric = data.get('current_rubric')
is_released = data.get('is_released', False) is_released = data.get('is_released', False)
is_example_based = data.get('is_example_based', False) is_example_based = data.get('is_example_based', False)
...@@ -92,7 +92,7 @@ class RubricValidationTest(TestCase): ...@@ -92,7 +92,7 @@ class RubricValidationTest(TestCase):
self.assertEqual(msg, u'') self.assertEqual(msg, u'')
@ddt.file_data('data/invalid_rubrics.json') @ddt.file_data('data/invalid_rubrics.json')
def test_invalid_assessment(self, data): def test_invalid_rubric(self, data):
current_rubric = data.get('current_rubric') current_rubric = data.get('current_rubric')
is_released = data.get('is_released', False) is_released = data.get('is_released', False)
is_example_based = data.get('is_example_based', False) is_example_based = data.get('is_example_based', False)
......
...@@ -214,6 +214,10 @@ def validate_rubric(rubric_dict, current_rubric, is_released, is_example_based, ...@@ -214,6 +214,10 @@ def validate_rubric(rubric_dict, current_rubric, is_released, is_example_based,
# but nothing that would change the point value of a rubric. # but nothing that would change the point value of a rubric.
if is_released: if is_released:
# Number of prompts must be the same
if len(rubric_dict['prompts']) != len(current_rubric['prompts']):
return (False, _(u'Prompts cannot be created or deleted after a problem is released.'))
# Number of criteria must be the same # Number of criteria must be the same
if len(rubric_dict['criteria']) != len(current_rubric['criteria']): if len(rubric_dict['criteria']) != len(current_rubric['criteria']):
return (False, _(u'The number of criteria cannot be changed after a problem is released.')) return (False, _(u'The number of criteria cannot be changed after a problem is released.'))
...@@ -330,7 +334,7 @@ def validator(oa_block, _, strict_post_release=True): ...@@ -330,7 +334,7 @@ def validator(oa_block, _, strict_post_release=True):
# Rubric # Rubric
is_example_based = 'example-based-assessment' in [asmnt.get('name') for asmnt in assessments] is_example_based = 'example-based-assessment' in [asmnt.get('name') for asmnt in assessments]
current_rubric = { current_rubric = {
'prompt': oa_block.prompt, 'prompts': oa_block.prompts,
'criteria': oa_block.rubric_criteria 'criteria': oa_block.rubric_criteria
} }
success, msg = validate_rubric(rubric_dict, current_rubric, is_released, is_example_based, _) success, msg = validate_rubric(rubric_dict, current_rubric, is_released, is_example_based, _)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment