Commit d8988b45 by Usman Khalid

Added field prompts to xblock.

TNL-708
parent 026680cc
......@@ -56,13 +56,13 @@ def convert_training_examples_list_to_dict(examples_list):
]
def create_rubric_dict(prompt, criteria):
def create_rubric_dict(prompts, criteria):
"""
Construct a serialized rubric model in the format expected
by the assessments app.
Args:
prompt (unicode): The rubric prompt.
prompts (list of dict): The serialized rubric prompts.
criteria (list of dict): The serialized rubric criteria.
Returns:
......@@ -70,7 +70,7 @@ def create_rubric_dict(prompt, criteria):
"""
return {
"prompt": prompt,
"prompts": prompts,
"criteria": criteria
}
......
......@@ -12,6 +12,13 @@ DEFAULT_PROMPT = """
Read for conciseness, clarity of thought, and form.
"""
DEFAULT_PROMPTS = [
{
'description': DEFAULT_PROMPT,
'order_num': 0,
}
]
DEFAULT_RUBRIC_CRITERIA = [
{
'name': "Ideas",
......
......@@ -140,10 +140,10 @@ class OpenAssessmentBlock(
help="The number of leaderboard results to display (0 if none)"
)
prompt = String(
default=DEFAULT_PROMPT,
prompts = List(
default=DEFAULT_PROMPTS,
scope=Scope.content,
help="A prompt to display to a student (plain text)."
help="The prompts to display to a student."
)
rubric_criteria = List(
......@@ -395,7 +395,7 @@ class OpenAssessmentBlock(
xblock_validator = validator(block, block._, strict_post_release=False)
xblock_validator(
create_rubric_dict(config['prompt'], config['rubric_criteria']),
create_rubric_dict(config['prompts'], config['rubric_criteria']),
config['rubric_assessments'],
submission_start=config['submission_start'],
submission_due=config['submission_due'],
......@@ -409,7 +409,7 @@ class OpenAssessmentBlock(
block.submission_start = config['submission_start']
block.submission_due = config['submission_due']
block.title = config['title']
block.prompt = config['prompt']
block.prompts = config['prompts']
block.allow_file_upload = config['allow_file_upload']
block.allow_latex = config['allow_latex']
block.leaderboard_show = config['leaderboard_show']
......@@ -422,6 +422,16 @@ class OpenAssessmentBlock(
return i18nService.ugettext
@property
def prompt(self):
"""
For backwards compatibility during development. To be removed.
Returns:
string
"""
return self.prompts[0]['description']
@property
def valid_assessments(self):
"""
Return a list of assessment dictionaries that we recognize.
......
......@@ -4,7 +4,7 @@ Schema for validating and sanitizing data received from the JavaScript client.
import dateutil
from pytz import utc
from voluptuous import Schema, Required, All, Any, Range, In, Invalid
from voluptuous import Schema, Optional, Required, All, Any, Range, In, Invalid
def utf8_validator(value):
......@@ -66,7 +66,13 @@ VALID_ASSESSMENT_TYPES = [
# Schema definition for an update from the Studio JavaScript editor.
EDITOR_UPDATE_SCHEMA = Schema({
Required('prompt'): utf8_validator,
Required('prompts'): [
Schema({
Required('order_num'): All(int, Range(min=0)),
Optional('uuid'): utf8_validator,
Required('description'): utf8_validator,
})
],
Required('title'): utf8_validator,
Required('feedback_prompt'): utf8_validator,
Required('feedback_default_text'): utf8_validator,
......
"""
Studio editing view for OpenAssessment XBlock.
"""
import pkg_resources
import copy
import logging
import pkg_resources
from uuid import uuid4
from django.template import Context
from django.template.loader import get_template
from voluptuous import MultipleInvalid
......@@ -112,7 +114,7 @@ class StudioMixin(object):
feedback_default_text = DEFAULT_RUBRIC_FEEDBACK_TEXT
return {
'prompt': self.prompt,
'prompts': self.prompts,
'title': self.title,
'submission_due': submission_due,
'submission_start': submission_start,
......@@ -159,6 +161,11 @@ class StudioMixin(object):
logger.exception('editor_assessments_order does not contain all expected assessment types')
return {'success': False, 'msg': self._('Error updating XBlock configuration')}
# For new prompts set the uuid
for prompt in data['prompts']:
if 'uuid' not in prompt:
prompt['uuid'] = uuid4().hex
# Backwards compatibility: We used to treat "name" as both a user-facing label
# and a unique identifier for criteria and options.
# Now we treat "name" as a unique identifier, and we've added an additional "label"
......@@ -192,7 +199,7 @@ class StudioMixin(object):
xblock_validator = validator(self, self._)
success, msg = xblock_validator(
create_rubric_dict(data['prompt'], data['criteria']),
create_rubric_dict(data['prompts'], data['criteria']),
data['assessments'],
submission_start=data['submission_start'],
submission_due=data['submission_due'],
......@@ -205,7 +212,7 @@ class StudioMixin(object):
# so we can safely modify the XBlock fields.
self.title = data['title']
self.display_name = data['title']
self.prompt = data['prompt']
self.prompts = data['prompts']
self.rubric_criteria = data['criteria']
self.rubric_assessments = data['assessments']
self.editor_assessments_order = data['editor_assessments_order']
......
......@@ -214,6 +214,17 @@ def validate_rubric(rubric_dict, current_rubric, is_released, is_example_based,
# but nothing that would change the point value of a rubric.
if is_released:
# Number of prompts must be the same
if len(rubric_dict['prompts']) != len(current_rubric['prompts']):
return (False, _(u'Prompts cannot be deleted or created after a problem is released.'))
# Prompts order must be the same
for current_prompt, updated_prompt in zip(current_rubric['prompts'], rubric_dict['prompts']):
# current_prompt will not have a uuid attribute if it is the DEFAULT_PROMPT so it is okay
# to override it.
if (hasattr(current_prompt, 'uuid') and current_prompt['uuid'] != updated_prompt['uuid']):
return (False, _(u'Prompts cannot be reordered after a problem is released.'))
# Number of criteria must be the same
if len(rubric_dict['criteria']) != len(current_rubric['criteria']):
return (False, _(u'The number of criteria cannot be changed after a problem is released.'))
......@@ -330,7 +341,7 @@ def validator(oa_block, _, strict_post_release=True):
# Rubric
is_example_based = 'example-based-assessment' in [asmnt.get('name') for asmnt in assessments]
current_rubric = {
'prompt': oa_block.prompt,
'prompts': oa_block.prompts,
'criteria': oa_block.rubric_criteria
}
success, msg = validate_rubric(rubric_dict, current_rubric, is_released, is_example_based, _)
......
......@@ -55,6 +55,35 @@ def _safe_get_text(element):
return unicode(element.text) if element.text is not None else u""
def _serialize_prompts(prompts_root, prompts_list):
"""
Serialize prompts as XML, adding children to the XML with root
node `prompts_root`.
We don't make any assumptions about the contents of `prompts_list`,
and we handle unexpected inputs gracefully.
Args:
prompts_root (lxml.etree.Element): The root node of the tree.
prompts_list (list): List of prompt dictionaries.
Returns:
None
"""
# Sort the prompts by order number, then serialize as XML
for prompt in _sort_by_order_num(prompts_list):
prompt_el = etree.SubElement(prompts_root, 'prompt')
# Prompt uuid
prompt_uuid = etree.SubElement(prompt_el, u'uuid')
prompt_uuid.text = unicode(prompt['uuid'])
# Prompt description
prompt_description = etree.SubElement(prompt_el, 'description')
prompt_description.text = unicode(prompt.get('description', u''))
def _serialize_options(options_root, options_list):
"""
Serialize rubric criterion options as XML, adding children to the XML
......@@ -139,7 +168,7 @@ def _serialize_criteria(criteria_root, criteria_list):
_serialize_options(criterion_el, options_list)
def serialize_rubric(rubric_root, oa_block, include_prompt=True):
def serialize_rubric(rubric_root, oa_block):
"""
Serialize a rubric dictionary as XML, adding children to the XML
with root node `rubric_root`.
......@@ -153,17 +182,9 @@ def serialize_rubric(rubric_root, oa_block, include_prompt=True):
rubric_dict (dict): A dictionary representation of the rubric, of the form
described in the serialized Rubric model (peer grading serializers).
Kwargs:
include_prompt (bool): Whether or not to include the prompt in the
serialized format for a rubric. Defaults to True.
Returns:
None
"""
# Rubric prompt (default to empty text); None indicates no input element
if include_prompt and oa_block.prompt is not None:
prompt = etree.SubElement(rubric_root, 'prompt')
prompt.text = unicode(oa_block.prompt)
# Criteria
criteria_list = oa_block.rubric_criteria
......@@ -227,6 +248,44 @@ def _parse_boolean(boolean_str):
return boolean_str in ['True', 'true']
def _parse_prompts_xml(prompts_root):
"""
Parse <prompts> element in the OpenAssessment XBlock's content XML.
Args:
prompts_root (lxml.etree.Element): The root node of the tree.
Returns:
list of prompts dictionaries.
Raises:
UpdateFromXmlError: The XML definition is invalid or the XBlock could not be updated.
"""
prompts_list = []
order_num = 0
for prompt in prompts_root.findall('prompts'):
prompt_dict = dict()
# Prompt order number (sequential)
prompt_dict['order_num'] = order_num
order_num += 1
# Prompt uuid
prompt_uuid = prompt.find('uuid')
if prompt_uuid is not None:
prompt_dict['uuid'] = _safe_get_text(prompt_uuid)
else:
raise UpdateFromXmlError('Every "prompt" element must contain a "uuid" element.')
# Prompt description
prompt_description = prompt.find('description')
if prompt_description is not None:
prompt_dict['description'] = _safe_get_text(prompt_description)
else:
raise UpdateFromXmlError('Every "prompt" element must contain a "description" element.')
def _parse_options_xml(options_root):
"""
Parse <options> element in the OpenAssessment XBlock's content XML.
......@@ -635,6 +694,10 @@ def serialize_content_to_xml(oa_block, root):
assessments_root = etree.SubElement(root, 'assessments')
serialize_assessments(assessments_root, oa_block)
# Prompts
prompts_root = etree.SubElement(root, 'prompts')
_serialize_prompts(prompts_root, oa_block)
# Rubric
rubric_root = etree.SubElement(root, 'rubric')
serialize_rubric(rubric_root, oa_block)
......@@ -673,7 +736,7 @@ def serialize_rubric_to_xml_str(oa_block):
"""
rubric_root = etree.Element('rubric')
serialize_rubric(rubric_root, oa_block, include_prompt=False)
serialize_rubric(rubric_root, oa_block)
return etree.tostring(rubric_root, pretty_print=True, encoding='unicode')
......@@ -782,9 +845,20 @@ def parse_from_xml(root):
else:
assessments = parse_assessments_xml(assessments_el)
prompts_el = root.find('prompts')
if prompts_el:
prompts = _parse_prompts_xml(root)
else:
prompts = [
{
'description': rubric['prompt'],
'order_num': 0,
}
]
return {
'title': title,
'prompt': rubric['prompt'],
'prompts': prompts,
'rubric_criteria': rubric['criteria'],
'rubric_assessments': assessments,
'rubric_feedback_prompt': rubric['feedbackprompt'],
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment