Commit 7b3376f9 by Adam

Merge pull request #2575 from edx/adam/i18n-self-assessment-2

Adam/i18n self assessment 2
parents 2cea81b0 97f5b25e
......@@ -812,7 +812,7 @@ class ChemicalEquationTest(unittest.TestCase):
def test_ajax_bad_method(self):
"""
With a bad dispatch, we shouldn't recieve anything
With a bad dispatch, we shouldn't receive anything
"""
response = self.the_input.handle_ajax("obviously_not_real", {})
self.assertEqual(response, {})
......@@ -923,7 +923,7 @@ class FormulaEquationTest(unittest.TestCase):
def test_ajax_bad_method(self):
"""
With a bad dispatch, we shouldn't recieve anything
With a bad dispatch, we shouldn't receive anything
"""
response = self.the_input.handle_ajax("obviously_not_real", {})
self.assertEqual(response, {})
......
......@@ -49,7 +49,7 @@ def segment_to_fraction(distance):
with nice fractions (which produce small Miller indices), he may want shift
to new origin if segments are like S = (0.015, > 0.05, >0.05) - close to zero
in one coordinate. He may update S to (0, >0.05, >0.05) and shift origin.
In this way he can recieve nice small fractions. Also there is can be
In this way he can receive nice small fractions. Also there is can be
degenerated case when S = (0.015, 0.012, >0.05) - if update S to (0, 0, >0.05) -
it is a line. This case should be considered separately. Small nice Miller
numbers and possibility to create very small segments can not be implemented
......
......@@ -249,7 +249,11 @@ class @CombinedOpenEnded
fd.append('submission_id', submission_id)
fd.append('grader_id', grader_id)
if(!score)
@gentle_alert "You need to pick a rating before you can submit."
###
Translators: A "rating" is a score a student gives to indicate how well
they feel they were graded on this problem
###
@gentle_alert gettext "You need to pick a rating before you can submit."
return
else
fd.append('score', score)
......@@ -293,10 +297,15 @@ class @CombinedOpenEnded
@replace_text_inputs()
@hint_area.attr('disabled', true)
if @task_number<@task_count
@gentle_alert "Your score did not meet the criteria to move to the next step."
###
Translators: this message appears when transitioning between openended grading
types (i.e. self assesment to peer assessment). Sometimes, if a student
did not perform well at one step, they cannot move on to the next one.
###
@gentle_alert gettext "Your score did not meet the criteria to move to the next step."
else if @child_state == 'initial'
@answer_area.attr("disabled", false)
@submit_button.prop('value', 'Submit')
@submit_button.prop('value', gettext 'Submit')
@submit_button.click @confirm_save_answer
@setup_file_upload()
@save_button.click @store_answer
......@@ -305,14 +314,18 @@ class @CombinedOpenEnded
@answer_area.attr("disabled", true)
@replace_text_inputs()
@hide_file_upload()
@submit_button.prop('value', 'Submit assessment')
###
Translators: one clicks this button after one has finished filling out the grading
form for an openended assessment
###
@submit_button.prop('value', gettext 'Submit assessment')
@submit_button.click @save_assessment
@submit_button.attr("disabled",true)
if @child_type == "openended"
@submit_button.hide()
@queueing()
@grader_status = @$(@grader_status_sel)
@grader_status.html("<span class='grading'>Your response has been submitted. Please check back later for your grade.</span>")
@grader_status.html("<span class='grading'>" + gettext "Your response has been submitted. Please check back later for your grade." + "</span>")
else if @child_type == "selfassessment"
@setup_score_selection()
else if @child_state == 'post_assessment'
......@@ -321,7 +334,11 @@ class @CombinedOpenEnded
@skip_post_assessment()
@answer_area.attr("disabled", true)
@replace_text_inputs()
@submit_button.prop('value', 'Submit post-assessment')
###
Translators: this button is clicked to submit a student's rating of
an evaluator's assessment
###
@submit_button.prop('value', gettext 'Submit post-assessment')
if @child_type=="selfassessment"
@submit_button.click @save_hint
else
......@@ -353,7 +370,7 @@ class @CombinedOpenEnded
@save_button.attr("disabled",true)
$.postWithPrefix "#{@ajax_url}/store_answer", data, (response) =>
if response.success
@gentle_alert("Answer saved, but not yet submitted.")
@gentle_alert(gettext "Answer saved, but not yet submitted.")
else
@errors_area.html(response.error)
@save_button.attr("disabled",false)
......@@ -378,7 +395,12 @@ class @CombinedOpenEnded
@gentle_alert response.error
confirm_save_answer: (event) =>
@save_answer(event) if confirm('Please confirm that you wish to submit your work. You will not be able to make any changes after submitting.')
###
Translators: This string appears in a confirmation box after one tries to submit
an openended problem
###
confirmation_text = gettext 'Please confirm that you wish to submit your work. You will not be able to make any changes after submitting.'
@save_answer(event) if confirm(confirmation_text)
save_answer: (event) =>
@$el.find(@oe_alert_sel).remove()
......@@ -399,7 +421,7 @@ class @CombinedOpenEnded
# Don't submit the file in the case of it being too large, deal with the error locally.
@submit_button.show()
@submit_button.attr('disabled', false)
@gentle_alert "You are trying to upload a file that is too large for our system. Please choose a file under 2MB or paste a link to it into the answer box."
@gentle_alert gettext "You are trying to upload a file that is too large for our system. Please choose a file under 2MB or paste a link to it into the answer box."
return
fd = new FormData()
......@@ -422,14 +444,14 @@ class @CombinedOpenEnded
@errors_area.html(@out_of_sync_message)
keydown_handler: (event) =>
#Previously, responses were submitted when hitting enter. Add in a modifier that ensures that ctrl+enter is needed.
# Previously, responses were submitted when hitting enter. Add in a modifier that ensures that ctrl+enter is needed.
if event.which == 17 && @is_ctrl==false
@is_ctrl=true
else if @is_ctrl==true && event.which == 13 && @child_state == 'assessing' && @rub.check_complete()
@save_assessment(event)
keyup_handler: (event) =>
#Handle keyup event when ctrl key is released
# Handle keyup event when ctrl key is released
if event.which == 17 && @is_ctrl==true
@is_ctrl=false
......@@ -485,7 +507,7 @@ class @CombinedOpenEnded
@errors_area.html(@out_of_sync_message)
confirm_reset: (event) =>
@reset(event) if confirm('Are you sure you want to remove your previous response to this question?')
@reset(event) if confirm(gettext 'Are you sure you want to remove your previous response to this question?')
reset: (event) =>
event.preventDefault()
......@@ -522,9 +544,14 @@ class @CombinedOpenEnded
@rebind()
@next_problem_button.hide()
if !response.allow_reset
@gentle_alert "Moved to next step."
@gentle_alert gettext "Moved to next step."
else
@gentle_alert "Your score did not meet the criteria to move to the next step."
###
Translators: this message appears when transitioning between openended grading
types (i.e. self assesment to peer assessment). Sometimes, if a student
did not perform well at one step, they cannot move on to the next one.
###
@gentle_alert gettext "Your score did not meet the criteria to move to the next step."
@show_combined_rubric_current()
else
@errors_area.html(response.error)
......@@ -561,7 +588,7 @@ class @CombinedOpenEnded
@$(@file_upload_preview_sel).hide()
@$(@file_upload_box_sel).change @preview_image
else
@gentle_alert 'File uploads are required for this question, but are not supported in this browser. Try the newest version of google chrome. Alternatively, if you have uploaded the image to the web, you can paste a link to it into the answer box.'
@gentle_alert gettext 'File uploads are required for this question, but are not supported in your browser. Try the newest version of Google Chrome. Alternatively, if you have uploaded the image to another website, you can paste a link to it into the answer box.'
hide_file_upload: =>
if @accept_file_upload == "True"
......@@ -582,12 +609,20 @@ class @CombinedOpenEnded
collapse_question: (event) =>
@prompt_container.slideToggle()
@prompt_container.toggleClass('open')
if @question_header.text() == "Hide Question"
new_text = "Show Question"
Logger.log 'oe_hide_question', {location: @location}
else
if @prompt_container.hasClass('open')
###
Translators: "Show Question" is some text that, when clicked, shows a question's
content that had been hidden
###
new_text = gettext "Show Question"
Logger.log 'oe_show_question', {location: @location}
new_text = "Hide Question"
else
###
Translators: "Hide Question" is some text that, when clicked, hides a question's
content
###
Logger.log 'oe_hide_question', {location: @location}
new_text = gettext "Hide Question"
@question_header.text(new_text)
return false
......@@ -627,19 +662,19 @@ class @CombinedOpenEnded
if @prompt_container.is(":hidden")==true
@prompt_container.slideToggle()
@prompt_container.toggleClass('open')
@question_header.text("Hide Question")
@question_header.text(gettext "Hide Question")
prompt_hide: () =>
if @prompt_container.is(":visible")==true
@prompt_container.slideToggle()
@prompt_container.toggleClass('open')
@question_header.text("Show Question")
@question_header.text(gettext "Show Question")
log_feedback_click: (event) ->
link_text = @$(event.target).html()
if link_text == 'See full feedback'
target = @$(event.target)
if target.hasClass('see-full-feedback')
Logger.log 'oe_show_full_feedback', {}
else if link_text == 'Respond to Feedback'
else if target.hasClass('respond-to-feedback')
Logger.log 'oe_show_respond_to_feedback', {}
else
generated_event_type = link_text.toLowerCase().replace(" ","_")
......
......@@ -11,12 +11,19 @@ GRADER_TYPE_IMAGE_DICT = {
'BC': '/static/images/ml_grading_icon.png',
}
_ = lambda text: text
HUMAN_GRADER_TYPE = {
'SA': 'Self-Assessment',
'PE': 'Peer-Assessment',
'IN': 'Instructor-Assessment',
'ML': 'AI-Assessment',
'BC': 'AI-Assessment',
# Translators: "Self-Assessment" refers to the self-assessed mode of openended evaluation
'SA': _('Self-Assessment'),
# Translators: "Peer-Assessment" refers to the peer-assessed mode of openended evaluation
'PE': _('Peer-Assessment'),
# Translators: "Instructor-Assessment" refers to the instructor-assessed mode of openended evaluation
'IN': _('Instructor-Assessment'),
# Translators: "AI-Assessment" refers to the machine-graded mode of openended evaluation
'ML': _('AI-Assessment'),
# Translators: "AI-Assessment" refers to the machine-graded mode of openended evaluation
'BC': _('AI-Assessment'),
}
DO_NOT_DISPLAY = ['BC', 'IN']
......@@ -63,13 +70,16 @@ class CombinedOpenEndedRubric(object):
rubric_template = '{0}/open_ended_rubric.html'.format(self.TEMPLATE_DIR)
if self.view_only:
rubric_template = '{0}/open_ended_view_only_rubric.html'.format(self.TEMPLATE_DIR)
html = self.system.render_template(rubric_template,
{'categories': rubric_categories,
'has_score': self.has_score,
'view_only': self.view_only,
'max_score': max_score,
'combined_rubric': False
})
html = self.system.render_template(
rubric_template,
{
'categories': rubric_categories,
'has_score': self.has_score,
'view_only': self.view_only,
'max_score': max_score,
'combined_rubric': False,
}
)
success = True
except:
#This is a staff_facing_error
......@@ -153,7 +163,6 @@ class CombinedOpenEndedRubric(object):
"[extract_category] Category {0} is missing a score. Contact the learning sciences group for assistance.".format(
descriptionxml.text))
# parse description
if descriptionxml.tag != 'description':
#This is a staff_facing_error
......@@ -245,17 +254,20 @@ class CombinedOpenEndedRubric(object):
else:
correct.append(.5)
html = self.system.render_template('{0}/open_ended_combined_rubric.html'.format(self.TEMPLATE_DIR),
{'categories': rubric_categories,
'max_scores': max_scores,
'correct' : correct,
'has_score': True,
'view_only': True,
'max_score': max_score,
'combined_rubric': True,
'grader_type_image_dict': GRADER_TYPE_IMAGE_DICT,
'human_grader_types': HUMAN_GRADER_TYPE,
})
html = self.system.render_template(
'{0}/open_ended_combined_rubric.html'.format(self.TEMPLATE_DIR),
{
'categories': rubric_categories,
'max_scores': max_scores,
'correct': correct,
'has_score': True,
'view_only': True,
'max_score': max_score,
'combined_rubric': True,
'grader_type_image_dict': GRADER_TYPE_IMAGE_DICT,
'human_grader_types': HUMAN_GRADER_TYPE,
}
)
return html
@staticmethod
......
......@@ -149,14 +149,22 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
event_info['problem_id'] = self.location_string
event_info['student_id'] = system.anonymous_student_id
event_info['survey_responses'] = data
_ = self.system.service(self, "i18n").ugettext
survey_responses = event_info['survey_responses']
for tag in ['feedback', 'submission_id', 'grader_id', 'score']:
if tag not in survey_responses:
# This is a student_facing_error
return {'success': False,
'msg': "Could not find needed tag {0} in the survey responses. Please try submitting again.".format(
tag)}
return {
'success': False,
# Translators: 'tag' is one of 'feedback', 'submission_id',
# 'grader_id', or 'score'. They are categories that a student
# responds to when filling out a post-assessment survey
# of his or her grade from an openended problem.
'msg': _("Could not find needed tag {tag_name} in the "
"survey responses. Please try submitting "
"again.").format(tag_name=tag)
}
try:
submission_id = int(survey_responses['submission_id'])
grader_id = int(survey_responses['grader_id'])
......@@ -171,11 +179,17 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
)
log.exception(error_message)
# This is a student_facing_error
return {'success': False, 'msg': "There was an error saving your feedback. Please contact course staff."}
return {
'success': False,
'msg': _(
"There was an error saving your feedback. Please "
"contact course staff."
)
}
xqueue = system.get('xqueue')
if xqueue is None:
return {'success': False, 'msg': "Couldn't submit feedback."}
return {'success': False, 'msg': _("Couldn't submit feedback.")}
qinterface = xqueue['interface']
qtime = datetime.strftime(datetime.now(UTC), xqueue_interface.dateformat)
anonymous_student_id = system.anonymous_student_id
......@@ -208,10 +222,10 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
# Convert error to a success value
success = True
message = "Successfully saved your feedback."
message = _("Successfully saved your feedback.")
if error:
success = False
message = "Unable to save your feedback. Please try again later."
message = _("Unable to save your feedback. Please try again later.")
log.error("Unable to send feedback to grader. location: {0}, error_message: {1}".format(
self.location_string, error_message
))
......@@ -277,12 +291,14 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
'key': queuekey,
'time': qtime,
}
_ = self.system.service(self, "i18n").ugettext
success = True
message = "Successfully saved your submission."
message = _("Successfully saved your submission.")
if error:
success = False
message = 'Unable to submit your submission to grader. Please try again later.'
# Translators: the `grader` refers to the grading service open response problems
# are sent to, either to be machine-graded, peer-graded, or instructor-graded.
message = _('Unable to submit your submission to the grader. Please try again later.')
log.error("Unable to submit to grader. location: {0}, error_message: {1}".format(
self.location_string, error_message
))
......@@ -297,9 +313,12 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
@param system: Modulesystem
@return: Boolean True (not useful currently)
"""
_ = self.system.service(self, "i18n").ugettext
new_score_msg = self._parse_score_msg(score_msg, system)
if not new_score_msg['valid']:
new_score_msg['feedback'] = 'Invalid grader reply. Please contact the course staff.'
# Translators: the `grader` refers to the grading service open response problems
# are sent to, either to be machine-graded, peer-graded, or instructor-graded.
new_score_msg['feedback'] = _('Invalid grader reply. Please contact the course staff.')
# self.child_history is initialized as []. record_latest_score() and record_latest_post_assessment()
# operate on self.child_history[-1]. Thus we have to make sure child_history is not [].
......@@ -387,7 +406,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
def format_feedback(feedback_type, value):
feedback_type, value = encode_values(feedback_type, value)
feedback = """
feedback = u"""
<div class="{feedback_type}">
{value}
</div>
......@@ -405,10 +424,15 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
# that we can do proper escaping here (e.g. are the graders allowed to
# include HTML?)
_ = self.system.service(self, "i18n").ugettext
for tag in ['success', 'feedback', 'submission_id', 'grader_id']:
if tag not in response_items:
# This is a student_facing_error
return format_feedback('errors', 'Error getting feedback from grader.')
return format_feedback(
# Translators: the `grader` refers to the grading service open response problems
# are sent to, either to be machine-graded, peer-graded, or instructor-graded.
'errors', _('Error getting feedback from grader.')
)
feedback_items = response_items['feedback']
try:
......@@ -417,12 +441,20 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
# This is a dev_facing_error
log.exception("feedback_items from external open ended grader have invalid json {0}".format(feedback_items))
# This is a student_facing_error
return format_feedback('errors', 'Error getting feedback from grader.')
return format_feedback(
# Translators: the `grader` refers to the grading service open response problems
# are sent to, either to be machine-graded, peer-graded, or instructor-graded.
'errors', _('Error getting feedback from grader.')
)
if response_items['success']:
if len(feedback) == 0:
# This is a student_facing_error
return format_feedback('errors', 'No feedback available from grader.')
return format_feedback(
# Translators: the `grader` refers to the grading service open response problems
# are sent to, either to be machine-graded, peer-graded, or instructor-graded.
'errors', _('No feedback available from grader.')
)
for tag in do_not_render:
if tag in feedback:
......@@ -648,12 +680,14 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
'check_for_score': self.check_for_score,
'store_answer': self.store_answer,
}
_ = self.system.service(self, "i18n").ugettext
if dispatch not in handlers:
# This is a dev_facing_error
log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch))
# This is a dev_facing_error
return json.dumps({'error': 'Error handling action. Please try again.', 'success': False})
return json.dumps(
{'error': _('Error handling action. Please try again.'), 'success': False}
)
before = self.get_progress()
d = handlers[dispatch](data, system)
......@@ -733,6 +767,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
Input: Modulesystem object
Output: Rendered HTML
"""
_ = self.system.service(self, "i18n").ugettext
# set context variables and render template
eta_string = None
if self.child_state != self.INITIAL:
......@@ -740,7 +775,9 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
score = self.latest_score()
correct = 'correct' if self.is_submission_correct(score) else 'incorrect'
if self.child_state == self.ASSESSING:
eta_string = "Your response has been submitted. Please check back later for your grade."
# Translators: this string appears once an openended response
# is submitted but before it has been graded
eta_string = _("Your response has been submitted. Please check back later for your grade.")
else:
post_assessment = ""
correct = ""
......
......@@ -25,9 +25,6 @@ MAX_ATTEMPTS = 1
# Overriden by max_score specified in xml.
MAX_SCORE = 1
FILE_NOT_FOUND_IN_RESPONSE_MESSAGE = "We could not find a file in your submission. Please try choosing a file or pasting a link to your file into the answer box."
ERROR_SAVING_FILE_MESSAGE = "We are having trouble saving your file. Please try another file or paste a link to your file into the answer box."
def upload_to_s3(file_to_upload, keyname, s3_interface):
'''
......@@ -91,12 +88,23 @@ class OpenEndedChild(object):
DONE = 'done'
# This is used to tell students where they are at in the module
_ = lambda text: text
HUMAN_NAMES = {
'initial': 'Not started',
'assessing': 'In progress',
'post_assessment': 'Done',
'done': 'Done',
}
# Translators: "Not started" communicates to a student that their response
# has not yet been graded
'initial': _('Not started'),
# Translators: "In progress" communicates to a student that their response
# is currently in the grading process
'assessing': _('In progress'),
# Translators: "Done" communicates to a student that their response
# has been fully graded
'post_assessment': _('Done'),
'done': _('Done'),
}
# included to make this act enough like an xblock to get i18n
_services_requested = {"i18n": "need"}
_combined_services = _services_requested
def __init__(self, system, location, definition, descriptor, static_data,
instance_state=None, shared_state=None, **kwargs):
......@@ -463,6 +471,8 @@ class OpenEndedChild(object):
@return: Boolean success, and updated AJAX data dictionary.
"""
_ = self.system.service(self, "i18n").ugettext
error_message = ""
if not self.accept_file_upload:
......@@ -481,7 +491,11 @@ class OpenEndedChild(object):
# If success is False, we have not found a link, and no file was attached.
# Show error to student.
if success is False:
error_message = FILE_NOT_FOUND_IN_RESPONSE_MESSAGE
error_message = _(
"We could not find a file in your submission. "
"Please try choosing a file or pasting a URL to your "
"file into the answer box."
)
except Exception:
# In this case, an image was submitted by the student, but the image could not be uploaded to S3. Likely
......@@ -490,7 +504,10 @@ class OpenEndedChild(object):
"but the image was not able to be uploaded to S3. This could indicate a configuration "
"issue with this deployment and the S3_INTERFACE setting.")
success = False
error_message = ERROR_SAVING_FILE_MESSAGE
error_message = _(
"We are having trouble saving your file. Please try another "
"file or paste a URL to your file into the answer box."
)
return success, error_message, data
......@@ -534,3 +551,23 @@ class OpenEndedChild(object):
eta_string = ""
return eta_string
@classmethod
def service_declaration(cls, service_name):
"""
This classmethod is copied from XBlock's service_declaration.
It is included to make this class act enough like an XBlock
to get i18n working on it.
This is currently only used for i18n, and will return "need"
in that case.
Arguments:
service_name (string): the name of the service requested.
Returns:
One of "need", "want", or None.
"""
declaration = cls._combined_services.get(service_name)
return declaration
......@@ -125,8 +125,9 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
rubric_html = rubric_dict['html']
# we'll render it
context = {'rubric': rubric_html,
'max_score': self._max_score,
context = {
'rubric': rubric_html,
'max_score': self._max_score,
}
if self.child_state == self.ASSESSING:
......@@ -233,7 +234,11 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
# This is a dev_facing_error
log.error("Non-integer score value passed to save_assessment, or no score list present.")
# This is a student_facing_error
return {'success': False, 'error': "Error saving your score. Please notify course staff."}
_ = self.system.service(self, "i18n").ugettext
return {
'success': False,
'error': _("Error saving your score. Please notify course staff.")
}
# Record score as assessment and rubric scores as post assessment
self.record_latest_score(score)
......@@ -266,9 +271,11 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
self.record_latest_post_assessment(data['hint'])
self.change_state(self.DONE)
return {'success': True,
'message_html': '',
'allow_reset': self._allow_reset()}
return {
'success': True,
'message_html': '',
'allow_reset': self._allow_reset(),
}
def latest_post_assessment(self, system):
latest_post_assessment = super(SelfAssessmentModule, self).latest_post_assessment(system)
......
......@@ -55,7 +55,7 @@ class RubricPage(PageObject):
num_categories = self.categories
if len(scores) != len(num_categories):
raise ScoreMismatchError(
"Recieved {0} scores but there are {1} rubric categories".format(
"Received {0} scores but there are {1} rubric categories".format(
len(scores), num_categories))
# Set the score for each category
......
......@@ -7,8 +7,8 @@ msgid ""
msgstr ""
"Project-Id-Version: 0.1a\n"
"Report-Msgid-Bugs-To: openedx-translation@googlegroups.com\n"
"POT-Creation-Date: 2014-02-11 18:19-0500\n"
"PO-Revision-Date: 2014-02-11 23:20:19.199916\n"
"POT-Creation-Date: 2014-02-14 16:00-0500\n"
"PO-Revision-Date: 2014-02-14 21:00:58.182145\n"
"Last-Translator: \n"
"Language-Team: openedx-translation <openedx-translation@googlegroups.com>\n"
"MIME-Version: 1.0\n"
......@@ -101,6 +101,106 @@ msgstr "ünänswéréd Ⱡ#"
msgid "Status: unsubmitted"
msgstr "Stätüs: ünsüßmïttéd Ⱡ'σя#"
#. Translators: A "rating" is a score a student gives to indicate how well
#. they feel they were graded on this problem
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
msgid "You need to pick a rating before you can submit."
msgstr "Ýöü nééd tö pïçk ä rätïng ßéföré ýöü çän süßmït. Ⱡ'σяєм ιρѕυм #"
#. Translators: this message appears when transitioning between openended
#. grading
#. types (i.e. self assesment to peer assessment). Sometimes, if a student
#. did not perform well at one step, they cannot move on to the next one.
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
msgid "Your score did not meet the criteria to move to the next step."
msgstr ""
"Ýöür sçöré dïd nöt méét thé çrïtérïä tö mövé tö thé néxt stép. Ⱡ'σяєм ιρѕυм "
"∂σłσя#"
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
#: lms/static/coffee/src/staff_grading/staff_grading.js
msgid "Submit"
msgstr "Süßmït Ⱡ'σяєм ιρѕ#"
#. Translators: one clicks this button after one has finished filling out the
#. grading
#. form for an openended assessment
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
msgid "Submit assessment"
msgstr "Süßmït ässéssmént Ⱡ'σ#"
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
msgid ""
"Your response has been submitted. Please check back later for your grade."
msgstr ""
"Ýöür réspönsé häs ßéén süßmïttéd. Pléäsé çhéçk ßäçk lätér för ýöür grädé. "
"Ⱡ'σяєм ιρѕυм ∂σłσя ѕιт#"
#. Translators: this button is clicked to submit a student's rating of
#. an evaluator's assessment
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
msgid "Submit post-assessment"
msgstr "Süßmït pöst-ässéssmént Ⱡ'σяє#"
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
msgid "Answer saved, but not yet submitted."
msgstr "Ànswér sävéd, ßüt nöt ýét süßmïttéd. Ⱡ'σяєм ιρ#"
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
msgid ""
"Please confirm that you wish to submit your work. You will not be able to "
"make any changes after submitting."
msgstr ""
"Pléäsé çönfïrm thät ýöü wïsh tö süßmït ýöür wörk. Ýöü wïll nöt ßé äßlé tö "
"mäké äný çhängés äftér süßmïttïng. Ⱡ'σяєм ιρѕυм ∂σłσя ѕιт αмєт, ¢σηѕ#"
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
msgid ""
"You are trying to upload a file that is too large for our system. Please "
"choose a file under 2MB or paste a link to it into the answer box."
msgstr ""
"Ýöü äré trýïng tö üplöäd ä fïlé thät ïs töö lärgé för öür sýstém. Pléäsé "
"çhöösé ä fïlé ündér 2MB ör pästé ä lïnk tö ït ïntö thé änswér ßöx. Ⱡ'σяєм "
"ιρѕυм ∂σłσя ѕιт αмєт, ¢σηѕє¢тєтυя α∂ι#"
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
msgid ""
"Are you sure you want to remove your previous response to this question?"
msgstr ""
"Àré ýöü süré ýöü wänt tö rémövé ýöür prévïöüs réspönsé tö thïs qüéstïön? "
"Ⱡ'σяєм ιρѕυм ∂σłσя ѕι#"
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
msgid "Moved to next step."
msgstr "Mövéd tö néxt stép. Ⱡ'σя#"
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
msgid ""
"File uploads are required for this question, but are not supported in your "
"browser. Try the newest version of Google Chrome. Alternatively, if you have"
" uploaded the image to another website, you can paste a link to it into the "
"answer box."
msgstr ""
"Fïlé üplöäds äré réqüïréd för thïs qüéstïön, ßüt äré nöt süppörtéd ïn ýöür "
"ßröwsér. Trý thé néwést vérsïön öf Gööglé Çhrömé. Àltérnätïvélý, ïf ýöü hävé"
" üplöädéd thé ïmägé tö änöthér wéßsïté, ýöü çän pästé ä lïnk tö ït ïntö thé "
"änswér ßöx. Ⱡ'σяєм ιρѕυм ∂σłσя ѕιт αмєт, ¢σηѕє¢тєтυя α∂ιριѕι¢ιηg єłιт, ѕє∂ "
"∂σ єιυѕмσ∂ тє#"
#. Translators: "Show Question" is some text that, when clicked, shows a
#. question's
#. content that had been hidden
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
msgid "Show Question"
msgstr "Shöw Qüéstïön Ⱡ'#"
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
#: common/lib/xmodule/xmodule/js/src/combinedopenended/display.js
msgid "Hide Question"
msgstr "Hïdé Qüéstïön Ⱡ'#"
#: common/lib/xmodule/xmodule/js/src/sequence/display.js
msgid ""
"Sequence error! Cannot navigate to tab %(tab_name)s in the current "
......@@ -1009,22 +1109,18 @@ msgid "Try loading again"
msgstr "Trý löädïng ägäïn Ⱡ'σ#"
#: lms/static/coffee/src/staff_grading/staff_grading.js
msgid "<%= num %> available"
msgstr "<%= num %> äväïläßlé Ⱡ'#"
msgid "<%= num %> available "
msgstr "<%= num %> äväïläßlé Ⱡ'#"
#: lms/static/coffee/src/staff_grading/staff_grading.js
msgid "<%= num %> graded"
msgstr "<%= num %> grädéd Ⱡ#"
msgid "<%= num %> graded "
msgstr "<%= num %> grädéd Ⱡ#"
#: lms/static/coffee/src/staff_grading/staff_grading.js
msgid "<%= num %> more needed to start ML"
msgstr "<%= num %> möré néédéd tö stärt ML Ⱡ'σяєм#"
#: lms/static/coffee/src/staff_grading/staff_grading.js
msgid "Submit"
msgstr "Süßmït Ⱡ'σяєм ιρѕ#"
#: lms/static/coffee/src/staff_grading/staff_grading.js
msgid "Re-check for submissions"
msgstr "Ré-çhéçk för süßmïssïöns Ⱡ'σяє#"
......
......@@ -530,3 +530,24 @@ defined::
Here we define ``_()`` as a pass-through function, so the string will be found
during extraction, but won't be translated too early. Then we use the real
translation function at runtime to get the localized string.
Multiline Strings
=================
Translator notes must directly prepend the string literals to which they refer.
For example, the translator note here will not be passed along to translators::
# Translators: you will not be able to see this note because
# I do not directly prepend the line with the translated string literal.
# See the line directly below this one does not contain part of the string?
long_translated_string = _(
"I am a long string, with many, many words. So many words that it is "
"advisable that I be split over this line."
)
In such a case, make sure you format your code so that the string begins on
a line directly below the translator note::
# Translators: you will be able to see this note.
# See how the line directly below this one contains the start of the string?
long_translated_string = _("I am a long string, with many, many words. "
"So many words that it is advisable that I "
"be split over this line.")
......@@ -15,7 +15,7 @@ Feature: LMS.Annotatable Component
And I click "Reply to annotation" on passage <problem>
Then I am scrolled to that annotation problem
When I answer that annotation problem
Then I recieve feedback on that annotation problem
Then I receive feedback on that annotation problem
When I click "Return to annotation" on that problem
Then I am scrolled to the annotatable component
......
......@@ -156,7 +156,7 @@ class AnnotatableSteps(object):
world.css_click(self.active_problem_selector('.check'))
def check_feedback(self, step):
r"""I recieve feedback on that annotation problem$"""
r"""I receive feedback on that annotation problem$"""
world.wait_for_visible(self.active_problem_selector('.tag-status.correct'))
assert_equals(len(world.css_find(self.active_problem_selector('.tag-status.correct'))), 1)
assert_equals(len(world.css_find(self.active_problem_selector('.show'))), 1)
......
......@@ -25,7 +25,15 @@
% endif
><i class="icon-chevron-left"></i></button>
% endif
${result['task_name']} from grader ${i+1}
## Translators: an example of what this string will look
## like is: "Scored rubric from grader 1", where
## "Scored rubric" replaces {result_of_task} and
## "1" replaces {number}.
## This string appears when a user is viewing one of
## their graded rubrics for an openended response problem.
## the number distinguishes between the different
## graded rubrics the user might have received
${_("{result_of_task} from grader {number}").format(result_of_task = result['task_name'], number = i + 1)}
% if num_results>1:
<button href="#" alt="Next" class="rubric-button rubric-next-button"
% if (i+1) >= num_results:
......@@ -45,4 +53,3 @@
%endif
% endfor
......@@ -19,7 +19,11 @@
%if len(co['feedback'])>2:
<div class="collapsible evaluation-response">
<header>
<a href="#">${_("See full feedback")}</a>
## Translators: "See full feedback" is the text of
## a link that allows a user to see more detailed
## feedback from a self, peer, or instructor
## graded openended problem
<a href="#" class="see-full-feedback">${_("See full feedback")}</a>
</header>
<section class="feedback-full">
${co['feedback']}
......@@ -33,7 +37,10 @@
<input type="hidden" value="${co['submission_id']}" class="submission_id" />
<div class="collapsible evaluation-response">
<header>
<a href="#">${_("Respond to Feedback")}</a>
## Translators: this text forms a link that, when
## clicked, allows a user to respond to the feedback
## the user received on his or her openended problem
<a href="#" class="respond-to-feedback">${_("Respond to Feedback")}</a>
</header>
<section id="evaluation" class="evaluation">
<p>${_("How accurate do you find this feedback?")}</p>
......
......@@ -7,7 +7,10 @@
<div class="visibility-control visibility-control-response">
<div class="inner">
</div>
<label class="section-header section-header-response" for="answer_${module_id}">Response</label>
<label class="section-header section-header-response" for="answer_${module_id}">
## Translators: "Response" labels an area that contains the user's
## Response to an openended problem. It is a noun.
${_("Response")}</label>
</div>
<textarea rows="${rows}" cols="${cols}" name="answer" class="answer short-form-response" id="answer_${module_id}">${previous_answer|h}</textarea>
......
......@@ -3,7 +3,10 @@
${msg|n}
<div class="collapsible evaluation-response">
<header>
<a href="#">${_("Respond to Feedback")}</a>
## Translators: when "Respond to Feedback" is clicked, a survey
## appears on which a user can respond to the feedback the user
## received on an openended problem
<a href="#" class="respond-to-feedback">${_("Respond to Feedback")}</a>
</header>
<section id="evaluation_${id}" class="evaluation">
<p>${_("How accurate do you find this feedback?")}</p>
......
......@@ -8,7 +8,10 @@
<div class="visibility-control visibility-control-response">
<div class="inner">
</div>
<label class="section-header section-header-response" for="answer_${module_id}">Response</label>
<label class="section-header section-header-response" for="answer_${module_id}">
## Translators: "Response" labels a text area into which a user enters
## his or her response to a prompt from an openended problem.
${_("Response")}</label>
</div>
<div>
<textarea name="answer" class="answer short-form-response" cols="70" rows="20" id="answer_${module_id}">${previous_answer|n}</textarea>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment