Commit 6f4ea850 by Xavier Antoviaque

Merge pull request #88 from open-craft/dragonfi/mrq-partial-completion-checkmark

Dragonfi/mrq partial completion checkmark
parents 7b5a900c bba425ec
......@@ -121,18 +121,18 @@ class AnswerBlock(LightChild, StepMixin):
log.info(u'Answer submitted for`{}`: "{}"'.format(self.name, self.student_input))
return {
'student_input': self.student_input,
'completed': self.completed,
'status': self.status,
'weight': self.weight,
'score': 1 if self.completed else 0,
'score': 1 if self.status == 'correct' else 0,
}
@property
def completed(self):
def status(self):
answer_length_ok = self.student_input
if self.min_characters > 0:
answer_length_ok = len(self.student_input.strip()) >= self.min_characters
return bool(self.read_only or answer_length_ok)
return 'correct' if (self.read_only or answer_length_ok) else 'incorrect'
def save(self):
"""
......
......@@ -52,31 +52,31 @@ class MCQBlock(QuestionnaireAbstractBlock):
def submit(self, submission):
log.debug(u'Received MCQ submission: "%s"', submission)
completed = True
correct = True
tips_fragments = []
for tip in self.get_tips():
completed = completed and self.is_tip_completed(tip, submission)
correct = correct and self.is_tip_correct(tip, submission)
if submission in tip.display_with_defaults:
tips_fragments.append(tip.render())
formatted_tips = render_template('templates/html/tip_choice_group.html', {
'self': self,
'tips_fragments': tips_fragments,
'completed': completed,
'completed': correct,
})
self.student_choice = submission
result = {
'submission': submission,
'completed': completed,
'status': 'correct' if correct else 'incorrect',
'tips': formatted_tips,
'weight': self.weight,
'score': 1 if completed else 0,
'score': 1 if correct else 0,
}
log.debug(u'MCQ submission result: %s', result)
return result
def is_tip_completed(self, tip, submission):
def is_tip_correct(self, tip, submission):
if not submission:
return False
......
......@@ -25,6 +25,7 @@
import logging
import uuid
from collections import namedtuple
from lxml import etree
from StringIO import StringIO
......@@ -54,6 +55,7 @@ def default_xml_content():
# Classes ###########################################################
Score = namedtuple("Score", ["raw", "percentage", "correct", "incorrect", "partially_correct"])
class MentoringBlock(XBlockWithLightChildren, StepParentMixin):
"""
......@@ -113,10 +115,11 @@ class MentoringBlock(XBlockWithLightChildren, StepParentMixin):
if total_child_weight == 0:
return (0, 0, 0, 0)
score = sum(r[1]['score'] * r[1]['weight'] for r in self.student_results) / total_child_weight
correct = sum(1 for r in self.student_results if r[1]['completed'] is True)
incorrect = sum(1 for r in self.student_results if r[1]['completed'] is False)
correct = sum(1 for r in self.student_results if r[1]['status'] == 'correct')
incorrect = sum(1 for r in self.student_results if r[1]['status'] == 'incorrect')
partially_correct = sum(1 for r in self.student_results if r[1]['status'] == 'partial')
return (score, int(round(score * 100)), correct, incorrect)
return Score(score, int(round(score * 100)), correct, incorrect, partially_correct)
def student_view(self, context):
fragment, named_children = self.get_children_fragment(
......@@ -216,7 +219,7 @@ class MentoringBlock(XBlockWithLightChildren, StepParentMixin):
child_result = child.submit(submission)
submit_results.append([child.name, child_result])
child.save()
completed = completed and child_result['completed']
completed = completed and (child_result['status'] == 'correct')
if self.max_attempts_reached:
message = self.get_message_html('max_attempts_reached')
......@@ -247,18 +250,17 @@ class MentoringBlock(XBlockWithLightChildren, StepParentMixin):
for result in submit_results:
self.student_results.append(result)
(raw_score, score, correct, incorrect) = self.score
self.runtime.publish(self, 'grade', {
'value': raw_score,
'value': self.score.raw,
'max_value': 1,
})
if not self.completed and self.max_attempts > 0:
self.num_attempts += 1
self.completed = bool(completed)
self.completed = completed is True
raw_score = self.score[0]
raw_score = self.score.raw
self._publish_event('xblock.mentoring.submitted', {
'num_attempts': self.num_attempts,
......@@ -302,20 +304,21 @@ class MentoringBlock(XBlockWithLightChildren, StepParentMixin):
del child_result['tips']
self.student_results.append([child.name, child_result])
child.save()
completed = child_result['completed']
completed = child_result['status']
event_data = {}
(raw_score, score, correct, incorrect) = self.score
score = self.score
if current_child == self.steps[-1]:
log.info(u'Last assessment step submitted: {}'.format(submissions))
if not self.max_attempts_reached:
self.runtime.publish(self, 'grade', {
'value': raw_score,
'value': score.raw,
'max_value': 1,
'score_type': 'proficiency',
})
event_data['final_grade'] = raw_score
event_data['final_grade'] = score.raw
self.num_attempts += 1
self.completed = True
......@@ -332,9 +335,10 @@ class MentoringBlock(XBlockWithLightChildren, StepParentMixin):
'max_attempts': self.max_attempts,
'num_attempts': self.num_attempts,
'step': self.step,
'score': score,
'correct_answer': correct,
'incorrect_answer': incorrect
'score': score.percentage,
'correct_answer': score.correct,
'incorrect_answer': score.incorrect,
'partially_correct_answer': score.partially_correct,
}
@XBlock.json_handler
......
......@@ -48,7 +48,6 @@ class MRQBlock(QuestionnaireAbstractBlock):
def submit(self, submissions):
log.debug(u'Received MRQ submissions: "%s"', submissions)
completed = True
score = 0
results = []
......@@ -64,7 +63,6 @@ class MRQBlock(QuestionnaireAbstractBlock):
(choice_selected and choice.value in tip.reject_with_defaults)):
choice_completed = False
completed = completed and choice_completed
if choice_completed:
score += 1
......@@ -85,9 +83,11 @@ class MRQBlock(QuestionnaireAbstractBlock):
self.student_choices = submissions
status = 'incorrect' if score <= 0 else 'correct' if score >= len(results) else 'partial'
result = {
'submissions': submissions,
'completed': completed,
'status': status,
'choices': results,
'message': self.message,
'weight': self.weight,
......
......@@ -78,11 +78,16 @@
.mentoring .checkmark-correct {
font-size: 22pt;
color: #006600;
color: #629b2b;
float: left;
}
.mentoring .checkmark-partially-correct {
font-size: 22pt;
color: #e37222;
float: left;
}
.mentoring .checkmark-incorrect {
font-size: 22pt;
color: #ff0000;
......
......@@ -17,6 +17,7 @@
}
.mentoring .questionnaire .choice-result.checkmark-correct,
.mentoring .questionnaire .choice-result.checkmark-partially-correct,
.mentoring .questionnaire .choice-result.checkmark-incorrect {
text-align:center;
}
......
......@@ -26,7 +26,7 @@ function AnswerBlock(runtime, element) {
this.clearResult();
if (result.completed) {
if (result.status === "correct") {
checkmark.addClass('checkmark-correct icon-ok fa-check');
}
else {
......
......@@ -128,15 +128,17 @@ function MentoringAssessmentView(runtime, element, mentoring) {
$('.grade', element).data('score', result.score);
$('.grade', element).data('correct_answer', result.correct_answer);
$('.grade', element).data('incorrect_answer', result.incorrect_answer);
$('.grade', element).data('partially_correct_answer', result.partially_correct_answer);
$('.grade', element).data('max_attempts', result.max_attempts);
$('.grade', element).data('num_attempts', result.num_attempts);
$('.attempts', element).data('max_attempts', result.max_attempts);
$('.attempts', element).data('num_attempts', result.num_attempts);
if (result.completed) {
if (result.completed === 'partial') {
checkmark.addClass('checkmark-partially-correct icon-ok fa-check');
} else if (result.completed === 'correct') {
checkmark.addClass('checkmark-correct icon-ok fa-check');
}
else {
} else {
checkmark.addClass('checkmark-incorrect icon-exclamation fa-exclamation');
}
......
......@@ -94,7 +94,7 @@ function MCQBlock(runtime, element, mentoring) {
var choiceTipsDOM = $('.choice-tips', choiceDOM);
var choiceTipsCloseDOM;
if (result.completed && choiceInputDOM.val() === result.submission) {
if (result.status === "correct" && choiceInputDOM.val() === result.submission) {
choiceResultDOM.addClass('checkmark-correct icon-ok fa-check');
}
else if (choiceInputDOM.val() === result.submission || _.isNull(result.submission)) {
......
......@@ -5,5 +5,5 @@
class="answer editable" cols="50" rows="10" name="input"
data-min_characters="{{ self.min_characters }}"
>{{ self.student_input }}</textarea>
<span class="answer-checkmark icon-2x"></span>
<span class="answer-checkmark fa icon-2x"></span>
</div>
......@@ -6,7 +6,7 @@
<div class="choices-list">
{% for choice in custom_choices %}
<div class="choice">
<div class="choice-result icon-2x"></div>
<div class="choice-result fa icon-2x"></div>
<label class="choice-label">
<input class="choice-selector" type="radio" name="{{ self.name }}" value="{{ choice.value }}"{% if self.student_choice == choice.value %} checked{% endif %} />{{ choice.content }}
</label>
......
......@@ -5,35 +5,35 @@
</legend>
<div class="choices-list">
<div class="choice">
<div class="choice-result icon-2x"></div>
<div class="choice-result fa icon-2x"></div>
<label><input class="choice-selector" type="radio" name="{{ self.name }}" value="1"{% if self.student_choice == '1' %} checked{% endif %} />1</label>
<span class="low"> - {{ self.low }}</span>
<div class="choice-tips"></div>
</div>
<div class="choice">
<div class="choice-result icon-2x"></div>
<div class="choice-result fa icon-2x"></div>
<label><input class="choice-selector" type="radio" name="{{ self.name }}" value="2"{% if self.student_choice == '2' %} checked{% endif %} />2</label>
<div class="choice-tips"></div>
</div>
<div class="choice">
<div class="choice-result icon-2x"></div>
<div class="choice-result fa icon-2x"></div>
<label><input class="choice-selector" type="radio" name="{{ self.name }}" value="3"{% if self.student_choice == '3' %} checked{% endif %} />3</label>
<div class="choice-tips"></div>
</div>
<div class="choice">
<div class="choice-result icon-2x"></div>
<div class="choice-result fa icon-2x"></div>
<label><input class="choice-selector" type="radio" name="{{ self.name }}" value="4"{% if self.student_choice == '4' %} checked{% endif %} />4</label>
<div class="choice-tips"></div>
</div>
<div class="choice">
<div class="choice-result icon-2x"></div>
<div class="choice-result fa icon-2x"></div>
<label><input class="choice-selector" type="radio" name="{{ self.name }}" value="5"{% if self.student_choice == '5' %} checked{% endif %} />5</label>
<span class="low"> - {{ self.high }}</span>
<div class="choice-tips"></div>
</div>
{% for choice in custom_choices %}
<div class="choice">
<div class="choice-result icon-2x"></div>
<div class="choice-result fa icon-2x"></div>
<label><input type="radio" name="{{ self.name }}" value="{{ choice.value }}"{% if self.student_choice == '{{ choice.value }}' %} checked{% endif %} />{{ choice.content }}</label>
<div class="choice-tips"></div>
</div>
......
......@@ -21,13 +21,14 @@
<div class="grade" data-score="{{ self.score.1 }}"
data-correct_answer="{{ self.score.2 }}"
data-incorrect_answer="{{ self.score.3 }}"
data-partially_correct_answer="{{ self.score.4 }}"
data-max_attempts="{{ self.max_attempts }}"
data-num_attempts="{{ self.num_attempts }}">
</div>
<div class="submit">
{% if self.mode == 'assessment' %}
<span class="assessment-checkmark icon-2x"></span>
<span class="assessment-checkmark fa icon-2x"></span>
{% endif %}
<input type="button" class="input-main" value="Submit" disabled="disabled" />
......
......@@ -7,8 +7,10 @@
<h2>You scored <%= score %>% on this assessment.</h2>
<hr/>
<span class="assessment-checkmark icon-2x checkmark-correct icon-ok fa-check"></span>
<span class="assessment-checkmark icon-2x checkmark-correct icon-ok fa fa-check"></span>
<p>You answered <%= correct_answer %> questions correctly.</p>
<span class="assessment-checkmark icon-2x checkmark-incorrect icon-exclamation fa-exclamation"></span>
<span class="assessment-checkmark icon-2x checkmark-partially-correct icon-ok fa fa-check"></span>
<p>You answered <%= partially_correct_answer %> questions partially correct.</p>
<span class="assessment-checkmark icon-2x checkmark-incorrect icon-exclamation fa fa-exclamation"></span>
<p>You answered <%= incorrect_answer %> questions incorrectly.</p>
</script>
......@@ -6,7 +6,7 @@
<div class="choices-list">
{% for choice in custom_choices %}
<div class="choice">
<div class="choice-result icon-2x"></div>
<div class="choice-result fa icon-2x"></div>
<label class="choice-label">
<input class="choice-selector" type="checkbox" name="{{ self.name }}"
value="{{ choice.value }}"
......
......@@ -3,4 +3,4 @@
{{ tip_fragment.body_html|safe }}
{% endfor %}
</div>
<div class="close icon-remove-sign fa-times-circle"></div>
<div class="close icon-remove-sign fa fa-times-circle"></div>
......@@ -17,11 +17,19 @@
<choice value="yes">Yes</choice>
<choice value="maybenot">Maybe not</choice>
<choice value="understand">I don't understand</choice>
<tip display="yes">Great!</tip>
<tip reject="maybenot">Ah, damn.</tip>
<tip reject="understand"><html><div id="test-custom-html">Really?</div></html></tip>
</mcq>
<mcq name="mcq_1_2" type="rating" low="Not good at all" high="Extremely good">
<question>How much do you rate this MCQ?</question>
<choice value="notwant">I don't want to rate it</choice>
<tip display="4,5">I love good grades.</tip>
<tip reject="1,2,3">Will do better next time...</tip>
<tip reject="notwant">Your loss!</tip>
</mcq>
<mrq name="mrq_1_1" type="choices">
......@@ -30,5 +38,9 @@
<choice value="beauty">Its beauty</choice>
<choice value="gracefulness">Its gracefulness</choice>
<choice value="bugs">Its bugs</choice>
<tip require="gracefulness">This MRQ is indeed very graceful</tip>
<tip require="elegance,beauty">This is something everyone has to like about this MRQ</tip>
<tip reject="bugs">Nah, there isn't any!</tip>
</mrq>
</mentoring>
......@@ -224,6 +224,7 @@ class MentoringAssessmentTest(MentoringBaseTest):
self.assert_persistent_elements_present(mentoring)
self.assertIn("Note: if you retake this assessment, only your final score counts.", mentoring.text)
self.assertIn("You answered 4 questions correctly.", mentoring.text)
self.assertIn("You answered 0 questions partially correct.", mentoring.text)
self.assertIn("You answered 0 questions incorrectly.", mentoring.text)
self.assert_hidden(submit)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment