Commit e03b3914 by Eugeny Kolpakov Committed by GitHub

Merge pull request #14737 from open-craft/mit-capa-hide-correct

MIT CAPA improvements: Add show_correctness field
parents 529df7c6 f18a2be8
......@@ -1163,6 +1163,7 @@ def create_xblock_info(xblock, data=None, metadata=None, include_ancestor_info=F
'explanatory_message': explanatory_message,
'group_access': xblock.group_access,
'user_partitions': user_partitions,
'show_correctness': xblock.show_correctness,
})
if xblock.category == 'sequential':
......
......@@ -54,6 +54,7 @@ class CourseMetadata(object):
'exam_review_rules',
'hide_after_due',
'self_paced',
'show_correctness',
'chrome',
'default_tab',
]
......
......@@ -4,12 +4,12 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
function($, AjaxHelpers, ViewUtils, CourseOutlinePage, XBlockOutlineInfo, DateUtils,
EditHelpers, TemplateHelpers, Course) {
describe('CourseOutlinePage', function() {
var createCourseOutlinePage, displayNameInput, model, outlinePage, requests,
getItemsOfType, getItemHeaders, verifyItemsExpanded, expandItemsAndVerifyState,
collapseItemsAndVerifyState, selectBasicSettings, selectAdvancedSettings, createMockCourseJSON,
createMockSectionJSON, createMockSubsectionJSON, verifyTypePublishable, mockCourseJSON,
mockEmptyCourseJSON, mockSingleSectionCourseJSON, createMockVerticalJSON, createMockIndexJSON,
mockCourseEntranceExamJSON, mockOutlinePage = readFixtures('mock/mock-course-outline-page.underscore'),
var createCourseOutlinePage, displayNameInput, model, outlinePage, requests, getItemsOfType, getItemHeaders,
verifyItemsExpanded, expandItemsAndVerifyState, collapseItemsAndVerifyState, selectBasicSettings,
selectVisibilitySettings, selectAdvancedSettings, createMockCourseJSON, createMockSectionJSON,
createMockSubsectionJSON, verifyTypePublishable, mockCourseJSON, mockEmptyCourseJSON,
mockSingleSectionCourseJSON, createMockVerticalJSON, createMockIndexJSON, mockCourseEntranceExamJSON,
mockOutlinePage = readFixtures('mock/mock-course-outline-page.underscore'),
mockRerunNotification = readFixtures('mock/mock-course-rerun-notification.underscore');
createMockCourseJSON = function(options, children) {
......@@ -71,6 +71,7 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
prereqs: [],
prereq: '',
prereq_min_score: '',
show_correctness: 'always',
child_info: {
category: 'vertical',
display_name: 'Unit',
......@@ -140,6 +141,10 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
this.$(".modal-section .settings-tab-button[data-tab='basic']").click();
};
selectVisibilitySettings = function() {
this.$(".modal-section .settings-tab-button[data-tab='visibility']").click();
};
selectAdvancedSettings = function() {
this.$(".modal-section .settings-tab-button[data-tab='advanced']").click();
};
......@@ -238,7 +243,7 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
'basic-modal', 'course-outline-modal', 'release-date-editor',
'due-date-editor', 'grading-editor', 'publish-editor',
'staff-lock-editor', 'content-visibility-editor', 'settings-modal-tabs',
'timed-examination-preference-editor', 'access-editor'
'timed-examination-preference-editor', 'access-editor', 'show-correctness-editor'
]);
appendSetFixtures(mockOutlinePage);
mockCourseJSON = createMockCourseJSON({}, [
......@@ -604,8 +609,8 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
expect($('due_date')).not.toExist();
expect($('grading_format')).not.toExist();
// Staff lock controls are always visible on the advanced tab
selectAdvancedSettings();
// Staff lock controls are always visible on the visibility tab
selectVisibilitySettings();
expect($('#staff_lock')).toExist();
selectBasicSettings();
$('.wrapper-modal-window .action-save').click();
......@@ -678,7 +683,8 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
describe('Subsection', function() {
var getDisplayNameWrapper, setEditModalValues, setContentVisibility, mockServerValuesJson,
selectDisableSpecialExams, selectTimedExam, selectProctoredExam, selectPracticeExam,
selectPrerequisite, selectLastPrerequisiteSubsection, checkOptionFieldVisibility;
selectPrerequisite, selectLastPrerequisiteSubsection, checkOptionFieldVisibility,
defaultModalSettings, getMockNoPrereqOrExamsCourseJSON, expectShowCorrectness;
getDisplayNameWrapper = function() {
return getItemHeaders('subsection').find('.wrapper-xblock-field');
......@@ -732,6 +738,38 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
expect($('.field-exam-review-rules').is(':visible')).toBe(review_rules);
};
expectShowCorrectness = function(showCorrectness) {
expect($('input[name=show-correctness][value=' + showCorrectness + ']').is(':checked')).toBe(true);
};
getMockNoPrereqOrExamsCourseJSON = function() {
var mockVerticalJSON = createMockVerticalJSON({}, []);
var mockSubsectionJSON = createMockSubsectionJSON({}, [mockVerticalJSON]);
delete mockSubsectionJSON.is_prereq;
delete mockSubsectionJSON.prereqs;
delete mockSubsectionJSON.prereq;
delete mockSubsectionJSON.prereq_min_score;
return createMockCourseJSON({
enable_proctored_exams: false,
enable_timed_exams: false
}, [
createMockSectionJSON({}, [mockSubsectionJSON])
]);
};
defaultModalSettings = {
graderType: 'notgraded',
isPrereq: false,
metadata: {
due: null,
is_practice_exam: false,
is_time_limited: false,
exam_review_rules: '',
is_proctored_enabled: false,
default_time_limit_minutes: null
}
};
// Contains hard-coded dates because dates are presented in different formats.
mockServerValuesJson = createMockSectionJSON({
release_date: 'Jan 01, 2970 at 05:00 UTC'
......@@ -746,6 +784,7 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
has_explicit_staff_lock: true,
staff_only_message: true,
is_prereq: false,
show_correctness: 'never',
'is_time_limited': true,
'is_practice_exam': false,
'is_proctored_exam': false,
......@@ -821,36 +860,45 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
expandItemsAndVerifyState('subsection');
});
it('can show basic settings', function() {
it('subsection can show basic settings', function() {
createCourseOutlinePage(this, mockCourseJSON, false);
outlinePage.$('.outline-subsection .configure-button').click();
selectBasicSettings();
expect($('.modal-section .settings-tab-button[data-tab="basic"]')).toHaveClass('active');
expect($('.modal-section .settings-tab-button[data-tab="visibility"]')).not.toHaveClass('active');
expect($('.modal-section .settings-tab-button[data-tab="advanced"]')).not.toHaveClass('active');
});
it('can show advanced settings', function() {
it('subsection can show visibility settings', function() {
createCourseOutlinePage(this, mockCourseJSON, false);
outlinePage.$('.outline-subsection .configure-button').click();
selectVisibilitySettings();
expect($('.modal-section .settings-tab-button[data-tab="basic"]')).not.toHaveClass('active');
expect($('.modal-section .settings-tab-button[data-tab="visibility"]')).toHaveClass('active');
expect($('.modal-section .settings-tab-button[data-tab="advanced"]')).not.toHaveClass('active');
});
it('subsection can show advanced settings', function() {
createCourseOutlinePage(this, mockCourseJSON, false);
outlinePage.$('.outline-subsection .configure-button').click();
selectAdvancedSettings();
expect($('.modal-section .settings-tab-button[data-tab="basic"]')).not.toHaveClass('active');
expect($('.modal-section .settings-tab-button[data-tab="visibility"]')).not.toHaveClass('active');
expect($('.modal-section .settings-tab-button[data-tab="advanced"]')).toHaveClass('active');
});
it('does not show settings tab headers if there is only one tab to show', function() {
var mockVerticalJSON = createMockVerticalJSON({}, []);
var mockSubsectionJSON = createMockSubsectionJSON({}, [mockVerticalJSON]);
delete mockSubsectionJSON.is_prereq;
delete mockSubsectionJSON.prereqs;
delete mockSubsectionJSON.prereq;
delete mockSubsectionJSON.prereq_min_score;
var mockCourseJSON = createMockCourseJSON({
enable_proctored_exams: false,
enable_timed_exams: false
}, [
createMockSectionJSON({}, [mockSubsectionJSON])
]);
createCourseOutlinePage(this, mockCourseJSON, false);
it('subsection does not show advanced settings tab if no special exams or prerequisites', function() {
var mockNoPrereqCourseJSON = getMockNoPrereqOrExamsCourseJSON();
createCourseOutlinePage(this, mockNoPrereqCourseJSON, false);
outlinePage.$('.outline-subsection .configure-button').click();
expect($('.modal-section .settings-tab-button[data-tab="basic"]')).toExist();
expect($('.modal-section .settings-tab-button[data-tab="visibility"]')).toExist();
expect($('.modal-section .settings-tab-button[data-tab="advanced"]')).not.toExist();
});
it('unit does not show settings tab headers if there is only one tab to show', function() {
var mockNoPrereqCourseJSON = getMockNoPrereqOrExamsCourseJSON();
createCourseOutlinePage(this, mockNoPrereqCourseJSON, false);
outlinePage.$('.outline-unit .configure-button').click();
expect($('.settings-tabs-header').length).toBe(0);
});
......@@ -869,6 +917,7 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
expect($('.grading-due-date').length).toBe(0);
expect($('.edit-settings-grading').length).toBe(1);
expect($('.edit-content-visibility').length).toBe(1);
expect($('.edit-show-correctness').length).toBe(1);
});
it('can select valid time', function() {
......@@ -894,6 +943,14 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
}
});
it('can be saved', function() {
createCourseOutlinePage(this, mockCourseJSON, false);
outlinePage.$('.outline-subsection .configure-button').click();
$('.wrapper-modal-window .action-save').click();
AjaxHelpers.expectJsonRequest(requests, 'POST', '/xblock/mock-subsection', defaultModalSettings);
expect(requests[0].requestHeaders['X-HTTP-Method-Override']).toBe('PATCH');
});
it('can be edited', function() {
createCourseOutlinePage(this, mockCourseJSON, false);
outlinePage.$('.outline-subsection .configure-button').click();
......@@ -948,15 +1005,17 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
expect($('input.no_special_exam').is(':checked')).toBe(false);
expect($('input.practice_exam').is(':checked')).toBe(false);
expect($('.field-time-limit input').val()).toBe('02:30');
expectShowCorrectness('never');
});
it('can hide time limit and hide after due fields when the None radio box is selected', function() {
createCourseOutlinePage(this, mockCourseJSON, false);
outlinePage.$('.outline-subsection .configure-button').click();
setEditModalValues('7/9/2014', '7/10/2014', 'Lab');
selectVisibilitySettings();
setContentVisibility('staff_only');
selectAdvancedSettings();
selectDisableSpecialExams();
setContentVisibility('staff_only');
// all additional options should be hidden
expect($('.exam-options').is(':hidden')).toBe(true);
......@@ -966,9 +1025,10 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
createCourseOutlinePage(this, mockCourseJSON, false);
outlinePage.$('.outline-subsection .configure-button').click();
setEditModalValues('7/9/2014', '7/10/2014', 'Lab');
selectVisibilitySettings();
setContentVisibility('staff_only');
selectAdvancedSettings();
selectPracticeExam('00:30');
setContentVisibility('staff_only');
// time limit should be visible, review rules should be hidden
checkOptionFieldVisibility(true, false);
......@@ -993,9 +1053,10 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
createCourseOutlinePage(this, mockCourseJSON, false);
outlinePage.$('.outline-subsection .configure-button').click();
setEditModalValues('7/9/2014', '7/10/2014', 'Lab');
selectVisibilitySettings();
setContentVisibility('staff_only');
selectAdvancedSettings();
selectProctoredExam('00:30');
setContentVisibility('staff_only');
// time limit and review rules should be visible
checkOptionFieldVisibility(true, true);
......@@ -1007,9 +1068,10 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
createCourseOutlinePage(this, mockCourseJSON, false);
outlinePage.$('.outline-subsection .configure-button').click();
setEditModalValues('7/9/2014', '7/10/2014', 'Lab');
selectVisibilitySettings();
setContentVisibility('staff_only');
selectAdvancedSettings();
selectProctoredExam('abcd');
setContentVisibility('staff_only');
// time limit field should be visible and have the correct value
expect($('.field-time-limit').is(':visible')).toBe(true);
......@@ -1414,6 +1476,57 @@ define(['jquery', 'edx-ui-toolkit/js/utils/spec-helpers/ajax-helpers', 'common/j
);
});
describe('Show correctness setting set as expected.', function() {
var setShowCorrectness;
setShowCorrectness = function(showCorrectness) {
$('input[name=show-correctness][value=' + showCorrectness + ']').click();
};
describe('Show correctness set by subsection metadata.', function() {
$.each(['always', 'never', 'past_due'], function(index, showCorrectness) {
it('show_correctness="' + showCorrectness + '"', function() {
var mockCourseJSONCorrectness = createMockCourseJSON({}, [
createMockSectionJSON({}, [
createMockSubsectionJSON({show_correctness: showCorrectness}, [])
])
]);
createCourseOutlinePage(this, mockCourseJSONCorrectness, false);
outlinePage.$('.outline-subsection .configure-button').click();
selectVisibilitySettings();
expectShowCorrectness(showCorrectness);
});
});
});
describe('Show correctness editor works as expected.', function() {
beforeEach(function() {
createCourseOutlinePage(this, mockCourseJSON, false);
outlinePage.$('.outline-subsection .configure-button').click();
selectVisibilitySettings();
});
it('show_correctness="always" (default, unchanged metadata)', function() {
setShowCorrectness('always');
$('.wrapper-modal-window .action-save').click();
AjaxHelpers.expectJsonRequest(requests, 'POST', '/xblock/mock-subsection',
defaultModalSettings);
});
$.each(['never', 'past_due'], function(index, showCorrectness) {
it('show_correctness="' + showCorrectness + '" updates settings, republishes', function() {
var expectedSettings = $.extend(true, {}, defaultModalSettings, {publish: 'republish'});
expectedSettings.metadata.show_correctness = showCorrectness;
setShowCorrectness(showCorrectness);
$('.wrapper-modal-window .action-save').click();
AjaxHelpers.expectJsonRequest(requests, 'POST', '/xblock/mock-subsection',
expectedSettings);
});
});
});
});
verifyTypePublishable('subsection', function(options) {
return createMockCourseJSON({}, [
createMockSectionJSON({}, [
......
......@@ -15,7 +15,7 @@ define(['jquery', 'backbone', 'underscore', 'gettext', 'js/views/baseview',
'use strict';
var CourseOutlineXBlockModal, SettingsXBlockModal, PublishXBlockModal, AbstractEditor, BaseDateEditor,
ReleaseDateEditor, DueDateEditor, GradingEditor, PublishEditor, AbstractVisibilityEditor, StaffLockEditor,
ContentVisibilityEditor, TimedExaminationPreferenceEditor, AccessEditor;
ContentVisibilityEditor, TimedExaminationPreferenceEditor, AccessEditor, ShowCorrectnessEditor;
CourseOutlineXBlockModal = BaseModal.extend({
events: _.extend({}, BaseModal.prototype.events, {
......@@ -714,7 +714,51 @@ define(['jquery', 'backbone', 'underscore', 'gettext', 'js/views/baseview',
AbstractVisibilityEditor.prototype.getContext.call(this),
{
hide_after_due: this.modelVisibility() === 'hide_after_due',
self_paced: this.model.get('self_paced') === true
self_paced: course.get('self_paced') === true
}
);
}
});
ShowCorrectnessEditor = AbstractEditor.extend({
templateName: 'show-correctness-editor',
className: 'edit-show-correctness',
afterRender: function() {
AbstractEditor.prototype.afterRender.call(this);
this.setValue(this.model.get('show_correctness') || 'always');
},
setValue: function(value) {
this.$('input[name=show-correctness][value=' + value + ']').prop('checked', true);
},
currentValue: function() {
return this.$('input[name=show-correctness]:checked').val();
},
hasChanges: function() {
return this.model.get('show_correctness') !== this.currentValue();
},
getRequestData: function() {
if (this.hasChanges()) {
return {
publish: 'republish',
metadata: {
show_correctness: this.currentValue()
}
};
} else {
return {};
}
},
getContext: function() {
return $.extend(
{},
AbstractEditor.prototype.getContext.call(this),
{
self_paced: course.get('self_paced') === true
}
);
}
......@@ -732,6 +776,11 @@ define(['jquery', 'backbone', 'underscore', 'gettext', 'js/views/baseview',
getEditModal: function(xblockInfo, options) {
var tabs = [];
var editors = [];
var advancedTab = {
name: 'advanced',
displayName: gettext('Advanced'),
editors: []
};
if (xblockInfo.isVertical()) {
editors = [StaffLockEditor];
} else {
......@@ -742,8 +791,8 @@ define(['jquery', 'backbone', 'underscore', 'gettext', 'js/views/baseview',
editors: []
},
{
name: 'advanced',
displayName: gettext('Advanced'),
name: 'visibility',
displayName: gettext('Visibility'),
editors: []
}
];
......@@ -752,14 +801,19 @@ define(['jquery', 'backbone', 'underscore', 'gettext', 'js/views/baseview',
tabs[1].editors = [StaffLockEditor];
} else if (xblockInfo.isSequential()) {
tabs[0].editors = [ReleaseDateEditor, GradingEditor, DueDateEditor];
tabs[1].editors = [ContentVisibilityEditor];
tabs[1].editors = [ContentVisibilityEditor, ShowCorrectnessEditor];
if (options.enable_proctored_exams || options.enable_timed_exams) {
tabs[1].editors.push(TimedExaminationPreferenceEditor);
advancedTab.editors.push(TimedExaminationPreferenceEditor);
}
if (typeof(xblockInfo.get('is_prereq')) !== 'undefined') {
tabs[1].editors.push(AccessEditor);
advancedTab.editors.push(AccessEditor);
}
// Show the Advanced tab iff it has editors to display
if (advancedTab.editors.length > 0) {
tabs.push(advancedTab);
}
}
}
......
......@@ -585,8 +585,11 @@
}
.list-fields {
.field-message {
@extend %t-copy-sub2;
color: $gray-d1;
font-size: ($baseline/2);
}
label {
@extend %t-title7;
}
.field {
display: inline-block;
......@@ -595,7 +598,6 @@
margin-bottom: ($baseline/4);
label {
@extend %t-copy-sub1;
@extend %t-strong;
@include transition(color $tmg-f3 ease-in-out 0s);
margin: 0 0 ($baseline/4) 0;
......
......@@ -26,7 +26,7 @@ from openedx.core.djangolib.markup import HTML, Text
<%block name="header_extras">
<link rel="stylesheet" type="text/css" href="${static.url('js/vendor/timepicker/jquery.timepicker.css')}" />
% for template_name in ['course-outline', 'xblock-string-field-editor', 'basic-modal', 'modal-button', 'course-outline-modal', 'due-date-editor', 'release-date-editor', 'grading-editor', 'publish-editor', 'staff-lock-editor', 'content-visibility-editor', 'verification-access-editor', 'timed-examination-preference-editor', 'access-editor', 'settings-modal-tabs']:
% for template_name in ['course-outline', 'xblock-string-field-editor', 'basic-modal', 'modal-button', 'course-outline-modal', 'due-date-editor', 'release-date-editor', 'grading-editor', 'publish-editor', 'staff-lock-editor', 'content-visibility-editor', 'verification-access-editor', 'timed-examination-preference-editor', 'access-editor', 'settings-modal-tabs', 'show-correctness-editor']:
<script type="text/template" id="${template_name}-tpl">
<%static:include path="js/${template_name}.underscore" />
</script>
......
<form>
<h3 class="modal-section-title" id="show_correctness_label"><%- gettext('Assessment Results Visibility') %></h3>
<div class="modal-section-content show-correctness">
<div role="group" class="list-fields" aria-labelledby="show_correctness_label">
<label class="label">
<input class="input input-radio" name="show-correctness" type="radio" value="always" aria-describedby="always_show_correctness_description" />
<%- gettext('Always show assessment results') %>
</label>
<p class='field-message' id='always_show_correctness_description'>
<%- gettext('When learners submit an answer to an assessment, they immediately see whether the answer is correct or incorrect, and the score received.') %>
</p>
<label class="label">
<input class="input input-radio" name="show-correctness" type="radio" value="never" aria-describedby="never_show_correctness_description" />
<%- gettext('Never show assessment results') %>
</label>
<p class='field-message' id='never_show_correctness_description'>
<%- gettext('Learners never see whether their answers to assessments are correct or incorrect, nor the score received.') %>
</p>
<label class="label">
<input class="input input-radio" name="show-correctness" type="radio" value="past_due" aria-describedby="show_correctness_past_due_description" />
<%- gettext('Show assessment results when subsection is past due') %>
</label>
<p class='field-message' id='show_correctness_past_due_description'>
<% if (self_paced) { %>
<%- gettext('Learners do not see whether their answers to assessments were correct or incorrect, nor the score received, until after the course end date has passed.') %>
<%- gettext('If the course does not have an end date, learners always see their scores when they submit answers to assessments.') %>
<% } else { %>
<%- gettext('Learners do not see whether their answers to assessments were correct or incorrect, nor the score received, until after the due date for the subsection has passed.') %>
<%- gettext('If the subsection does not have a due date, learners always see their scores when they submit answers to assessments.') %>
<% } %>
</p>
</div>
</div>
</form>
......@@ -798,16 +798,21 @@ class LoncapaProblem(object):
if problemid in self.correct_map:
pid = input_id
# If the the problem has not been saved since the last submit set the status to the
# current correctness value and set the message as expected. Otherwise we do not want to
# display correctness because the answer may have changed since the problem was graded.
if not self.has_saved_answers:
status = self.correct_map.get_correctness(pid)
msg = self.correct_map.get_msg(pid)
hint = self.correct_map.get_hint(pid)
hintmode = self.correct_map.get_hintmode(pid)
answervariable = self.correct_map.get_property(pid, 'answervariable')
# If we're withholding correctness, don't show adaptive hints either.
# Note that regular, "demand" hints will be shown, if the course author has added them to the problem.
if not self.capa_module.correctness_available():
status = 'submitted'
else:
# If the the problem has not been saved since the last submit set the status to the
# current correctness value and set the message as expected. Otherwise we do not want to
# display correctness because the answer may have changed since the problem was graded.
if not self.has_saved_answers:
status = self.correct_map.get_correctness(pid)
msg = self.correct_map.get_msg(pid)
hint = self.correct_map.get_hint(pid)
hintmode = self.correct_map.get_hintmode(pid)
answervariable = self.correct_map.get_property(pid, 'answervariable')
value = ''
if self.student_answers and problemid in self.student_answers:
......
......@@ -90,6 +90,7 @@ class Status(object):
'incomplete': _('incomplete'),
'unanswered': _('unanswered'),
'unsubmitted': _('unanswered'),
'submitted': _('submitted'),
'queued': _('processing'),
}
tooltips = {
......@@ -197,7 +198,7 @@ class InputTypeBase(object):
(what the student entered last time)
* 'id' -- the id of this input, typically
"{problem-location}_{response-num}_{input-num}"
* 'status' (answered, unanswered, unsubmitted)
* 'status' (submitted, unanswered, unsubmitted)
* 'input_state' -- dictionary containing any inputtype-specific state
that has been preserved
* 'feedback' (dictionary containing keys for hints, errors, or other
......
......@@ -22,7 +22,7 @@
<div class="script_placeholder" data-src="${jschannel_loader}"/>
<div class="script_placeholder" data-src="${jsinput_loader}"/>
% if status in ['unsubmitted', 'correct', 'incorrect', 'partially-correct', 'incomplete']:
% if status in ['unsubmitted', 'submitted', 'correct', 'incorrect', 'partially-correct', 'incomplete']:
<div class="${status.classname}">
% endif
......@@ -49,7 +49,7 @@
<div class="error_message" style="padding: 5px 5px 5px 5px; background-color:#FA6666; height:60px;width:400px; display: none"></div>
% if status in ['unsubmitted', 'correct', 'incorrect', 'partially-correct', 'incomplete']:
% if status in ['unsubmitted', 'submitted', 'correct', 'incorrect', 'partially-correct', 'incomplete']:
</div>
% endif
......
......@@ -8,7 +8,7 @@
<div class="script_placeholder" data-src="${preprocessor['script_src']}"/>
% endif
% if status in ('unsubmitted', 'correct', 'incorrect', 'partially-correct', 'incomplete'):
% if status in ('unsubmitted', 'submitted', 'correct', 'incorrect', 'partially-correct', 'incomplete'):
<div class="${status.classname} ${doinline}">
% endif
......@@ -45,7 +45,7 @@
<textarea style="display:none" id="input_${id}_dynamath" name="input_${id}_dynamath"></textarea>
% endif
% if status in ('unsubmitted', 'correct', 'incorrect', 'partially-correct', 'incomplete'):
% if status in ('unsubmitted', 'submitted', 'correct', 'incorrect', 'partially-correct', 'incomplete'):
</div>
% endif
......
......@@ -173,6 +173,7 @@ class TemplateTestCase(unittest.TestCase):
cases = [
('correct', 'correct'),
('unsubmitted', 'unanswered'),
('submitted', 'submitted'),
('incorrect', 'incorrect'),
('incomplete', 'incorrect')
]
......
......@@ -24,7 +24,7 @@ from capa.inputtypes import Status
from capa.responsetypes import StudentInputError, ResponseError, LoncapaProblemError
from capa.util import convert_files_to_filenames, get_inner_html_from_xpath
from xblock.fields import Boolean, Dict, Float, Integer, Scope, String, XMLString
from xmodule.capa_base_constants import RANDOMIZATION, SHOWANSWER
from xmodule.capa_base_constants import RANDOMIZATION, SHOWANSWER, SHOW_CORRECTNESS
from xmodule.exceptions import NotFoundError
from .fields import Date, Timedelta
from .progress import Progress
......@@ -114,6 +114,18 @@ class CapaFields(object):
help=_("Amount of time after the due date that submissions will be accepted"),
scope=Scope.settings
)
show_correctness = String(
display_name=_("Show Results"),
help=_("Defines when to show whether a learner's answer to the problem is correct. "
"Configured on the subsection."),
scope=Scope.settings,
default=SHOW_CORRECTNESS.ALWAYS,
values=[
{"display_name": _("Always"), "value": SHOW_CORRECTNESS.ALWAYS},
{"display_name": _("Never"), "value": SHOW_CORRECTNESS.NEVER},
{"display_name": _("Past Due"), "value": SHOW_CORRECTNESS.PAST_DUE},
],
)
showanswer = String(
display_name=_("Show Answer"),
help=_("Defines when to show the answer to the problem. "
......@@ -391,12 +403,25 @@ class CapaMixin(CapaFields):
return None
return None
def get_display_progress(self):
"""
Return (score, total) to be displayed to the learner.
"""
progress = self.get_progress()
score, total = (progress.frac() if progress else (0, 0))
# Withhold the score if hiding correctness
if not self.correctness_available():
score = None
return score, total
def get_html(self):
"""
Return some html with data about the module
"""
progress = self.get_progress()
curr_score, total_possible = (progress.frac() if progress else (0, 0))
curr_score, total_possible = self.get_display_progress()
return self.runtime.render_template('problem_ajax.html', {
'element_id': self.location.html_id(),
'id': self.location.to_deprecated_string(),
......@@ -739,7 +764,11 @@ class CapaMixin(CapaFields):
if render_notifications:
progress = self.get_progress()
id_list = self.lcp.correct_map.keys()
if len(id_list) == 1:
# Show only a generic message if hiding correctness
if not self.correctness_available():
answer_notification_type = 'submitted'
elif len(id_list) == 1:
# Only one answer available
answer_notification_type = self.lcp.correct_map.get_correctness(id_list[0])
elif len(id_list) > 1:
......@@ -782,6 +811,8 @@ class CapaMixin(CapaFields):
).format(progress=str(progress))
else:
answer_notification_message = _('Partially Correct')
elif answer_notification_type == 'submitted':
answer_notification_message = _("Answer submitted.")
return answer_notification_type, answer_notification_message
......@@ -855,7 +886,10 @@ class CapaMixin(CapaFields):
"""
Is the user allowed to see an answer?
"""
if self.showanswer == '':
if not self.correctness_available():
# If correctness is being withheld, then don't show answers either.
return False
elif self.showanswer == '':
return False
elif self.showanswer == SHOWANSWER.NEVER:
return False
......@@ -883,6 +917,24 @@ class CapaMixin(CapaFields):
return False
def correctness_available(self):
"""
Is the user allowed to see whether she's answered correctly?
Limits access to the correct/incorrect flags, messages, and problem score.
"""
if self.show_correctness == SHOW_CORRECTNESS.NEVER:
return False
elif self.runtime.user_is_staff:
# This is after the 'never' check because admins can see correctness
# unless the problem explicitly prevents it
return True
elif self.show_correctness == SHOW_CORRECTNESS.PAST_DUE:
return self.is_past_due()
# else: self.show_correctness == SHOW_CORRECTNESS.ALWAYS
return True
def update_score(self, data):
"""
Delivers grading response (e.g. from asynchronous code checking) to
......@@ -1233,6 +1285,10 @@ class CapaMixin(CapaFields):
# render problem into HTML
html = self.get_problem_html(encapsulate=False, submit_notification=True)
# Withhold success indicator if hiding correctness
if not self.correctness_available():
success = 'submitted'
return {
'success': success,
'contents': html
......
......@@ -4,6 +4,15 @@ Constants for capa_base problems
"""
class SHOW_CORRECTNESS(object): # pylint: disable=invalid-name
"""
Constants for when to show correctness
"""
ALWAYS = "always"
PAST_DUE = "past_due"
NEVER = "never"
class SHOWANSWER(object):
"""
Constants for when to show answer
......
......@@ -120,7 +120,8 @@ class CapaModule(CapaMixin, XModule):
after = self.get_progress()
after_attempts = self.attempts
progress_changed = (after != before) or (after_attempts != before_attempts)
curr_score, total_possible = (after.frac() if after else (0, 0))
curr_score, total_possible = self.get_display_progress()
result.update({
'progress_changed': progress_changed,
'current_score': curr_score,
......@@ -215,6 +216,7 @@ class CapaDescriptor(CapaFields, RawDescriptor):
CapaDescriptor.force_save_button,
CapaDescriptor.markdown,
CapaDescriptor.use_latex_compiler,
CapaDescriptor.show_correctness,
])
return non_editable_fields
......
......@@ -250,6 +250,15 @@ div.problem {
border-color: $incorrect;
}
}
&.choicegroup_submitted {
border: 2px solid $submitted;
// keep blue for submitted answers on hover.
&:hover {
border-color: $submitted;
}
}
}
.indicator-container {
......@@ -325,6 +334,7 @@ div.problem {
@include status-icon($incorrect, $cross-icon);
}
&.submitted,
&.unsubmitted,
&.unanswered {
.status-icon {
......@@ -419,6 +429,12 @@ div.problem {
}
}
&.submitted, &.ui-icon-check {
input {
border-color: $submitted;
}
}
p.answer {
display: inline-block;
margin-top: ($baseline / 2);
......@@ -790,6 +806,18 @@ div.problem {
}
}
// CASE: submitted, correctness withheld
> .submitted {
input {
border: 2px solid $submitted;
}
.status {
content: '';
}
}
// CASE: unanswered and unsubmitted
> .unanswered, > .unsubmitted {
......@@ -824,7 +852,11 @@ div.problem {
.indicator-container {
display: inline-block;
.status.correct:after, .status.partially-correct:after, .status.incorrect:after, .status.unanswered:after {
.status.correct:after,
.status.partially-correct:after,
.status.incorrect:after,
.status.submitted:after,
.status.unanswered:after {
@include margin-left(0);
}
}
......@@ -1531,6 +1563,10 @@ div.problem {
@extend label.choicegroup_incorrect;
}
label.choicetextgroup_submitted, section.choicetextgroup_submitted {
@extend label.choicegroup_submitted;
}
label.choicetextgroup_show_correct, section.choicetextgroup_show_correct {
&:after {
@include margin-left($baseline*.75);
......@@ -1569,6 +1605,10 @@ div.problem .imageinput.capa_inputtype {
.partially-correct {
@include status-icon($partially-correct, $asterisk-icon);
}
.submitted {
content: '';
}
}
// +Problem - Annotation Problem Overrides
......@@ -1596,4 +1636,8 @@ div.problem .annotation-input {
.partially-correct {
@include status-icon($partially-correct, $asterisk-icon);
}
.submitted {
content: '';
}
}
......@@ -138,6 +138,28 @@ describe 'Problem', ->
it 'shows 0 points possible for the detail', ->
testProgessData(@problem, 0, 0, 1, "False", "0 points possible (ungraded)")
describe 'with a score of null (show_correctness == false)', ->
it 'reports the number of points possible and graded, results hidden', ->
testProgessData(@problem, null, 1, 0, "True", "1 point possible (graded, results hidden)")
it 'reports the number of points possible (plural) and graded, results hidden', ->
testProgessData(@problem, null, 2, 0, "True", "2 points possible (graded, results hidden)")
it 'reports the number of points possible and ungraded, results hidden', ->
testProgessData(@problem, null, 1, 0, "False", "1 point possible (ungraded, results hidden)")
it 'displays ungraded if number of points possible is 0, results hidden', ->
testProgessData(@problem, null, 0, 0, "False", "0 points possible (ungraded, results hidden)")
it 'displays ungraded if number of points possible is 0, even if graded value is True, results hidden', ->
testProgessData(@problem, null, 0, 0, "True", "0 points possible (ungraded, results hidden)")
it 'reports the correct score with status none and >0 attempts, results hidden', ->
testProgessData(@problem, null, 1, 1, "True", "1 point possible (graded, results hidden)")
it 'reports the correct score with >1 weight, status none, and >0 attempts, results hidden', ->
testProgessData(@problem, null, 2, 2, "True", "2 points possible (graded, results hidden)")
describe 'render', ->
beforeEach ->
@problem = new Problem($('.xblock-student_view'))
......
......@@ -214,11 +214,36 @@
attemptsUsed = this.el.data('attempts-used');
graded = this.el.data('graded');
// The problem is ungraded if it's explicitly marked as such, or if the total possible score is 0
if (graded === 'True' && totalScore !== 0) {
graded = true;
} else {
graded = false;
}
if (curScore === undefined || totalScore === undefined) {
progress = '';
// Render an empty string.
progressTemplate = '';
} else if (curScore === null || curScore === 'None') {
// Render 'x point(s) possible (un/graded, results hidden)' if no current score provided.
if (graded) {
progressTemplate = ngettext(
// Translators: %(num_points)s is the number of points possible (examples: 1, 3, 10).;
'%(num_points)s point possible (graded, results hidden)',
'%(num_points)s points possible (graded, results hidden)',
totalScore
);
} else {
progressTemplate = ngettext(
// Translators: %(num_points)s is the number of points possible (examples: 1, 3, 10).;
'%(num_points)s point possible (ungraded, results hidden)',
'%(num_points)s points possible (ungraded, results hidden)',
totalScore
);
}
} else if (attemptsUsed === 0 || totalScore === 0) {
// Render 'x point(s) possible' if student has not yet attempted question
if (graded === 'True' && totalScore !== 0) {
if (graded) {
progressTemplate = ngettext(
// Translators: %(num_points)s is the number of points possible (examples: 1, 3, 10).;
'%(num_points)s point possible (graded)', '%(num_points)s points possible (graded)',
......@@ -231,10 +256,9 @@
totalScore
);
}
progress = interpolate(progressTemplate, {num_points: totalScore}, true);
} else {
// Render 'x/y point(s)' if student has attempted question
if (graded === 'True' && totalScore !== 0) {
if (graded) {
progressTemplate = ngettext(
// This comment needs to be on one line to be properly scraped for the translators.
// Translators: %(earned)s is the number of points earned. %(possible)s is the total number of points (examples: 0/1, 1/1, 2/3, 5/10). The total number of points will always be at least 1. We pluralize based on the total number of points (example: 0/1 point; 1/2 points);
......@@ -249,13 +273,14 @@
totalScore
);
}
progress = interpolate(
progressTemplate, {
earned: curScore,
possible: totalScore
}, true
);
}
progress = interpolate(
progressTemplate, {
earned: curScore,
num_points: totalScore,
possible: totalScore
}, true
);
return this.$('.problem-progress').text(progress);
};
......@@ -573,6 +598,7 @@
complete: this.enableSubmitButtonAfterResponse,
success: function(response) {
switch (response.success) {
case 'submitted':
case 'incorrect':
case 'correct':
that.render(response.contents);
......@@ -599,6 +625,7 @@
Logger.log('problem_check', this.answers);
return $.postWithPrefix('' + this.url + '/problem_check', this.answers, function(response) {
switch (response.success) {
case 'submitted':
case 'incorrect':
case 'correct':
window.SR.readTexts(that.get_sr_status(response.contents));
......
......@@ -100,6 +100,19 @@ class InheritanceMixin(XBlockMixin):
scope=Scope.settings,
default="finished",
)
show_correctness = String(
display_name=_("Show Results"),
help=_(
# Translators: DO NOT translate the words in quotes here, they are
# specific words for the acceptable values.
'Specify when to show answer correctness and score to learners. '
'Valid values are "always", "never", and "past_due".'
),
scope=Scope.settings,
default="always",
)
rerandomize = String(
display_name=_("Randomization"),
help=_(
......
......@@ -265,6 +265,46 @@ class CapaModuleTest(unittest.TestCase):
problem.attempts = 1
self.assertTrue(problem.answer_available())
@ddt.data(
# If show_correctness=always, Answer is visible after attempted
({
'showanswer': 'attempted',
'max_attempts': '1',
'show_correctness': 'always',
}, True),
# If show_correctness=never, Answer is never visible
({
'showanswer': 'attempted',
'max_attempts': '1',
'show_correctness': 'never',
}, False),
# If show_correctness=past_due, answer is not visible before due date
({
'showanswer': 'attempted',
'show_correctness': 'past_due',
'max_attempts': '1',
'due': 'tomorrow_str',
}, False),
# If show_correctness=past_due, answer is visible after due date
({
'showanswer': 'attempted',
'show_correctness': 'past_due',
'max_attempts': '1',
'due': 'yesterday_str',
}, True),
)
@ddt.unpack
def test_showanswer_hide_correctness(self, problem_data, answer_available):
"""
Ensure that the answer will not be shown when correctness is being hidden.
"""
if 'due' in problem_data:
problem_data['due'] = getattr(self, problem_data['due'])
problem = CapaFactory.create(**problem_data)
self.assertFalse(problem.answer_available())
problem.attempts = 1
self.assertEqual(problem.answer_available(), answer_available)
def test_showanswer_closed(self):
# can see after attempts used up, even with due date in the future
......@@ -414,6 +454,73 @@ class CapaModuleTest(unittest.TestCase):
graceperiod=self.two_day_delta_str)
self.assertTrue(still_in_grace.answer_available())
@ddt.data('', 'other-value')
def test_show_correctness_other(self, show_correctness):
"""
Test that correctness is visible if show_correctness is not set to one of the values
from SHOW_CORRECTNESS constant.
"""
problem = CapaFactory.create(show_correctness=show_correctness)
self.assertTrue(problem.correctness_available())
def test_show_correctness_default(self):
"""
Test that correctness is visible by default.
"""
problem = CapaFactory.create()
self.assertTrue(problem.correctness_available())
def test_show_correctness_never(self):
"""
Test that correctness is hidden when show_correctness turned off.
"""
problem = CapaFactory.create(show_correctness='never')
self.assertFalse(problem.correctness_available())
@ddt.data(
# Correctness not visible if due date in the future, even after using up all attempts
({
'show_correctness': 'past_due',
'max_attempts': '1',
'attempts': '1',
'due': 'tomorrow_str',
}, False),
# Correctness visible if due date in the past
({
'show_correctness': 'past_due',
'max_attempts': '1',
'attempts': '0',
'due': 'yesterday_str',
}, True),
# Correctness not visible if due date in the future
({
'show_correctness': 'past_due',
'max_attempts': '1',
'attempts': '0',
'due': 'tomorrow_str',
}, False),
# Correctness not visible because grace period hasn't expired,
# even after using up all attempts
({
'show_correctness': 'past_due',
'max_attempts': '1',
'attempts': '1',
'due': 'yesterday_str',
'graceperiod': 'two_day_delta_str',
}, False),
)
@ddt.unpack
def test_show_correctness_past_due(self, problem_data, expected_result):
"""
Test that with show_correctness="past_due", correctness will only be visible
after the problem is closed for everyone--e.g. after due date + grace period.
"""
problem_data['due'] = getattr(self, problem_data['due'])
if 'graceperiod' in problem_data:
problem_data['graceperiod'] = getattr(self, problem_data['graceperiod'])
problem = CapaFactory.create(**problem_data)
self.assertEqual(problem.correctness_available(), expected_result)
def test_closed(self):
# Attempts < Max attempts --> NOT closed
......@@ -814,6 +921,36 @@ class CapaModuleTest(unittest.TestCase):
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
@ddt.data(
("never", True, None, 'submitted'),
("never", False, None, 'submitted'),
("past_due", True, None, 'submitted'),
("past_due", False, None, 'submitted'),
("always", True, 1, 'correct'),
("always", False, 0, 'incorrect'),
)
@ddt.unpack
def test_handle_ajax_show_correctness(self, show_correctness, is_correct, expected_score, expected_success):
module = CapaFactory.create(show_correctness=show_correctness,
due=self.tomorrow_str,
correct=is_correct)
# Simulate marking the input correct/incorrect
with patch('capa.correctmap.CorrectMap.is_correct') as mock_is_correct:
mock_is_correct.return_value = is_correct
# Check the problem
get_request_dict = {CapaFactory.input_key(): '0'}
json_result = module.handle_ajax('problem_check', get_request_dict)
result = json.loads(json_result)
# Expect that the AJAX result withholds correctness and score
self.assertEqual(result['current_score'], expected_score)
self.assertEqual(result['success'], expected_success)
# Expect that the number of attempts is incremented by 1
self.assertEqual(module.attempts, 1)
def test_reset_problem(self):
module = CapaFactory.create(done=True)
module.new_lcp = Mock(wraps=module.new_lcp)
......@@ -1584,6 +1721,27 @@ class CapaModuleTest(unittest.TestCase):
other_module.get_progress()
mock_progress.assert_called_with(1, 1)
@ddt.data(
("never", True, None),
("never", False, None),
("past_due", True, None),
("past_due", False, None),
("always", True, 1),
("always", False, 0),
)
@ddt.unpack
def test_get_display_progress_show_correctness(self, show_correctness, is_correct, expected_score):
"""
Check that score and total are calculated correctly for the progress fraction.
"""
module = CapaFactory.create(correct=is_correct,
show_correctness=show_correctness,
due=self.tomorrow_str)
module.weight = 1
score, total = module.get_display_progress()
self.assertEqual(score, expected_score)
self.assertEqual(total, 1)
def test_get_html(self):
"""
Check that get_html() calls get_progress() with no arguments.
......
......@@ -151,6 +151,7 @@ $general-color-accent: $uxpl-blue-base !default
$correct: $success-color !default;
$partially-correct: $success-color !default;
$incorrect: $error-color !default;
$submitted: $general-color !default;
// BUTTONS
......
......@@ -317,6 +317,14 @@ class ProblemPage(PageObject):
self.wait_for_element_visibility('.fa-asterisk', "Waiting for asterisk notification icon")
self.wait_for_focus_on_submit_notification()
def wait_submitted_notification(self):
"""
Check for visibility of the "answer received" general notification and icon.
"""
msg = "Wait for submitted notification to be visible"
self.wait_for_element_visibility('.notification.general.notification-submit', msg)
self.wait_for_focus_on_submit_notification()
def click_hint(self):
"""
Click the Hint button.
......@@ -418,14 +426,16 @@ class ProblemPage(PageObject):
solution_selector = '.solution-span div.detailed-solution'
return self.q(css=solution_selector).is_present()
def is_correct_choice_highlighted(self, correct_choices):
def is_choice_highlighted(self, choice, choices_list):
"""
Check if correct answer/choice highlighted for choice group.
Check if the given answer/choice is highlighted for choice group.
"""
correct_status_xpath = '//fieldset/div[contains(@class, "field")][{0}]/label[contains(@class, "choicegroup_correct")]/span[contains(@class, "status correct")]' # pylint: disable=line-too-long
choice_status_xpath = ('//fieldset/div[contains(@class, "field")][{{0}}]'
'/label[contains(@class, "choicegroup_{choice}")]'
'/span[contains(@class, "status {choice}")]'.format(choice=choice))
any_status_xpath = '//fieldset/div[contains(@class, "field")][{0}]/label/span'
for choice in correct_choices:
if not self.q(xpath=correct_status_xpath.format(choice)).is_present():
for choice in choices_list:
if not self.q(xpath=choice_status_xpath.format(choice)).is_present():
return False
# Check that there is only a single status span, as there were some bugs with multiple
......@@ -435,6 +445,18 @@ class ProblemPage(PageObject):
return True
def is_correct_choice_highlighted(self, correct_choices):
"""
Check if correct answer/choice highlighted for choice group.
"""
return self.is_choice_highlighted('correct', correct_choices)
def is_submitted_choice_highlighted(self, correct_choices):
"""
Check if submitted answer/choice highlighted for choice group.
"""
return self.is_choice_highlighted('submitted', correct_choices)
@property
def problem_question(self):
"""
......
......@@ -575,6 +575,13 @@ class CourseOutlinePage(CoursePage, CourseOutlineContainer):
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def select_visibility_tab(self):
"""
Select the advanced settings tab
"""
self.q(css=".settings-tab-button[data-tab='visibility']").first.click()
self.wait_for_element_presence('input[value=hide_after_due]', 'Visibility fields not present.')
def select_advanced_tab(self, desired_item='special_exam'):
"""
Select the advanced settings tab
......@@ -584,8 +591,6 @@ class CourseOutlinePage(CoursePage, CourseOutlineContainer):
self.wait_for_element_presence('input.no_special_exam', 'Special exam settings fields not present.')
if desired_item == 'gated_content':
self.wait_for_element_visibility('#is_prereq', 'Gating settings fields are present.')
if desired_item == 'hide_after_due_date':
self.wait_for_element_presence('input[value=hide_after_due]', 'Visibility fields not present.')
def make_exam_proctored(self):
"""
......@@ -601,6 +606,7 @@ class CourseOutlinePage(CoursePage, CourseOutlineContainer):
"""
self.q(css="input.timed_exam").first.click()
if hide_after_due:
self.select_visibility_tab()
self.q(css='input[name=content-visibility][value=hide_after_due]').first.click()
self.q(css=".action-save").first.click()
self.wait_for_ajax()
......@@ -1057,7 +1063,7 @@ class CourseOutlineModal(object):
if needed.
"""
if not self.is_staff_lock_visible:
self.find_css(".settings-tab-button[data-tab=advanced]").click()
self.find_css(".settings-tab-button[data-tab=visibility]").click()
EmptyPromise(
lambda: self.is_staff_lock_visible,
"Staff lock option is visible",
......
......@@ -900,7 +900,7 @@ class SubsectionHiddenAfterDueDateTest(UniqueCourseTest):
self.studio_course_outline.visit()
self.studio_course_outline.open_subsection_settings_dialog()
self.studio_course_outline.select_advanced_tab('hide_after_due_date')
self.studio_course_outline.select_visibility_tab()
self.studio_course_outline.make_subsection_hidden_after_due_date()
self.logout_page.visit()
......
......@@ -3,6 +3,7 @@ Bok choy acceptance and a11y tests for problem types in the LMS
See also lettuce tests in lms/djangoapps/courseware/features/problems.feature
"""
import ddt
import random
import textwrap
......@@ -84,12 +85,14 @@ class ProblemTypeTestBase(ProblemsTest, EventsTestMixin):
problem_name = None
problem_type = None
problem_points = 1
factory = None
factory_kwargs = {}
status_indicators = {
'correct': ['span.correct'],
'incorrect': ['span.incorrect'],
'unanswered': ['span.unanswered'],
'submitted': ['span.submitted'],
}
def setUp(self):
......@@ -100,6 +103,10 @@ class ProblemTypeTestBase(ProblemsTest, EventsTestMixin):
self.courseware_page.visit()
self.problem_page = ProblemPage(self.browser)
def get_sequential(self):
""" Allow any class in the inheritance chain to customize subsection metadata."""
return XBlockFixtureDesc('sequential', 'Test Subsection', metadata=getattr(self, 'sequential_metadata', {}))
def get_problem(self):
"""
Creates a {problem_type} problem
......@@ -117,7 +124,7 @@ class ProblemTypeTestBase(ProblemsTest, EventsTestMixin):
Waits for the expected status indicator.
Args:
status: one of ("correct", "incorrect", "unanswered)
status: one of ("correct", "incorrect", "unanswered", "submitted")
"""
msg = "Wait for status to be {}".format(status)
selector = ', '.join(self.status_indicators[status])
......@@ -381,12 +388,83 @@ class ProblemTypeTestMixin(ProblemTypeA11yTestMixin):
self.problem_page.wait_partial_notification()
class AnnotationProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
@ddt.ddt
class ProblemNeverShowCorrectnessMixin(object):
"""
Tests the effect of adding `show_correctness: never` to the sequence metadata
for subclasses of ProblemTypeTestMixin.
"""
sequential_metadata = {'show_correctness': 'never'}
@attr(shard=7)
@ddt.data('correct', 'incorrect', 'partially-correct')
def test_answer_says_submitted(self, correctness):
"""
Scenario: I can answer a problem <Correctness>ly
Given External graders respond "<Correctness>"
And I am viewing a "<ProblemType>" problem
in a subsection with show_correctness set to "never"
Then I should see a score of "N point(s) possible (ungraded, results hidden)"
When I answer a "<ProblemType>" problem "<Correctness>ly"
And the "<ProblemType>" problem displays only a "submitted" notification.
And I should see a score of "N point(s) possible (ungraded, results hidden)"
And a "problem_check" server event is emitted
And a "problem_check" browser event is emitted
"""
# Not all problems have partially correct solutions configured
if correctness == 'partially-correct' and not self.partially_correct:
raise SkipTest("Test incompatible with the current problem type")
# Problem progress text depends on points possible
possible = 'possible (ungraded, results hidden)'
if self.problem_points == 1:
problem_progress = '1 point {}'.format(possible)
else:
problem_progress = '{} points {}'.format(self.problem_points, possible)
# Make sure we're looking at the right problem
self.problem_page.wait_for(
lambda: self.problem_page.problem_name == self.problem_name,
"Make sure the correct problem is on the page"
)
# Learner can see that score will be hidden prior to submitting answer
self.assertEqual(self.problem_page.problem_progress_graded_value, problem_progress)
# Answer the problem correctly
self.answer_problem(correctness=correctness)
self.problem_page.click_submit()
self.wait_for_status('submitted')
self.problem_page.wait_submitted_notification()
# Score is still hidden after submitting answer
self.assertEqual(self.problem_page.problem_progress_graded_value, problem_progress)
# Check for corresponding tracking event
expected_events = [
{
'event_source': 'server',
'event_type': 'problem_check',
'username': self.username,
}, {
'event_source': 'browser',
'event_type': 'problem_check',
'username': self.username,
},
]
for event in expected_events:
self.wait_for_events(event_filter=event, number_of_matches=1)
class AnnotationProblemTypeBase(ProblemTypeTestBase):
"""
TestCase Class for Annotation Problem Type
ProblemTypeTestBase specialization for Annotation Problem Type
"""
problem_name = 'ANNOTATION TEST PROBLEM'
problem_type = 'annotationresponse'
problem_points = 2
factory = AnnotationResponseXMLFactory()
partially_correct = True
......@@ -411,13 +489,14 @@ class AnnotationProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
'incorrect': ['span.incorrect'],
'partially-correct': ['span.partially-correct'],
'unanswered': ['span.unanswered'],
'submitted': ['span.submitted'],
}
def setUp(self, *args, **kwargs):
"""
Additional setup for AnnotationProblemTypeTest
Additional setup for AnnotationProblemTypeBase
"""
super(AnnotationProblemTypeTest, self).setUp(*args, **kwargs)
super(AnnotationProblemTypeBase, self).setUp(*args, **kwargs)
self.problem_page.a11y_audit.config.set_rules({
"ignore": [
......@@ -443,9 +522,23 @@ class AnnotationProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
).nth(choice).click()
class CheckboxProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
class AnnotationProblemTypeTest(AnnotationProblemTypeBase, ProblemTypeTestMixin):
"""
Standard tests for the Annotation Problem Type
"""
pass
class AnnotationProblemTypeNeverShowCorrectnessTest(AnnotationProblemTypeBase, ProblemNeverShowCorrectnessMixin):
"""
Ensure that correctness can be withheld for Annotation Problem Type problems.
"""
pass
class CheckboxProblemTypeBase(ProblemTypeTestBase):
"""
TestCase Class for Checkbox Problem Type
ProblemTypeTestBase specialization Checkbox Problem Type
"""
problem_name = 'CHECKBOX TEST PROBLEM'
problem_type = 'checkbox'
......@@ -462,12 +555,6 @@ class CheckboxProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
'explanation_text': 'This is explanation text'
}
def setUp(self, *args, **kwargs):
"""
Additional setup for CheckboxProblemTypeTest
"""
super(CheckboxProblemTypeTest, self).setUp(*args, **kwargs)
def answer_problem(self, correctness):
"""
Answer checkbox problem.
......@@ -481,6 +568,11 @@ class CheckboxProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
self.problem_page.click_choice("choice_1")
self.problem_page.click_choice("choice_3")
class CheckboxProblemTypeTest(CheckboxProblemTypeBase, ProblemTypeTestMixin):
"""
Standard tests for the Checkbox Problem Type
"""
@attr(shard=7)
def test_can_show_answer(self):
"""
......@@ -498,9 +590,16 @@ class CheckboxProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
self.problem_page.wait_for_show_answer_notification()
class MultipleChoiceProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
class CheckboxProblemTypeNeverShowCorrectnessTest(CheckboxProblemTypeBase, ProblemNeverShowCorrectnessMixin):
"""
Ensure that correctness can be withheld for Checkbox Problem Type problems.
"""
pass
class MultipleChoiceProblemTypeBase(ProblemTypeTestBase):
"""
TestCase Class for Multiple Choice Problem Type
ProblemTypeTestBase specialization Multiple Choice Problem Type
"""
problem_name = 'MULTIPLE CHOICE TEST PROBLEM'
problem_type = 'multiple choice'
......@@ -518,14 +617,9 @@ class MultipleChoiceProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
'correct': ['label.choicegroup_correct'],
'incorrect': ['label.choicegroup_incorrect', 'span.incorrect'],
'unanswered': ['span.unanswered'],
'submitted': ['label.choicegroup_submitted', 'span.submitted'],
}
def setUp(self, *args, **kwargs):
"""
Additional setup for MultipleChoiceProblemTypeTest
"""
super(MultipleChoiceProblemTypeTest, self).setUp(*args, **kwargs)
def answer_problem(self, correctness):
"""
Answer multiple choice problem.
......@@ -535,6 +629,11 @@ class MultipleChoiceProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
else:
self.problem_page.click_choice("choice_choice_2")
class MultipleChoiceProblemTypeTest(MultipleChoiceProblemTypeBase, ProblemTypeTestMixin):
"""
Standard tests for the Multiple Choice Problem Type
"""
@attr(shard=7)
def test_can_show_answer(self):
"""
......@@ -565,9 +664,17 @@ class MultipleChoiceProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
self.problem_page.wait_for_show_answer_notification()
class RadioProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
class MultipleChoiceProblemTypeNeverShowCorrectnessTest(MultipleChoiceProblemTypeBase,
ProblemNeverShowCorrectnessMixin):
"""
Ensure that correctness can be withheld for Multiple Choice Problem Type problems.
"""
pass
class RadioProblemTypeBase(ProblemTypeTestBase):
"""
TestCase Class for Radio Problem Type
ProblemTypeTestBase specialization for Radio Problem Type
"""
problem_name = 'RADIO TEST PROBLEM'
problem_type = 'radio'
......@@ -586,14 +693,9 @@ class RadioProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
'correct': ['label.choicegroup_correct'],
'incorrect': ['label.choicegroup_incorrect', 'span.incorrect'],
'unanswered': ['span.unanswered'],
'submitted': ['label.choicegroup_submitted', 'span.submitted'],
}
def setUp(self, *args, **kwargs):
"""
Additional setup for RadioProblemTypeTest
"""
super(RadioProblemTypeTest, self).setUp(*args, **kwargs)
def answer_problem(self, correctness):
"""
Answer radio problem.
......@@ -604,9 +706,23 @@ class RadioProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
self.problem_page.click_choice("choice_1")
class DropDownProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
class RadioProblemTypeTest(RadioProblemTypeBase, ProblemTypeTestMixin):
"""
TestCase Class for Drop Down Problem Type
Standard tests for the Multiple Radio Problem Type
"""
pass
class RadioProblemTypeNeverShowCorrectnessTest(RadioProblemTypeBase, ProblemNeverShowCorrectnessMixin):
"""
Ensure that correctness can be withheld for Radio Problem Type problems.
"""
pass
class DropDownProblemTypeBase(ProblemTypeTestBase):
"""
ProblemTypeTestBase specialization for Drop Down Problem Type
"""
problem_name = 'DROP DOWN TEST PROBLEM'
problem_type = 'drop down'
......@@ -621,12 +737,6 @@ class DropDownProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
'correct_option': 'Option 2'
}
def setUp(self, *args, **kwargs):
"""
Additional setup for DropDownProblemTypeTest
"""
super(DropDownProblemTypeTest, self).setUp(*args, **kwargs)
def answer_problem(self, correctness):
"""
Answer drop down problem.
......@@ -637,9 +747,23 @@ class DropDownProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
select_option_by_text(selector_element, answer)
class StringProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
class DropDownProblemTypeTest(DropDownProblemTypeBase, ProblemTypeTestMixin):
"""
Standard tests for the Multiple Radio Problem Type
"""
pass
class DropDownProblemTypeNeverShowCorrectnessTest(DropDownProblemTypeBase, ProblemNeverShowCorrectnessMixin):
"""
Ensure that correctness can be withheld for Drop Down Problem Type problems.
"""
pass
class StringProblemTypeBase(ProblemTypeTestBase):
"""
TestCase Class for String Problem Type
ProblemTypeTestBase specialization for String Problem Type
"""
problem_name = 'STRING TEST PROBLEM'
problem_type = 'string'
......@@ -658,14 +782,9 @@ class StringProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
'correct': ['div.correct'],
'incorrect': ['div.incorrect'],
'unanswered': ['div.unanswered', 'div.unsubmitted'],
'submitted': ['span.submitted'],
}
def setUp(self, *args, **kwargs):
"""
Additional setup for StringProblemTypeTest
"""
super(StringProblemTypeTest, self).setUp(*args, **kwargs)
def answer_problem(self, correctness):
"""
Answer string problem.
......@@ -674,9 +793,23 @@ class StringProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
self.problem_page.fill_answer(textvalue)
class NumericalProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
class StringProblemTypeTest(StringProblemTypeBase, ProblemTypeTestMixin):
"""
TestCase Class for Numerical Problem Type
Standard tests for the String Problem Type
"""
pass
class StringProblemTypeNeverShowCorrectnessTest(StringProblemTypeBase, ProblemNeverShowCorrectnessMixin):
"""
Ensure that correctness can be withheld for String Problem Type problems.
"""
pass
class NumericalProblemTypeBase(ProblemTypeTestBase):
"""
ProblemTypeTestBase specialization for Numerical Problem Type
"""
problem_name = 'NUMERICAL TEST PROBLEM'
problem_type = 'numerical'
......@@ -695,14 +828,9 @@ class NumericalProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
'correct': ['div.correct'],
'incorrect': ['div.incorrect'],
'unanswered': ['div.unanswered', 'div.unsubmitted'],
'submitted': ['div.submitted'],
}
def setUp(self, *args, **kwargs):
"""
Additional setup for NumericalProblemTypeTest
"""
super(NumericalProblemTypeTest, self).setUp(*args, **kwargs)
def answer_problem(self, correctness):
"""
Answer numerical problem.
......@@ -716,6 +844,11 @@ class NumericalProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
textvalue = str(random.randint(-2, 2))
self.problem_page.fill_answer(textvalue)
class NumericalProblemTypeTest(NumericalProblemTypeBase, ProblemTypeTestMixin):
"""
Standard tests for the Numerical Problem Type
"""
def test_error_input_gentle_alert(self):
"""
Scenario: I can answer a problem with erroneous input and will see a gentle alert
......@@ -741,9 +874,16 @@ class NumericalProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
self.problem_page.wait_for_focus_on_problem_meta()
class FormulaProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
class NumericalProblemTypeNeverShowCorrectnessTest(NumericalProblemTypeBase, ProblemNeverShowCorrectnessMixin):
"""
Ensure that correctness can be withheld for Numerical Problem Type problems.
"""
pass
class FormulaProblemTypeBase(ProblemTypeTestBase):
"""
TestCase Class for Formula Problem Type
ProblemTypeTestBase specialization for Formula Problem Type
"""
problem_name = 'FORMULA TEST PROBLEM'
problem_type = 'formula'
......@@ -764,14 +904,9 @@ class FormulaProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
'correct': ['div.correct'],
'incorrect': ['div.incorrect'],
'unanswered': ['div.unanswered', 'div.unsubmitted'],
'submitted': ['div.submitted'],
}
def setUp(self, *args, **kwargs):
"""
Additional setup for FormulaProblemTypeTest
"""
super(FormulaProblemTypeTest, self).setUp(*args, **kwargs)
def answer_problem(self, correctness):
"""
Answer formula problem.
......@@ -780,12 +915,27 @@ class FormulaProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
self.problem_page.fill_answer(textvalue)
class ScriptProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
class FormulaProblemTypeTest(FormulaProblemTypeBase, ProblemTypeTestMixin):
"""
TestCase Class for Script Problem Type
Standard tests for the Formula Problem Type
"""
pass
class FormulaProblemTypeNeverShowCorrectnessTest(FormulaProblemTypeBase, ProblemNeverShowCorrectnessMixin):
"""
Ensure that correctness can be withheld for Formula Problem Type problems.
"""
pass
class ScriptProblemTypeBase(ProblemTypeTestBase):
"""
ProblemTypeTestBase specialization for Script Problem Type
"""
problem_name = 'SCRIPT TEST PROBLEM'
problem_type = 'script'
problem_points = 2
partially_correct = False
factory = CustomResponseXMLFactory()
......@@ -811,14 +961,9 @@ class ScriptProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
'correct': ['div.correct'],
'incorrect': ['div.incorrect'],
'unanswered': ['div.unanswered', 'div.unsubmitted'],
'submitted': ['div.submitted'],
}
def setUp(self, *args, **kwargs):
"""
Additional setup for ScriptProblemTypeTest
"""
super(ScriptProblemTypeTest, self).setUp(*args, **kwargs)
def answer_problem(self, correctness):
"""
Answer script problem.
......@@ -836,6 +981,20 @@ class ScriptProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
self.problem_page.fill_answer(second_addend, input_num=1)
class ScriptProblemTypeTest(ScriptProblemTypeBase, ProblemTypeTestMixin):
"""
Standard tests for the Script Problem Type
"""
pass
class ScriptProblemTypeNeverShowCorrectnessTest(ScriptProblemTypeBase, ProblemNeverShowCorrectnessMixin):
"""
Ensure that correctness can be withheld for Script Problem Type problems.
"""
pass
class JSInputTypeTest(ProblemTypeTestBase, ProblemTypeA11yTestMixin):
"""
TestCase Class for jsinput (custom JavaScript) problem type.
......@@ -859,9 +1018,9 @@ class JSInputTypeTest(ProblemTypeTestBase, ProblemTypeA11yTestMixin):
raise NotImplementedError()
class CodeProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
class CodeProblemTypeBase(ProblemTypeTestBase):
"""
TestCase Class for Code Problem Type
ProblemTypeTestBase specialization for Code Problem Type
"""
problem_name = 'CODE TEST PROBLEM'
problem_type = 'code'
......@@ -879,6 +1038,7 @@ class CodeProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
'correct': ['.grader-status .correct ~ .debug'],
'incorrect': ['.grader-status .incorrect ~ .debug'],
'unanswered': ['.grader-status .unanswered ~ .debug'],
'submitted': ['.grader-status .submitted ~ .debug'],
}
def answer_problem(self, correctness):
......@@ -895,6 +1055,11 @@ class CodeProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
# (configured in the problem XML above)
pass
class CodeProblemTypeTest(CodeProblemTypeBase, ProblemTypeTestMixin):
"""
Standard tests for the Code Problem Type
"""
def test_answer_incorrectly(self):
"""
Overridden for script test because the testing grader always responds
......@@ -924,7 +1089,14 @@ class CodeProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
pass
class ChoiceTextProbelmTypeTestBase(ProblemTypeTestBase):
class CodeProblemTypeNeverShowCorrectnessTest(CodeProblemTypeBase, ProblemNeverShowCorrectnessMixin):
"""
Ensure that correctness can be withheld for Code Problem Type problems.
"""
pass
class ChoiceTextProblemTypeTestBase(ProblemTypeTestBase):
"""
Base class for "Choice + Text" Problem Types.
(e.g. RadioText, CheckboxText)
......@@ -961,9 +1133,9 @@ class ChoiceTextProbelmTypeTestBase(ProblemTypeTestBase):
self._fill_input_text(input_value, choice)
class RadioTextProblemTypeTest(ChoiceTextProbelmTypeTestBase, ProblemTypeTestMixin):
class RadioTextProblemTypeBase(ChoiceTextProblemTypeTestBase):
"""
TestCase Class for Radio Text Problem Type
ProblemTypeTestBase specialization for Radio Text Problem Type
"""
problem_name = 'RADIO TEXT TEST PROBLEM'
problem_type = 'radio_text'
......@@ -986,13 +1158,14 @@ class RadioTextProblemTypeTest(ChoiceTextProbelmTypeTestBase, ProblemTypeTestMix
'correct': ['section.choicetextgroup_correct'],
'incorrect': ['section.choicetextgroup_incorrect', 'span.incorrect'],
'unanswered': ['span.unanswered'],
'submitted': ['section.choicetextgroup_submitted', 'span.submitted'],
}
def setUp(self, *args, **kwargs):
"""
Additional setup for RadioTextProblemTypeTest
Additional setup for RadioTextProblemTypeBase
"""
super(RadioTextProblemTypeTest, self).setUp(*args, **kwargs)
super(RadioTextProblemTypeBase, self).setUp(*args, **kwargs)
self.problem_page.a11y_audit.config.set_rules({
"ignore": [
......@@ -1003,9 +1176,23 @@ class RadioTextProblemTypeTest(ChoiceTextProbelmTypeTestBase, ProblemTypeTestMix
})
class CheckboxTextProblemTypeTest(ChoiceTextProbelmTypeTestBase, ProblemTypeTestMixin):
class RadioTextProblemTypeTest(RadioTextProblemTypeBase, ProblemTypeTestMixin):
"""
Standard tests for the Radio Text Problem Type
"""
pass
class RadioTextProblemTypeNeverShowCorrectnessTest(RadioTextProblemTypeBase, ProblemNeverShowCorrectnessMixin):
"""
TestCase Class for Checkbox Text Problem Type
Ensure that correctness can be withheld for Radio + Text Problem Type problems.
"""
pass
class CheckboxTextProblemTypeBase(ChoiceTextProblemTypeTestBase):
"""
ProblemTypeTestBase specialization for Checkbox Text Problem Type
"""
problem_name = 'CHECKBOX TEXT TEST PROBLEM'
problem_type = 'checkbox_text'
......@@ -1025,9 +1212,9 @@ class CheckboxTextProblemTypeTest(ChoiceTextProbelmTypeTestBase, ProblemTypeTest
def setUp(self, *args, **kwargs):
"""
Additional setup for CheckboxTextProblemTypeTest
Additional setup for CheckboxTextProblemTypeBase
"""
super(CheckboxTextProblemTypeTest, self).setUp(*args, **kwargs)
super(CheckboxTextProblemTypeBase, self).setUp(*args, **kwargs)
self.problem_page.a11y_audit.config.set_rules({
"ignore": [
......@@ -1038,9 +1225,23 @@ class CheckboxTextProblemTypeTest(ChoiceTextProbelmTypeTestBase, ProblemTypeTest
})
class ImageProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
class CheckboxTextProblemTypeTest(CheckboxTextProblemTypeBase, ProblemTypeTestMixin):
"""
Standard tests for the Checkbox Text Problem Type
"""
pass
class CheckboxTextProblemTypeNeverShowCorrectnessTest(CheckboxTextProblemTypeBase, ProblemNeverShowCorrectnessMixin):
"""
Ensure that correctness can be withheld for Checkbox + Text Problem Type problems.
"""
pass
class ImageProblemTypeBase(ProblemTypeTestBase):
"""
TestCase Class for Image Problem Type
ProblemTypeTestBase specialization for Image Problem Type
"""
problem_name = 'IMAGE TEST PROBLEM'
problem_type = 'image'
......@@ -1071,9 +1272,23 @@ class ImageProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
chain.perform()
class SymbolicProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
class ImageProblemTypeTest(ImageProblemTypeBase, ProblemTypeTestMixin):
"""
Standard tests for the Image Problem Type
"""
pass
class ImageProblemTypeNeverShowCorrectnessTest(ImageProblemTypeBase, ProblemNeverShowCorrectnessMixin):
"""
TestCase Class for Symbolic Problem Type
Ensure that correctness can be withheld for Image Problem Type problems.
"""
pass
class SymbolicProblemTypeBase(ProblemTypeTestBase):
"""
ProblemTypeTestBase specialization for Symbolic Problem Type
"""
problem_name = 'SYMBOLIC TEST PROBLEM'
problem_type = 'symbolicresponse'
......@@ -1090,6 +1305,7 @@ class SymbolicProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
'correct': ['div.capa_inputtype div.correct'],
'incorrect': ['div.capa_inputtype div.incorrect'],
'unanswered': ['div.capa_inputtype div.unanswered'],
'submitted': ['div.capa_inputtype div.submitted'],
}
def answer_problem(self, correctness):
......@@ -1098,3 +1314,17 @@ class SymbolicProblemTypeTest(ProblemTypeTestBase, ProblemTypeTestMixin):
"""
choice = "2*x+3*y" if correctness == 'correct' else "3*a+4*b"
self.problem_page.fill_answer(choice)
class SymbolicProblemTypeTest(SymbolicProblemTypeBase, ProblemTypeTestMixin):
"""
Standard tests for the Symbolic Problem Type
"""
pass
class SymbolicProblemTypeNeverShowCorrectnessTest(SymbolicProblemTypeBase, ProblemNeverShowCorrectnessMixin):
"""
Ensure that correctness can be withheld for Symbolic Problem Type problems.
"""
pass
......@@ -98,6 +98,15 @@ from openedx.core.djangolib.markup import HTML
notification_message=answer_notification_message"
/>
% endif
% if 'submitted' == answer_notification_type:
<%include file="problem_notifications.html" args="
notification_type='general',
notification_icon='fa-info-circle',
notification_name='submit',
is_hidden=False,
notification_message=answer_notification_message"
/>
% endif
% endif
<%include file="problem_notifications.html" args="
notification_type='warning',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment