test_tasks.py 21.5 KB
Newer Older
Brian Wilson committed
1
"""
2
Unit tests for LMS instructor-initiated background tasks.
Brian Wilson committed
3 4 5 6 7 8 9 10

Runs tasks on answers to course problems to validate that code
paths actually work.

"""
import json
from uuid import uuid4

11
from mock import Mock, MagicMock, patch
Brian Wilson committed
12 13 14 15

from celery.states import SUCCESS, FAILURE

from xmodule.modulestore.exceptions import ItemNotFoundError
16
from opaque_keys.edx.locations import i4xEncoder
Brian Wilson committed
17

18
from courseware.models import StudentModule
Brian Wilson committed
19
from courseware.tests.factories import StudentModuleFactory
20
from student.tests.factories import UserFactory, CourseEnrollmentFactory
Brian Wilson committed
21 22

from instructor_task.models import InstructorTask
23
from instructor_task.tests.test_base import InstructorTaskModuleTestCase
Brian Wilson committed
24
from instructor_task.tests.factories import InstructorTaskFactory
25 26 27 28 29 30
from instructor_task.tasks import (
    rescore_problem,
    reset_problem_attempts,
    delete_problem_state,
    generate_certificates,
)
31
from instructor_task.tasks_helper import UpdateProblemModuleStateError
32

Brian Wilson committed
33 34 35 36 37 38 39
PROBLEM_URL_NAME = "test_urlname"


class TestTaskFailure(Exception):
    pass


40
class TestInstructorTasks(InstructorTaskModuleTestCase):
41

Brian Wilson committed
42
    def setUp(self):
43
        super(TestInstructorTasks, self).setUp()
Brian Wilson committed
44 45
        self.initialize_course()
        self.instructor = self.create_instructor('instructor')
46
        self.location = self.problem_location(PROBLEM_URL_NAME)
Brian Wilson committed
47

48
    def _create_input_entry(self, student_ident=None, use_problem_url=True, course_id=None):
Brian Wilson committed
49 50
        """Creates a InstructorTask entry for testing."""
        task_id = str(uuid4())
51
        task_input = {}
52
        if use_problem_url:
53
            task_input['problem_url'] = self.location
Brian Wilson committed
54 55 56
        if student_ident is not None:
            task_input['student'] = student_ident

57 58
        course_id = course_id or self.course.id
        instructor_task = InstructorTaskFactory.create(course_id=course_id,
Brian Wilson committed
59
                                                       requester=self.instructor,
60
                                                       task_input=json.dumps(task_input, cls=i4xEncoder),
Brian Wilson committed
61 62 63 64 65 66 67 68 69 70 71 72
                                                       task_key='dummy value',
                                                       task_id=task_id)
        return instructor_task

    def _get_xmodule_instance_args(self):
        """
        Calculate dummy values for parameters needed for instantiating xmodule instances.
        """
        return {'xqueue_callback_url_prefix': 'dummy_value',
                'request_info': {},
                }

73
    def _run_task_with_mock_celery(self, task_class, entry_id, task_id, expected_failure_message=None):
74
        """Submit a task and mock how celery provides a current_task."""
Brian Wilson committed
75 76 77 78 79 80
        self.current_task = Mock()
        self.current_task.request = Mock()
        self.current_task.request.id = task_id
        self.current_task.update_state = Mock()
        if expected_failure_message is not None:
            self.current_task.update_state.side_effect = TestTaskFailure(expected_failure_message)
81 82
        task_args = [entry_id, self._get_xmodule_instance_args()]

Brian Wilson committed
83 84
        with patch('instructor_task.tasks_helper._get_current_task') as mock_get_task:
            mock_get_task.return_value = self.current_task
85
            return task_class.apply(task_args, task_id=task_id).get()
Brian Wilson committed
86

87 88
    def _test_missing_current_task(self, task_class):
        """Check that a task_class fails when celery doesn't provide a current_task."""
Brian Wilson committed
89
        task_entry = self._create_input_entry()
90
        with self.assertRaises(ValueError):
91
            task_class(task_entry.id, self._get_xmodule_instance_args())
Brian Wilson committed
92

93
    def _test_undefined_course(self, task_class):
94
        """Run with celery, but with no course defined."""
95 96
        task_entry = self._create_input_entry(course_id="bogus/course/id")
        with self.assertRaises(ItemNotFoundError):
97
            self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id)
98

99
    def _test_undefined_problem(self, task_class):
100
        """Run with celery, but no problem defined."""
Brian Wilson committed
101 102
        task_entry = self._create_input_entry()
        with self.assertRaises(ItemNotFoundError):
103
            self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id)
104

105 106
    def _test_run_with_task(self, task_class, action_name, expected_num_succeeded,
                            expected_num_skipped=0, expected_attempted=0, expected_total=0):
107
        """Run a task and check the number of StudentModules processed."""
Brian Wilson committed
108
        task_entry = self._create_input_entry()
109
        status = self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id)
110 111 112 113
        expected_attempted = expected_attempted \
            if expected_attempted else expected_num_succeeded + expected_num_skipped
        expected_total = expected_total \
            if expected_total else expected_num_succeeded + expected_num_skipped
Brian Wilson committed
114
        # check return value
115
        self.assertEquals(status.get('attempted'), expected_attempted)
116
        self.assertEquals(status.get('succeeded'), expected_num_succeeded)
117
        self.assertEquals(status.get('skipped'), expected_num_skipped)
118
        self.assertEquals(status.get('total'), expected_total)
Brian Wilson committed
119
        self.assertEquals(status.get('action_name'), action_name)
120
        self.assertGreater(status.get('duration_ms'), 0)
Brian Wilson committed
121 122 123 124 125
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(json.loads(entry.task_output), status)
        self.assertEquals(entry.task_state, SUCCESS)

126
    def _test_run_with_no_state(self, task_class, action_name):
127
        """Run with no StudentModules defined for the current problem."""
Brian Wilson committed
128
        self.define_option_problem(PROBLEM_URL_NAME)
129
        self._test_run_with_task(task_class, action_name, 0)
Brian Wilson committed
130

131
    def _create_students_with_state(self, num_students, state=None, grade=0, max_grade=1):
132
        """Create students, a problem, and StudentModule objects for testing"""
Brian Wilson committed
133 134 135 136 137 138
        self.define_option_problem(PROBLEM_URL_NAME)
        students = [
            UserFactory.create(username='robot%d' % i, email='robot+test+%d@edx.org' % i)
            for i in xrange(num_students)
        ]
        for student in students:
139
            CourseEnrollmentFactory.create(course_id=self.course.id, user=student)
Brian Wilson committed
140
            StudentModuleFactory.create(course_id=self.course.id,
141
                                        module_state_key=self.location,
Brian Wilson committed
142
                                        student=student,
143 144
                                        grade=grade,
                                        max_grade=max_grade,
Brian Wilson committed
145 146 147
                                        state=state)
        return students

148 149
    def _assert_num_attempts(self, students, num_attempts):
        """Check the number attempts for all students is the same"""
Brian Wilson committed
150 151 152
        for student in students:
            module = StudentModule.objects.get(course_id=self.course.id,
                                               student=student,
153
                                               module_state_key=self.location)
Brian Wilson committed
154
            state = json.loads(module.state)
155 156
            self.assertEquals(state['attempts'], num_attempts)

157
    def _test_run_with_failure(self, task_class, expected_message):
158
        """Run a task and trigger an artificial failure with the given message."""
Brian Wilson committed
159 160
        task_entry = self._create_input_entry()
        self.define_option_problem(PROBLEM_URL_NAME)
161
        with self.assertRaises(TestTaskFailure):
162
            self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id, expected_message)
Brian Wilson committed
163 164 165 166 167 168 169
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(entry.task_state, FAILURE)
        output = json.loads(entry.task_output)
        self.assertEquals(output['exception'], 'TestTaskFailure')
        self.assertEquals(output['message'], expected_message)

170
    def _test_run_with_long_error_msg(self, task_class):
171 172 173 174
        """
        Run with an error message that is so long it will require
        truncation (as well as the jettisoning of the traceback).
        """
Brian Wilson committed
175 176 177
        task_entry = self._create_input_entry()
        self.define_option_problem(PROBLEM_URL_NAME)
        expected_message = "x" * 1500
178
        with self.assertRaises(TestTaskFailure):
179
            self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id, expected_message)
Brian Wilson committed
180 181 182
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(entry.task_state, FAILURE)
183
        self.assertGreater(1023, len(entry.task_output))
Brian Wilson committed
184 185
        output = json.loads(entry.task_output)
        self.assertEquals(output['exception'], 'TestTaskFailure')
186 187
        self.assertEquals(output['message'], expected_message[:len(output['message']) - 3] + "...")
        self.assertTrue('traceback' not in output)
Brian Wilson committed
188

189
    def _test_run_with_short_error_msg(self, task_class):
190 191 192 193 194
        """
        Run with an error message that is short enough to fit
        in the output, but long enough that the traceback won't.
        Confirm that the traceback is truncated.
        """
195 196 197
        task_entry = self._create_input_entry()
        self.define_option_problem(PROBLEM_URL_NAME)
        expected_message = "x" * 900
198
        with self.assertRaises(TestTaskFailure):
199
            self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id, expected_message)
200 201 202 203 204 205 206 207 208 209
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(entry.task_state, FAILURE)
        self.assertGreater(1023, len(entry.task_output))
        output = json.loads(entry.task_output)
        self.assertEquals(output['exception'], 'TestTaskFailure')
        self.assertEquals(output['message'], expected_message)
        self.assertEquals(output['traceback'][-3:], "...")


210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233
class TestRescoreInstructorTask(TestInstructorTasks):
    """Tests problem-rescoring instructor task."""

    def test_rescore_missing_current_task(self):
        self._test_missing_current_task(rescore_problem)

    def test_rescore_undefined_course(self):
        self._test_undefined_course(rescore_problem)

    def test_rescore_undefined_problem(self):
        self._test_undefined_problem(rescore_problem)

    def test_rescore_with_no_state(self):
        self._test_run_with_no_state(rescore_problem, 'rescored')

    def test_rescore_with_failure(self):
        self._test_run_with_failure(rescore_problem, 'We expected this to fail')

    def test_rescore_with_long_error_msg(self):
        self._test_run_with_long_error_msg(rescore_problem)

    def test_rescore_with_short_error_msg(self):
        self._test_run_with_short_error_msg(rescore_problem)

234 235 236 237 238
    def test_rescoring_unrescorable(self):
        input_state = json.dumps({'done': True})
        num_students = 1
        self._create_students_with_state(num_students, input_state)
        task_entry = self._create_input_entry()
239 240
        mock_instance = MagicMock()
        del mock_instance.rescore_problem
241
        with patch('instructor_task.tasks_helper.get_module_for_descriptor_internal') as mock_get_module:
242 243 244
            mock_get_module.return_value = mock_instance
            with self.assertRaises(UpdateProblemModuleStateError):
                self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
245 246 247 248 249 250 251 252 253 254 255 256 257
        # check values stored in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        output = json.loads(entry.task_output)
        self.assertEquals(output['exception'], "UpdateProblemModuleStateError")
        self.assertEquals(output['message'], "Specified problem does not support rescoring.")
        self.assertGreater(len(output['traceback']), 0)

    def test_rescoring_success(self):
        input_state = json.dumps({'done': True})
        num_students = 10
        self._create_students_with_state(num_students, input_state)
        task_entry = self._create_input_entry()
        mock_instance = Mock()
258 259
        mock_instance.rescore_problem = Mock(return_value={'success': 'correct'})
        with patch('instructor_task.tasks_helper.get_module_for_descriptor_internal') as mock_get_module:
260 261 262 263 264 265
            mock_get_module.return_value = mock_instance
            self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
        # check return value
        entry = InstructorTask.objects.get(id=task_entry.id)
        output = json.loads(entry.task_output)
        self.assertEquals(output.get('attempted'), num_students)
266
        self.assertEquals(output.get('succeeded'), num_students)
267 268
        self.assertEquals(output.get('total'), num_students)
        self.assertEquals(output.get('action_name'), 'rescored')
269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309
        self.assertGreater(output.get('duration_ms'), 0)

    def test_rescoring_bad_result(self):
        # Confirm that rescoring does not succeed if "success" key is not an expected value.
        input_state = json.dumps({'done': True})
        num_students = 10
        self._create_students_with_state(num_students, input_state)
        task_entry = self._create_input_entry()
        mock_instance = Mock()
        mock_instance.rescore_problem = Mock(return_value={'success': 'bogus'})
        with patch('instructor_task.tasks_helper.get_module_for_descriptor_internal') as mock_get_module:
            mock_get_module.return_value = mock_instance
            self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
        # check return value
        entry = InstructorTask.objects.get(id=task_entry.id)
        output = json.loads(entry.task_output)
        self.assertEquals(output.get('attempted'), num_students)
        self.assertEquals(output.get('succeeded'), 0)
        self.assertEquals(output.get('total'), num_students)
        self.assertEquals(output.get('action_name'), 'rescored')
        self.assertGreater(output.get('duration_ms'), 0)

    def test_rescoring_missing_result(self):
        # Confirm that rescoring does not succeed if "success" key is not returned.
        input_state = json.dumps({'done': True})
        num_students = 10
        self._create_students_with_state(num_students, input_state)
        task_entry = self._create_input_entry()
        mock_instance = Mock()
        mock_instance.rescore_problem = Mock(return_value={'bogus': 'value'})
        with patch('instructor_task.tasks_helper.get_module_for_descriptor_internal') as mock_get_module:
            mock_get_module.return_value = mock_instance
            self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
        # check return value
        entry = InstructorTask.objects.get(id=task_entry.id)
        output = json.loads(entry.task_output)
        self.assertEquals(output.get('attempted'), num_students)
        self.assertEquals(output.get('succeeded'), 0)
        self.assertEquals(output.get('total'), num_students)
        self.assertEquals(output.get('action_name'), 'rescored')
        self.assertGreater(output.get('duration_ms'), 0)
310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347


class TestResetAttemptsInstructorTask(TestInstructorTasks):
    """Tests instructor task that resets problem attempts."""

    def test_reset_missing_current_task(self):
        self._test_missing_current_task(reset_problem_attempts)

    def test_reset_undefined_course(self):
        self._test_undefined_course(reset_problem_attempts)

    def test_reset_undefined_problem(self):
        self._test_undefined_problem(reset_problem_attempts)

    def test_reset_with_no_state(self):
        self._test_run_with_no_state(reset_problem_attempts, 'reset')

    def test_reset_with_failure(self):
        self._test_run_with_failure(reset_problem_attempts, 'We expected this to fail')

    def test_reset_with_long_error_msg(self):
        self._test_run_with_long_error_msg(reset_problem_attempts)

    def test_reset_with_short_error_msg(self):
        self._test_run_with_short_error_msg(reset_problem_attempts)

    def test_reset_with_some_state(self):
        initial_attempts = 3
        input_state = json.dumps({'attempts': initial_attempts})
        num_students = 10
        students = self._create_students_with_state(num_students, input_state)
        # check that entries were set correctly
        self._assert_num_attempts(students, initial_attempts)
        # run the task
        self._test_run_with_task(reset_problem_attempts, 'reset', num_students)
        # check that entries were reset
        self._assert_num_attempts(students, 0)

348 349 350 351 352 353 354 355 356 357 358 359
    def test_reset_with_zero_attempts(self):
        initial_attempts = 0
        input_state = json.dumps({'attempts': initial_attempts})
        num_students = 10
        students = self._create_students_with_state(num_students, input_state)
        # check that entries were set correctly
        self._assert_num_attempts(students, initial_attempts)
        # run the task
        self._test_run_with_task(reset_problem_attempts, 'reset', 0, expected_num_skipped=num_students)
        # check that entries were reset
        self._assert_num_attempts(students, 0)

360 361 362 363 364 365 366 367 368 369
    def _test_reset_with_student(self, use_email):
        """Run a reset task for one student, with several StudentModules for the problem defined."""
        num_students = 10
        initial_attempts = 3
        input_state = json.dumps({'attempts': initial_attempts})
        students = self._create_students_with_state(num_students, input_state)
        # check that entries were set correctly
        for student in students:
            module = StudentModule.objects.get(course_id=self.course.id,
                                               student=student,
370
                                               module_state_key=self.location)
371 372 373 374 375 376 377 378 379 380 381 382 383 384 385
            state = json.loads(module.state)
            self.assertEquals(state['attempts'], initial_attempts)

        if use_email:
            student_ident = students[3].email
        else:
            student_ident = students[3].username
        task_entry = self._create_input_entry(student_ident)

        status = self._run_task_with_mock_celery(reset_problem_attempts, task_entry.id, task_entry.task_id)
        # check return value
        self.assertEquals(status.get('attempted'), 1)
        self.assertEquals(status.get('succeeded'), 1)
        self.assertEquals(status.get('total'), 1)
        self.assertEquals(status.get('action_name'), 'reset')
386 387
        self.assertGreater(status.get('duration_ms'), 0)

388 389 390 391 392 393 394 395
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(json.loads(entry.task_output), status)
        self.assertEquals(entry.task_state, SUCCESS)
        # check that the correct entry was reset
        for index, student in enumerate(students):
            module = StudentModule.objects.get(course_id=self.course.id,
                                               student=student,
396
                                               module_state_key=self.location)
397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442
            state = json.loads(module.state)
            if index == 3:
                self.assertEquals(state['attempts'], 0)
            else:
                self.assertEquals(state['attempts'], initial_attempts)

    def test_reset_with_student_username(self):
        self._test_reset_with_student(False)

    def test_reset_with_student_email(self):
        self._test_reset_with_student(True)


class TestDeleteStateInstructorTask(TestInstructorTasks):
    """Tests instructor task that deletes problem state."""

    def test_delete_missing_current_task(self):
        self._test_missing_current_task(delete_problem_state)

    def test_delete_undefined_course(self):
        self._test_undefined_course(delete_problem_state)

    def test_delete_undefined_problem(self):
        self._test_undefined_problem(delete_problem_state)

    def test_delete_with_no_state(self):
        self._test_run_with_no_state(delete_problem_state, 'deleted')

    def test_delete_with_failure(self):
        self._test_run_with_failure(delete_problem_state, 'We expected this to fail')

    def test_delete_with_long_error_msg(self):
        self._test_run_with_long_error_msg(delete_problem_state)

    def test_delete_with_short_error_msg(self):
        self._test_run_with_short_error_msg(delete_problem_state)

    def test_delete_with_some_state(self):
        # This will create StudentModule entries -- we don't have to worry about
        # the state inside them.
        num_students = 10
        students = self._create_students_with_state(num_students)
        # check that entries were created correctly
        for student in students:
            StudentModule.objects.get(course_id=self.course.id,
                                      student=student,
443
                                      module_state_key=self.location)
444 445 446 447 448 449
        self._test_run_with_task(delete_problem_state, 'deleted', num_students)
        # confirm that no state can be found anymore:
        for student in students:
            with self.assertRaises(StudentModule.DoesNotExist):
                StudentModule.objects.get(course_id=self.course.id,
                                          student=student,
450
                                          module_state_key=self.location)
451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473


class TestCertificateGenerationnstructorTask(TestInstructorTasks):
    """Tests instructor task that generates student certificates."""

    def test_generate_certificates_missing_current_task(self):
        """
        Test error is raised when certificate generation task run without current task
        """
        self._test_missing_current_task(generate_certificates)

    def test_generate_certificates_task_run(self):
        """
        Test certificate generation task run without any errors
        """
        self._test_run_with_task(
            generate_certificates,
            'certificates generated',
            0,
            0,
            expected_attempted=1,
            expected_total=1
        )