test_tasks.py 22.9 KB
Newer Older
Brian Wilson committed
1
"""
2
Unit tests for LMS instructor-initiated background tasks.
Brian Wilson committed
3 4 5 6

Runs tasks on answers to course problems to validate that code
paths actually work.
"""
7

Brian Wilson committed
8
import json
9
from functools import partial
Brian Wilson committed
10 11
from uuid import uuid4

12
import ddt
13
from celery.states import FAILURE, SUCCESS
14
from django.utils.translation import ugettext_noop
15
from mock import MagicMock, Mock, patch
16
from nose.plugins.attrib import attr
17
from opaque_keys.edx.locations import i4xEncoder
Brian Wilson committed
18

19
from courseware.models import StudentModule
Brian Wilson committed
20
from courseware.tests.factories import StudentModuleFactory
21
from lms.djangoapps.instructor_task.exceptions import UpdateProblemModuleStateError
22 23
from lms.djangoapps.instructor_task.models import InstructorTask
from lms.djangoapps.instructor_task.tasks import (
24
    delete_problem_state,
25
    export_ora2_data,
26 27 28
    generate_certificates,
    rescore_problem,
    reset_problem_attempts
29
)
30
from lms.djangoapps.instructor_task.tasks_helper.misc import upload_ora2_data
31 32 33 34
from lms.djangoapps.instructor_task.tests.factories import InstructorTaskFactory
from lms.djangoapps.instructor_task.tests.test_base import InstructorTaskModuleTestCase
from student.tests.factories import CourseEnrollmentFactory, UserFactory
from xmodule.modulestore.exceptions import ItemNotFoundError
35

Brian Wilson committed
36 37 38 39
PROBLEM_URL_NAME = "test_urlname"


class TestTaskFailure(Exception):
40 41 42
    """
    An example exception to indicate failure of a mocked task.
    """
Brian Wilson committed
43 44 45
    pass


46
class TestInstructorTasks(InstructorTaskModuleTestCase):
47 48 49
    """
    Ensure tasks behave as expected.
    """
50

Brian Wilson committed
51
    def setUp(self):
52
        super(TestInstructorTasks, self).setUp()
Brian Wilson committed
53 54
        self.initialize_course()
        self.instructor = self.create_instructor('instructor')
55
        self.location = self.problem_location(PROBLEM_URL_NAME)
Brian Wilson committed
56

57
    def _create_input_entry(self, student_ident=None, use_problem_url=True, course_id=None, only_if_higher=False):
Brian Wilson committed
58 59
        """Creates a InstructorTask entry for testing."""
        task_id = str(uuid4())
60
        task_input = {'only_if_higher': only_if_higher}
61
        if use_problem_url:
62
            task_input['problem_url'] = self.location
Brian Wilson committed
63 64 65
        if student_ident is not None:
            task_input['student'] = student_ident

66 67
        course_id = course_id or self.course.id
        instructor_task = InstructorTaskFactory.create(course_id=course_id,
Brian Wilson committed
68
                                                       requester=self.instructor,
69
                                                       task_input=json.dumps(task_input, cls=i4xEncoder),
Brian Wilson committed
70 71 72 73 74 75 76 77
                                                       task_key='dummy value',
                                                       task_id=task_id)
        return instructor_task

    def _get_xmodule_instance_args(self):
        """
        Calculate dummy values for parameters needed for instantiating xmodule instances.
        """
78 79 80 81 82 83 84
        return {
            'xqueue_callback_url_prefix': 'dummy_value',
            'request_info': {
                'username': 'dummy_username',
                'user_id': 'dummy_id',
            },
        }
Brian Wilson committed
85

86
    def _run_task_with_mock_celery(self, task_class, entry_id, task_id, expected_failure_message=None):
87
        """Submit a task and mock how celery provides a current_task."""
Brian Wilson committed
88 89 90 91 92 93
        self.current_task = Mock()
        self.current_task.request = Mock()
        self.current_task.request.id = task_id
        self.current_task.update_state = Mock()
        if expected_failure_message is not None:
            self.current_task.update_state.side_effect = TestTaskFailure(expected_failure_message)
94 95
        task_args = [entry_id, self._get_xmodule_instance_args()]

96
        with patch('lms.djangoapps.instructor_task.tasks_helper.runner._get_current_task') as mock_get_task:
Brian Wilson committed
97
            mock_get_task.return_value = self.current_task
98
            return task_class.apply(task_args, task_id=task_id).get()
Brian Wilson committed
99

100 101
    def _test_missing_current_task(self, task_class):
        """Check that a task_class fails when celery doesn't provide a current_task."""
Brian Wilson committed
102
        task_entry = self._create_input_entry()
103
        with self.assertRaises(ValueError):
104
            task_class(task_entry.id, self._get_xmodule_instance_args())
Brian Wilson committed
105

106
    def _test_undefined_course(self, task_class):
107
        """Run with celery, but with no course defined."""
108 109
        task_entry = self._create_input_entry(course_id="bogus/course/id")
        with self.assertRaises(ItemNotFoundError):
110
            self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id)
111

112
    def _test_undefined_problem(self, task_class):
113
        """Run with celery, but no problem defined."""
Brian Wilson committed
114 115
        task_entry = self._create_input_entry()
        with self.assertRaises(ItemNotFoundError):
116
            self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id)
117

118 119
    def _test_run_with_task(self, task_class, action_name, expected_num_succeeded,
                            expected_num_skipped=0, expected_attempted=0, expected_total=0):
120
        """Run a task and check the number of StudentModules processed."""
Brian Wilson committed
121
        task_entry = self._create_input_entry()
122
        status = self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id)
123 124 125 126
        expected_attempted = expected_attempted \
            if expected_attempted else expected_num_succeeded + expected_num_skipped
        expected_total = expected_total \
            if expected_total else expected_num_succeeded + expected_num_skipped
Brian Wilson committed
127
        # check return value
128
        self.assertEquals(status.get('attempted'), expected_attempted)
129
        self.assertEquals(status.get('succeeded'), expected_num_succeeded)
130
        self.assertEquals(status.get('skipped'), expected_num_skipped)
131
        self.assertEquals(status.get('total'), expected_total)
Brian Wilson committed
132
        self.assertEquals(status.get('action_name'), action_name)
133
        self.assertGreater(status.get('duration_ms'), 0)
Brian Wilson committed
134 135 136 137 138
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(json.loads(entry.task_output), status)
        self.assertEquals(entry.task_state, SUCCESS)

139
    def _test_run_with_no_state(self, task_class, action_name):
140
        """Run with no StudentModules defined for the current problem."""
Brian Wilson committed
141
        self.define_option_problem(PROBLEM_URL_NAME)
142
        self._test_run_with_task(task_class, action_name, 0)
Brian Wilson committed
143

144
    def _create_students_with_state(self, num_students, state=None, grade=0, max_grade=1):
145
        """Create students, a problem, and StudentModule objects for testing"""
Brian Wilson committed
146 147 148 149 150 151
        self.define_option_problem(PROBLEM_URL_NAME)
        students = [
            UserFactory.create(username='robot%d' % i, email='robot+test+%d@edx.org' % i)
            for i in xrange(num_students)
        ]
        for student in students:
152
            CourseEnrollmentFactory.create(course_id=self.course.id, user=student)
Brian Wilson committed
153
            StudentModuleFactory.create(course_id=self.course.id,
154
                                        module_state_key=self.location,
Brian Wilson committed
155
                                        student=student,
156 157
                                        grade=grade,
                                        max_grade=max_grade,
Brian Wilson committed
158 159 160
                                        state=state)
        return students

161 162
    def _assert_num_attempts(self, students, num_attempts):
        """Check the number attempts for all students is the same"""
Brian Wilson committed
163 164 165
        for student in students:
            module = StudentModule.objects.get(course_id=self.course.id,
                                               student=student,
166
                                               module_state_key=self.location)
Brian Wilson committed
167
            state = json.loads(module.state)
168 169
            self.assertEquals(state['attempts'], num_attempts)

170
    def _test_run_with_failure(self, task_class, expected_message):
171
        """Run a task and trigger an artificial failure with the given message."""
Brian Wilson committed
172 173
        task_entry = self._create_input_entry()
        self.define_option_problem(PROBLEM_URL_NAME)
174
        with self.assertRaises(TestTaskFailure):
175
            self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id, expected_message)
Brian Wilson committed
176 177 178 179 180 181 182
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(entry.task_state, FAILURE)
        output = json.loads(entry.task_output)
        self.assertEquals(output['exception'], 'TestTaskFailure')
        self.assertEquals(output['message'], expected_message)

183
    def _test_run_with_long_error_msg(self, task_class):
184 185 186 187
        """
        Run with an error message that is so long it will require
        truncation (as well as the jettisoning of the traceback).
        """
Brian Wilson committed
188 189 190
        task_entry = self._create_input_entry()
        self.define_option_problem(PROBLEM_URL_NAME)
        expected_message = "x" * 1500
191
        with self.assertRaises(TestTaskFailure):
192
            self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id, expected_message)
Brian Wilson committed
193 194 195
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(entry.task_state, FAILURE)
196
        self.assertGreater(1023, len(entry.task_output))
Brian Wilson committed
197 198
        output = json.loads(entry.task_output)
        self.assertEquals(output['exception'], 'TestTaskFailure')
199
        self.assertEquals(output['message'], expected_message[:len(output['message']) - 3] + "...")
200
        self.assertNotIn('traceback', output)
Brian Wilson committed
201

202
    def _test_run_with_short_error_msg(self, task_class):
203 204 205 206 207
        """
        Run with an error message that is short enough to fit
        in the output, but long enough that the traceback won't.
        Confirm that the traceback is truncated.
        """
208 209 210
        task_entry = self._create_input_entry()
        self.define_option_problem(PROBLEM_URL_NAME)
        expected_message = "x" * 900
211
        with self.assertRaises(TestTaskFailure):
212
            self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id, expected_message)
213 214 215 216 217 218 219 220 221 222
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(entry.task_state, FAILURE)
        self.assertGreater(1023, len(entry.task_output))
        output = json.loads(entry.task_output)
        self.assertEquals(output['exception'], 'TestTaskFailure')
        self.assertEquals(output['message'], expected_message)
        self.assertEquals(output['traceback'][-3:], "...")


223
@attr(shard=3)
224
@ddt.ddt
225 226 227
class TestRescoreInstructorTask(TestInstructorTasks):
    """Tests problem-rescoring instructor task."""

Awais Jibran committed
228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244
    def assert_task_output(self, output, **expected_output):
        """
        Check & compare output of the task
        """
        self.assertEqual(output.get('total'), expected_output.get('total'))
        self.assertEqual(output.get('attempted'), expected_output.get('attempted'))
        self.assertEqual(output.get('succeeded'), expected_output.get('succeeded'))
        self.assertEqual(output.get('skipped'), expected_output.get('skipped'))
        self.assertEqual(output.get('failed'), expected_output.get('failed'))
        self.assertEqual(output.get('action_name'), expected_output.get('action_name'))
        self.assertGreater(output.get('duration_ms'), expected_output.get('duration_ms', 0))

    def get_task_output(self, task_id):
        """Get and load instructor task output"""
        entry = InstructorTask.objects.get(id=task_id)
        return json.loads(entry.task_output)

245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265
    def test_rescore_missing_current_task(self):
        self._test_missing_current_task(rescore_problem)

    def test_rescore_undefined_course(self):
        self._test_undefined_course(rescore_problem)

    def test_rescore_undefined_problem(self):
        self._test_undefined_problem(rescore_problem)

    def test_rescore_with_no_state(self):
        self._test_run_with_no_state(rescore_problem, 'rescored')

    def test_rescore_with_failure(self):
        self._test_run_with_failure(rescore_problem, 'We expected this to fail')

    def test_rescore_with_long_error_msg(self):
        self._test_run_with_long_error_msg(rescore_problem)

    def test_rescore_with_short_error_msg(self):
        self._test_run_with_short_error_msg(rescore_problem)

266 267 268 269 270
    def test_rescoring_unrescorable(self):
        input_state = json.dumps({'done': True})
        num_students = 1
        self._create_students_with_state(num_students, input_state)
        task_entry = self._create_input_entry()
271 272
        mock_instance = MagicMock()
        del mock_instance.rescore_problem
273
        del mock_instance.rescore
274
        with patch('lms.djangoapps.instructor_task.tasks_helper.module_state.get_module_for_descriptor_internal') as mock_get_module:
275 276 277
            mock_get_module.return_value = mock_instance
            with self.assertRaises(UpdateProblemModuleStateError):
                self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
278 279 280 281 282 283 284
        # check values stored in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        output = json.loads(entry.task_output)
        self.assertEquals(output['exception'], "UpdateProblemModuleStateError")
        self.assertEquals(output['message'], "Specified problem does not support rescoring.")
        self.assertGreater(len(output['traceback']), 0)

Awais Jibran committed
285 286 287 288 289 290 291 292 293
    def test_rescoring_unaccessable(self):
        """
        Tests rescores a problem in a course, for all students fails if user has answered a
        problem to which user does not have access to.
        """
        input_state = json.dumps({'done': True})
        num_students = 1
        self._create_students_with_state(num_students, input_state)
        task_entry = self._create_input_entry()
294
        with patch('lms.djangoapps.instructor_task.tasks_helper.module_state.get_module_for_descriptor_internal', return_value=None):
Awais Jibran committed
295 296 297 298 299 300 301 302 303 304 305 306
            self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)

        self.assert_task_output(
            output=self.get_task_output(task_entry.id),
            total=num_students,
            attempted=num_students,
            succeeded=0,
            skipped=0,
            failed=num_students,
            action_name='rescored'
        )

307
    def test_rescoring_success(self):
Awais Jibran committed
308 309 310
        """
        Tests rescores a problem in a course, for all students succeeds.
        """
311
        mock_instance = MagicMock()
312 313 314
        getattr(mock_instance, 'rescore').return_value = None
        mock_instance.has_submitted_answer.return_value = True
        del mock_instance.done  # old CAPA code used to use this value so we delete it here to be sure
315

316
        num_students = 10
317
        self._create_students_with_state(num_students)
318
        task_entry = self._create_input_entry()
319 320 321
        with patch(
                'lms.djangoapps.instructor_task.tasks_helper.module_state.get_module_for_descriptor_internal'
        ) as mock_get_module:
322 323
            mock_get_module.return_value = mock_instance
            self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
Awais Jibran committed
324 325 326 327 328 329 330 331 332 333

        self.assert_task_output(
            output=self.get_task_output(task_entry.id),
            total=num_students,
            attempted=num_students,
            succeeded=num_students,
            skipped=0,
            failed=0,
            action_name='rescored'
        )
334

335

336
@attr(shard=3)
337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372
class TestResetAttemptsInstructorTask(TestInstructorTasks):
    """Tests instructor task that resets problem attempts."""

    def test_reset_missing_current_task(self):
        self._test_missing_current_task(reset_problem_attempts)

    def test_reset_undefined_course(self):
        self._test_undefined_course(reset_problem_attempts)

    def test_reset_undefined_problem(self):
        self._test_undefined_problem(reset_problem_attempts)

    def test_reset_with_no_state(self):
        self._test_run_with_no_state(reset_problem_attempts, 'reset')

    def test_reset_with_failure(self):
        self._test_run_with_failure(reset_problem_attempts, 'We expected this to fail')

    def test_reset_with_long_error_msg(self):
        self._test_run_with_long_error_msg(reset_problem_attempts)

    def test_reset_with_short_error_msg(self):
        self._test_run_with_short_error_msg(reset_problem_attempts)

    def test_reset_with_some_state(self):
        initial_attempts = 3
        input_state = json.dumps({'attempts': initial_attempts})
        num_students = 10
        students = self._create_students_with_state(num_students, input_state)
        # check that entries were set correctly
        self._assert_num_attempts(students, initial_attempts)
        # run the task
        self._test_run_with_task(reset_problem_attempts, 'reset', num_students)
        # check that entries were reset
        self._assert_num_attempts(students, 0)

373 374 375 376 377 378 379 380 381 382 383 384
    def test_reset_with_zero_attempts(self):
        initial_attempts = 0
        input_state = json.dumps({'attempts': initial_attempts})
        num_students = 10
        students = self._create_students_with_state(num_students, input_state)
        # check that entries were set correctly
        self._assert_num_attempts(students, initial_attempts)
        # run the task
        self._test_run_with_task(reset_problem_attempts, 'reset', 0, expected_num_skipped=num_students)
        # check that entries were reset
        self._assert_num_attempts(students, 0)

385 386 387 388 389 390 391 392 393 394
    def _test_reset_with_student(self, use_email):
        """Run a reset task for one student, with several StudentModules for the problem defined."""
        num_students = 10
        initial_attempts = 3
        input_state = json.dumps({'attempts': initial_attempts})
        students = self._create_students_with_state(num_students, input_state)
        # check that entries were set correctly
        for student in students:
            module = StudentModule.objects.get(course_id=self.course.id,
                                               student=student,
395
                                               module_state_key=self.location)
396 397 398 399 400 401 402 403 404 405 406 407 408 409 410
            state = json.loads(module.state)
            self.assertEquals(state['attempts'], initial_attempts)

        if use_email:
            student_ident = students[3].email
        else:
            student_ident = students[3].username
        task_entry = self._create_input_entry(student_ident)

        status = self._run_task_with_mock_celery(reset_problem_attempts, task_entry.id, task_entry.task_id)
        # check return value
        self.assertEquals(status.get('attempted'), 1)
        self.assertEquals(status.get('succeeded'), 1)
        self.assertEquals(status.get('total'), 1)
        self.assertEquals(status.get('action_name'), 'reset')
411 412
        self.assertGreater(status.get('duration_ms'), 0)

413 414 415 416 417 418 419 420
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(json.loads(entry.task_output), status)
        self.assertEquals(entry.task_state, SUCCESS)
        # check that the correct entry was reset
        for index, student in enumerate(students):
            module = StudentModule.objects.get(course_id=self.course.id,
                                               student=student,
421
                                               module_state_key=self.location)
422 423 424 425 426 427 428 429 430 431 432 433 434
            state = json.loads(module.state)
            if index == 3:
                self.assertEquals(state['attempts'], 0)
            else:
                self.assertEquals(state['attempts'], initial_attempts)

    def test_reset_with_student_username(self):
        self._test_reset_with_student(False)

    def test_reset_with_student_email(self):
        self._test_reset_with_student(True)


435
@attr(shard=3)
436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468
class TestDeleteStateInstructorTask(TestInstructorTasks):
    """Tests instructor task that deletes problem state."""

    def test_delete_missing_current_task(self):
        self._test_missing_current_task(delete_problem_state)

    def test_delete_undefined_course(self):
        self._test_undefined_course(delete_problem_state)

    def test_delete_undefined_problem(self):
        self._test_undefined_problem(delete_problem_state)

    def test_delete_with_no_state(self):
        self._test_run_with_no_state(delete_problem_state, 'deleted')

    def test_delete_with_failure(self):
        self._test_run_with_failure(delete_problem_state, 'We expected this to fail')

    def test_delete_with_long_error_msg(self):
        self._test_run_with_long_error_msg(delete_problem_state)

    def test_delete_with_short_error_msg(self):
        self._test_run_with_short_error_msg(delete_problem_state)

    def test_delete_with_some_state(self):
        # This will create StudentModule entries -- we don't have to worry about
        # the state inside them.
        num_students = 10
        students = self._create_students_with_state(num_students)
        # check that entries were created correctly
        for student in students:
            StudentModule.objects.get(course_id=self.course.id,
                                      student=student,
469
                                      module_state_key=self.location)
470 471 472 473 474 475
        self._test_run_with_task(delete_problem_state, 'deleted', num_students)
        # confirm that no state can be found anymore:
        for student in students:
            with self.assertRaises(StudentModule.DoesNotExist):
                StudentModule.objects.get(course_id=self.course.id,
                                          student=student,
476
                                          module_state_key=self.location)
477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499


class TestCertificateGenerationnstructorTask(TestInstructorTasks):
    """Tests instructor task that generates student certificates."""

    def test_generate_certificates_missing_current_task(self):
        """
        Test error is raised when certificate generation task run without current task
        """
        self._test_missing_current_task(generate_certificates)

    def test_generate_certificates_task_run(self):
        """
        Test certificate generation task run without any errors
        """
        self._test_run_with_task(
            generate_certificates,
            'certificates generated',
            0,
            0,
            expected_attempted=1,
            expected_total=1
        )
500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520


class TestOra2ResponsesInstructorTask(TestInstructorTasks):
    """Tests instructor task that fetches ora2 response data."""

    def test_ora2_missing_current_task(self):
        self._test_missing_current_task(export_ora2_data)

    def test_ora2_with_failure(self):
        self._test_run_with_failure(export_ora2_data, 'We expected this to fail')

    def test_ora2_with_long_error_msg(self):
        self._test_run_with_long_error_msg(export_ora2_data)

    def test_ora2_with_short_error_msg(self):
        self._test_run_with_short_error_msg(export_ora2_data)

    def test_ora2_runs_task(self):
        task_entry = self._create_input_entry()
        task_xmodule_args = self._get_xmodule_instance_args()

521
        with patch('lms.djangoapps.instructor_task.tasks.run_main_task') as mock_main_task:
522 523 524 525 526 527
            export_ora2_data(task_entry.id, task_xmodule_args)

            action_name = ugettext_noop('generated')
            task_fn = partial(upload_ora2_data, task_xmodule_args)

            mock_main_task.assert_called_once_with_args(task_entry.id, task_fn, action_name)