test_tasks.py 17.6 KB
Newer Older
Brian Wilson committed
1
"""
2
Unit tests for LMS instructor-initiated background tasks.
Brian Wilson committed
3 4 5 6 7 8 9

Runs tasks on answers to course problems to validate that code
paths actually work.

"""
import json
from uuid import uuid4
10
from unittest import skip
Brian Wilson committed
11 12 13 14 15 16 17 18 19 20 21 22

from mock import Mock, patch

from celery.states import SUCCESS, FAILURE

from xmodule.modulestore.exceptions import ItemNotFoundError

from courseware.model_data import StudentModule
from courseware.tests.factories import StudentModuleFactory
from student.tests.factories import UserFactory

from instructor_task.models import InstructorTask
23
from instructor_task.tests.test_base import InstructorTaskModuleTestCase
Brian Wilson committed
24 25
from instructor_task.tests.factories import InstructorTaskFactory
from instructor_task.tasks import rescore_problem, reset_problem_attempts, delete_problem_state
26
from instructor_task.tasks_helper import UpdateProblemModuleStateError, update_problem_module_state
Brian Wilson committed
27

28

Brian Wilson committed
29 30 31 32 33 34 35
PROBLEM_URL_NAME = "test_urlname"


class TestTaskFailure(Exception):
    pass


36
class TestInstructorTasks(InstructorTaskModuleTestCase):
Brian Wilson committed
37
    def setUp(self):
38
        super(InstructorTaskModuleTestCase, self).setUp()
Brian Wilson committed
39 40
        self.initialize_course()
        self.instructor = self.create_instructor('instructor')
41
        self.problem_url = InstructorTaskModuleTestCase.problem_location(PROBLEM_URL_NAME)
Brian Wilson committed
42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64

    def _create_input_entry(self, student_ident=None):
        """Creates a InstructorTask entry for testing."""
        task_id = str(uuid4())
        task_input = {'problem_url': self.problem_url}
        if student_ident is not None:
            task_input['student'] = student_ident

        instructor_task = InstructorTaskFactory.create(course_id=self.course.id,
                                                       requester=self.instructor,
                                                       task_input=json.dumps(task_input),
                                                       task_key='dummy value',
                                                       task_id=task_id)
        return instructor_task

    def _get_xmodule_instance_args(self):
        """
        Calculate dummy values for parameters needed for instantiating xmodule instances.
        """
        return {'xqueue_callback_url_prefix': 'dummy_value',
                'request_info': {},
                }

65
    def _run_task_with_mock_celery(self, task_function, entry_id, task_id, expected_failure_message=None):
66
        """Submit a task and mock how celery provides a current_task."""
Brian Wilson committed
67 68 69 70 71 72 73 74
        self.current_task = Mock()
        self.current_task.request = Mock()
        self.current_task.request.id = task_id
        self.current_task.update_state = Mock()
        if expected_failure_message is not None:
            self.current_task.update_state.side_effect = TestTaskFailure(expected_failure_message)
        with patch('instructor_task.tasks_helper._get_current_task') as mock_get_task:
            mock_get_task.return_value = self.current_task
75
            return task_function(entry_id, self._get_xmodule_instance_args())
Brian Wilson committed
76

77
    def _test_missing_current_task(self, task_function):
78
        """Check that a task_function fails when celery doesn't provide a current_task."""
Brian Wilson committed
79 80
        task_entry = self._create_input_entry()
        with self.assertRaises(UpdateProblemModuleStateError):
81
            task_function(task_entry.id, self._get_xmodule_instance_args())
Brian Wilson committed
82

83 84 85 86 87 88 89 90 91 92
    def test_rescore_missing_current_task(self):
        self._test_missing_current_task(rescore_problem)

    def test_reset_missing_current_task(self):
        self._test_missing_current_task(reset_problem_attempts)

    def test_delete_missing_current_task(self):
        self._test_missing_current_task(delete_problem_state)

    def _test_undefined_problem(self, task_function):
93
        """Run with celery, but no problem defined."""
Brian Wilson committed
94 95
        task_entry = self._create_input_entry()
        with self.assertRaises(ItemNotFoundError):
96 97 98 99 100 101 102
            self._run_task_with_mock_celery(task_function, task_entry.id, task_entry.task_id)

    def test_rescore_undefined_problem(self):
        self._test_undefined_problem(rescore_problem)

    def test_reset_undefined_problem(self):
        self._test_undefined_problem(reset_problem_attempts)
Brian Wilson committed
103

104 105
    def test_delete_undefined_problem(self):
        self._test_undefined_problem(delete_problem_state)
Brian Wilson committed
106

107
    def _test_run_with_task(self, task_function, action_name, expected_num_updated):
108
        """Run a task and check the number of StudentModules processed."""
Brian Wilson committed
109
        task_entry = self._create_input_entry()
110
        status = self._run_task_with_mock_celery(task_function, task_entry.id, task_entry.task_id)
Brian Wilson committed
111 112 113 114 115
        # check return value
        self.assertEquals(status.get('attempted'), expected_num_updated)
        self.assertEquals(status.get('updated'), expected_num_updated)
        self.assertEquals(status.get('total'), expected_num_updated)
        self.assertEquals(status.get('action_name'), action_name)
116
        self.assertGreater('duration_ms', 0)
Brian Wilson committed
117 118 119 120 121
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(json.loads(entry.task_output), status)
        self.assertEquals(entry.task_state, SUCCESS)

122
    def _test_run_with_no_state(self, task_function, action_name):
123
        """Run with no StudentModules defined for the current problem."""
Brian Wilson committed
124
        self.define_option_problem(PROBLEM_URL_NAME)
125
        self._test_run_with_task(task_function, action_name, 0)
Brian Wilson committed
126 127 128 129 130 131 132 133 134 135

    def test_rescore_with_no_state(self):
        self._test_run_with_no_state(rescore_problem, 'rescored')

    def test_reset_with_no_state(self):
        self._test_run_with_no_state(reset_problem_attempts, 'reset')

    def test_delete_with_no_state(self):
        self._test_run_with_no_state(delete_problem_state, 'deleted')

136 137
    def _create_students_with_state(self, num_students, state=None):
        """Create students, a problem, and StudentModule objects for testing"""
Brian Wilson committed
138 139 140 141 142 143 144 145 146 147 148 149
        self.define_option_problem(PROBLEM_URL_NAME)
        students = [
            UserFactory.create(username='robot%d' % i, email='robot+test+%d@edx.org' % i)
            for i in xrange(num_students)
        ]
        for student in students:
            StudentModuleFactory.create(course_id=self.course.id,
                                        module_state_key=self.problem_url,
                                        student=student,
                                        state=state)
        return students

150 151
    def _assert_num_attempts(self, students, num_attempts):
        """Check the number attempts for all students is the same"""
Brian Wilson committed
152 153 154 155 156
        for student in students:
            module = StudentModule.objects.get(course_id=self.course.id,
                                               student=student,
                                               module_state_key=self.problem_url)
            state = json.loads(module.state)
157 158 159 160 161 162 163 164 165
            self.assertEquals(state['attempts'], num_attempts)

    def test_reset_with_some_state(self):
        initial_attempts = 3
        input_state = json.dumps({'attempts': initial_attempts})
        num_students = 10
        students = self._create_students_with_state(num_students, input_state)
        # check that entries were set correctly
        self._assert_num_attempts(students, initial_attempts)
Brian Wilson committed
166 167 168
        # run the task
        self._test_run_with_task(reset_problem_attempts, 'reset', num_students)
        # check that entries were reset
169
        self._assert_num_attempts(students, 0)
Brian Wilson committed
170 171 172 173 174

    def test_delete_with_some_state(self):
        # This will create StudentModule entries -- we don't have to worry about
        # the state inside them.
        num_students = 10
175
        students = self._create_students_with_state(num_students)
Brian Wilson committed
176 177 178
        # check that entries were created correctly
        for student in students:
            StudentModule.objects.get(course_id=self.course.id,
179 180
                                      student=student,
                                      module_state_key=self.problem_url)
Brian Wilson committed
181 182 183 184 185 186 187 188 189
        self._test_run_with_task(delete_problem_state, 'deleted', num_students)
        # confirm that no state can be found anymore:
        for student in students:
            with self.assertRaises(StudentModule.DoesNotExist):
                StudentModule.objects.get(course_id=self.course.id,
                                          student=student,
                                          module_state_key=self.problem_url)

    def _test_reset_with_student(self, use_email):
190
        """Run a reset task for one student, with several StudentModules for the problem defined."""
Brian Wilson committed
191 192 193
        num_students = 10
        initial_attempts = 3
        input_state = json.dumps({'attempts': initial_attempts})
194
        students = self._create_students_with_state(num_students, input_state)
Brian Wilson committed
195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214
        # check that entries were set correctly
        for student in students:
            module = StudentModule.objects.get(course_id=self.course.id,
                                               student=student,
                                               module_state_key=self.problem_url)
            state = json.loads(module.state)
            self.assertEquals(state['attempts'], initial_attempts)

        if use_email:
            student_ident = students[3].email
        else:
            student_ident = students[3].username
        task_entry = self._create_input_entry(student_ident)

        status = self._run_task_with_mock_celery(reset_problem_attempts, task_entry.id, task_entry.task_id)
        # check return value
        self.assertEquals(status.get('attempted'), 1)
        self.assertEquals(status.get('updated'), 1)
        self.assertEquals(status.get('total'), 1)
        self.assertEquals(status.get('action_name'), 'reset')
215
        self.assertGreater('duration_ms', 0)
Brian Wilson committed
216 217 218 219
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(json.loads(entry.task_output), status)
        self.assertEquals(entry.task_state, SUCCESS)
220 221 222 223 224 225 226 227 228 229
        # check that the correct entry was reset
        for index, student in enumerate(students):
            module = StudentModule.objects.get(course_id=self.course.id,
                                               student=student,
                                               module_state_key=self.problem_url)
            state = json.loads(module.state)
            if index == 3:
                self.assertEquals(state['attempts'], 0)
            else:
                self.assertEquals(state['attempts'], initial_attempts)
Brian Wilson committed
230 231 232 233 234 235 236

    def test_reset_with_student_username(self):
        self._test_reset_with_student(False)

    def test_reset_with_student_email(self):
        self._test_reset_with_student(True)

237
    def _test_run_with_failure(self, task_function, expected_message):
238
        """Run a task and trigger an artificial failure with give message."""
Brian Wilson committed
239 240
        task_entry = self._create_input_entry()
        self.define_option_problem(PROBLEM_URL_NAME)
241 242
        with self.assertRaises(TestTaskFailure):
            self._run_task_with_mock_celery(task_function, task_entry.id, task_entry.task_id, expected_message)
Brian Wilson committed
243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(entry.task_state, FAILURE)
        output = json.loads(entry.task_output)
        self.assertEquals(output['exception'], 'TestTaskFailure')
        self.assertEquals(output['message'], expected_message)

    def test_rescore_with_failure(self):
        self._test_run_with_failure(rescore_problem, 'We expected this to fail')

    def test_reset_with_failure(self):
        self._test_run_with_failure(reset_problem_attempts, 'We expected this to fail')

    def test_delete_with_failure(self):
        self._test_run_with_failure(delete_problem_state, 'We expected this to fail')

259
    def _test_run_with_long_error_msg(self, task_function):
260 261 262 263
        """
        Run with an error message that is so long it will require
        truncation (as well as the jettisoning of the traceback).
        """
Brian Wilson committed
264 265 266
        task_entry = self._create_input_entry()
        self.define_option_problem(PROBLEM_URL_NAME)
        expected_message = "x" * 1500
267 268
        with self.assertRaises(TestTaskFailure):
            self._run_task_with_mock_celery(task_function, task_entry.id, task_entry.task_id, expected_message)
Brian Wilson committed
269 270 271
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(entry.task_state, FAILURE)
272
        self.assertGreater(1023, len(entry.task_output))
Brian Wilson committed
273 274
        output = json.loads(entry.task_output)
        self.assertEquals(output['exception'], 'TestTaskFailure')
275 276
        self.assertEquals(output['message'], expected_message[:len(output['message']) - 3] + "...")
        self.assertTrue('traceback' not in output)
Brian Wilson committed
277 278 279

    def test_rescore_with_long_error_msg(self):
        self._test_run_with_long_error_msg(rescore_problem)
280 281 282 283 284 285 286

    def test_reset_with_long_error_msg(self):
        self._test_run_with_long_error_msg(reset_problem_attempts)

    def test_delete_with_long_error_msg(self):
        self._test_run_with_long_error_msg(delete_problem_state)

287
    def _test_run_with_short_error_msg(self, task_function):
288 289 290 291 292
        """
        Run with an error message that is short enough to fit
        in the output, but long enough that the traceback won't.
        Confirm that the traceback is truncated.
        """
293 294 295
        task_entry = self._create_input_entry()
        self.define_option_problem(PROBLEM_URL_NAME)
        expected_message = "x" * 900
296 297
        with self.assertRaises(TestTaskFailure):
            self._run_task_with_mock_celery(task_function, task_entry.id, task_entry.task_id, expected_message)
298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(entry.task_state, FAILURE)
        self.assertGreater(1023, len(entry.task_output))
        output = json.loads(entry.task_output)
        self.assertEquals(output['exception'], 'TestTaskFailure')
        self.assertEquals(output['message'], expected_message)
        self.assertEquals(output['traceback'][-3:], "...")

    def test_rescore_with_short_error_msg(self):
        self._test_run_with_short_error_msg(rescore_problem)

    def test_reset_with_short_error_msg(self):
        self._test_run_with_short_error_msg(reset_problem_attempts)

    def test_delete_with_short_error_msg(self):
        self._test_run_with_short_error_msg(delete_problem_state)
315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337

    def test_successful_result_too_long(self):
        # while we don't expect the existing tasks to generate output that is too
        # long, we can test the framework will handle such an occurrence.
        task_entry = self._create_input_entry()
        self.define_option_problem(PROBLEM_URL_NAME)
        action_name = 'x' * 1000
        update_fcn = lambda(_module_descriptor, _student_module, _xmodule_instance_args): True
        task_function = (lambda entry_id, xmodule_instance_args:
                         update_problem_module_state(entry_id,
                                                     update_fcn, action_name, filter_fcn=None,
                                                     xmodule_instance_args=None))

        with self.assertRaises(ValueError):
            self._run_task_with_mock_celery(task_function, task_entry.id, task_entry.task_id)
        # compare with entry in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        self.assertEquals(entry.task_state, FAILURE)
        self.assertGreater(1023, len(entry.task_output))
        output = json.loads(entry.task_output)
        self.assertEquals(output['exception'], 'ValueError')
        self.assertTrue("Length of task output is too long" in output['message'])
        self.assertTrue('traceback' not in output)
338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377

    @skip
    def test_rescoring_unrescorable(self):
        # TODO: this test needs to have Mako templates initialized
        # to make sure that the creation of an XModule works.
        input_state = json.dumps({'done': True})
        num_students = 1
        self._create_students_with_state(num_students, input_state)
        task_entry = self._create_input_entry()
        with self.assertRaises(UpdateProblemModuleStateError):
            self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
        # check values stored in table:
        entry = InstructorTask.objects.get(id=task_entry.id)
        output = json.loads(entry.task_output)
        self.assertEquals(output['exception'], "UpdateProblemModuleStateError")
        self.assertEquals(output['message'], "Specified problem does not support rescoring.")
        self.assertGreater(len(output['traceback']), 0)

    @skip
    def test_rescoring_success(self):
        # TODO: this test needs to have Mako templates initialized
        # to make sure that the creation of an XModule works.
        input_state = json.dumps({'done': True})
        num_students = 10
        self._create_students_with_state(num_students, input_state)
        task_entry = self._create_input_entry()
        mock_instance = Mock()
        mock_instance.rescore_problem = Mock({'success': 'correct'})
        # TODO: figure out why this mock is not working....
        with patch('courseware.module_render.get_module_for_descriptor_internal') as mock_get_module:
            mock_get_module.return_value = mock_instance
            self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
        # check return value
        entry = InstructorTask.objects.get(id=task_entry.id)
        output = json.loads(entry.task_output)
        self.assertEquals(output.get('attempted'), num_students)
        self.assertEquals(output.get('updated'), num_students)
        self.assertEquals(output.get('total'), num_students)
        self.assertEquals(output.get('action_name'), 'rescored')
        self.assertGreater('duration_ms', 0)