Commit 332a4405 by Brian Wilson

Enable per-student background tasks.

parent 4d2183d7
......@@ -2,8 +2,6 @@ import hashlib
import json
import logging
from django.db import transaction
from celery.result import AsyncResult
from celery.states import READY_STATES, SUCCESS, FAILURE, REVOKED
......@@ -30,7 +28,6 @@ def _task_is_running(course_id, task_type, task_key):
return len(runningTasks) > 0
@transaction.autocommit
def _reserve_task(course_id, task_type, task_key, task_input, requester):
"""
Creates a database entry to indicate that a task is in progress.
......@@ -39,9 +36,9 @@ def _reserve_task(course_id, task_type, task_key, task_input, requester):
Includes the creation of an arbitrary value for task_id, to be
submitted with the task call to celery.
Autocommit annotation makes sure the database entry is committed.
The InstructorTask.create method makes sure the InstructorTask entry is committed.
When called from any view that is wrapped by TransactionMiddleware,
and thus in a "commit-on-success" transaction, this autocommit here
and thus in a "commit-on-success" transaction, an autocommit buried within here
will cause any pending transaction to be committed by a successful
save here. Any future database operations will take place in a
separate transaction.
......
......@@ -72,6 +72,16 @@ class InstructorTask(models.Model):
@classmethod
def create(cls, course_id, task_type, task_key, task_input, requester):
"""
Create an instance of InstructorTask.
The InstructorTask.save_now method makes sure the InstructorTask entry is committed.
When called from any view that is wrapped by TransactionMiddleware,
and thus in a "commit-on-success" transaction, an autocommit buried within here
will cause any pending transaction to be committed by a successful
save here. Any future database operations will take place in a
separate transaction.
"""
# create the task_id here, and pass it into celery:
task_id = str(uuid4())
......@@ -99,7 +109,16 @@ class InstructorTask(models.Model):
@transaction.autocommit
def save_now(self):
"""Writes InstructorTask immediately, ensuring the transaction is committed."""
"""
Writes InstructorTask immediately, ensuring the transaction is committed.
Autocommit annotation makes sure the database entry is committed.
When called from any view that is wrapped by TransactionMiddleware,
and thus in a "commit-on-success" transaction, this autocommit here
will cause any pending transaction to be committed by a successful
save here. Any future database operations will take place in a
separate transaction.
"""
self.save()
@staticmethod
......
......@@ -22,7 +22,7 @@ from instructor_task.tests.test_base import (InstructorTaskTestCase,
class InstructorTaskReportTest(InstructorTaskTestCase):
"""
Tests API and view methods that involve the reporting of status for background tasks.
Tests API methods that involve the reporting of status for background tasks.
"""
def test_get_running_instructor_tasks(self):
......
"""
Integration Tests for LMS instructor-initiated background tasks
Integration Tests for LMS instructor-initiated background tasks.
Runs tasks on answers to course problems to validate that code
paths actually work.
......
"""
Unit tests for LMS instructor-initiated background tasks,
Unit tests for LMS instructor-initiated background tasks.
Runs tasks on answers to course problems to validate that code
paths actually work.
......@@ -7,6 +7,7 @@ paths actually work.
"""
import json
from uuid import uuid4
from unittest import skip
from mock import Mock, patch
......@@ -62,6 +63,7 @@ class TestInstructorTasks(InstructorTaskModuleTestCase):
}
def _run_task_with_mock_celery(self, task_function, entry_id, task_id, expected_failure_message=None):
"""Submit a task and mock how celery provides a current_task."""
self.current_task = Mock()
self.current_task.request = Mock()
self.current_task.request.id = task_id
......@@ -73,7 +75,7 @@ class TestInstructorTasks(InstructorTaskModuleTestCase):
return task_function(entry_id, self._get_xmodule_instance_args())
def _test_missing_current_task(self, task_function):
# run without (mock) Celery running
"""Check that a task_function fails when celery doesn't provide a current_task."""
task_entry = self._create_input_entry()
with self.assertRaises(UpdateProblemModuleStateError):
task_function(task_entry.id, self._get_xmodule_instance_args())
......@@ -88,7 +90,7 @@ class TestInstructorTasks(InstructorTaskModuleTestCase):
self._test_missing_current_task(delete_problem_state)
def _test_undefined_problem(self, task_function):
# run with celery, but no problem defined
"""Run with celery, but no problem defined."""
task_entry = self._create_input_entry()
with self.assertRaises(ItemNotFoundError):
self._run_task_with_mock_celery(task_function, task_entry.id, task_entry.task_id)
......@@ -103,7 +105,7 @@ class TestInstructorTasks(InstructorTaskModuleTestCase):
self._test_undefined_problem(delete_problem_state)
def _test_run_with_task(self, task_function, action_name, expected_num_updated):
# run with some StudentModules for the problem
"""Run a task and check the number of StudentModules processed."""
task_entry = self._create_input_entry()
status = self._run_task_with_mock_celery(task_function, task_entry.id, task_entry.task_id)
# check return value
......@@ -118,7 +120,7 @@ class TestInstructorTasks(InstructorTaskModuleTestCase):
self.assertEquals(entry.task_state, SUCCESS)
def _test_run_with_no_state(self, task_function, action_name):
# run with no StudentModules for the problem
"""Run with no StudentModules defined for the current problem."""
self.define_option_problem(PROBLEM_URL_NAME)
self._test_run_with_task(task_function, action_name, 0)
......@@ -185,7 +187,7 @@ class TestInstructorTasks(InstructorTaskModuleTestCase):
module_state_key=self.problem_url)
def _test_reset_with_student(self, use_email):
# run with some StudentModules for the problem
"""Run a reset task for one student, with several StudentModules for the problem defined."""
num_students = 10
initial_attempts = 3
input_state = json.dumps({'attempts': initial_attempts})
......@@ -233,8 +235,7 @@ class TestInstructorTasks(InstructorTaskModuleTestCase):
self._test_reset_with_student(True)
def _test_run_with_failure(self, task_function, expected_message):
# run with no StudentModules for the problem,
# because we will fail before entering the loop.
"""Run a task and trigger an artificial failure with give message."""
task_entry = self._create_input_entry()
self.define_option_problem(PROBLEM_URL_NAME)
with self.assertRaises(TestTaskFailure):
......@@ -256,8 +257,10 @@ class TestInstructorTasks(InstructorTaskModuleTestCase):
self._test_run_with_failure(delete_problem_state, 'We expected this to fail')
def _test_run_with_long_error_msg(self, task_function):
# run with an error message that is so long it will require
# truncation (as well as the jettisoning of the traceback).
"""
Run with an error message that is so long it will require
truncation (as well as the jettisoning of the traceback).
"""
task_entry = self._create_input_entry()
self.define_option_problem(PROBLEM_URL_NAME)
expected_message = "x" * 1500
......@@ -282,9 +285,11 @@ class TestInstructorTasks(InstructorTaskModuleTestCase):
self._test_run_with_long_error_msg(delete_problem_state)
def _test_run_with_short_error_msg(self, task_function):
# run with an error message that is short enough to fit
# in the output, but long enough that the traceback won't.
# Confirm that the traceback is truncated.
"""
Run with an error message that is short enough to fit
in the output, but long enough that the traceback won't.
Confirm that the traceback is truncated.
"""
task_entry = self._create_input_entry()
self.define_option_problem(PROBLEM_URL_NAME)
expected_message = "x" * 900
......@@ -330,3 +335,43 @@ class TestInstructorTasks(InstructorTaskModuleTestCase):
self.assertEquals(output['exception'], 'ValueError')
self.assertTrue("Length of task output is too long" in output['message'])
self.assertTrue('traceback' not in output)
@skip
def test_rescoring_unrescorable(self):
# TODO: this test needs to have Mako templates initialized
# to make sure that the creation of an XModule works.
input_state = json.dumps({'done': True})
num_students = 1
self._create_students_with_state(num_students, input_state)
task_entry = self._create_input_entry()
with self.assertRaises(UpdateProblemModuleStateError):
self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
# check values stored in table:
entry = InstructorTask.objects.get(id=task_entry.id)
output = json.loads(entry.task_output)
self.assertEquals(output['exception'], "UpdateProblemModuleStateError")
self.assertEquals(output['message'], "Specified problem does not support rescoring.")
self.assertGreater(len(output['traceback']), 0)
@skip
def test_rescoring_success(self):
# TODO: this test needs to have Mako templates initialized
# to make sure that the creation of an XModule works.
input_state = json.dumps({'done': True})
num_students = 10
self._create_students_with_state(num_students, input_state)
task_entry = self._create_input_entry()
mock_instance = Mock()
mock_instance.rescore_problem = Mock({'success': 'correct'})
# TODO: figure out why this mock is not working....
with patch('courseware.module_render.get_module_for_descriptor_internal') as mock_get_module:
mock_get_module.return_value = mock_instance
self._run_task_with_mock_celery(rescore_problem, task_entry.id, task_entry.task_id)
# check return value
entry = InstructorTask.objects.get(id=task_entry.id)
output = json.loads(entry.task_output)
self.assertEquals(output.get('attempted'), num_students)
self.assertEquals(output.get('updated'), num_students)
self.assertEquals(output.get('total'), num_students)
self.assertEquals(output.get('action_name'), 'rescored')
self.assertGreater('duration_ms', 0)
"""
Test for LMS instructor background task queue management
Test for LMS instructor background task views.
"""
import json
from celery.states import SUCCESS, FAILURE, REVOKED, PENDING
......@@ -18,7 +18,7 @@ from instructor_task.views import instructor_task_status, get_task_completion_in
class InstructorTaskReportTest(InstructorTaskTestCase):
"""
Tests API and view methods that involve the reporting of status for background tasks.
Tests view methods that involve the reporting of status for background tasks.
"""
def _get_instructor_task_status(self, task_id):
......@@ -263,4 +263,3 @@ class InstructorTaskReportTest(InstructorTaskTestCase):
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "Problem rescored for 2 of 3 students (out of 5)")
......@@ -249,7 +249,7 @@ function goto( mode)
<p>
Then select an action:
<input type="submit" name="action" value="Reset student's attempts">
%if settings.MITX_FEATURES.get('ENABLE_COURSE_BACKGROUND_TASKS'):
%if settings.MITX_FEATURES.get('ENABLE_INSTRUCTOR_BACKGROUND_TASKS'):
<input type="submit" name="action" value="Rescore student's problem submission">
%endif
</p>
......@@ -260,9 +260,9 @@ function goto( mode)
<input type="submit" name="action" value="Delete student state for module">
</p>
%endif
%if settings.MITX_FEATURES.get('ENABLE_COURSE_BACKGROUND_TASKS'):
%if settings.MITX_FEATURES.get('ENABLE_INSTRUCTOR_BACKGROUND_TASKS'):
<p>Rescoring runs in the background, and status for active tasks will appear in a table below.
To see status for all tasks submitted for this course and student, click on this button:
To see status for all tasks submitted for this problem and student, click on this button:
</p>
<p>
<input type="submit" name="action" value="Show Background Task History for Student">
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment