Commit 77032067 by Brian Wilson

Refactor test_views from test_api. Pull out pending_tasks.js.

parent c2aadbfb
...@@ -29,7 +29,7 @@ def get_running_instructor_tasks(course_id): ...@@ -29,7 +29,7 @@ def get_running_instructor_tasks(course_id):
# exclude states that are "ready" (i.e. not "running", e.g. failure, success, revoked): # exclude states that are "ready" (i.e. not "running", e.g. failure, success, revoked):
for state in READY_STATES: for state in READY_STATES:
instructor_tasks = instructor_tasks.exclude(task_state=state) instructor_tasks = instructor_tasks.exclude(task_state=state)
return instructor_tasks return instructor_tasks.order_by('-id')
def get_instructor_task_history(course_id, problem_url, student=None): def get_instructor_task_history(course_id, problem_url, student=None):
...@@ -142,7 +142,7 @@ def submit_delete_problem_state_for_all_students(request, course_id, problem_url ...@@ -142,7 +142,7 @@ def submit_delete_problem_state_for_all_students(request, course_id, problem_url
using i4x-type notation. using i4x-type notation.
ItemNotFoundException is raised if the problem doesn't exist, or AlreadyRunningError ItemNotFoundException is raised if the problem doesn't exist, or AlreadyRunningError
if the particular problem is already being deleted. if the particular problem's state is already being deleted.
This method makes sure the InstructorTask entry is committed. This method makes sure the InstructorTask entry is committed.
When called from any view that is wrapped by TransactionMiddleware, When called from any view that is wrapped by TransactionMiddleware,
......
...@@ -186,11 +186,7 @@ def get_status_from_instructor_task(instructor_task): ...@@ -186,11 +186,7 @@ def get_status_from_instructor_task(instructor_task):
'message': returned for failed and revoked tasks. 'message': returned for failed and revoked tasks.
'traceback': optional, returned if task failed and produced a traceback. 'traceback': optional, returned if task failed and produced a traceback.
If task doesn't exist, returns None. """
If task has been REVOKED, the InstructorTask entry will be updated in
persistent storage as a side effect.
"""
status = {} status = {}
if instructor_task.task_output is not None: if instructor_task.task_output is not None:
...@@ -231,7 +227,7 @@ def encode_problem_and_student_input(problem_url, student=None): ...@@ -231,7 +227,7 @@ def encode_problem_and_student_input(problem_url, student=None):
task_key_stub = "{student}_{problem}".format(student=student.id, problem=problem_url) task_key_stub = "{student}_{problem}".format(student=student.id, problem=problem_url)
else: else:
task_input = {'problem_url': problem_url} task_input = {'problem_url': problem_url}
task_key_stub = "{student}_{problem}".format(student="", problem=problem_url) task_key_stub = "_{problem}".format(problem=problem_url)
# create the key value by using MD5 hash: # create the key value by using MD5 hash:
task_key = hashlib.md5(task_key_stub).hexdigest() task_key = hashlib.md5(task_key_stub).hexdigest()
......
...@@ -102,17 +102,25 @@ class InstructorTask(models.Model): ...@@ -102,17 +102,25 @@ class InstructorTask(models.Model):
@staticmethod @staticmethod
def create_output_for_success(returned_result): def create_output_for_success(returned_result):
"""Converts successful result to output format""" """
Converts successful result to output format.
Raises a ValueError exception if the output is too long.
"""
# In future, there should be a check here that the resulting JSON
# will fit in the column. In the meantime, just return an exception.
json_output = json.dumps(returned_result) json_output = json.dumps(returned_result)
if len(json_output) > 1023:
raise ValueError("Length of task output is too long: {0}".format(json_output))
return json_output return json_output
@staticmethod @staticmethod
def create_output_for_failure(exception, traceback_string): def create_output_for_failure(exception, traceback_string):
""" """
Converts failed result inofrmation to output format. Converts failed result information to output format.
Traceback information is truncated or not included if it would result in an output string Traceback information is truncated or not included if it would result in an output string
that would not fit in the database. If the output is still too long, then the that would not fit in the database. If the output is still too long, then the
exception message is also truncated. exception message is also truncated.
Truncation is indicated by adding "..." to the end of the value. Truncation is indicated by adding "..." to the end of the value.
...@@ -143,5 +151,5 @@ class InstructorTask(models.Model): ...@@ -143,5 +151,5 @@ class InstructorTask(models.Model):
@staticmethod @staticmethod
def create_output_for_revoked(): def create_output_for_revoked():
"""Creates standard message to store in output format for revoked tasks.""" """Creates standard message to store in output format for revoked tasks."""
return json.dumps({'message': 'Task revoked before running'}) return json.dumps({'message': 'Task revoked before running'})
...@@ -218,10 +218,10 @@ def update_problem_module_state(entry_id, update_fcn, action_name, filter_fcn, ...@@ -218,10 +218,10 @@ def update_problem_module_state(entry_id, update_fcn, action_name, filter_fcn,
if xmodule_instance_args is not None: if xmodule_instance_args is not None:
xmodule_instance_args['task_id'] = task_id xmodule_instance_args['task_id'] = task_id
# now that we have an entry we can try to catch failures: # Now that we have an entry we can try to catch failures:
task_progress = None task_progress = None
try: try:
# check that the task_id submitted in the InstructorTask matches the current task # Check that the task_id submitted in the InstructorTask matches the current task
# that is running. # that is running.
request_task_id = _get_current_task().request.id request_task_id = _get_current_task().request.id
if task_id != request_task_id: if task_id != request_task_id:
...@@ -230,10 +230,17 @@ def update_problem_module_state(entry_id, update_fcn, action_name, filter_fcn, ...@@ -230,10 +230,17 @@ def update_problem_module_state(entry_id, update_fcn, action_name, filter_fcn,
TASK_LOG.error(message) TASK_LOG.error(message)
raise UpdateProblemModuleStateError(message) raise UpdateProblemModuleStateError(message)
# now do the work: # Now do the work:
with dog_stats_api.timer('instructor_tasks.module.time.overall', tags=['action:{name}'.format(name=action_name)]): with dog_stats_api.timer('instructor_tasks.module.time.overall', tags=['action:{name}'.format(name=action_name)]):
task_progress = _perform_module_state_update(course_id, module_state_key, student_ident, update_fcn, task_progress = _perform_module_state_update(course_id, module_state_key, student_ident, update_fcn,
action_name, filter_fcn, xmodule_instance_args) action_name, filter_fcn, xmodule_instance_args)
# If we get here, we assume we've succeeded, so update the InstructorTask entry in anticipation.
# But we do this within the try, in case creating the task_output causes an exception to be
# raised.
entry.task_output = InstructorTask.create_output_for_success(task_progress)
entry.task_state = SUCCESS
entry.save_now()
except Exception: except Exception:
# try to write out the failure to the entry before failing # try to write out the failure to the entry before failing
_, exception, traceback = exc_info() _, exception, traceback = exc_info()
...@@ -244,11 +251,6 @@ def update_problem_module_state(entry_id, update_fcn, action_name, filter_fcn, ...@@ -244,11 +251,6 @@ def update_problem_module_state(entry_id, update_fcn, action_name, filter_fcn,
entry.save_now() entry.save_now()
raise raise
# if we get here, we assume we've succeeded, so update the InstructorTask entry in anticipation:
entry.task_output = json.dumps(task_progress)
entry.task_state = SUCCESS
entry.save_now()
# log and exit, returning task_progress info as task result: # log and exit, returning task_progress info as task result:
fmt = 'Finishing task "{task_id}": course "{course_id}" problem "{state_key}": final: {progress}' fmt = 'Finishing task "{task_id}": course "{course_id}" problem "{state_key}": final: {progress}'
TASK_LOG.info(fmt.format(task_id=task_id, course_id=course_id, state_key=module_state_key, progress=task_progress)) TASK_LOG.info(fmt.format(task_id=task_id, course_id=course_id, state_key=module_state_key, progress=task_progress))
......
""" """
Test for LMS instructor background task queue management Test for LMS instructor background task queue management
""" """
import logging
import json
from celery.states import SUCCESS, FAILURE, REVOKED, PENDING
from mock import Mock, patch
from uuid import uuid4
from django.utils.datastructures import MultiValueDict
from django.test.testcases import TestCase
from xmodule.modulestore.exceptions import ItemNotFoundError from xmodule.modulestore.exceptions import ItemNotFoundError
...@@ -22,75 +13,17 @@ from instructor_task.api import (get_running_instructor_tasks, ...@@ -22,75 +13,17 @@ from instructor_task.api import (get_running_instructor_tasks,
submit_reset_problem_attempts_for_all_students, submit_reset_problem_attempts_for_all_students,
submit_delete_problem_state_for_all_students) submit_delete_problem_state_for_all_students)
from instructor_task.api_helper import (AlreadyRunningError, from instructor_task.api_helper import AlreadyRunningError
encode_problem_and_student_input) from instructor_task.models import InstructorTask, PROGRESS
from instructor_task.models import InstructorTask, PROGRESS, QUEUING from instructor_task.tests.test_base import (InstructorTaskTestCase,
from instructor_task.tests.test_base import InstructorTaskTestCase InstructorTaskModuleTestCase,
from instructor_task.tests.factories import InstructorTaskFactory TEST_COURSE_ID)
from instructor_task.views import instructor_task_status, get_task_completion_info
log = logging.getLogger(__name__)
TEST_COURSE_ID = 'edx/1.23x/test_course'
TEST_FAILURE_MESSAGE = 'task failed horribly'
TEST_FAILURE_EXCEPTION = 'RandomCauseError'
class InstructorTaskReportTest(TestCase): class InstructorTaskReportTest(InstructorTaskTestCase):
""" """
Tests API and view methods that involve the reporting of status for background tasks. Tests API and view methods that involve the reporting of status for background tasks.
""" """
def setUp(self):
self.student = UserFactory.create(username="student", email="student@edx.org")
self.instructor = UserFactory.create(username="instructor", email="instructor@edx.org")
self.problem_url = InstructorTaskReportTest.problem_location("test_urlname")
@staticmethod
def problem_location(problem_url_name):
"""
Create an internal location for a test problem.
"""
return "i4x://{org}/{number}/problem/{problem_url_name}".format(org='edx',
number='1.23x',
problem_url_name=problem_url_name)
def _create_entry(self, task_state=QUEUING, task_output=None, student=None):
"""Creates a InstructorTask entry for testing."""
task_id = str(uuid4())
progress_json = json.dumps(task_output) if task_output is not None else None
task_input, task_key = encode_problem_and_student_input(self.problem_url, student)
instructor_task = InstructorTaskFactory.create(course_id=TEST_COURSE_ID,
requester=self.instructor,
task_input=json.dumps(task_input),
task_key=task_key,
task_id=task_id,
task_state=task_state,
task_output=progress_json)
return instructor_task
def _create_failure_entry(self):
"""Creates a InstructorTask entry representing a failed task."""
# view task entry for task failure
progress = {'message': TEST_FAILURE_MESSAGE,
'exception': TEST_FAILURE_EXCEPTION,
}
return self._create_entry(task_state=FAILURE, task_output=progress)
def _create_success_entry(self, student=None):
"""Creates a InstructorTask entry representing a successful task."""
return self._create_progress_entry(student, task_state=SUCCESS)
def _create_progress_entry(self, student=None, task_state=PROGRESS):
"""Creates a InstructorTask entry representing a task in progress."""
progress = {'attempted': 3,
'updated': 2,
'total': 5,
'action_name': 'rescored',
}
return self._create_entry(task_state=task_state, task_output=progress, student=student)
def test_get_running_instructor_tasks(self): def test_get_running_instructor_tasks(self):
# when fetching running tasks, we get all running tasks, and only running tasks # when fetching running tasks, we get all running tasks, and only running tasks
...@@ -112,243 +45,8 @@ class InstructorTaskReportTest(TestCase): ...@@ -112,243 +45,8 @@ class InstructorTaskReportTest(TestCase):
in get_instructor_task_history(TEST_COURSE_ID, self.problem_url)] in get_instructor_task_history(TEST_COURSE_ID, self.problem_url)]
self.assertEquals(set(task_ids), set(expected_ids)) self.assertEquals(set(task_ids), set(expected_ids))
def _get_instructor_task_status(self, task_id):
"""Returns status corresponding to task_id via api method."""
request = Mock()
request.REQUEST = {'task_id': task_id}
return instructor_task_status(request)
def test_instructor_task_status(self):
instructor_task = self._create_failure_entry()
task_id = instructor_task.task_id
request = Mock()
request.REQUEST = {'task_id': task_id}
response = instructor_task_status(request)
output = json.loads(response.content)
self.assertEquals(output['task_id'], task_id)
def test_instructor_task_status_list(self):
# Fetch status for existing tasks by arg list, as if called from ajax.
# Note that ajax does something funny with the marshalling of
# list data, so the key value has "[]" appended to it.
task_ids = [(self._create_failure_entry()).task_id for _ in range(1, 5)]
request = Mock()
request.REQUEST = MultiValueDict({'task_ids[]': task_ids})
response = instructor_task_status(request)
output = json.loads(response.content)
self.assertEquals(len(output), len(task_ids))
for task_id in task_ids:
self.assertEquals(output[task_id]['task_id'], task_id)
def test_get_status_from_failure(self):
# get status for a task that has already failed
instructor_task = self._create_failure_entry()
task_id = instructor_task.task_id
response = self._get_instructor_task_status(task_id)
output = json.loads(response.content)
self.assertEquals(output['message'], TEST_FAILURE_MESSAGE)
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_id'], task_id)
self.assertEquals(output['task_state'], FAILURE)
self.assertFalse(output['in_progress'])
expected_progress = {'exception': TEST_FAILURE_EXCEPTION,
'message': TEST_FAILURE_MESSAGE}
self.assertEquals(output['task_progress'], expected_progress)
def test_get_status_from_success(self):
# get status for a task that has already succeeded
instructor_task = self._create_success_entry()
task_id = instructor_task.task_id
response = self._get_instructor_task_status(task_id)
output = json.loads(response.content)
self.assertEquals(output['message'], "Problem rescored for 2 of 3 students (out of 5)")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_id'], task_id)
self.assertEquals(output['task_state'], SUCCESS)
self.assertFalse(output['in_progress'])
expected_progress = {'attempted': 3,
'updated': 2,
'total': 5,
'action_name': 'rescored'}
self.assertEquals(output['task_progress'], expected_progress)
def _test_get_status_from_result(self, task_id, mock_result):
"""
Provides mock result to caller of instructor_task_status, and returns resulting output.
"""
with patch('celery.result.AsyncResult.__new__') as mock_result_ctor:
mock_result_ctor.return_value = mock_result
response = self._get_instructor_task_status(task_id)
output = json.loads(response.content)
self.assertEquals(output['task_id'], task_id)
return output
def test_get_status_to_pending(self):
# get status for a task that hasn't begun to run yet
instructor_task = self._create_entry()
task_id = instructor_task.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = PENDING
output = self._test_get_status_from_result(task_id, mock_result)
for key in ['message', 'succeeded', 'task_progress']:
self.assertTrue(key not in output)
self.assertEquals(output['task_state'], 'PENDING')
self.assertTrue(output['in_progress'])
def test_update_progress_to_progress(self): class InstructorTaskSubmitTest(InstructorTaskModuleTestCase):
# view task entry for task in progress
instructor_task = self._create_progress_entry()
task_id = instructor_task.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = PROGRESS
mock_result.result = {'attempted': 5,
'updated': 4,
'total': 10,
'action_name': 'rescored'}
output = self._test_get_status_from_result(task_id, mock_result)
self.assertEquals(output['message'], "Progress: rescored 4 of 5 so far (out of 10)")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_state'], PROGRESS)
self.assertTrue(output['in_progress'])
self.assertEquals(output['task_progress'], mock_result.result)
def test_update_progress_to_failure(self):
# view task entry for task in progress that later fails
instructor_task = self._create_progress_entry()
task_id = instructor_task.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = FAILURE
mock_result.result = NotImplementedError("This task later failed.")
mock_result.traceback = "random traceback"
output = self._test_get_status_from_result(task_id, mock_result)
self.assertEquals(output['message'], "This task later failed.")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_state'], FAILURE)
self.assertFalse(output['in_progress'])
expected_progress = {'exception': 'NotImplementedError',
'message': "This task later failed.",
'traceback': "random traceback"}
self.assertEquals(output['task_progress'], expected_progress)
def test_update_progress_to_revoked(self):
# view task entry for task in progress that later fails
instructor_task = self._create_progress_entry()
task_id = instructor_task.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = REVOKED
output = self._test_get_status_from_result(task_id, mock_result)
self.assertEquals(output['message'], "Task revoked before running")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_state'], REVOKED)
self.assertFalse(output['in_progress'])
expected_progress = {'message': "Task revoked before running"}
self.assertEquals(output['task_progress'], expected_progress)
def _get_output_for_task_success(self, attempted, updated, total, student=None):
"""returns the task_id and the result returned by instructor_task_status()."""
# view task entry for task in progress
instructor_task = self._create_progress_entry(student)
task_id = instructor_task.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = SUCCESS
mock_result.result = {'attempted': attempted,
'updated': updated,
'total': total,
'action_name': 'rescored'}
output = self._test_get_status_from_result(task_id, mock_result)
return output
def test_update_progress_to_success(self):
output = self._get_output_for_task_success(10, 8, 10)
self.assertEquals(output['message'], "Problem rescored for 8 of 10 students")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_state'], SUCCESS)
self.assertFalse(output['in_progress'])
expected_progress = {'attempted': 10,
'updated': 8,
'total': 10,
'action_name': 'rescored'}
self.assertEquals(output['task_progress'], expected_progress)
def test_success_messages(self):
output = self._get_output_for_task_success(0, 0, 10)
self.assertEqual(output['message'], "Unable to find any students with submissions to be rescored (out of 10)")
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(10, 0, 10)
self.assertEqual(output['message'], "Problem failed to be rescored for any of 10 students")
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(10, 8, 10)
self.assertEqual(output['message'], "Problem rescored for 8 of 10 students")
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(9, 8, 10)
self.assertEqual(output['message'], "Problem rescored for 8 of 9 students (out of 10)")
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(10, 10, 10)
self.assertEqual(output['message'], "Problem successfully rescored for 10 students")
self.assertTrue(output['succeeded'])
output = self._get_output_for_task_success(0, 0, 1, student=self.student)
self.assertTrue("Unable to find submission to be rescored for student" in output['message'])
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(1, 0, 1, student=self.student)
self.assertTrue("Problem failed to be rescored for student" in output['message'])
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(1, 1, 1, student=self.student)
self.assertTrue("Problem successfully rescored for student" in output['message'])
self.assertTrue(output['succeeded'])
def test_get_info_for_queuing_task(self):
# get status for a task that is still running:
instructor_task = self._create_entry()
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "No status information available")
def test_get_info_for_missing_output(self):
# check for missing task_output
instructor_task = self._create_success_entry()
instructor_task.task_output = None
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "No status information available")
def test_get_info_for_broken_output(self):
# check for non-JSON task_output
instructor_task = self._create_success_entry()
instructor_task.task_output = "{ bad"
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "No parsable status information available")
def test_get_info_for_empty_output(self):
# check for JSON task_output with missing keys
instructor_task = self._create_success_entry()
instructor_task.task_output = "{}"
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "No progress status information available")
def test_get_info_for_broken_input(self):
# check for non-JSON task_input, but then just ignore it
instructor_task = self._create_success_entry()
instructor_task.task_input = "{ bad"
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "Problem rescored for 2 of 3 students (out of 5)")
class InstructorTaskSubmitTest(InstructorTaskTestCase):
"""Tests API methods that involve the submission of background tasks.""" """Tests API methods that involve the submission of background tasks."""
def setUp(self): def setUp(self):
...@@ -358,9 +56,7 @@ class InstructorTaskSubmitTest(InstructorTaskTestCase): ...@@ -358,9 +56,7 @@ class InstructorTaskSubmitTest(InstructorTaskTestCase):
def test_submit_nonexistent_modules(self): def test_submit_nonexistent_modules(self):
# confirm that a rescore of a non-existent module returns an exception # confirm that a rescore of a non-existent module returns an exception
# (Note that it is easier to test a non-rescorable module in test_tasks, problem_url = InstructorTaskModuleTestCase.problem_location("NonexistentProblem")
# where we are creating real modules.
problem_url = InstructorTaskTestCase.problem_location("NonexistentProblem")
course_id = self.course.id course_id = self.course.id
request = None request = None
with self.assertRaises(ItemNotFoundError): with self.assertRaises(ItemNotFoundError):
...@@ -374,7 +70,7 @@ class InstructorTaskSubmitTest(InstructorTaskTestCase): ...@@ -374,7 +70,7 @@ class InstructorTaskSubmitTest(InstructorTaskTestCase):
def test_submit_nonrescorable_modules(self): def test_submit_nonrescorable_modules(self):
# confirm that a rescore of an existent but unscorable module returns an exception # confirm that a rescore of an existent but unscorable module returns an exception
# (Note that it is easier to test a non-rescorable module in test_tasks, # (Note that it is easier to test a scoreable but non-rescorable module in test_tasks,
# where we are creating real modules.) # where we are creating real modules.)
problem_url = self.problem_section.location.url() problem_url = self.problem_section.location.url()
course_id = self.course.id course_id = self.course.id
...@@ -384,15 +80,15 @@ class InstructorTaskSubmitTest(InstructorTaskTestCase): ...@@ -384,15 +80,15 @@ class InstructorTaskSubmitTest(InstructorTaskTestCase):
with self.assertRaises(NotImplementedError): with self.assertRaises(NotImplementedError):
submit_rescore_problem_for_all_students(request, course_id, problem_url) submit_rescore_problem_for_all_students(request, course_id, problem_url)
def _test_submit_with_long_url(self, task_class, student=None): def _test_submit_with_long_url(self, task_function, student=None):
problem_url_name = 'x' * 255 problem_url_name = 'x' * 255
self.define_option_problem(problem_url_name) self.define_option_problem(problem_url_name)
location = InstructorTaskTestCase.problem_location(problem_url_name) location = InstructorTaskModuleTestCase.problem_location(problem_url_name)
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
if student is not None: if student is not None:
task_class(self.create_task_request(self.instructor), self.course.id, location, student) task_function(self.create_task_request(self.instructor), self.course.id, location, student)
else: else:
task_class(self.create_task_request(self.instructor), self.course.id, location) task_function(self.create_task_request(self.instructor), self.course.id, location)
def test_submit_rescore_all_with_long_url(self): def test_submit_rescore_all_with_long_url(self):
self._test_submit_with_long_url(submit_rescore_problem_for_all_students) self._test_submit_with_long_url(submit_rescore_problem_for_all_students)
...@@ -406,15 +102,16 @@ class InstructorTaskSubmitTest(InstructorTaskTestCase): ...@@ -406,15 +102,16 @@ class InstructorTaskSubmitTest(InstructorTaskTestCase):
def test_submit_delete_all_with_long_url(self): def test_submit_delete_all_with_long_url(self):
self._test_submit_with_long_url(submit_delete_problem_state_for_all_students) self._test_submit_with_long_url(submit_delete_problem_state_for_all_students)
def _test_submit_task(self, task_class, student=None): def _test_submit_task(self, task_function, student=None):
# tests submit, and then tests a second identical submission.
problem_url_name = 'H1P1' problem_url_name = 'H1P1'
self.define_option_problem(problem_url_name) self.define_option_problem(problem_url_name)
location = InstructorTaskTestCase.problem_location(problem_url_name) location = InstructorTaskModuleTestCase.problem_location(problem_url_name)
if student is not None: if student is not None:
instructor_task = task_class(self.create_task_request(self.instructor), instructor_task = task_function(self.create_task_request(self.instructor),
self.course.id, location, student) self.course.id, location, student)
else: else:
instructor_task = task_class(self.create_task_request(self.instructor), instructor_task = task_function(self.create_task_request(self.instructor),
self.course.id, location) self.course.id, location)
# test resubmitting, by updating the existing record: # test resubmitting, by updating the existing record:
...@@ -424,9 +121,9 @@ class InstructorTaskSubmitTest(InstructorTaskTestCase): ...@@ -424,9 +121,9 @@ class InstructorTaskSubmitTest(InstructorTaskTestCase):
with self.assertRaises(AlreadyRunningError): with self.assertRaises(AlreadyRunningError):
if student is not None: if student is not None:
task_class(self.create_task_request(self.instructor), self.course.id, location, student) task_function(self.create_task_request(self.instructor), self.course.id, location, student)
else: else:
task_class(self.create_task_request(self.instructor), self.course.id, location) task_function(self.create_task_request(self.instructor), self.course.id, location)
def test_submit_rescore_all(self): def test_submit_rescore_all(self):
self._test_submit_task(submit_rescore_problem_for_all_students) self._test_submit_task(submit_rescore_problem_for_all_students)
...@@ -439,4 +136,3 @@ class InstructorTaskSubmitTest(InstructorTaskTestCase): ...@@ -439,4 +136,3 @@ class InstructorTaskSubmitTest(InstructorTaskTestCase):
def test_submit_delete_all(self): def test_submit_delete_all(self):
self._test_submit_task(submit_delete_problem_state_for_all_students) self._test_submit_task(submit_delete_problem_state_for_all_students)
""" """
Integration Test for LMS instructor-initiated background tasks Base test classes for LMS instructor-initiated background tasks
Runs tasks on answers to course problems to validate that code
paths actually work.
""" """
import logging
import json import json
from uuid import uuid4
from mock import Mock from mock import Mock
from celery.states import SUCCESS, FAILURE
from django.test.testcases import TestCase
from django.contrib.auth.models import User from django.contrib.auth.models import User
from django.test.utils import override_settings from django.test.utils import override_settings
...@@ -21,23 +21,85 @@ from student.tests.factories import CourseEnrollmentFactory, UserFactory ...@@ -21,23 +21,85 @@ from student.tests.factories import CourseEnrollmentFactory, UserFactory
from courseware.model_data import StudentModule from courseware.model_data import StudentModule
from courseware.tests.tests import LoginEnrollmentTestCase, TEST_DATA_MONGO_MODULESTORE from courseware.tests.tests import LoginEnrollmentTestCase, TEST_DATA_MONGO_MODULESTORE
from instructor_task.api_helper import encode_problem_and_student_input
from instructor_task.models import PROGRESS, QUEUING
from instructor_task.tests.factories import InstructorTaskFactory
from instructor_task.views import instructor_task_status from instructor_task.views import instructor_task_status
log = logging.getLogger(__name__)
TEST_COURSE_ORG = 'edx' TEST_COURSE_ORG = 'edx'
TEST_COURSE_NAME = 'Test Course' TEST_COURSE_NAME = 'Test Course'
TEST_COURSE_NUMBER = '1.23x' TEST_COURSE_NUMBER = '1.23x'
TEST_SECTION_NAME = "Problem" TEST_SECTION_NAME = "Problem"
TEST_COURSE_ID = 'edx/1.23x/test_course'
TEST_FAILURE_MESSAGE = 'task failed horribly'
TEST_FAILURE_EXCEPTION = 'RandomCauseError'
OPTION_1 = 'Option 1'
OPTION_2 = 'Option 2'
class InstructorTaskTestCase(TestCase):
"""
Tests API and view methods that involve the reporting of status for background tasks.
"""
def setUp(self):
self.student = UserFactory.create(username="student", email="student@edx.org")
self.instructor = UserFactory.create(username="instructor", email="instructor@edx.org")
self.problem_url = InstructorTaskTestCase.problem_location("test_urlname")
@staticmethod
def problem_location(problem_url_name):
"""
Create an internal location for a test problem.
"""
return "i4x://{org}/{number}/problem/{problem_url_name}".format(org='edx',
number='1.23x',
problem_url_name=problem_url_name)
def _create_entry(self, task_state=QUEUING, task_output=None, student=None):
"""Creates a InstructorTask entry for testing."""
task_id = str(uuid4())
progress_json = json.dumps(task_output) if task_output is not None else None
task_input, task_key = encode_problem_and_student_input(self.problem_url, student)
instructor_task = InstructorTaskFactory.create(course_id=TEST_COURSE_ID,
requester=self.instructor,
task_input=json.dumps(task_input),
task_key=task_key,
task_id=task_id,
task_state=task_state,
task_output=progress_json)
return instructor_task
def _create_failure_entry(self):
"""Creates a InstructorTask entry representing a failed task."""
# view task entry for task failure
progress = {'message': TEST_FAILURE_MESSAGE,
'exception': TEST_FAILURE_EXCEPTION,
}
return self._create_entry(task_state=FAILURE, task_output=progress)
def _create_success_entry(self, student=None):
"""Creates a InstructorTask entry representing a successful task."""
return self._create_progress_entry(student, task_state=SUCCESS)
def _create_progress_entry(self, student=None, task_state=PROGRESS):
"""Creates a InstructorTask entry representing a task in progress."""
progress = {'attempted': 3,
'updated': 2,
'total': 5,
'action_name': 'rescored',
}
return self._create_entry(task_state=task_state, task_output=progress, student=student)
@override_settings(MODULESTORE=TEST_DATA_MONGO_MODULESTORE) @override_settings(MODULESTORE=TEST_DATA_MONGO_MODULESTORE)
class InstructorTaskTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase): class InstructorTaskModuleTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase):
""" """
Base test class for InstructorTask-related tests that require Base test class for InstructorTask-related tests that require
the setup of a course and problem. the setup of a course and problem in order to access StudentModule state.
""" """
course = None course = None
current_user = None current_user = None
...@@ -68,14 +130,13 @@ class InstructorTaskTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase): ...@@ -68,14 +130,13 @@ class InstructorTaskTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase):
def login_username(self, username): def login_username(self, username):
"""Login the user, given the `username`.""" """Login the user, given the `username`."""
if self.current_user != username: if self.current_user != username:
self.login(InstructorTaskTestCase.get_user_email(username), "test") self.login(InstructorTaskModuleTestCase.get_user_email(username), "test")
self.current_user = username self.current_user = username
def _create_user(self, username, is_staff=False): def _create_user(self, username, is_staff=False):
"""Creates a user and enrolls them in the test course.""" """Creates a user and enrolls them in the test course."""
email = InstructorTaskTestCase.get_user_email(username) email = InstructorTaskModuleTestCase.get_user_email(username)
UserFactory.create(username=username, email=email, is_staff=is_staff) thisuser = UserFactory.create(username=username, email=email, is_staff=is_staff)
thisuser = User.objects.get(username=username)
CourseEnrollmentFactory.create(user=thisuser, course_id=self.course.id) CourseEnrollmentFactory.create(user=thisuser, course_id=self.course.id)
return thisuser return thisuser
...@@ -102,9 +163,9 @@ class InstructorTaskTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase): ...@@ -102,9 +163,9 @@ class InstructorTaskTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase):
def define_option_problem(self, problem_url_name): def define_option_problem(self, problem_url_name):
"""Create the problem definition so the answer is Option 1""" """Create the problem definition so the answer is Option 1"""
factory = OptionResponseXMLFactory() factory = OptionResponseXMLFactory()
factory_args = {'question_text': 'The correct answer is Option 1', factory_args = {'question_text': 'The correct answer is {0}'.format(OPTION_1),
'options': ['Option 1', 'Option 2'], 'options': [OPTION_1, OPTION_2],
'correct_option': 'Option 1', 'correct_option': OPTION_1,
'num_responses': 2} 'num_responses': 2}
problem_xml = factory.build_xml(**factory_args) problem_xml = factory.build_xml(**factory_args)
ItemFactory.create(parent_location=self.problem_section.location, ItemFactory.create(parent_location=self.problem_section.location,
...@@ -115,9 +176,9 @@ class InstructorTaskTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase): ...@@ -115,9 +176,9 @@ class InstructorTaskTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase):
def redefine_option_problem(self, problem_url_name): def redefine_option_problem(self, problem_url_name):
"""Change the problem definition so the answer is Option 2""" """Change the problem definition so the answer is Option 2"""
factory = OptionResponseXMLFactory() factory = OptionResponseXMLFactory()
factory_args = {'question_text': 'The correct answer is Option 2', factory_args = {'question_text': 'The correct answer is {0}'.format(OPTION_2),
'options': ['Option 1', 'Option 2'], 'options': [OPTION_1, OPTION_2],
'correct_option': 'Option 2', 'correct_option': OPTION_2,
'num_responses': 2} 'num_responses': 2}
problem_xml = factory.build_xml(**factory_args) problem_xml = factory.build_xml(**factory_args)
location = InstructorTaskTestCase.problem_location(problem_url_name) location = InstructorTaskTestCase.problem_location(problem_url_name)
...@@ -131,7 +192,8 @@ class InstructorTaskTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase): ...@@ -131,7 +192,8 @@ class InstructorTaskTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase):
module_state_key=descriptor.location.url(), module_state_key=descriptor.location.url(),
) )
def get_task_status(self, task_id): @staticmethod
def get_task_status(task_id):
"""Use api method to fetch task status, using mock request.""" """Use api method to fetch task status, using mock request."""
mock_request = Mock() mock_request = Mock()
mock_request.REQUEST = {'task_id': task_id} mock_request.REQUEST = {'task_id': task_id}
......
...@@ -26,14 +26,15 @@ from instructor_task.api import (submit_rescore_problem_for_all_students, ...@@ -26,14 +26,15 @@ from instructor_task.api import (submit_rescore_problem_for_all_students,
submit_reset_problem_attempts_for_all_students, submit_reset_problem_attempts_for_all_students,
submit_delete_problem_state_for_all_students) submit_delete_problem_state_for_all_students)
from instructor_task.models import InstructorTask from instructor_task.models import InstructorTask
from instructor_task.tests.test_base import InstructorTaskTestCase, TEST_COURSE_ORG, TEST_COURSE_NUMBER from instructor_task.tests.test_base import (InstructorTaskModuleTestCase, TEST_COURSE_ORG, TEST_COURSE_NUMBER,
OPTION_1, OPTION_2)
from capa.responsetypes import StudentInputError from capa.responsetypes import StudentInputError
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class TestIntegrationTask(InstructorTaskTestCase): class TestIntegrationTask(InstructorTaskModuleTestCase):
""" """
Base class to provide general methods used for "integration" testing of particular tasks. Base class to provide general methods used for "integration" testing of particular tasks.
""" """
...@@ -46,6 +47,9 @@ class TestIntegrationTask(InstructorTaskTestCase): ...@@ -46,6 +47,9 @@ class TestIntegrationTask(InstructorTaskTestCase):
""" """
def get_input_id(response_id): def get_input_id(response_id):
"""Creates input id using information about the test course and the current problem.""" """Creates input id using information about the test course and the current problem."""
# Note that this is a capa-specific convention. The form is a version of the problem's
# URL, modified so that it can be easily stored in html, prepended with "input-" and
# appended with a sequence identifier for the particular response the input goes to.
return 'input_i4x-{0}-{1}-problem-{2}_{3}'.format(TEST_COURSE_ORG.lower(), return 'input_i4x-{0}-{1}-problem-{2}_{3}'.format(TEST_COURSE_ORG.lower(),
TEST_COURSE_NUMBER.replace('.', '_'), TEST_COURSE_NUMBER.replace('.', '_'),
problem_url_name, response_id) problem_url_name, response_id)
...@@ -56,15 +60,32 @@ class TestIntegrationTask(InstructorTaskTestCase): ...@@ -56,15 +60,32 @@ class TestIntegrationTask(InstructorTaskTestCase):
# make ajax call: # make ajax call:
modx_url = reverse('modx_dispatch', modx_url = reverse('modx_dispatch',
kwargs={'course_id': self.course.id, kwargs={'course_id': self.course.id,
'location': InstructorTaskTestCase.problem_location(problem_url_name), 'location': InstructorTaskModuleTestCase.problem_location(problem_url_name),
'dispatch': 'problem_check', }) 'dispatch': 'problem_check', })
# we assume we have two responses, so assign them the correct identifiers.
resp = self.client.post(modx_url, { resp = self.client.post(modx_url, {
get_input_id('2_1'): responses[0], get_input_id('2_1'): responses[0],
get_input_id('3_1'): responses[1], get_input_id('3_1'): responses[1],
}) })
return resp return resp
def _assert_task_failure(self, entry_id, task_type, problem_url_name, expected_message):
"""Confirm that expected values are stored in InstructorTask on task failure."""
instructor_task = InstructorTask.objects.get(id=entry_id)
self.assertEqual(instructor_task.task_state, FAILURE)
self.assertEqual(instructor_task.requester.username, 'instructor')
self.assertEqual(instructor_task.task_type, task_type)
task_input = json.loads(instructor_task.task_input)
self.assertFalse('student' in task_input)
self.assertEqual(task_input['problem_url'], InstructorTaskModuleTestCase.problem_location(problem_url_name))
status = json.loads(instructor_task.task_output)
self.assertEqual(status['exception'], 'ZeroDivisionError')
self.assertEqual(status['message'], expected_message)
# check status returned:
status = InstructorTaskModuleTestCase.get_task_status(instructor_task.task_id)
self.assertEqual(status['message'], expected_message)
class TestRescoringTask(TestIntegrationTask): class TestRescoringTask(TestIntegrationTask):
""" """
...@@ -92,7 +113,7 @@ class TestRescoringTask(TestIntegrationTask): ...@@ -92,7 +113,7 @@ class TestRescoringTask(TestIntegrationTask):
# make ajax call: # make ajax call:
modx_url = reverse('modx_dispatch', modx_url = reverse('modx_dispatch',
kwargs={'course_id': self.course.id, kwargs={'course_id': self.course.id,
'location': InstructorTaskTestCase.problem_location(problem_url_name), 'location': InstructorTaskModuleTestCase.problem_location(problem_url_name),
'dispatch': 'problem_get', }) 'dispatch': 'problem_get', })
resp = self.client.post(modx_url, {}) resp = self.client.post(modx_url, {})
return resp return resp
...@@ -120,32 +141,27 @@ class TestRescoringTask(TestIntegrationTask): ...@@ -120,32 +141,27 @@ class TestRescoringTask(TestIntegrationTask):
def submit_rescore_all_student_answers(self, instructor, problem_url_name): def submit_rescore_all_student_answers(self, instructor, problem_url_name):
"""Submits the particular problem for rescoring""" """Submits the particular problem for rescoring"""
return submit_rescore_problem_for_all_students(self.create_task_request(instructor), self.course.id, return submit_rescore_problem_for_all_students(self.create_task_request(instructor), self.course.id,
InstructorTaskTestCase.problem_location(problem_url_name)) InstructorTaskModuleTestCase.problem_location(problem_url_name))
def submit_rescore_one_student_answer(self, instructor, problem_url_name, student): def submit_rescore_one_student_answer(self, instructor, problem_url_name, student):
"""Submits the particular problem for rescoring for a particular student""" """Submits the particular problem for rescoring for a particular student"""
return submit_rescore_problem_for_student(self.create_task_request(instructor), self.course.id, return submit_rescore_problem_for_student(self.create_task_request(instructor), self.course.id,
InstructorTaskTestCase.problem_location(problem_url_name), InstructorTaskModuleTestCase.problem_location(problem_url_name),
student) student)
def rescore_all_student_answers(self, instructor, problem_url_name):
"""Runs the task to rescore the current problem"""
return submit_rescore_problem_for_all_students(self.create_task_request(instructor), self.course.id,
InstructorTaskTestCase.problem_location(problem_url_name))
def test_rescoring_option_problem(self): def test_rescoring_option_problem(self):
'''Run rescore scenario on option problem''' """Run rescore scenario on option problem"""
# get descriptor: # get descriptor:
problem_url_name = 'H1P1' problem_url_name = 'H1P1'
self.define_option_problem(problem_url_name) self.define_option_problem(problem_url_name)
location = InstructorTaskTestCase.problem_location(problem_url_name) location = InstructorTaskModuleTestCase.problem_location(problem_url_name)
descriptor = self.module_store.get_instance(self.course.id, location) descriptor = self.module_store.get_instance(self.course.id, location)
# first store answers for each of the separate users: # first store answers for each of the separate users:
self.submit_student_answer('u1', problem_url_name, ['Option 1', 'Option 1']) self.submit_student_answer('u1', problem_url_name, [OPTION_1, OPTION_1])
self.submit_student_answer('u2', problem_url_name, ['Option 1', 'Option 2']) self.submit_student_answer('u2', problem_url_name, [OPTION_1, OPTION_2])
self.submit_student_answer('u3', problem_url_name, ['Option 2', 'Option 1']) self.submit_student_answer('u3', problem_url_name, [OPTION_2, OPTION_1])
self.submit_student_answer('u4', problem_url_name, ['Option 2', 'Option 2']) self.submit_student_answer('u4', problem_url_name, [OPTION_2, OPTION_2])
self.check_state('u1', descriptor, 2, 2, 1) self.check_state('u1', descriptor, 2, 2, 1)
self.check_state('u2', descriptor, 1, 2, 1) self.check_state('u2', descriptor, 1, 2, 1)
...@@ -177,28 +193,13 @@ class TestRescoringTask(TestIntegrationTask): ...@@ -177,28 +193,13 @@ class TestRescoringTask(TestIntegrationTask):
"""Simulate a failure in rescoring a problem""" """Simulate a failure in rescoring a problem"""
problem_url_name = 'H1P1' problem_url_name = 'H1P1'
self.define_option_problem(problem_url_name) self.define_option_problem(problem_url_name)
self.submit_student_answer('u1', problem_url_name, ['Option 1', 'Option 1']) self.submit_student_answer('u1', problem_url_name, [OPTION_1, OPTION_1])
expected_message = "bad things happened" expected_message = "bad things happened"
with patch('capa.capa_problem.LoncapaProblem.rescore_existing_answers') as mock_rescore: with patch('capa.capa_problem.LoncapaProblem.rescore_existing_answers') as mock_rescore:
mock_rescore.side_effect = ZeroDivisionError(expected_message) mock_rescore.side_effect = ZeroDivisionError(expected_message)
instructor_task = self.submit_rescore_all_student_answers('instructor', problem_url_name) instructor_task = self.submit_rescore_all_student_answers('instructor', problem_url_name)
self._assert_task_failure(instructor_task.id, 'rescore_problem', problem_url_name, expected_message)
# check instructor_task returned
instructor_task = InstructorTask.objects.get(id=instructor_task.id)
self.assertEqual(instructor_task.task_state, 'FAILURE')
self.assertEqual(instructor_task.requester.username, 'instructor')
self.assertEqual(instructor_task.task_type, 'rescore_problem')
task_input = json.loads(instructor_task.task_input)
self.assertFalse('student' in task_input)
self.assertEqual(task_input['problem_url'], InstructorTaskTestCase.problem_location(problem_url_name))
status = json.loads(instructor_task.task_output)
self.assertEqual(status['exception'], 'ZeroDivisionError')
self.assertEqual(status['message'], expected_message)
# check status returned:
status = self.get_task_status(instructor_task.task_id)
self.assertEqual(status['message'], expected_message)
def test_rescoring_bad_unicode_input(self): def test_rescoring_bad_unicode_input(self):
"""Generate a real failure in rescoring a problem, with an answer including unicode""" """Generate a real failure in rescoring a problem, with an answer including unicode"""
...@@ -209,7 +210,7 @@ class TestRescoringTask(TestIntegrationTask): ...@@ -209,7 +210,7 @@ class TestRescoringTask(TestIntegrationTask):
# set up an option problem -- doesn't matter really what problem it is, but we need # set up an option problem -- doesn't matter really what problem it is, but we need
# it to have an answer. # it to have an answer.
self.define_option_problem(problem_url_name) self.define_option_problem(problem_url_name)
self.submit_student_answer('u1', problem_url_name, ['Option 1', 'Option 1']) self.submit_student_answer('u1', problem_url_name, [OPTION_1, OPTION_1])
# return an input error as if it were a numerical response, with an embedded unicode character: # return an input error as if it were a numerical response, with an embedded unicode character:
expected_message = u"Could not interpret '2/3\u03a9' as a number" expected_message = u"Could not interpret '2/3\u03a9' as a number"
...@@ -224,24 +225,12 @@ class TestRescoringTask(TestIntegrationTask): ...@@ -224,24 +225,12 @@ class TestRescoringTask(TestIntegrationTask):
self.assertEqual(instructor_task.task_type, 'rescore_problem') self.assertEqual(instructor_task.task_type, 'rescore_problem')
task_input = json.loads(instructor_task.task_input) task_input = json.loads(instructor_task.task_input)
self.assertFalse('student' in task_input) self.assertFalse('student' in task_input)
self.assertEqual(task_input['problem_url'], InstructorTaskTestCase.problem_location(problem_url_name)) self.assertEqual(task_input['problem_url'], InstructorTaskModuleTestCase.problem_location(problem_url_name))
status = json.loads(instructor_task.task_output) status = json.loads(instructor_task.task_output)
self.assertEqual(status['attempted'], 1) self.assertEqual(status['attempted'], 1)
self.assertEqual(status['updated'], 0) self.assertEqual(status['updated'], 0)
self.assertEqual(status['total'], 1) self.assertEqual(status['total'], 1)
def test_rescoring_non_problem(self):
"""confirm that a non-problem will not submit"""
problem_url_name = self.problem_section.location.url()
with self.assertRaises(NotImplementedError):
self.submit_rescore_all_student_answers('instructor', problem_url_name)
def test_rescoring_nonexistent_problem(self):
"""confirm that a non-existent problem will not submit"""
problem_url_name = 'NonexistentProblem'
with self.assertRaises(ItemNotFoundError):
self.submit_rescore_all_student_answers('instructor', problem_url_name)
def define_code_response_problem(self, problem_url_name): def define_code_response_problem(self, problem_url_name):
""" """
Define an arbitrary code-response problem. Define an arbitrary code-response problem.
...@@ -276,7 +265,7 @@ class TestRescoringTask(TestIntegrationTask): ...@@ -276,7 +265,7 @@ class TestRescoringTask(TestIntegrationTask):
self.assertEqual(status['exception'], 'NotImplementedError') self.assertEqual(status['exception'], 'NotImplementedError')
self.assertEqual(status['message'], "Problem's definition does not support rescoring") self.assertEqual(status['message'], "Problem's definition does not support rescoring")
status = self.get_task_status(instructor_task.task_id) status = InstructorTaskModuleTestCase.get_task_status(instructor_task.task_id)
self.assertEqual(status['message'], "Problem's definition does not support rescoring") self.assertEqual(status['message'], "Problem's definition does not support rescoring")
def define_randomized_custom_response_problem(self, problem_url_name, redefine=False): def define_randomized_custom_response_problem(self, problem_url_name, redefine=False):
...@@ -290,21 +279,14 @@ class TestRescoringTask(TestIntegrationTask): ...@@ -290,21 +279,14 @@ class TestRescoringTask(TestIntegrationTask):
to not-equals). to not-equals).
""" """
factory = CustomResponseXMLFactory() factory = CustomResponseXMLFactory()
if redefine: script = textwrap.dedent("""
script = textwrap.dedent("""
def check_func(expect, answer_given):
expected = str(random.randint(0, 100))
return {'ok': answer_given != expected, 'msg': expected}
""")
else:
script = textwrap.dedent("""
def check_func(expect, answer_given): def check_func(expect, answer_given):
expected = str(random.randint(0, 100)) expected = str(random.randint(0, 100))
return {'ok': answer_given == expected, 'msg': expected} return {'ok': answer_given %s expected, 'msg': expected}
""") """ % ('!=' if redefine else '=='))
problem_xml = factory.build_xml(script=script, cfn="check_func", expect="42", num_responses=1) problem_xml = factory.build_xml(script=script, cfn="check_func", expect="42", num_responses=1)
if redefine: if redefine:
self.module_store.update_item(InstructorTaskTestCase.problem_location(problem_url_name), problem_xml) self.module_store.update_item(InstructorTaskModuleTestCase.problem_location(problem_url_name), problem_xml)
else: else:
# Use "per-student" rerandomization so that check-problem can be called more than once. # Use "per-student" rerandomization so that check-problem can be called more than once.
# Using "always" means we cannot check a problem twice, but we want to call once to get the # Using "always" means we cannot check a problem twice, but we want to call once to get the
...@@ -322,7 +304,7 @@ class TestRescoringTask(TestIntegrationTask): ...@@ -322,7 +304,7 @@ class TestRescoringTask(TestIntegrationTask):
# First define the custom response problem: # First define the custom response problem:
problem_url_name = 'H1P1' problem_url_name = 'H1P1'
self.define_randomized_custom_response_problem(problem_url_name) self.define_randomized_custom_response_problem(problem_url_name)
location = InstructorTaskTestCase.problem_location(problem_url_name) location = InstructorTaskModuleTestCase.problem_location(problem_url_name)
descriptor = self.module_store.get_instance(self.course.id, location) descriptor = self.module_store.get_instance(self.course.id, location)
# run with more than one user # run with more than one user
userlist = ['u1', 'u2', 'u3', 'u4'] userlist = ['u1', 'u2', 'u3', 'u4']
...@@ -340,7 +322,7 @@ class TestRescoringTask(TestIntegrationTask): ...@@ -340,7 +322,7 @@ class TestRescoringTask(TestIntegrationTask):
correct_map = state['correct_map'] correct_map = state['correct_map']
log.info("Correct Map: %s", correct_map) log.info("Correct Map: %s", correct_map)
# only one response, so pull it out: # only one response, so pull it out:
answer = correct_map[correct_map.keys()[0]]['msg'] answer = correct_map.values()[0]['msg']
self.submit_student_answer(username, problem_url_name, [answer, answer]) self.submit_student_answer(username, problem_url_name, [answer, answer])
# we should now get the problem right, with a second attempt: # we should now get the problem right, with a second attempt:
self.check_state(username, descriptor, 1, 1, 2) self.check_state(username, descriptor, 1, 1, 2)
...@@ -355,10 +337,8 @@ class TestRescoringTask(TestIntegrationTask): ...@@ -355,10 +337,8 @@ class TestRescoringTask(TestIntegrationTask):
# rescore the problem for only one student -- only that student's grade should change # rescore the problem for only one student -- only that student's grade should change
# (and none of the attempts): # (and none of the attempts):
self.submit_rescore_one_student_answer('instructor', problem_url_name, User.objects.get(username='u1')) self.submit_rescore_one_student_answer('instructor', problem_url_name, User.objects.get(username='u1'))
self.check_state('u1', descriptor, 0, 1, 2) for username in userlist:
self.check_state('u2', descriptor, 1, 1, 2) self.check_state(username, descriptor, 0 if username == 'u1' else 1, 1, 2)
self.check_state('u3', descriptor, 1, 1, 2)
self.check_state('u4', descriptor, 1, 1, 2)
# rescore the problem for all students # rescore the problem for all students
self.submit_rescore_all_student_answers('instructor', problem_url_name) self.submit_rescore_all_student_answers('instructor', problem_url_name)
...@@ -392,20 +372,20 @@ class TestResetAttemptsTask(TestIntegrationTask): ...@@ -392,20 +372,20 @@ class TestResetAttemptsTask(TestIntegrationTask):
def reset_problem_attempts(self, instructor, problem_url_name): def reset_problem_attempts(self, instructor, problem_url_name):
"""Submits the current problem for resetting""" """Submits the current problem for resetting"""
return submit_reset_problem_attempts_for_all_students(self.create_task_request(instructor), self.course.id, return submit_reset_problem_attempts_for_all_students(self.create_task_request(instructor), self.course.id,
InstructorTaskTestCase.problem_location(problem_url_name)) InstructorTaskModuleTestCase.problem_location(problem_url_name))
def test_reset_attempts_on_problem(self): def test_reset_attempts_on_problem(self):
'''Run reset-attempts scenario on option problem''' """Run reset-attempts scenario on option problem"""
# get descriptor: # get descriptor:
problem_url_name = 'H1P1' problem_url_name = 'H1P1'
self.define_option_problem(problem_url_name) self.define_option_problem(problem_url_name)
location = InstructorTaskTestCase.problem_location(problem_url_name) location = InstructorTaskModuleTestCase.problem_location(problem_url_name)
descriptor = self.module_store.get_instance(self.course.id, location) descriptor = self.module_store.get_instance(self.course.id, location)
num_attempts = 3 num_attempts = 3
# first store answers for each of the separate users: # first store answers for each of the separate users:
for _ in range(num_attempts): for _ in range(num_attempts):
for username in self.userlist: for username in self.userlist:
self.submit_student_answer(username, problem_url_name, ['Option 1', 'Option 1']) self.submit_student_answer(username, problem_url_name, [OPTION_1, OPTION_1])
for username in self.userlist: for username in self.userlist:
self.assertEquals(self.get_num_attempts(username, descriptor), num_attempts) self.assertEquals(self.get_num_attempts(username, descriptor), num_attempts)
...@@ -419,28 +399,13 @@ class TestResetAttemptsTask(TestIntegrationTask): ...@@ -419,28 +399,13 @@ class TestResetAttemptsTask(TestIntegrationTask):
"""Simulate a failure in resetting attempts on a problem""" """Simulate a failure in resetting attempts on a problem"""
problem_url_name = 'H1P1' problem_url_name = 'H1P1'
self.define_option_problem(problem_url_name) self.define_option_problem(problem_url_name)
self.submit_student_answer('u1', problem_url_name, ['Option 1', 'Option 1']) self.submit_student_answer('u1', problem_url_name, [OPTION_1, OPTION_1])
expected_message = "bad things happened" expected_message = "bad things happened"
with patch('courseware.models.StudentModule.save') as mock_save: with patch('courseware.models.StudentModule.save') as mock_save:
mock_save.side_effect = ZeroDivisionError(expected_message) mock_save.side_effect = ZeroDivisionError(expected_message)
instructor_task = self.reset_problem_attempts('instructor', problem_url_name) instructor_task = self.reset_problem_attempts('instructor', problem_url_name)
self._assert_task_failure(instructor_task.id, 'reset_problem_attempts', problem_url_name, expected_message)
# check instructor_task
instructor_task = InstructorTask.objects.get(id=instructor_task.id)
self.assertEqual(instructor_task.task_state, FAILURE)
self.assertEqual(instructor_task.requester.username, 'instructor')
self.assertEqual(instructor_task.task_type, 'reset_problem_attempts')
task_input = json.loads(instructor_task.task_input)
self.assertFalse('student' in task_input)
self.assertEqual(task_input['problem_url'], InstructorTaskTestCase.problem_location(problem_url_name))
status = json.loads(instructor_task.task_output)
self.assertEqual(status['exception'], 'ZeroDivisionError')
self.assertEqual(status['message'], expected_message)
# check status returned:
status = self.get_task_status(instructor_task.task_id)
self.assertEqual(status['message'], expected_message)
def test_reset_non_problem(self): def test_reset_non_problem(self):
"""confirm that a non-problem can still be successfully reset""" """confirm that a non-problem can still be successfully reset"""
...@@ -449,12 +414,6 @@ class TestResetAttemptsTask(TestIntegrationTask): ...@@ -449,12 +414,6 @@ class TestResetAttemptsTask(TestIntegrationTask):
instructor_task = InstructorTask.objects.get(id=instructor_task.id) instructor_task = InstructorTask.objects.get(id=instructor_task.id)
self.assertEqual(instructor_task.task_state, SUCCESS) self.assertEqual(instructor_task.task_state, SUCCESS)
def test_reset_nonexistent_problem(self):
"""confirm that a non-existent problem will not submit"""
problem_url_name = 'NonexistentProblem'
with self.assertRaises(ItemNotFoundError):
self.reset_problem_attempts('instructor', problem_url_name)
class TestDeleteProblemTask(TestIntegrationTask): class TestDeleteProblemTask(TestIntegrationTask):
""" """
...@@ -474,18 +433,18 @@ class TestDeleteProblemTask(TestIntegrationTask): ...@@ -474,18 +433,18 @@ class TestDeleteProblemTask(TestIntegrationTask):
def delete_problem_state(self, instructor, problem_url_name): def delete_problem_state(self, instructor, problem_url_name):
"""Submits the current problem for deletion""" """Submits the current problem for deletion"""
return submit_delete_problem_state_for_all_students(self.create_task_request(instructor), self.course.id, return submit_delete_problem_state_for_all_students(self.create_task_request(instructor), self.course.id,
InstructorTaskTestCase.problem_location(problem_url_name)) InstructorTaskModuleTestCase.problem_location(problem_url_name))
def test_delete_problem_state(self): def test_delete_problem_state(self):
'''Run delete-state scenario on option problem''' """Run delete-state scenario on option problem"""
# get descriptor: # get descriptor:
problem_url_name = 'H1P1' problem_url_name = 'H1P1'
self.define_option_problem(problem_url_name) self.define_option_problem(problem_url_name)
location = InstructorTaskTestCase.problem_location(problem_url_name) location = InstructorTaskModuleTestCase.problem_location(problem_url_name)
descriptor = self.module_store.get_instance(self.course.id, location) descriptor = self.module_store.get_instance(self.course.id, location)
# first store answers for each of the separate users: # first store answers for each of the separate users:
for username in self.userlist: for username in self.userlist:
self.submit_student_answer(username, problem_url_name, ['Option 1', 'Option 1']) self.submit_student_answer(username, problem_url_name, [OPTION_1, OPTION_1])
# confirm that state exists: # confirm that state exists:
for username in self.userlist: for username in self.userlist:
self.assertTrue(self.get_student_module(username, descriptor) is not None) self.assertTrue(self.get_student_module(username, descriptor) is not None)
...@@ -500,28 +459,13 @@ class TestDeleteProblemTask(TestIntegrationTask): ...@@ -500,28 +459,13 @@ class TestDeleteProblemTask(TestIntegrationTask):
"""Simulate a failure in deleting state of a problem""" """Simulate a failure in deleting state of a problem"""
problem_url_name = 'H1P1' problem_url_name = 'H1P1'
self.define_option_problem(problem_url_name) self.define_option_problem(problem_url_name)
self.submit_student_answer('u1', problem_url_name, ['Option 1', 'Option 1']) self.submit_student_answer('u1', problem_url_name, [OPTION_1, OPTION_1])
expected_message = "bad things happened" expected_message = "bad things happened"
with patch('courseware.models.StudentModule.delete') as mock_delete: with patch('courseware.models.StudentModule.delete') as mock_delete:
mock_delete.side_effect = ZeroDivisionError(expected_message) mock_delete.side_effect = ZeroDivisionError(expected_message)
instructor_task = self.delete_problem_state('instructor', problem_url_name) instructor_task = self.delete_problem_state('instructor', problem_url_name)
self._assert_task_failure(instructor_task.id, 'delete_problem_state', problem_url_name, expected_message)
# check instructor_task returned
instructor_task = InstructorTask.objects.get(id=instructor_task.id)
self.assertEqual(instructor_task.task_state, FAILURE)
self.assertEqual(instructor_task.requester.username, 'instructor')
self.assertEqual(instructor_task.task_type, 'delete_problem_state')
task_input = json.loads(instructor_task.task_input)
self.assertFalse('student' in task_input)
self.assertEqual(task_input['problem_url'], InstructorTaskTestCase.problem_location(problem_url_name))
status = json.loads(instructor_task.task_output)
self.assertEqual(status['exception'], 'ZeroDivisionError')
self.assertEqual(status['message'], expected_message)
# check status returned:
status = self.get_task_status(instructor_task.task_id)
self.assertEqual(status['message'], expected_message)
def test_delete_non_problem(self): def test_delete_non_problem(self):
"""confirm that a non-problem can still be successfully deleted""" """confirm that a non-problem can still be successfully deleted"""
...@@ -529,9 +473,3 @@ class TestDeleteProblemTask(TestIntegrationTask): ...@@ -529,9 +473,3 @@ class TestDeleteProblemTask(TestIntegrationTask):
instructor_task = self.delete_problem_state('instructor', problem_url_name) instructor_task = self.delete_problem_state('instructor', problem_url_name)
instructor_task = InstructorTask.objects.get(id=instructor_task.id) instructor_task = InstructorTask.objects.get(id=instructor_task.id)
self.assertEqual(instructor_task.task_state, SUCCESS) self.assertEqual(instructor_task.task_state, SUCCESS)
def test_delete_nonexistent_module(self):
"""confirm that a non-existent module will not submit"""
problem_url_name = 'NonexistentProblem'
with self.assertRaises(ItemNotFoundError):
self.delete_problem_state('instructor', problem_url_name)
...@@ -5,7 +5,6 @@ Runs tasks on answers to course problems to validate that code ...@@ -5,7 +5,6 @@ Runs tasks on answers to course problems to validate that code
paths actually work. paths actually work.
""" """
import logging
import json import json
from uuid import uuid4 from uuid import uuid4
...@@ -20,13 +19,12 @@ from courseware.tests.factories import StudentModuleFactory ...@@ -20,13 +19,12 @@ from courseware.tests.factories import StudentModuleFactory
from student.tests.factories import UserFactory from student.tests.factories import UserFactory
from instructor_task.models import InstructorTask from instructor_task.models import InstructorTask
from instructor_task.tests.test_base import InstructorTaskTestCase, TEST_COURSE_ORG, TEST_COURSE_NUMBER from instructor_task.tests.test_base import InstructorTaskModuleTestCase, TEST_COURSE_ORG, TEST_COURSE_NUMBER
from instructor_task.tests.factories import InstructorTaskFactory from instructor_task.tests.factories import InstructorTaskFactory
from instructor_task.tasks import rescore_problem, reset_problem_attempts, delete_problem_state from instructor_task.tasks import rescore_problem, reset_problem_attempts, delete_problem_state
from instructor_task.tasks_helper import UpdateProblemModuleStateError from instructor_task.tasks_helper import UpdateProblemModuleStateError, update_problem_module_state
log = logging.getLogger(__name__)
PROBLEM_URL_NAME = "test_urlname" PROBLEM_URL_NAME = "test_urlname"
...@@ -34,12 +32,12 @@ class TestTaskFailure(Exception): ...@@ -34,12 +32,12 @@ class TestTaskFailure(Exception):
pass pass
class TestInstructorTasks(InstructorTaskTestCase): class TestInstructorTasks(InstructorTaskModuleTestCase):
def setUp(self): def setUp(self):
super(InstructorTaskTestCase, self).setUp() super(InstructorTaskModuleTestCase, self).setUp()
self.initialize_course() self.initialize_course()
self.instructor = self.create_instructor('instructor') self.instructor = self.create_instructor('instructor')
self.problem_url = InstructorTaskTestCase.problem_location(PROBLEM_URL_NAME) self.problem_url = InstructorTaskModuleTestCase.problem_location(PROBLEM_URL_NAME)
def _create_input_entry(self, student_ident=None): def _create_input_entry(self, student_ident=None):
"""Creates a InstructorTask entry for testing.""" """Creates a InstructorTask entry for testing."""
...@@ -63,7 +61,7 @@ class TestInstructorTasks(InstructorTaskTestCase): ...@@ -63,7 +61,7 @@ class TestInstructorTasks(InstructorTaskTestCase):
'request_info': {}, 'request_info': {},
} }
def _run_task_with_mock_celery(self, task_class, entry_id, task_id, expected_failure_message=None): def _run_task_with_mock_celery(self, task_function, entry_id, task_id, expected_failure_message=None):
self.current_task = Mock() self.current_task = Mock()
self.current_task.request = Mock() self.current_task.request = Mock()
self.current_task.request.id = task_id self.current_task.request.id = task_id
...@@ -72,43 +70,57 @@ class TestInstructorTasks(InstructorTaskTestCase): ...@@ -72,43 +70,57 @@ class TestInstructorTasks(InstructorTaskTestCase):
self.current_task.update_state.side_effect = TestTaskFailure(expected_failure_message) self.current_task.update_state.side_effect = TestTaskFailure(expected_failure_message)
with patch('instructor_task.tasks_helper._get_current_task') as mock_get_task: with patch('instructor_task.tasks_helper._get_current_task') as mock_get_task:
mock_get_task.return_value = self.current_task mock_get_task.return_value = self.current_task
return task_class(entry_id, self._get_xmodule_instance_args()) return task_function(entry_id, self._get_xmodule_instance_args())
def test_missing_current_task(self): def _test_missing_current_task(self, task_function):
# run without (mock) Celery running # run without (mock) Celery running
task_entry = self._create_input_entry() task_entry = self._create_input_entry()
with self.assertRaises(UpdateProblemModuleStateError): with self.assertRaises(UpdateProblemModuleStateError):
reset_problem_attempts(task_entry.id, self._get_xmodule_instance_args()) task_function(task_entry.id, self._get_xmodule_instance_args())
def test_undefined_problem(self): def test_rescore_missing_current_task(self):
self._test_missing_current_task(rescore_problem)
def test_reset_missing_current_task(self):
self._test_missing_current_task(reset_problem_attempts)
def test_delete_missing_current_task(self):
self._test_missing_current_task(delete_problem_state)
def _test_undefined_problem(self, task_function):
# run with celery, but no problem defined # run with celery, but no problem defined
task_entry = self._create_input_entry() task_entry = self._create_input_entry()
with self.assertRaises(ItemNotFoundError): with self.assertRaises(ItemNotFoundError):
self._run_task_with_mock_celery(reset_problem_attempts, task_entry.id, task_entry.task_id) self._run_task_with_mock_celery(task_function, task_entry.id, task_entry.task_id)
def test_rescore_undefined_problem(self):
self._test_undefined_problem(rescore_problem)
def test_reset_undefined_problem(self):
self._test_undefined_problem(reset_problem_attempts)
def _assert_return_matches_entry(self, returned, entry_id): def test_delete_undefined_problem(self):
entry = InstructorTask.objects.get(id=entry_id) self._test_undefined_problem(delete_problem_state)
self.assertEquals(returned, json.loads(entry.task_output))
def _test_run_with_task(self, task_class, action_name, expected_num_updated): def _test_run_with_task(self, task_function, action_name, expected_num_updated):
# run with some StudentModules for the problem # run with some StudentModules for the problem
task_entry = self._create_input_entry() task_entry = self._create_input_entry()
status = self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id) status = self._run_task_with_mock_celery(task_function, task_entry.id, task_entry.task_id)
# check return value # check return value
self.assertEquals(status.get('attempted'), expected_num_updated) self.assertEquals(status.get('attempted'), expected_num_updated)
self.assertEquals(status.get('updated'), expected_num_updated) self.assertEquals(status.get('updated'), expected_num_updated)
self.assertEquals(status.get('total'), expected_num_updated) self.assertEquals(status.get('total'), expected_num_updated)
self.assertEquals(status.get('action_name'), action_name) self.assertEquals(status.get('action_name'), action_name)
self.assertTrue('duration_ms' in status) self.assertGreater('duration_ms', 0)
# compare with entry in table: # compare with entry in table:
entry = InstructorTask.objects.get(id=task_entry.id) entry = InstructorTask.objects.get(id=task_entry.id)
self.assertEquals(json.loads(entry.task_output), status) self.assertEquals(json.loads(entry.task_output), status)
self.assertEquals(entry.task_state, SUCCESS) self.assertEquals(entry.task_state, SUCCESS)
def _test_run_with_no_state(self, task_class, action_name): def _test_run_with_no_state(self, task_function, action_name):
# run with no StudentModules for the problem # run with no StudentModules for the problem
self.define_option_problem(PROBLEM_URL_NAME) self.define_option_problem(PROBLEM_URL_NAME)
self._test_run_with_task(task_class, action_name, 0) self._test_run_with_task(task_function, action_name, 0)
def test_rescore_with_no_state(self): def test_rescore_with_no_state(self):
self._test_run_with_no_state(rescore_problem, 'rescored') self._test_run_with_no_state(rescore_problem, 'rescored')
...@@ -119,7 +131,8 @@ class TestInstructorTasks(InstructorTaskTestCase): ...@@ -119,7 +131,8 @@ class TestInstructorTasks(InstructorTaskTestCase):
def test_delete_with_no_state(self): def test_delete_with_no_state(self):
self._test_run_with_no_state(delete_problem_state, 'deleted') self._test_run_with_no_state(delete_problem_state, 'deleted')
def _create_some_students(self, num_students, state=None): def _create_students_with_state(self, num_students, state=None):
"""Create students, a problem, and StudentModule objects for testing"""
self.define_option_problem(PROBLEM_URL_NAME) self.define_option_problem(PROBLEM_URL_NAME)
students = [ students = [
UserFactory.create(username='robot%d' % i, email='robot+test+%d@edx.org' % i) UserFactory.create(username='robot%d' % i, email='robot+test+%d@edx.org' % i)
...@@ -132,38 +145,37 @@ class TestInstructorTasks(InstructorTaskTestCase): ...@@ -132,38 +145,37 @@ class TestInstructorTasks(InstructorTaskTestCase):
state=state) state=state)
return students return students
def test_reset_with_some_state(self): def _assert_num_attempts(self, students, num_attempts):
initial_attempts = 3 """Check the number attempts for all students is the same"""
input_state = json.dumps({'attempts': initial_attempts})
num_students = 10
students = self._create_some_students(num_students, input_state)
# check that entries were set correctly
for student in students: for student in students:
module = StudentModule.objects.get(course_id=self.course.id, module = StudentModule.objects.get(course_id=self.course.id,
student=student, student=student,
module_state_key=self.problem_url) module_state_key=self.problem_url)
state = json.loads(module.state) state = json.loads(module.state)
self.assertEquals(state['attempts'], initial_attempts) self.assertEquals(state['attempts'], num_attempts)
def test_reset_with_some_state(self):
initial_attempts = 3
input_state = json.dumps({'attempts': initial_attempts})
num_students = 10
students = self._create_students_with_state(num_students, input_state)
# check that entries were set correctly
self._assert_num_attempts(students, initial_attempts)
# run the task # run the task
self._test_run_with_task(reset_problem_attempts, 'reset', num_students) self._test_run_with_task(reset_problem_attempts, 'reset', num_students)
# check that entries were reset # check that entries were reset
for student in students: self._assert_num_attempts(students, 0)
module = StudentModule.objects.get(course_id=self.course.id,
student=student,
module_state_key=self.problem_url)
state = json.loads(module.state)
self.assertEquals(state['attempts'], 0)
def test_delete_with_some_state(self): def test_delete_with_some_state(self):
# This will create StudentModule entries -- we don't have to worry about # This will create StudentModule entries -- we don't have to worry about
# the state inside them. # the state inside them.
num_students = 10 num_students = 10
students = self._create_some_students(num_students) students = self._create_students_with_state(num_students)
# check that entries were created correctly # check that entries were created correctly
for student in students: for student in students:
StudentModule.objects.get(course_id=self.course.id, StudentModule.objects.get(course_id=self.course.id,
student=student, student=student,
module_state_key=self.problem_url) module_state_key=self.problem_url)
self._test_run_with_task(delete_problem_state, 'deleted', num_students) self._test_run_with_task(delete_problem_state, 'deleted', num_students)
# confirm that no state can be found anymore: # confirm that no state can be found anymore:
for student in students: for student in students:
...@@ -177,7 +189,7 @@ class TestInstructorTasks(InstructorTaskTestCase): ...@@ -177,7 +189,7 @@ class TestInstructorTasks(InstructorTaskTestCase):
num_students = 10 num_students = 10
initial_attempts = 3 initial_attempts = 3
input_state = json.dumps({'attempts': initial_attempts}) input_state = json.dumps({'attempts': initial_attempts})
students = self._create_some_students(num_students, input_state) students = self._create_students_with_state(num_students, input_state)
# check that entries were set correctly # check that entries were set correctly
for student in students: for student in students:
module = StudentModule.objects.get(course_id=self.course.id, module = StudentModule.objects.get(course_id=self.course.id,
...@@ -198,7 +210,7 @@ class TestInstructorTasks(InstructorTaskTestCase): ...@@ -198,7 +210,7 @@ class TestInstructorTasks(InstructorTaskTestCase):
self.assertEquals(status.get('updated'), 1) self.assertEquals(status.get('updated'), 1)
self.assertEquals(status.get('total'), 1) self.assertEquals(status.get('total'), 1)
self.assertEquals(status.get('action_name'), 'reset') self.assertEquals(status.get('action_name'), 'reset')
self.assertTrue('duration_ms' in status) self.assertGreater('duration_ms', 0)
# compare with entry in table: # compare with entry in table:
entry = InstructorTask.objects.get(id=task_entry.id) entry = InstructorTask.objects.get(id=task_entry.id)
self.assertEquals(json.loads(entry.task_output), status) self.assertEquals(json.loads(entry.task_output), status)
...@@ -220,15 +232,13 @@ class TestInstructorTasks(InstructorTaskTestCase): ...@@ -220,15 +232,13 @@ class TestInstructorTasks(InstructorTaskTestCase):
def test_reset_with_student_email(self): def test_reset_with_student_email(self):
self._test_reset_with_student(True) self._test_reset_with_student(True)
def _test_run_with_failure(self, task_class, expected_message): def _test_run_with_failure(self, task_function, expected_message):
# run with no StudentModules for the problem, # run with no StudentModules for the problem,
# because we will fail before entering the loop. # because we will fail before entering the loop.
task_entry = self._create_input_entry() task_entry = self._create_input_entry()
self.define_option_problem(PROBLEM_URL_NAME) self.define_option_problem(PROBLEM_URL_NAME)
try: with self.assertRaises(TestTaskFailure):
self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id, expected_message) self._run_task_with_mock_celery(task_function, task_entry.id, task_entry.task_id, expected_message)
except TestTaskFailure:
pass
# compare with entry in table: # compare with entry in table:
entry = InstructorTask.objects.get(id=task_entry.id) entry = InstructorTask.objects.get(id=task_entry.id)
self.assertEquals(entry.task_state, FAILURE) self.assertEquals(entry.task_state, FAILURE)
...@@ -245,16 +255,14 @@ class TestInstructorTasks(InstructorTaskTestCase): ...@@ -245,16 +255,14 @@ class TestInstructorTasks(InstructorTaskTestCase):
def test_delete_with_failure(self): def test_delete_with_failure(self):
self._test_run_with_failure(delete_problem_state, 'We expected this to fail') self._test_run_with_failure(delete_problem_state, 'We expected this to fail')
def _test_run_with_long_error_msg(self, task_class): def _test_run_with_long_error_msg(self, task_function):
# run with an error message that is so long it will require # run with an error message that is so long it will require
# truncation (as well as the jettisoning of the traceback). # truncation (as well as the jettisoning of the traceback).
task_entry = self._create_input_entry() task_entry = self._create_input_entry()
self.define_option_problem(PROBLEM_URL_NAME) self.define_option_problem(PROBLEM_URL_NAME)
expected_message = "x" * 1500 expected_message = "x" * 1500
try: with self.assertRaises(TestTaskFailure):
self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id, expected_message) self._run_task_with_mock_celery(task_function, task_entry.id, task_entry.task_id, expected_message)
except TestTaskFailure:
pass
# compare with entry in table: # compare with entry in table:
entry = InstructorTask.objects.get(id=task_entry.id) entry = InstructorTask.objects.get(id=task_entry.id)
self.assertEquals(entry.task_state, FAILURE) self.assertEquals(entry.task_state, FAILURE)
...@@ -273,17 +281,15 @@ class TestInstructorTasks(InstructorTaskTestCase): ...@@ -273,17 +281,15 @@ class TestInstructorTasks(InstructorTaskTestCase):
def test_delete_with_long_error_msg(self): def test_delete_with_long_error_msg(self):
self._test_run_with_long_error_msg(delete_problem_state) self._test_run_with_long_error_msg(delete_problem_state)
def _test_run_with_short_error_msg(self, task_class): def _test_run_with_short_error_msg(self, task_function):
# run with an error message that is short enough to fit # run with an error message that is short enough to fit
# in the output, but long enough that the traceback won't. # in the output, but long enough that the traceback won't.
# Confirm that the traceback is truncated. # Confirm that the traceback is truncated.
task_entry = self._create_input_entry() task_entry = self._create_input_entry()
self.define_option_problem(PROBLEM_URL_NAME) self.define_option_problem(PROBLEM_URL_NAME)
expected_message = "x" * 900 expected_message = "x" * 900
try: with self.assertRaises(TestTaskFailure):
self._run_task_with_mock_celery(task_class, task_entry.id, task_entry.task_id, expected_message) self._run_task_with_mock_celery(task_function, task_entry.id, task_entry.task_id, expected_message)
except TestTaskFailure:
pass
# compare with entry in table: # compare with entry in table:
entry = InstructorTask.objects.get(id=task_entry.id) entry = InstructorTask.objects.get(id=task_entry.id)
self.assertEquals(entry.task_state, FAILURE) self.assertEquals(entry.task_state, FAILURE)
...@@ -301,3 +307,26 @@ class TestInstructorTasks(InstructorTaskTestCase): ...@@ -301,3 +307,26 @@ class TestInstructorTasks(InstructorTaskTestCase):
def test_delete_with_short_error_msg(self): def test_delete_with_short_error_msg(self):
self._test_run_with_short_error_msg(delete_problem_state) self._test_run_with_short_error_msg(delete_problem_state)
def test_successful_result_too_long(self):
# while we don't expect the existing tasks to generate output that is too
# long, we can test the framework will handle such an occurrence.
task_entry = self._create_input_entry()
self.define_option_problem(PROBLEM_URL_NAME)
action_name = 'x' * 1000
update_fcn = lambda(_module_descriptor, _student_module, _xmodule_instance_args): True
task_function = (lambda entry_id, xmodule_instance_args:
update_problem_module_state(entry_id,
update_fcn, action_name, filter_fcn=None,
xmodule_instance_args=None))
with self.assertRaises(ValueError):
self._run_task_with_mock_celery(task_function, task_entry.id, task_entry.task_id)
# compare with entry in table:
entry = InstructorTask.objects.get(id=task_entry.id)
self.assertEquals(entry.task_state, FAILURE)
self.assertGreater(1023, len(entry.task_output))
output = json.loads(entry.task_output)
self.assertEquals(output['exception'], 'ValueError')
self.assertTrue("Length of task output is too long" in output['message'])
self.assertTrue('traceback' not in output)
"""
Test for LMS instructor background task queue management
"""
import json
from celery.states import SUCCESS, FAILURE, REVOKED, PENDING
from mock import Mock, patch
from django.utils.datastructures import MultiValueDict
from instructor_task.models import PROGRESS
from instructor_task.tests.test_base import (InstructorTaskTestCase,
TEST_FAILURE_MESSAGE,
TEST_FAILURE_EXCEPTION)
from instructor_task.views import instructor_task_status, get_task_completion_info
class InstructorTaskReportTest(InstructorTaskTestCase):
"""
Tests API and view methods that involve the reporting of status for background tasks.
"""
def _get_instructor_task_status(self, task_id):
"""Returns status corresponding to task_id via api method."""
request = Mock()
request.REQUEST = {'task_id': task_id}
return instructor_task_status(request)
def test_instructor_task_status(self):
instructor_task = self._create_failure_entry()
task_id = instructor_task.task_id
request = Mock()
request.REQUEST = {'task_id': task_id}
response = instructor_task_status(request)
output = json.loads(response.content)
self.assertEquals(output['task_id'], task_id)
def test_instructor_task_status_list(self):
# Fetch status for existing tasks by arg list, as if called from ajax.
# Note that ajax does something funny with the marshalling of
# list data, so the key value has "[]" appended to it.
task_ids = [(self._create_failure_entry()).task_id for _ in range(1, 5)]
request = Mock()
request.REQUEST = MultiValueDict({'task_ids[]': task_ids})
response = instructor_task_status(request)
output = json.loads(response.content)
self.assertEquals(len(output), len(task_ids))
for task_id in task_ids:
self.assertEquals(output[task_id]['task_id'], task_id)
def test_get_status_from_failure(self):
# get status for a task that has already failed
instructor_task = self._create_failure_entry()
task_id = instructor_task.task_id
response = self._get_instructor_task_status(task_id)
output = json.loads(response.content)
self.assertEquals(output['message'], TEST_FAILURE_MESSAGE)
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_id'], task_id)
self.assertEquals(output['task_state'], FAILURE)
self.assertFalse(output['in_progress'])
expected_progress = {'exception': TEST_FAILURE_EXCEPTION,
'message': TEST_FAILURE_MESSAGE}
self.assertEquals(output['task_progress'], expected_progress)
def test_get_status_from_success(self):
# get status for a task that has already succeeded
instructor_task = self._create_success_entry()
task_id = instructor_task.task_id
response = self._get_instructor_task_status(task_id)
output = json.loads(response.content)
self.assertEquals(output['message'], "Problem rescored for 2 of 3 students (out of 5)")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_id'], task_id)
self.assertEquals(output['task_state'], SUCCESS)
self.assertFalse(output['in_progress'])
expected_progress = {'attempted': 3,
'updated': 2,
'total': 5,
'action_name': 'rescored'}
self.assertEquals(output['task_progress'], expected_progress)
def _test_get_status_from_result(self, task_id, mock_result):
"""
Provides mock result to caller of instructor_task_status, and returns resulting output.
"""
with patch('celery.result.AsyncResult.__new__') as mock_result_ctor:
mock_result_ctor.return_value = mock_result
response = self._get_instructor_task_status(task_id)
output = json.loads(response.content)
self.assertEquals(output['task_id'], task_id)
return output
def test_get_status_to_pending(self):
# get status for a task that hasn't begun to run yet
instructor_task = self._create_entry()
task_id = instructor_task.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = PENDING
output = self._test_get_status_from_result(task_id, mock_result)
for key in ['message', 'succeeded', 'task_progress']:
self.assertTrue(key not in output)
self.assertEquals(output['task_state'], 'PENDING')
self.assertTrue(output['in_progress'])
def test_update_progress_to_progress(self):
# view task entry for task in progress
instructor_task = self._create_progress_entry()
task_id = instructor_task.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = PROGRESS
mock_result.result = {'attempted': 5,
'updated': 4,
'total': 10,
'action_name': 'rescored'}
output = self._test_get_status_from_result(task_id, mock_result)
self.assertEquals(output['message'], "Progress: rescored 4 of 5 so far (out of 10)")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_state'], PROGRESS)
self.assertTrue(output['in_progress'])
self.assertEquals(output['task_progress'], mock_result.result)
def test_update_progress_to_failure(self):
# view task entry for task in progress that later fails
instructor_task = self._create_progress_entry()
task_id = instructor_task.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = FAILURE
mock_result.result = NotImplementedError("This task later failed.")
mock_result.traceback = "random traceback"
output = self._test_get_status_from_result(task_id, mock_result)
self.assertEquals(output['message'], "This task later failed.")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_state'], FAILURE)
self.assertFalse(output['in_progress'])
expected_progress = {'exception': 'NotImplementedError',
'message': "This task later failed.",
'traceback': "random traceback"}
self.assertEquals(output['task_progress'], expected_progress)
def test_update_progress_to_revoked(self):
# view task entry for task in progress that later fails
instructor_task = self._create_progress_entry()
task_id = instructor_task.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = REVOKED
output = self._test_get_status_from_result(task_id, mock_result)
self.assertEquals(output['message'], "Task revoked before running")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_state'], REVOKED)
self.assertFalse(output['in_progress'])
expected_progress = {'message': "Task revoked before running"}
self.assertEquals(output['task_progress'], expected_progress)
def _get_output_for_task_success(self, attempted, updated, total, student=None):
"""returns the task_id and the result returned by instructor_task_status()."""
# view task entry for task in progress
instructor_task = self._create_progress_entry(student)
task_id = instructor_task.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = SUCCESS
mock_result.result = {'attempted': attempted,
'updated': updated,
'total': total,
'action_name': 'rescored'}
output = self._test_get_status_from_result(task_id, mock_result)
return output
def test_update_progress_to_success(self):
output = self._get_output_for_task_success(10, 8, 10)
self.assertEquals(output['message'], "Problem rescored for 8 of 10 students")
self.assertEquals(output['succeeded'], False)
self.assertEquals(output['task_state'], SUCCESS)
self.assertFalse(output['in_progress'])
expected_progress = {'attempted': 10,
'updated': 8,
'total': 10,
'action_name': 'rescored'}
self.assertEquals(output['task_progress'], expected_progress)
def test_success_messages(self):
output = self._get_output_for_task_success(0, 0, 10)
self.assertEqual(output['message'], "Unable to find any students with submissions to be rescored (out of 10)")
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(10, 0, 10)
self.assertEqual(output['message'], "Problem failed to be rescored for any of 10 students")
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(10, 8, 10)
self.assertEqual(output['message'], "Problem rescored for 8 of 10 students")
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(9, 8, 10)
self.assertEqual(output['message'], "Problem rescored for 8 of 9 students (out of 10)")
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(10, 10, 10)
self.assertEqual(output['message'], "Problem successfully rescored for 10 students")
self.assertTrue(output['succeeded'])
output = self._get_output_for_task_success(0, 0, 1, student=self.student)
self.assertTrue("Unable to find submission to be rescored for student" in output['message'])
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(1, 0, 1, student=self.student)
self.assertTrue("Problem failed to be rescored for student" in output['message'])
self.assertFalse(output['succeeded'])
output = self._get_output_for_task_success(1, 1, 1, student=self.student)
self.assertTrue("Problem successfully rescored for student" in output['message'])
self.assertTrue(output['succeeded'])
def test_get_info_for_queuing_task(self):
# get status for a task that is still running:
instructor_task = self._create_entry()
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "No status information available")
def test_get_info_for_missing_output(self):
# check for missing task_output
instructor_task = self._create_success_entry()
instructor_task.task_output = None
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "No status information available")
def test_get_info_for_broken_output(self):
# check for non-JSON task_output
instructor_task = self._create_success_entry()
instructor_task.task_output = "{ bad"
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "No parsable status information available")
def test_get_info_for_empty_output(self):
# check for JSON task_output with missing keys
instructor_task = self._create_success_entry()
instructor_task.task_output = "{}"
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "No progress status information available")
def test_get_info_for_broken_input(self):
# check for non-JSON task_input, but then just ignore it
instructor_task = self._create_success_entry()
instructor_task.task_input = "{ bad"
succeeded, message = get_task_completion_info(instructor_task)
self.assertFalse(succeeded)
self.assertEquals(message, "Problem rescored for 2 of 3 students (out of 5)")
...@@ -39,7 +39,7 @@ def instructor_task_status(request): ...@@ -39,7 +39,7 @@ def instructor_task_status(request):
'message': on complete tasks, status message reporting on final progress, 'message': on complete tasks, status message reporting on final progress,
or providing exception message if failed. For tasks in progress, or providing exception message if failed. For tasks in progress,
indicates the current progress. indicates the current progress.
'succeeded': on complete tasks or tasks in progress, indicates if the 'succeeded': on complete tasks or tasks in progress, boolean value indicates if the
task outcome was successful: did it achieve what it set out to do. task outcome was successful: did it achieve what it set out to do.
This is in contrast with a successful task_state, which indicates that the This is in contrast with a successful task_state, which indicates that the
task merely completed. task merely completed.
...@@ -125,10 +125,10 @@ def get_task_completion_info(instructor_task): ...@@ -125,10 +125,10 @@ def get_task_completion_info(instructor_task):
log.warning(fmt.format(instructor_task.task_id, instructor_task.task_output)) log.warning(fmt.format(instructor_task.task_id, instructor_task.task_output))
return (succeeded, "No progress status information available") return (succeeded, "No progress status information available")
action_name = task_output.get('action_name') action_name = task_output['action_name']
num_attempted = task_output.get('attempted') num_attempted = task_output['attempted']
num_updated = task_output.get('updated') num_updated = task_output['updated']
num_total = task_output.get('total') num_total = task_output['total']
student = None student = None
try: try:
......
// Define an InstructorTaskProgress object for updating a table on the instructor
// dashboard that shows the current background tasks that are currently running
// for the instructor's course. Any tasks that were running when the page is
// first displayed are passed in as instructor_tasks, and populate the "Pending Instructor
// Task" table. The InstructorTaskProgress is bound to this table, and periodically
// polls the LMS to see if any of the tasks has completed. Once a task is complete,
// it is not included in any further polling.
(function() {
var __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; };
this.InstructorTaskProgress = (function() {
function InstructorTaskProgress(element) {
this.update_progress = __bind(this.update_progress, this);
this.get_status = __bind(this.get_status, this);
this.element = element;
this.entries = $(element).find('.task-progress-entry')
if (window.queuePollerID) {
window.clearTimeout(window.queuePollerID);
}
// Hardcode the initial delay before the first refresh to one second:
window.queuePollerID = window.setTimeout(this.get_status, 1000);
}
InstructorTaskProgress.prototype.$ = function(selector) {
return $(selector, this.element);
};
InstructorTaskProgress.prototype.update_progress = function(response) {
var _this = this;
// Response should be a dict with an entry for each requested task_id,
// with a "task-state" and "in_progress" key and optionally a "message"
// and a "task_progress.duration" key.
var something_in_progress = false;
for (task_id in response) {
var task_dict = response[task_id];
// find the corresponding entry, and update it:
entry = $(_this.element).find('[data-task-id="' + task_id + '"]');
entry.find('.task-state').text(task_dict.task_state)
var duration_value = (task_dict.task_progress && task_dict.task_progress.duration_ms
&& Math.round(task_dict.task_progress.duration_ms/1000)) || 'unknown';
entry.find('.task-duration').text(duration_value);
var progress_value = task_dict.message || '';
entry.find('.task-progress').text(progress_value);
// if the task is complete, then change the entry so it won't
// be queried again. Otherwise set a flag.
if (task_dict.in_progress === true) {
something_in_progress = true;
} else {
entry.data('inProgress', "False")
}
}
// if some entries are still incomplete, then repoll:
// Hardcode the refresh interval to be every five seconds.
// TODO: allow the refresh interval to be set. (And if it is disabled,
// then don't set the timeout at all.)
if (something_in_progress) {
window.queuePollerID = window.setTimeout(_this.get_status, 5000);
} else {
delete window.queuePollerID;
}
}
InstructorTaskProgress.prototype.get_status = function() {
var _this = this;
var task_ids = [];
// Construct the array of ids to get status for, by
// including the subset of entries that are still in progress.
this.entries.each(function(idx, element) {
var task_id = $(element).data('taskId');
var in_progress = $(element).data('inProgress');
if (in_progress="True") {
task_ids.push(task_id);
}
});
// Make call to get status for these ids.
// Note that the keyname here ends up with "[]" being appended
// in the POST parameter that shows up on the Django server.
// TODO: add error handler.
var ajax_url = '/instructor_task_status/';
var data = {'task_ids': task_ids };
$.post(ajax_url, data).done(this.update_progress);
};
return InstructorTaskProgress;
})();
}).call(this);
// once the page is rendered, create the progress object
var instructorTaskProgress;
$(document).ready(function() {
instructorTaskProgress = new InstructorTaskProgress($('#task-progress-wrapper'));
});
...@@ -9,112 +9,9 @@ ...@@ -9,112 +9,9 @@
<script type="text/javascript" src="${static.url('js/vendor/jquery-jvectormap-1.1.1/jquery-jvectormap-1.1.1.min.js')}"></script> <script type="text/javascript" src="${static.url('js/vendor/jquery-jvectormap-1.1.1/jquery-jvectormap-1.1.1.min.js')}"></script>
<script type="text/javascript" src="${static.url('js/vendor/jquery-jvectormap-1.1.1/jquery-jvectormap-world-mill-en.js')}"></script> <script type="text/javascript" src="${static.url('js/vendor/jquery-jvectormap-1.1.1/jquery-jvectormap-world-mill-en.js')}"></script>
<script type="text/javascript" src="${static.url('js/course_groups/cohorts.js')}"></script> <script type="text/javascript" src="${static.url('js/course_groups/cohorts.js')}"></script>
%if instructor_tasks is not None: %if instructor_tasks is not None:
<script type="text/javascript"> <script type="text/javascript" src="${static.url('js/pending_tasks.js')}"></script>>
// Define an InstructorTaskProgress object for updating a table on the instructor
// dashboard that shows the current background tasks that are currently running
// for the instructor's course. Any tasks that were running when the page is
// first displayed are passed in as instructor_tasks, and populate the "Pending Instructor
// Task" table. The InstructorTaskProgress is bound to this table, and periodically
// polls the LMS to see if any of the tasks has completed. Once a task is complete,
// it is not included in any further polling.
(function() {
var __bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; };
this.InstructorTaskProgress = (function() {
function InstructorTaskProgress(element) {
this.update_progress = __bind(this.update_progress, this);
this.get_status = __bind(this.get_status, this);
this.element = element;
this.entries = $(element).find('.task-progress-entry')
if (window.queuePollerID) {
window.clearTimeout(window.queuePollerID);
}
// Hardcode the initial delay before the first refresh to one second:
window.queuePollerID = window.setTimeout(this.get_status, 1000);
}
InstructorTaskProgress.prototype.$ = function(selector) {
return $(selector, this.element);
};
InstructorTaskProgress.prototype.update_progress = function(response) {
var _this = this;
// Response should be a dict with an entry for each requested task_id,
// with a "task-state" and "in_progress" key and optionally a "message"
// and a "task_progress.duration" key.
var something_in_progress = false;
for (task_id in response) {
var task_dict = response[task_id];
// find the corresponding entry, and update it:
entry = $(_this.element).find('[data-task-id="' + task_id + '"]');
entry.find('.task-state').text(task_dict.task_state)
var duration_value = (task_dict.task_progress && task_dict.task_progress.duration_ms
&& Math.round(task_dict.task_progress.duration_ms/1000)) || 'unknown';
entry.find('.task-duration').text(duration_value);
var progress_value = task_dict.message || '';
entry.find('.task-progress').text(progress_value);
// if the task is complete, then change the entry so it won't
// be queried again. Otherwise set a flag.
if (task_dict.in_progress === true) {
something_in_progress = true;
} else {
entry.data('inProgress', "False")
}
}
// if some entries are still incomplete, then repoll:
// Hardcode the refresh interval to be every five seconds.
// TODO: allow the refresh interval to be set. (And if it is disabled,
// then don't set the timeout at all.)
if (something_in_progress) {
window.queuePollerID = window.setTimeout(_this.get_status, 5000);
} else {
delete window.queuePollerID;
}
}
InstructorTaskProgress.prototype.get_status = function() {
var _this = this;
var task_ids = [];
// Construct the array of ids to get status for, by
// including the subset of entries that are still in progress.
this.entries.each(function(idx, element) {
var task_id = $(element).data('taskId');
var in_progress = $(element).data('inProgress');
if (in_progress="True") {
task_ids.push(task_id);
}
});
// Make call to get status for these ids.
// Note that the keyname here ends up with "[]" being appended
// in the POST parameter that shows up on the Django server.
// TODO: add error handler.
var ajax_url = '/instructor_task_status/';
var data = {'task_ids': task_ids };
$.post(ajax_url, data).done(this.update_progress);
};
return InstructorTaskProgress;
})();
}).call(this);
// once the page is rendered, create the progress object
var instructorTaskProgress;
$(document).ready(function() {
instructorTaskProgress = new InstructorTaskProgress($('#task-progress-wrapper'));
});
</script>
%endif %endif
</%block> </%block>
<%include file="/courseware/course_navigation.html" args="active_page='instructor'" /> <%include file="/courseware/course_navigation.html" args="active_page='instructor'" />
...@@ -304,7 +201,7 @@ function goto( mode) ...@@ -304,7 +201,7 @@ function goto( mode)
<p> <p>
Specify a particular problem in the course here by its url: Specify a particular problem in the course here by its url:
<input type="text" name="problem_for_all_students" size="60"> <input type="text" name="problem_for_all_students" size="60">
</p> </p>
<p> <p>
You may use just the "urlname" if a problem, or "modulename/urlname" if not. You may use just the "urlname" if a problem, or "modulename/urlname" if not.
...@@ -360,7 +257,7 @@ function goto( mode) ...@@ -360,7 +257,7 @@ function goto( mode)
%if instructor_access: %if instructor_access:
<p> <p>
You may also delete the entire state of a student for the specified module: You may also delete the entire state of a student for the specified module:
<input type="submit" name="action" value="Delete student state for module"> <input type="submit" name="action" value="Delete student state for module">
</p> </p>
%endif %endif
%if settings.MITX_FEATURES.get('ENABLE_COURSE_BACKGROUND_TASKS'): %if settings.MITX_FEATURES.get('ENABLE_COURSE_BACKGROUND_TASKS'):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment