Commit 2c5e038f by Brian Wilson

initial test_tasks

parent 3bd2b082
...@@ -14,7 +14,6 @@ from xmodule.modulestore.django import modulestore ...@@ -14,7 +14,6 @@ from xmodule.modulestore.django import modulestore
from instructor_task.models import InstructorTask from instructor_task.models import InstructorTask
from instructor_task.tasks_helper import PROGRESS from instructor_task.tasks_helper import PROGRESS
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
# define a "state" used in InstructorTask # define a "state" used in InstructorTask
...@@ -49,6 +48,13 @@ def _reserve_task(course_id, task_type, task_key, task_input, requester): ...@@ -49,6 +48,13 @@ def _reserve_task(course_id, task_type, task_key, task_input, requester):
will cause any pending transaction to be committed by a successful will cause any pending transaction to be committed by a successful
save here. Any future database operations will take place in a save here. Any future database operations will take place in a
separate transaction. separate transaction.
Note that there is a chance of a race condition here, when two users
try to run the same task at almost exactly the same time. One user
could be after the check and before the create when the second user
gets to the check. At that point, both users are able to run their
tasks simultaneously. This is deemed a small enough risk to not
put in further safeguards.
""" """
if _task_is_running(course_id, task_type, task_key): if _task_is_running(course_id, task_type, task_key):
......
...@@ -49,6 +49,11 @@ class UpdateProblemModuleStateError(Exception): ...@@ -49,6 +49,11 @@ class UpdateProblemModuleStateError(Exception):
pass pass
def _get_current_task():
"""Stub to make it easier to test without actually running Celery"""
return current_task
def _perform_module_state_update(course_id, module_state_key, student_identifier, update_fcn, action_name, filter_fcn, def _perform_module_state_update(course_id, module_state_key, student_identifier, update_fcn, action_name, filter_fcn,
xmodule_instance_args): xmodule_instance_args):
""" """
...@@ -137,12 +142,12 @@ def _perform_module_state_update(course_id, module_state_key, student_identifier ...@@ -137,12 +142,12 @@ def _perform_module_state_update(course_id, module_state_key, student_identifier
return progress return progress
task_progress = get_task_progress() task_progress = get_task_progress()
current_task.update_state(state=PROGRESS, meta=task_progress) _get_current_task().update_state(state=PROGRESS, meta=task_progress)
for module_to_update in modules_to_update: for module_to_update in modules_to_update:
num_attempted += 1 num_attempted += 1
# There is no try here: if there's an error, we let it throw, and the task will # There is no try here: if there's an error, we let it throw, and the task will
# be marked as FAILED, with a stack trace. # be marked as FAILED, with a stack trace.
with dog_stats_api.timer('courseware.tasks.module.{0}.time'.format(action_name)): with dog_stats_api.timer('instructor_tasks.module.{0}.time'.format(action_name)):
if update_fcn(module_descriptor, module_to_update, xmodule_instance_args): if update_fcn(module_descriptor, module_to_update, xmodule_instance_args):
# If the update_fcn returns true, then it performed some kind of work. # If the update_fcn returns true, then it performed some kind of work.
# Logging of failures is left to the update_fcn itself. # Logging of failures is left to the update_fcn itself.
...@@ -150,7 +155,7 @@ def _perform_module_state_update(course_id, module_state_key, student_identifier ...@@ -150,7 +155,7 @@ def _perform_module_state_update(course_id, module_state_key, student_identifier
# update task status: # update task status:
task_progress = get_task_progress() task_progress = get_task_progress()
current_task.update_state(state=PROGRESS, meta=task_progress) _get_current_task().update_state(state=PROGRESS, meta=task_progress)
return task_progress return task_progress
...@@ -162,7 +167,7 @@ def _save_course_task(course_task): ...@@ -162,7 +167,7 @@ def _save_course_task(course_task):
def update_problem_module_state(entry_id, update_fcn, action_name, filter_fcn, def update_problem_module_state(entry_id, update_fcn, action_name, filter_fcn,
xmodule_instance_args): xmodule_instance_args):
""" """
Performs generic update by visiting StudentModule instances with the update_fcn provided. Performs generic update by visiting StudentModule instances with the update_fcn provided.
...@@ -219,7 +224,7 @@ def update_problem_module_state(entry_id, update_fcn, action_name, filter_fcn, ...@@ -219,7 +224,7 @@ def update_problem_module_state(entry_id, update_fcn, action_name, filter_fcn,
try: try:
# check that the task_id submitted in the InstructorTask matches the current task # check that the task_id submitted in the InstructorTask matches the current task
# that is running. # that is running.
request_task_id = current_task.request.id request_task_id = _get_current_task().request.id
if task_id != request_task_id: if task_id != request_task_id:
fmt = 'Requested task "{task_id}" did not match actual task "{actual_id}"' fmt = 'Requested task "{task_id}" did not match actual task "{actual_id}"'
message = fmt.format(task_id=task_id, course_id=course_id, state_key=module_state_key, actual_id=request_task_id) message = fmt.format(task_id=task_id, course_id=course_id, state_key=module_state_key, actual_id=request_task_id)
...@@ -227,7 +232,7 @@ def update_problem_module_state(entry_id, update_fcn, action_name, filter_fcn, ...@@ -227,7 +232,7 @@ def update_problem_module_state(entry_id, update_fcn, action_name, filter_fcn,
raise UpdateProblemModuleStateError(message) raise UpdateProblemModuleStateError(message)
# now do the work: # now do the work:
with dog_stats_api.timer('courseware.tasks.module.{0}.overall_time'.format(action_name)): with dog_stats_api.timer('instructor_tasks.module.{0}.overall_time'.format(action_name)):
task_progress = _perform_module_state_update(course_id, module_state_key, student_ident, update_fcn, task_progress = _perform_module_state_update(course_id, module_state_key, student_ident, update_fcn,
action_name, filter_fcn, xmodule_instance_args) action_name, filter_fcn, xmodule_instance_args)
except Exception: except Exception:
...@@ -351,7 +356,7 @@ def reset_attempts_module_state(_module_descriptor, student_module, xmodule_inst ...@@ -351,7 +356,7 @@ def reset_attempts_module_state(_module_descriptor, student_module, xmodule_inst
Always returns true, indicating success, if it doesn't raise an exception due to database error. Always returns true, indicating success, if it doesn't raise an exception due to database error.
""" """
problem_state = json.loads(student_module.state) problem_state = json.loads(student_module.state) if student_module.state else {}
if 'attempts' in problem_state: if 'attempts' in problem_state:
old_number_of_attempts = problem_state["attempts"] old_number_of_attempts = problem_state["attempts"]
if old_number_of_attempts > 0: if old_number_of_attempts > 0:
......
...@@ -17,7 +17,7 @@ from xmodule.modulestore.django import modulestore ...@@ -17,7 +17,7 @@ from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from student.tests.factories import CourseEnrollmentFactory, UserFactory, AdminFactory from student.tests.factories import CourseEnrollmentFactory, UserFactory
from courseware.model_data import StudentModule from courseware.model_data import StudentModule
from courseware.tests.tests import LoginEnrollmentTestCase, TEST_DATA_MONGO_MODULESTORE from courseware.tests.tests import LoginEnrollmentTestCase, TEST_DATA_MONGO_MODULESTORE
...@@ -36,8 +36,8 @@ TEST_SECTION_NAME = "Problem" ...@@ -36,8 +36,8 @@ TEST_SECTION_NAME = "Problem"
@override_settings(MODULESTORE=TEST_DATA_MONGO_MODULESTORE) @override_settings(MODULESTORE=TEST_DATA_MONGO_MODULESTORE)
class InstructorTaskTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase): class InstructorTaskTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase):
""" """
Test that all students' answers to a problem can be rescored after the Base test class for InstructorTask-related tests that require
definition of the problem has been redefined. the setup of a course and problem.
""" """
course = None course = None
current_user = None current_user = None
...@@ -67,16 +67,14 @@ class InstructorTaskTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase): ...@@ -67,16 +67,14 @@ class InstructorTaskTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase):
def login_username(self, username): def login_username(self, username):
"""Login the user, given the `username`.""" """Login the user, given the `username`."""
self.login(InstructorTaskTestCase.get_user_email(username), "test") if self.current_user != username:
self.current_user = username self.login(InstructorTaskTestCase.get_user_email(username), "test")
self.current_user = username
def _create_user(self, username, is_staff=False): def _create_user(self, username, is_staff=False):
"""Creates a user and enrolls them in the test course.""" """Creates a user and enrolls them in the test course."""
email = InstructorTaskTestCase.get_user_email(username) email = InstructorTaskTestCase.get_user_email(username)
if (is_staff): UserFactory.create(username=username, email=email, is_staff=is_staff)
AdminFactory.create(username=username, email=email)
else:
UserFactory.create(username=username, email=email)
thisuser = User.objects.get(username=username) thisuser = User.objects.get(username=username)
CourseEnrollmentFactory.create(user=thisuser, course_id=self.course.id) CourseEnrollmentFactory.create(user=thisuser, course_id=self.course.id)
return thisuser return thisuser
...@@ -140,3 +138,12 @@ class InstructorTaskTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase): ...@@ -140,3 +138,12 @@ class InstructorTaskTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase):
response = instructor_task_status(mock_request) response = instructor_task_status(mock_request)
status = json.loads(response.content) status = json.loads(response.content)
return status return status
def create_task_request(self, requester_username):
"""Generate request that can be used for submitting tasks"""
request = Mock()
request.user = User.objects.get(username=requester_username)
request.get_host = Mock(return_value="testhost")
request.META = {'REMOTE_ADDR': '0:0:0:0', 'SERVER_NAME': 'testhost'}
request.is_secure = Mock(return_value=False)
return request
...@@ -7,7 +7,7 @@ paths actually work. ...@@ -7,7 +7,7 @@ paths actually work.
""" """
import logging import logging
import json import json
from mock import Mock, patch from mock import patch
import textwrap import textwrap
from celery.states import SUCCESS, FAILURE from celery.states import SUCCESS, FAILURE
...@@ -33,6 +33,9 @@ log = logging.getLogger(__name__) ...@@ -33,6 +33,9 @@ log = logging.getLogger(__name__)
class TestIntegrationTask(InstructorTaskTestCase): class TestIntegrationTask(InstructorTaskTestCase):
"""
Base class to provide general methods used for "integration" testing of particular tasks.
"""
def submit_student_answer(self, username, problem_url_name, responses): def submit_student_answer(self, username, problem_url_name, responses):
""" """
...@@ -48,8 +51,7 @@ class TestIntegrationTask(InstructorTaskTestCase): ...@@ -48,8 +51,7 @@ class TestIntegrationTask(InstructorTaskTestCase):
# make sure that the requested user is logged in, so that the ajax call works # make sure that the requested user is logged in, so that the ajax call works
# on the right problem: # on the right problem:
if self.current_user != username: self.login_username(username)
self.login_username(username)
# make ajax call: # make ajax call:
modx_url = reverse('modx_dispatch', modx_url = reverse('modx_dispatch',
kwargs={'course_id': self.course.id, kwargs={'course_id': self.course.id,
...@@ -62,18 +64,13 @@ class TestIntegrationTask(InstructorTaskTestCase): ...@@ -62,18 +64,13 @@ class TestIntegrationTask(InstructorTaskTestCase):
}) })
return resp return resp
def create_task_request(self, requester_username):
"""Generate request that can be used for submitting tasks"""
request = Mock()
request.user = User.objects.get(username=requester_username)
request.get_host = Mock(return_value="testhost")
request.META = {'REMOTE_ADDR': '0:0:0:0', 'SERVER_NAME': 'testhost'}
request.is_secure = Mock(return_value=False)
return request
class TestRescoringTask(TestIntegrationTask): class TestRescoringTask(TestIntegrationTask):
"""Test rescoring problems in a background task.""" """
Integration-style tests for rescoring problems in a background task.
Exercises real problems with a minimum of patching.
"""
def setUp(self): def setUp(self):
self.initialize_course() self.initialize_course()
...@@ -90,8 +87,7 @@ class TestRescoringTask(TestIntegrationTask): ...@@ -90,8 +87,7 @@ class TestRescoringTask(TestIntegrationTask):
""" """
# make sure that the requested user is logged in, so that the ajax call works # make sure that the requested user is logged in, so that the ajax call works
# on the right problem: # on the right problem:
if self.current_user != username: self.login_username(username)
self.login_username(username)
# make ajax call: # make ajax call:
modx_url = reverse('modx_dispatch', modx_url = reverse('modx_dispatch',
kwargs={'course_id': self.course.id, kwargs={'course_id': self.course.id,
...@@ -109,11 +105,11 @@ class TestRescoringTask(TestIntegrationTask): ...@@ -109,11 +105,11 @@ class TestRescoringTask(TestIntegrationTask):
Values checked include the number of attempts, the score, and the max score for a problem. Values checked include the number of attempts, the score, and the max score for a problem.
""" """
module = self.get_student_module(username, descriptor) module = self.get_student_module(username, descriptor)
self.assertEqual(module.grade, expected_score, "Scores were not equal") self.assertEqual(module.grade, expected_score)
self.assertEqual(module.max_grade, expected_max_score, "Max scores were not equal") self.assertEqual(module.max_grade, expected_max_score)
state = json.loads(module.state) state = json.loads(module.state)
attempts = state['attempts'] attempts = state['attempts']
self.assertEqual(attempts, expected_attempts, "Attempts were not equal") self.assertEqual(attempts, expected_attempts)
if attempts > 0: if attempts > 0:
self.assertTrue('correct_map' in state) self.assertTrue('correct_map' in state)
self.assertTrue('student_answers' in state) self.assertTrue('student_answers' in state)
...@@ -342,7 +338,11 @@ class TestRescoringTask(TestIntegrationTask): ...@@ -342,7 +338,11 @@ class TestRescoringTask(TestIntegrationTask):
class TestResetAttemptsTask(TestIntegrationTask): class TestResetAttemptsTask(TestIntegrationTask):
"""Test resetting problem attempts in a background task.""" """
Integration-style tests for resetting problem attempts in a background task.
Exercises real problems with a minimum of patching.
"""
userlist = ['u1', 'u2', 'u3', 'u4'] userlist = ['u1', 'u2', 'u3', 'u4']
def setUp(self): def setUp(self):
...@@ -402,7 +402,7 @@ class TestResetAttemptsTask(TestIntegrationTask): ...@@ -402,7 +402,7 @@ class TestResetAttemptsTask(TestIntegrationTask):
self.assertEqual(instructor_task.task_type, 'reset_problem_attempts') self.assertEqual(instructor_task.task_type, 'reset_problem_attempts')
task_input = json.loads(instructor_task.task_input) task_input = json.loads(instructor_task.task_input)
self.assertFalse('student' in task_input) self.assertFalse('student' in task_input)
self.assertEqual(task_input['problem_url'], TestRescoringTask.problem_location(problem_url_name)) self.assertEqual(task_input['problem_url'], InstructorTaskTestCase.problem_location(problem_url_name))
status = json.loads(instructor_task.task_output) status = json.loads(instructor_task.task_output)
self.assertEqual(status['exception'], 'ZeroDivisionError') self.assertEqual(status['exception'], 'ZeroDivisionError')
self.assertEqual(status['message'], expected_message) self.assertEqual(status['message'], expected_message)
...@@ -426,7 +426,11 @@ class TestResetAttemptsTask(TestIntegrationTask): ...@@ -426,7 +426,11 @@ class TestResetAttemptsTask(TestIntegrationTask):
class TestDeleteProblemTask(TestIntegrationTask): class TestDeleteProblemTask(TestIntegrationTask):
"""Test deleting problem state in a background task.""" """
Integration-style tests for deleting problem state in a background task.
Exercises real problems with a minimum of patching.
"""
userlist = ['u1', 'u2', 'u3', 'u4'] userlist = ['u1', 'u2', 'u3', 'u4']
def setUp(self): def setUp(self):
...@@ -479,7 +483,7 @@ class TestDeleteProblemTask(TestIntegrationTask): ...@@ -479,7 +483,7 @@ class TestDeleteProblemTask(TestIntegrationTask):
self.assertEqual(instructor_task.task_type, 'delete_problem_state') self.assertEqual(instructor_task.task_type, 'delete_problem_state')
task_input = json.loads(instructor_task.task_input) task_input = json.loads(instructor_task.task_input)
self.assertFalse('student' in task_input) self.assertFalse('student' in task_input)
self.assertEqual(task_input['problem_url'], TestRescoringTask.problem_location(problem_url_name)) self.assertEqual(task_input['problem_url'], InstructorTaskTestCase.problem_location(problem_url_name))
status = json.loads(instructor_task.task_output) status = json.loads(instructor_task.task_output)
self.assertEqual(status['exception'], 'ZeroDivisionError') self.assertEqual(status['exception'], 'ZeroDivisionError')
self.assertEqual(status['message'], expected_message) self.assertEqual(status['message'], expected_message)
......
...@@ -32,7 +32,8 @@ def instructor_task_status(request): ...@@ -32,7 +32,8 @@ def instructor_task_status(request):
Task_id values that are unrecognized are skipped. Task_id values that are unrecognized are skipped.
The dict with status information for a task contains the following keys: The dict with status information for a task contains the following keys:
'message': status message reporting on progress, or providing exception message if failed. 'message': on complete tasks, status message reporting on final progress,
or providing exception message if failed.
'succeeded': on complete tasks, indicates if the task outcome was successful: 'succeeded': on complete tasks, indicates if the task outcome was successful:
did it achieve what it set out to do. did it achieve what it set out to do.
This is in contrast with a successful task_state, which indicates that the This is in contrast with a successful task_state, which indicates that the
...@@ -96,25 +97,44 @@ def get_task_completion_info(instructor_task): ...@@ -96,25 +97,44 @@ def get_task_completion_info(instructor_task):
""" """
succeeded = False succeeded = False
# if still in progress, then we assume there is no completion info to provide:
if instructor_task.task_state not in READY_STATES:
return (succeeded, "No status information available")
# we're more surprised if there is no output for a completed task, but just warn:
if instructor_task.task_output is None: if instructor_task.task_output is None:
log.warning("No task_output information found for instructor_task {0}".format(instructor_task.task_id)) log.warning("No task_output information found for instructor_task {0}".format(instructor_task.task_id))
return (succeeded, "No status information available") return (succeeded, "No status information available")
task_output = json.loads(instructor_task.task_output) try:
if instructor_task.task_state in [FAILURE, REVOKED]: task_output = json.loads(instructor_task.task_output)
return(succeeded, task_output['message']) except ValueError:
fmt = "No parsable task_output information found for instructor_task {0}: {1}"
log.warning(fmt.format(instructor_task.task_id, instructor_task.task_output))
return (succeeded, "No parsable status information available")
action_name = task_output['action_name'] if instructor_task.task_state in [FAILURE, REVOKED]:
num_attempted = task_output['attempted'] return (succeeded, task_output.get('message', 'No message provided'))
num_updated = task_output['updated']
num_total = task_output['total'] if any([key not in task_output for key in ['action_name', 'attempted', 'updated', 'total']]):
fmt = "Invalid task_output information found for instructor_task {0}: {1}"
log.warning(fmt.format(instructor_task.task_id, instructor_task.task_output))
return (succeeded, "No progress status information available")
action_name = task_output.get('action_name')
num_attempted = task_output.get('attempted')
num_updated = task_output.get('updated')
num_total = task_output.get('total')
student = None
try:
task_input = json.loads(instructor_task.task_input)
except ValueError:
fmt = "No parsable task_input information found for instructor_task {0}: {1}"
log.warning(fmt.format(instructor_task.task_id, instructor_task.task_input))
else:
student = task_input.get('student')
if instructor_task.task_input is None:
log.warning("No task_input information found for instructor_task {0}".format(instructor_task.task_id))
return (succeeded, "No status information available")
task_input = json.loads(instructor_task.task_input)
problem_url = task_input.get('problem_url')
student = task_input.get('student')
if student is not None: if student is not None:
if num_attempted == 0: if num_attempted == 0:
msg_format = "Unable to find submission to be {action} for student '{student}'" msg_format = "Unable to find submission to be {action} for student '{student}'"
...@@ -133,10 +153,11 @@ def get_task_completion_info(instructor_task): ...@@ -133,10 +153,11 @@ def get_task_completion_info(instructor_task):
else: # num_updated < num_attempted else: # num_updated < num_attempted
msg_format = "Problem {action} for {updated} of {attempted} students" msg_format = "Problem {action} for {updated} of {attempted} students"
if student is not None and num_attempted != num_total: if student is None and num_attempted != num_total:
msg_format += " (out of {total})" msg_format += " (out of {total})"
# Update status in task result object itself: # Update status in task result object itself:
message = msg_format.format(action=action_name, updated=num_updated, attempted=num_attempted, total=num_total, message = msg_format.format(action=action_name, updated=num_updated,
student=student, problem=problem_url) attempted=num_attempted, total=num_total,
student=student)
return (succeeded, message) return (succeeded, message)
...@@ -188,4 +188,3 @@ PASSWORD_HASHERS = ( ...@@ -188,4 +188,3 @@ PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher', 'django.contrib.auth.hashers.MD5PasswordHasher',
# 'django.contrib.auth.hashers.CryptPasswordHasher', # 'django.contrib.auth.hashers.CryptPasswordHasher',
) )
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment