Commit 8872fbcc by Brian Wilson

Add factory for CourseTaskLog.

Add unit tests for regrading at courseware level (task and task_queue).
parent 8660c9a7
......@@ -16,6 +16,10 @@ from xmodule.modulestore.django import modulestore
log = logging.getLogger(__name__)
class AlreadyRunningError(Exception):
pass
def get_running_course_tasks(course_id):
"""
Returns a query of CourseTaskLog objects of running tasks for a given course.
......@@ -85,7 +89,7 @@ def _reserve_task(course_id, task_name, task_args, requester, student=None):
"""
if _task_is_running(course_id, task_name, task_args, student):
raise Exception("requested task is already running")
raise AlreadyRunningError("requested task is already running")
# Create log entry now, so that future requests won't
tasklog_args = {'course_id': course_id,
......@@ -157,7 +161,7 @@ def _update_course_task_log(course_task_log_entry, task_result):
total=returned_result['total'],
action_name=returned_result['action_name'])
output['message'] = message
log.info("task progress: {0}".format(message))
log.info("task progress: %s", message)
else:
log.info("still making progress... ")
output['task_progress'] = returned_result
......@@ -165,7 +169,7 @@ def _update_course_task_log(course_task_log_entry, task_result):
elif result_state == 'SUCCESS':
output['task_progress'] = returned_result
course_task_log_entry.task_progress = json.dumps(returned_result)
log.info("task succeeded: {0}".format(returned_result))
log.info("task succeeded: %s", returned_result)
entry_needs_saving = True
elif result_state == 'FAILURE':
......@@ -175,13 +179,23 @@ def _update_course_task_log(course_task_log_entry, task_result):
entry_needs_saving = True
task_progress = {'exception': type(exception).__name__, 'message': str(exception.message)}
output['message'] = exception.message
log.warning("background task (%s) failed: %s %s".format(task_id, returned_result, traceback))
log.warning("background task (%s) failed: %s %s", task_id, returned_result, traceback)
if result_traceback is not None:
output['task_traceback'] = result_traceback
task_progress['traceback'] = result_traceback
course_task_log_entry.task_progress = json.dumps(task_progress)
output['task_progress'] = task_progress
elif result_state == 'REVOKED':
# on revocation, the result's result doesn't contain anything
entry_needs_saving = True
message = 'Task revoked before running'
output['message'] = message
log.warning("background task (%s) revoked.", task_id)
task_progress = {'message': message}
course_task_log_entry.task_progress = json.dumps(task_progress)
output['task_progress'] = task_progress
# always update the entry if the state has changed:
if result_state != course_task_log_entry.task_state:
course_task_log_entry.task_state = result_state
......@@ -308,7 +322,7 @@ def _check_arguments_for_regrading(course_id, problem_url):
"""
descriptor = modulestore().get_instance(course_id, problem_url)
supports_regrade = False
if hasattr(descriptor,'module_class'):
if hasattr(descriptor, 'module_class'):
module_class = descriptor.module_class
if hasattr(module_class, 'regrade_problem'):
supports_regrade = True
......
......@@ -10,6 +10,8 @@ from student.tests.factories import CourseEnrollmentAllowedFactory as StudentCou
from student.tests.factories import RegistrationFactory as StudentRegistrationFactory
from courseware.models import StudentModule, XModuleContentField, XModuleSettingsField
from courseware.models import XModuleStudentInfoField, XModuleStudentPrefsField
from courseware.models import CourseTaskLog
from xmodule.modulestore import Location
from pytz import UTC
......@@ -84,3 +86,16 @@ class StudentInfoFactory(DjangoModelFactory):
field_name = 'existing_field'
value = json.dumps('old_value')
student = SubFactory(UserFactory)
class CourseTaskLogFactory(DjangoModelFactory):
FACTORY_FOR = CourseTaskLog
task_name = 'regrade_problem'
course_id = "MITx/999/Robot_Super_Course"
student = SubFactory(UserFactory)
task_args = None
task_id = None
task_state = "QUEUED"
task_progress = None
requester = SubFactory(UserFactory)
"""
Test for LMS courseware background task queue management
"""
import logging
import json
from mock import Mock, patch
from uuid import uuid4
from django.utils.datastructures import MultiValueDict
from django.test.testcases import TestCase
from xmodule.modulestore.exceptions import ItemNotFoundError
from courseware.tests.factories import UserFactory, CourseTaskLogFactory
from courseware.task_queue import (get_running_course_tasks,
course_task_log_status,
AlreadyRunningError,
submit_regrade_problem_for_all_students,
submit_regrade_problem_for_student,
submit_reset_problem_attempts_for_all_students,
submit_delete_problem_state_for_all_students)
log = logging.getLogger("mitx." + __name__)
TEST_FAILURE_MESSAGE = 'task failed horribly'
class TaskQueueTestCase(TestCase):
"""
Check that background tasks are properly queued and report status.
"""
student = None
instructor = None
problem_url = None
def setUp(self):
self.student = UserFactory.create(username="student", email="student@edx.org")
self.instructor = UserFactory.create(username="instructor", email="student@edx.org")
self.problem_url = TaskQueueTestCase.problem_location("test_urlname")
@staticmethod
def problem_location(problem_url_name):
"""
Create an internal location for a test problem.
"""
if "i4x:" in problem_url_name:
return problem_url_name
else:
return "i4x://{org}/{number}/problem/{problem_url_name}".format(org='edx',
number='1.23x',
problem_url_name=problem_url_name)
def _create_entry(self, task_state="QUEUED", task_progress=None, student=None):
task_id = str(uuid4())
progress_json = json.dumps(task_progress)
course_task_log = CourseTaskLogFactory.create(student=student,
requester=self.instructor,
task_args=self.problem_url,
task_id=task_id,
task_state=task_state,
task_progress=progress_json)
return course_task_log
def _create_failure_entry(self):
# view task entry for task failure
progress = {'message': TEST_FAILURE_MESSAGE,
'exception': 'RandomCauseError',
}
return self._create_entry(task_state="FAILURE", task_progress=progress)
def _create_success_entry(self, student=None):
return self._create_progress_entry(student=None, task_state="SUCCESS")
def _create_progress_entry(self, student=None, task_state="PROGRESS"):
# view task entry for task failure
progress = {'attempted': 3,
'updated': 2,
'total': 10,
'action_name': 'regraded',
'message': 'some random string that should summarize the other info',
}
return self._create_entry(task_state=task_state, task_progress=progress, student=student)
def test_fetch_running_tasks(self):
# when fetching running tasks, we get all running tasks, and only running tasks
failure_task_ids = [(self._create_failure_entry()).task_id for _ in range(1, 4)]
entry = self._create_failure_entry()
failure_task_ids.append(entry.task_id)
course_id = entry.course_id # get course_id used by the factory
success_task_ids = [(self._create_success_entry()).task_id for _ in range(1, 5)]
progress_task_ids = [(self._create_progress_entry()).task_id for _ in range(1, 5)]
task_ids = [course_task_log.task_id for course_task_log in get_running_course_tasks(course_id)]
self.assertEquals(len(task_ids), len(progress_task_ids))
for task_id in task_ids:
self.assertTrue(task_id in progress_task_ids)
self.assertFalse(task_id in success_task_ids)
self.assertFalse(task_id in failure_task_ids)
def test_course_task_log_status_by_post(self):
# fetch status for existing tasks: by arg is tested elsewhere,
# so test by POST arg
course_task_log = self._create_failure_entry()
task_id = course_task_log.task_id
request = Mock()
request.POST = {}
request.POST['task_id'] = task_id
response = course_task_log_status(request)
output = json.loads(response.content)
self.assertEquals(output['task_id'], task_id)
def test_course_task_log_status_list_by_post(self):
# Fetch status for existing tasks: by arg is tested elsewhere,
# so test here by POST arg list, as if called from ajax.
# Note that ajax does something funny with the marshalling of
# list data, so the key value has "[]" appended to it.
task_ids = [(self._create_failure_entry()).task_id for _ in range(1, 5)]
request = Mock()
request.POST = MultiValueDict({'task_ids[]': task_ids})
response = course_task_log_status(request)
output = json.loads(response.content)
for task_id in task_ids:
self.assertEquals(output[task_id]['task_id'], task_id)
def test_initial_failure(self):
course_task_log = self._create_failure_entry()
task_id = course_task_log.task_id
response = course_task_log_status(Mock(), task_id=task_id)
output = json.loads(response.content)
self.assertEquals(output['task_id'], task_id)
self.assertEquals(output['task_state'], "FAILURE")
self.assertFalse(output['in_progress'])
self.assertEquals(output['message'], TEST_FAILURE_MESSAGE)
def test_initial_success(self):
course_task_log = self._create_success_entry()
task_id = course_task_log.task_id
response = course_task_log_status(Mock(), task_id=task_id)
output = json.loads(response.content)
self.assertEquals(output['task_id'], task_id)
self.assertEquals(output['task_state'], "SUCCESS")
self.assertFalse(output['in_progress'])
def test_update_progress_to_progress(self):
# view task entry for task in progress
course_task_log = self._create_progress_entry()
task_id = course_task_log.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = "PROGRESS"
mock_result.result = {'attempted': 5,
'updated': 4,
'total': 10,
'action_name': 'regraded'}
with patch('celery.result.AsyncResult.__new__') as mock_result_ctor:
mock_result_ctor.return_value = mock_result
response = course_task_log_status(Mock(), task_id=task_id)
output = json.loads(response.content)
self.assertEquals(output['task_id'], task_id)
self.assertEquals(output['task_state'], "PROGRESS")
self.assertTrue(output['in_progress'])
# self.assertEquals(output['message'], )
def test_update_progress_to_failure(self):
# view task entry for task in progress that later fails
course_task_log = self._create_progress_entry()
task_id = course_task_log.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = "FAILURE"
mock_result.result = NotImplementedError("This task later failed.")
mock_result.traceback = "random traceback"
with patch('celery.result.AsyncResult.__new__') as mock_result_ctor:
mock_result_ctor.return_value = mock_result
response = course_task_log_status(Mock(), task_id=task_id)
output = json.loads(response.content)
self.assertEquals(output['task_id'], task_id)
self.assertEquals(output['task_state'], "FAILURE")
self.assertFalse(output['in_progress'])
self.assertEquals(output['message'], "This task later failed.")
def test_update_progress_to_revoked(self):
# view task entry for task in progress that later fails
course_task_log = self._create_progress_entry()
task_id = course_task_log.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = "REVOKED"
with patch('celery.result.AsyncResult.__new__') as mock_result_ctor:
mock_result_ctor.return_value = mock_result
response = course_task_log_status(Mock(), task_id=task_id)
output = json.loads(response.content)
self.assertEquals(output['task_id'], task_id)
self.assertEquals(output['task_state'], "REVOKED")
self.assertFalse(output['in_progress'])
self.assertEquals(output['message'], "Task revoked before running")
def _get_output_for_task_success(self, attempted, updated, total, student=None):
# view task entry for task in progress
course_task_log = self._create_progress_entry(student)
task_id = course_task_log.task_id
mock_result = Mock()
mock_result.task_id = task_id
mock_result.state = "SUCCESS"
mock_result.result = {'attempted': attempted,
'updated': updated,
'total': total,
'action_name': 'regraded'}
with patch('celery.result.AsyncResult.__new__') as mock_result_ctor:
mock_result_ctor.return_value = mock_result
response = course_task_log_status(Mock(), task_id=task_id)
output = json.loads(response.content)
return task_id, output
def test_update_progress_to_success(self):
task_id, output = self._get_output_for_task_success(10, 8, 10)
self.assertEquals(output['task_id'], task_id)
self.assertEquals(output['task_state'], "SUCCESS")
self.assertFalse(output['in_progress'])
def test_success_messages(self):
_, output = self._get_output_for_task_success(0, 0, 10)
self.assertTrue("Unable to find any students with submissions to be regraded" in output['message'])
self.assertFalse(output['succeeded'])
_, output = self._get_output_for_task_success(10, 0, 10)
self.assertTrue("Problem failed to be regraded for any of 10 students " in output['message'])
self.assertFalse(output['succeeded'])
_, output = self._get_output_for_task_success(10, 8, 10)
self.assertTrue("Problem regraded for 8 of 10 students" in output['message'])
self.assertFalse(output['succeeded'])
_, output = self._get_output_for_task_success(10, 10, 10)
self.assertTrue("Problem successfully regraded for 10 students" in output['message'])
self.assertTrue(output['succeeded'])
_, output = self._get_output_for_task_success(0, 0, 1, student=self.student)
self.assertTrue("Unable to find submission to be regraded for student" in output['message'])
self.assertFalse(output['succeeded'])
_, output = self._get_output_for_task_success(1, 0, 1, student=self.student)
self.assertTrue("Problem failed to be regraded for student" in output['message'])
self.assertFalse(output['succeeded'])
_, output = self._get_output_for_task_success(1, 1, 1, student=self.student)
self.assertTrue("Problem successfully regraded for student" in output['message'])
self.assertTrue(output['succeeded'])
def test_submit_nonexistent_modules(self):
# confirm that a regrade of a non-existent module returns an exception
# (Note that it is easier to test a non-regradable module in test_tasks,
# where we are creating real modules.
problem_url = self.problem_url
course_id = "something else"
request = None
with self.assertRaises(ItemNotFoundError):
submit_regrade_problem_for_student(request, course_id, problem_url, self.student)
with self.assertRaises(ItemNotFoundError):
submit_regrade_problem_for_all_students(request, course_id, problem_url)
with self.assertRaises(ItemNotFoundError):
submit_reset_problem_attempts_for_all_students(request, course_id, problem_url)
with self.assertRaises(ItemNotFoundError):
submit_delete_problem_state_for_all_students(request, course_id, problem_url)
def test_submit_when_running(self):
# get exception when trying to submit a task that is already running
course_task_log = self._create_progress_entry()
problem_url = course_task_log.task_args
course_id = course_task_log.course_id
# requester doesn't have to be the same when determining if a task is already running
request = Mock()
request.user = self.student
with self.assertRaises(AlreadyRunningError):
# just skip making the argument check, so we don't have to fake it deeper down
with patch('courseware.task_queue._check_arguments_for_regrading'):
submit_regrade_problem_for_all_students(request, course_id, problem_url)
'''
Test for LMS courseware background tasks
'''
import logging
import json
from mock import Mock, patch
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from capa.tests.response_xml_factory import OptionResponseXMLFactory, CodeResponseXMLFactory
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.exceptions import ItemNotFoundError
from student.tests.factories import CourseEnrollmentFactory, UserFactory, AdminFactory
from courseware.model_data import StudentModule
from courseware.task_queue import (submit_regrade_problem_for_all_students,
submit_regrade_problem_for_student,
course_task_log_status)
from courseware.tests.tests import LoginEnrollmentTestCase, TEST_DATA_MONGO_MODULESTORE
log = logging.getLogger("mitx." + __name__)
TEST_COURSE_ORG = 'edx'
TEST_COURSE_NAME = 'Test Course'
TEST_COURSE_NUMBER = '1.23x'
TEST_SECTION_NAME = "Problem"
@override_settings(MODULESTORE=TEST_DATA_MONGO_MODULESTORE)
class TestRegradingBase(LoginEnrollmentTestCase, ModuleStoreTestCase):
"""
Test that all students' answers to a problem can be regraded after the
definition of the problem has been redefined.
"""
course = None
current_user = None
def initialize_course(self):
"""Create a course in the store, with a chapter and section."""
self.module_store = modulestore()
# Create the course
self.course = CourseFactory.create(org=TEST_COURSE_ORG,
number=TEST_COURSE_NUMBER,
display_name=TEST_COURSE_NAME)
# Add a chapter to the course
chapter = ItemFactory.create(parent_location=self.course.location,
display_name=TEST_SECTION_NAME)
# add a sequence to the course to which the problems can be added
self.problem_section = ItemFactory.create(parent_location=chapter.location,
template='i4x://edx/templates/sequential/Empty',
display_name=TEST_SECTION_NAME)
@staticmethod
def get_user_email(username):
return '{0}@test.com'.format(username)
@staticmethod
def get_user_password(username):
return 'test'
def login_username(self, username):
self.login(TestRegradingBase.get_user_email(username), TestRegradingBase.get_user_password(username))
self.current_user = username
def _create_user(self, username, is_staff=False):
email = TestRegradingBase.get_user_email(username)
if (is_staff):
AdminFactory.create(username=username, email=email)
else:
UserFactory.create(username=username, email=email)
thisuser = User.objects.get(username=username)
CourseEnrollmentFactory.create(user=thisuser, course_id=self.course.id)
return thisuser
def create_instructor(self, username):
return self._create_user(username, is_staff=True)
def create_student(self, username):
return self._create_user(username, is_staff=False)
@staticmethod
def problem_location(problem_url_name):
"""
Create an internal location for a test problem.
"""
if "i4x:" in problem_url_name:
return problem_url_name
else:
return "i4x://{org}/{number}/problem/{problem_url_name}".format(org=TEST_COURSE_ORG,
number=TEST_COURSE_NUMBER,
problem_url_name=problem_url_name)
def define_option_problem(self, problem_url_name):
"""Create the problem definition so the answer is Option 1"""
factory = OptionResponseXMLFactory()
factory_args = {'question_text': 'The correct answer is Option 1',
'options': ['Option 1', 'Option 2'],
'correct_option': 'Option 1',
'num_responses': 2}
problem_xml = factory.build_xml(**factory_args)
ItemFactory.create(parent_location=self.problem_section.location,
template="i4x://edx/templates/problem/Blank_Common_Problem",
display_name=str(problem_url_name),
data=problem_xml)
def redefine_option_problem(self, problem_url_name):
"""Change the problem definition so the answer is Option 2"""
factory = OptionResponseXMLFactory()
factory_args = {'question_text': 'The correct answer is Option 2',
'options': ['Option 1', 'Option 2'],
'correct_option': 'Option 2',
'num_responses': 2}
problem_xml = factory.build_xml(**factory_args)
location = TestRegrading.problem_location(problem_url_name)
self.module_store.update_item(location, problem_xml)
def render_problem(self, username, problem_url_name):
"""
Use ajax interface to request html for a problem.
"""
# make sure that the requested user is logged in, so that the ajax call works
# on the right problem:
if self.current_user != username:
self.login_username(username)
# make ajax call:
modx_url = reverse('modx_dispatch',
kwargs={
'course_id': self.course.id,
'location': TestRegrading.problem_location(problem_url_name),
'dispatch': 'problem_get', })
resp = self.client.post(modx_url, {})
return resp
def submit_student_answer(self, username, problem_url_name, responses):
"""
Use ajax interface to submit a student answer.
Assumes the input list of responses has two values.
"""
def get_input_id(response_id):
return 'input_i4x-{0}-{1}-problem-{2}_{3}'.format(TEST_COURSE_ORG.lower(),
TEST_COURSE_NUMBER.replace('.', '_'),
problem_url_name, response_id)
# make sure that the requested user is logged in, so that the ajax call works
# on the right problem:
if self.current_user != username:
self.login_username(username)
# make ajax call:
modx_url = reverse('modx_dispatch',
kwargs={
'course_id': self.course.id,
'location': TestRegrading.problem_location(problem_url_name),
'dispatch': 'problem_check', })
resp = self.client.post(modx_url, {
get_input_id('2_1'): responses[0],
get_input_id('3_1'): responses[1],
})
return resp
def _create_task_request(self, requester_username):
"""Generate request that can be used for submitting tasks"""
request = Mock()
request.user = User.objects.get(username=requester_username)
request.get_host = Mock(return_value="testhost")
request.META = {'REMOTE_ADDR': '0:0:0:0', 'SERVER_NAME': 'testhost'}
request.is_secure = Mock(return_value=False)
return request
def regrade_all_student_answers(self, instructor, problem_url_name):
"""Submits the current problem for regrading"""
return submit_regrade_problem_for_all_students(self._create_task_request(instructor), self.course.id,
TestRegradingBase.problem_location(problem_url_name))
def regrade_one_student_answer(self, instructor, problem_url_name, student):
"""Submits the current problem for regrading for a particular student"""
return submit_regrade_problem_for_student(self._create_task_request(instructor), self.course.id,
TestRegradingBase.problem_location(problem_url_name),
student)
def show_correct_answer(self, problem_url_name):
modx_url = reverse('modx_dispatch',
kwargs={
'course_id': self.course.id,
'location': TestRegradingBase.problem_location(problem_url_name),
'dispatch': 'problem_show', })
return self.client.post(modx_url, {})
def get_student_module(self, username, descriptor):
return StudentModule.objects.get(course_id=self.course.id,
student=User.objects.get(username=username),
module_type=descriptor.location.category,
module_state_key=descriptor.location.url(),
)
def check_state(self, username, descriptor, expected_score, expected_max_score, expected_attempts):
module = self.get_student_module(username, descriptor)
self.assertEqual(module.grade, expected_score, "Scores were not equal")
self.assertEqual(module.max_grade, expected_max_score, "Max scores were not equal")
state = json.loads(module.state)
attempts = state['attempts']
self.assertEqual(attempts, expected_attempts, "Attempts were not equal")
if attempts > 0:
self.assertTrue('correct_map' in state)
self.assertTrue('student_answers' in state)
self.assertGreater(len(state['correct_map']), 0)
self.assertGreater(len(state['student_answers']), 0)
class TestRegrading(TestRegradingBase):
def setUp(self):
self.initialize_course()
self.create_instructor('instructor')
self.create_student('u1')
self.create_student('u2')
self.create_student('u3')
self.create_student('u4')
self.logout()
def testRegradingOptionProblem(self):
'''Run regrade scenario on option problem'''
# get descriptor:
problem_url_name = 'H1P1'
self.define_option_problem(problem_url_name)
location = TestRegrading.problem_location(problem_url_name)
descriptor = self.module_store.get_instance(self.course.id, location)
# first store answers for each of the separate users:
self.submit_student_answer('u1', problem_url_name, ['Option 1', 'Option 1'])
self.submit_student_answer('u2', problem_url_name, ['Option 1', 'Option 2'])
self.submit_student_answer('u3', problem_url_name, ['Option 2', 'Option 1'])
self.submit_student_answer('u4', problem_url_name, ['Option 2', 'Option 2'])
self.check_state('u1', descriptor, 2, 2, 1)
self.check_state('u2', descriptor, 1, 2, 1)
self.check_state('u3', descriptor, 1, 2, 1)
self.check_state('u4', descriptor, 0, 2, 1)
# update the data in the problem definition
self.redefine_option_problem(problem_url_name)
# confirm that simply rendering the problem again does not result in a change
# in the grade:
self.render_problem('u1', problem_url_name)
self.check_state('u1', descriptor, 2, 2, 1)
# regrade the problem for only one student -- only that student's grade should change:
self.regrade_one_student_answer('instructor', problem_url_name, User.objects.get(username='u1'))
self.check_state('u1', descriptor, 0, 2, 1)
self.check_state('u2', descriptor, 1, 2, 1)
self.check_state('u3', descriptor, 1, 2, 1)
self.check_state('u4', descriptor, 0, 2, 1)
# regrade the problem for all students
self.regrade_all_student_answers('instructor', problem_url_name)
self.check_state('u1', descriptor, 0, 2, 1)
self.check_state('u2', descriptor, 1, 2, 1)
self.check_state('u3', descriptor, 1, 2, 1)
self.check_state('u4', descriptor, 2, 2, 1)
def define_code_response_problem(self, problem_url_name):
factory = CodeResponseXMLFactory()
grader_payload = json.dumps({"grader": "ps04/grade_square.py"})
problem_xml = factory.build_xml(initial_display="def square(x):",
answer_display="answer",
grader_payload=grader_payload,
num_responses=2)
ItemFactory.create(parent_location=self.problem_section.location,
template="i4x://edx/templates/problem/Blank_Common_Problem",
display_name=str(problem_url_name),
data=problem_xml)
def testRegradingFailure(self):
"""Simulate a failure in regrading a problem"""
problem_url_name = 'H1P1'
self.define_option_problem(problem_url_name)
self.submit_student_answer('u1', problem_url_name, ['Option 1', 'Option 1'])
expected_message = "bad things happened"
with patch('capa.capa_problem.LoncapaProblem.regrade_existing_answers') as mock_regrade:
mock_regrade.side_effect = ZeroDivisionError(expected_message)
course_task_log = self.regrade_all_student_answers('instructor', problem_url_name)
# check task_log returned
self.assertEqual(course_task_log.task_state, 'FAILURE')
self.assertEqual(course_task_log.student, None)
self.assertEqual(course_task_log.requester.username, 'instructor')
self.assertEqual(course_task_log.task_name, 'regrade_problem')
self.assertEqual(course_task_log.task_args, TestRegrading.problem_location(problem_url_name))
status = json.loads(course_task_log.task_progress)
self.assertEqual(status['exception'], 'ZeroDivisionError')
self.assertEqual(status['message'], expected_message)
# check status returned:
mock_request = Mock()
response = course_task_log_status(mock_request, task_id=course_task_log.task_id)
status = json.loads(response.content)
self.assertEqual(status['message'], expected_message)
def testRegradingNonProblem(self):
"""confirm that a non-problem will not submit"""
problem_url_name = self.problem_section.location.url()
with self.assertRaises(NotImplementedError):
self.regrade_all_student_answers('instructor', problem_url_name)
def testRegradingNonexistentProblem(self):
"""confirm that a non-existent problem will not submit"""
problem_url_name = 'NonexistentProblem'
with self.assertRaises(ItemNotFoundError):
self.regrade_all_student_answers('instructor', problem_url_name)
def testRegradingCodeProblem(self):
'''Run regrade scenario on problem with code submission'''
problem_url_name = 'H1P2'
self.define_code_response_problem(problem_url_name)
# we fully create the CodeResponse problem, but just pretend that we're queuing it:
with patch('capa.xqueue_interface.XQueueInterface.send_to_queue') as mock_send_to_queue:
mock_send_to_queue.return_value = (0, "Successfully queued")
self.submit_student_answer('u1', problem_url_name, ["answer1", "answer2"])
course_task_log = self.regrade_all_student_answers('instructor', problem_url_name)
self.assertEqual(course_task_log.task_state, 'FAILURE')
status = json.loads(course_task_log.task_progress)
self.assertEqual(status['exception'], 'NotImplementedError')
self.assertEqual(status['message'], "Problem's definition does not support regrading")
mock_request = Mock()
response = course_task_log_status(mock_request, task_id=course_task_log.task_id)
status = json.loads(response.content)
self.assertEqual(status['message'], "Problem's definition does not support regrading")
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment