Commit 8f31acbe by Brian Wilson

Add support for counting and reporting skips in background tasks.

parent d48e90ee
......@@ -12,8 +12,9 @@ file and check it in at the same time as your model changes. To do that,
"""
import logging
from django.db import models
from django.db import models, transaction
from django.contrib.auth.models import User
from html_to_text import html_to_text
log = logging.getLogger(__name__)
......@@ -33,9 +34,11 @@ class Email(models.Model):
class Meta: # pylint: disable=C0111
abstract = True
SEND_TO_MYSELF = 'myself'
SEND_TO_STAFF = 'staff'
SEND_TO_ALL = 'all'
TO_OPTIONS = [SEND_TO_MYSELF, SEND_TO_STAFF, SEND_TO_ALL]
class CourseEmail(Email, models.Model):
......@@ -51,17 +54,66 @@ class CourseEmail(Email, models.Model):
# * All: This sends an email to anyone enrolled in the course, with any role
# (student, staff, or instructor)
#
TO_OPTIONS = (
TO_OPTION_CHOICES = (
(SEND_TO_MYSELF, 'Myself'),
(SEND_TO_STAFF, 'Staff and instructors'),
(SEND_TO_ALL, 'All')
)
course_id = models.CharField(max_length=255, db_index=True)
to_option = models.CharField(max_length=64, choices=TO_OPTIONS, default=SEND_TO_MYSELF)
to_option = models.CharField(max_length=64, choices=TO_OPTION_CHOICES, default=SEND_TO_MYSELF)
def __unicode__(self):
return self.subject
@classmethod
def create(cls, course_id, sender, to_option, subject, html_message, text_message=None):
"""
Create an instance of CourseEmail.
The CourseEmail.save_now method makes sure the CourseEmail entry is committed.
When called from any view that is wrapped by TransactionMiddleware,
and thus in a "commit-on-success" transaction, an autocommit buried within here
will cause any pending transaction to be committed by a successful
save here. Any future database operations will take place in a
separate transaction.
"""
# automatically generate the stripped version of the text from the HTML markup:
if text_message is None:
text_message = html_to_text(html_message)
# perform some validation here:
if to_option not in TO_OPTIONS:
fmt = 'Course email being sent to unrecognized to_option: "{to_option}" for "{course}", subject "{subject}"'
msg = fmt.format(to_option=to_option, course=course_id, subject=subject)
raise ValueError(msg)
# create the task, then save it immediately:
course_email = cls(
course_id=course_id,
sender=sender,
to_option=to_option,
subject=subject,
html_message=html_message,
text_message=text_message,
)
course_email.save_now()
return course_email
@transaction.autocommit
def save_now(self):
"""
Writes InstructorTask immediately, ensuring the transaction is committed.
Autocommit annotation makes sure the database entry is committed.
When called from any view that is wrapped by TransactionMiddleware,
and thus in a "commit-on-success" transaction, this autocommit here
will cause any pending transaction to be committed by a successful
save here. Any future database operations will take place in a
separate transaction.
"""
self.save()
class Optout(models.Model):
"""
......
......@@ -12,6 +12,8 @@ from courseware.tests.tests import TEST_DATA_MONGO_MODULESTORE
from student.tests.factories import UserFactory, GroupFactory, CourseEnrollmentFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from instructor_task.models import InstructorTask
from instructor_task.tests.factories import InstructorTaskFactory
from bulk_email.tasks import send_course_email
from bulk_email.models import CourseEmail, Optout
......@@ -288,10 +290,18 @@ class TestEmailSendExceptions(ModuleStoreTestCase):
"""
Test that exceptions are handled correctly.
"""
def test_no_course_email_obj(self):
# Make sure send_course_email handles CourseEmail.DoesNotExist exception.
def test_no_instructor_task(self):
with self.assertRaises(InstructorTask.DoesNotExist):
send_course_email(100, 101, [], {}, False)
def test_no_course_title(self):
entry = InstructorTaskFactory.create(task_key='', task_id='dummy')
with self.assertRaises(KeyError):
send_course_email(101, [], {}, False)
send_course_email(entry.id, 101, [], {}, False)
def test_no_course_email_obj(self):
# Make sure send_course_email handles CourseEmail.DoesNotExist exception.
entry = InstructorTaskFactory.create(task_key='', task_id='dummy')
with self.assertRaises(CourseEmail.DoesNotExist):
send_course_email(101, [], {'course_title': 'Test'}, False)
send_course_email(entry.id, 101, [], {'course_title': 'Test'}, False)
......@@ -30,6 +30,7 @@ from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.html_module import HtmlDescriptor
from bulk_email.models import CourseEmail
from courseware import grades
from courseware.access import (has_access, get_access_group_name,
course_beta_test_group_name)
......@@ -721,7 +722,6 @@ def instructor_dashboard(request, course_id):
email_to_option = request.POST.get("to_option")
email_subject = request.POST.get("subject")
html_message = request.POST.get("message")
text_message = html_to_text(html_message)
# TODO: make sure this is committed before submitting it to the task.
# However, it should probably be enough to do the submit below, which
......@@ -730,15 +730,7 @@ def instructor_dashboard(request, course_id):
# Actually, this should probably be moved out, so that all the validation logic
# we might want to add to it can be added. There might also be something
# that would permit validation of the email beforehand.
email = CourseEmail(
course_id=course_id,
sender=request.user,
to_option=email_to_option,
subject=email_subject,
html_message=html_message,
text_message=text_message
)
email.save()
email = CourseEmail.create(course_id, request.user, email_to_option, email_subject, html_message)
# TODO: make this into a task submission, so that the correct
# InstructorTask object gets created (for monitoring purposes)
......@@ -749,6 +741,10 @@ def instructor_dashboard(request, course_id):
else:
email_msg = '<div class="msg msg-confirm"><p class="copy">Your email was successfully queued for sending.</p></div>'
elif "Show Background Email Task History" in action:
message, datatable = get_background_task_table(course_id, task_type='bulk_course_email')
msg += message
#----------------------------------------
# psychometrics
......@@ -883,6 +879,7 @@ def instructor_dashboard(request, course_id):
return render_to_response('courseware/instructor_dashboard.html', context)
def _do_remote_gradebook(user, course, action, args=None, files=None):
'''
Perform remote gradebook action. Returns msg, datatable.
......@@ -1533,7 +1530,7 @@ def dump_grading_context(course):
return msg
def get_background_task_table(course_id, problem_url, student=None):
def get_background_task_table(course_id, problem_url=None, student=None, task_type=None):
"""
Construct the "datatable" structure to represent background task history.
......@@ -1544,14 +1541,17 @@ def get_background_task_table(course_id, problem_url, student=None):
Returns a tuple of (msg, datatable), where the msg is a possible error message,
and the datatable is the datatable to be used for display.
"""
history_entries = get_instructor_task_history(course_id, problem_url, student)
history_entries = get_instructor_task_history(course_id, problem_url, student, task_type)
datatable = {}
msg = ""
# first check to see if there is any history at all
# (note that we don't have to check that the arguments are valid; it
# just won't find any entries.)
if (history_entries.count()) == 0:
if student is not None:
# TODO: figure out how to deal with task_type better here...
if problem_url is None:
msg += '<font color="red">Failed to find any background tasks for course "{course}".</font>'.format(course=course_id)
elif student is not None:
template = '<font color="red">Failed to find any background tasks for course "{course}", module "{problem}" and student "{student}".</font>'
msg += template.format(course=course_id, problem=problem_url, student=student.username)
else:
......@@ -1588,7 +1588,9 @@ def get_background_task_table(course_id, problem_url, student=None):
task_message]
datatable['data'].append(row)
if student is not None:
if problem_url is None:
datatable['title'] = "{course_id}".format(course_id=course_id)
elif student is not None:
datatable['title'] = "{course_id} > {location} > {student}".format(course_id=course_id,
location=problem_url,
student=student.username)
......
......@@ -113,8 +113,16 @@ def _update_instructor_task(instructor_task, task_result):
# Assume we don't always update the InstructorTask entry if we don't have to:
entry_needs_saving = False
task_output = None
if result_state in [PROGRESS, SUCCESS]:
entry_needs_updating = True
if result_state == SUCCESS and instructor_task.task_state == PROGRESS and len(instructor_task.subtasks) > 0:
# This happens when running subtasks: the result object is marked with SUCCESS,
# meaning that the subtasks have successfully been defined. However, the InstructorTask
# will be marked as in PROGRESS, until the last subtask completes and marks it as SUCCESS.
# We want to ignore the parent SUCCESS if subtasks are still running, and just trust the
# contents of the InstructorTask.
entry_needs_updating = False
elif result_state in [PROGRESS, SUCCESS]:
# construct a status message directly from the task result's result:
# it needs to go back with the entry passed in.
log.info("background task (%s), state %s: result: %s", task_id, result_state, returned_result)
......@@ -136,12 +144,13 @@ def _update_instructor_task(instructor_task, task_result):
# save progress and state into the entry, even if it's not being saved:
# when celery is run in "ALWAYS_EAGER" mode, progress needs to go back
# with the entry passed in.
instructor_task.task_state = result_state
if task_output is not None:
instructor_task.task_output = task_output
if entry_needs_updating:
instructor_task.task_state = result_state
if task_output is not None:
instructor_task.task_output = task_output
if entry_needs_saving:
instructor_task.save()
if entry_needs_saving:
instructor_task.save()
def get_updated_instructor_task(task_id):
......@@ -177,7 +186,7 @@ def get_status_from_instructor_task(instructor_task):
'in_progress': boolean indicating if task is still running.
'task_progress': dict containing progress information. This includes:
'attempted': number of attempts made
'updated': number of attempts that "succeeded"
'succeeded': number of attempts that "succeeded"
'total': number of possible subtasks to attempt
'action_name': user-visible verb to use in status messages. Should be past-tense.
'duration_ms': how long the task has (or had) been running.
......
......@@ -88,7 +88,7 @@ class InstructorTaskTestCase(TestCase):
def _create_progress_entry(self, student=None, task_state=PROGRESS):
"""Creates a InstructorTask entry representing a task in progress."""
progress = {'attempted': 3,
'updated': 2,
'succeeded': 2,
'total': 5,
'action_name': 'rescored',
}
......@@ -120,6 +120,7 @@ class InstructorTaskModuleTestCase(LoginEnrollmentTestCase, ModuleStoreTestCase)
# add a sequence to the course to which the problems can be added
self.problem_section = ItemFactory.create(parent_location=chapter.location,
category='sequential',
metadata={'graded': True, 'format': 'Homework'},
display_name=TEST_SECTION_NAME)
@staticmethod
......
......@@ -227,7 +227,7 @@ class TestRescoringTask(TestIntegrationTask):
self.assertEqual(task_input['problem_url'], InstructorTaskModuleTestCase.problem_location(problem_url_name))
status = json.loads(instructor_task.task_output)
self.assertEqual(status['attempted'], 1)
self.assertEqual(status['updated'], 0)
self.assertEqual(status['succeeded'], 0)
self.assertEqual(status['total'], 1)
def define_code_response_problem(self, problem_url_name):
......
......@@ -104,14 +104,14 @@ class TestInstructorTasks(InstructorTaskModuleTestCase):
def test_delete_undefined_problem(self):
self._test_undefined_problem(delete_problem_state)
def _test_run_with_task(self, task_function, action_name, expected_num_updated):
def _test_run_with_task(self, task_function, action_name, expected_num_succeeded):
"""Run a task and check the number of StudentModules processed."""
task_entry = self._create_input_entry()
status = self._run_task_with_mock_celery(task_function, task_entry.id, task_entry.task_id)
# check return value
self.assertEquals(status.get('attempted'), expected_num_updated)
self.assertEquals(status.get('updated'), expected_num_updated)
self.assertEquals(status.get('total'), expected_num_updated)
self.assertEquals(status.get('attempted'), expected_num_succeeded)
self.assertEquals(status.get('succeeded'), expected_num_succeeded)
self.assertEquals(status.get('total'), expected_num_succeeded)
self.assertEquals(status.get('action_name'), action_name)
self.assertGreater('duration_ms', 0)
# compare with entry in table:
......@@ -209,7 +209,7 @@ class TestInstructorTasks(InstructorTaskModuleTestCase):
status = self._run_task_with_mock_celery(reset_problem_attempts, task_entry.id, task_entry.task_id)
# check return value
self.assertEquals(status.get('attempted'), 1)
self.assertEquals(status.get('updated'), 1)
self.assertEquals(status.get('succeeded'), 1)
self.assertEquals(status.get('total'), 1)
self.assertEquals(status.get('action_name'), 'reset')
self.assertGreater('duration_ms', 0)
......@@ -371,7 +371,7 @@ class TestInstructorTasks(InstructorTaskModuleTestCase):
entry = InstructorTask.objects.get(id=task_entry.id)
output = json.loads(entry.task_output)
self.assertEquals(output.get('attempted'), num_students)
self.assertEquals(output.get('updated'), num_students)
self.assertEquals(output.get('succeeded'), num_students)
self.assertEquals(output.get('total'), num_students)
self.assertEquals(output.get('action_name'), 'rescored')
self.assertGreater('duration_ms', 0)
......@@ -84,7 +84,7 @@ class InstructorTaskReportTest(InstructorTaskTestCase):
self.assertEquals(output['task_state'], SUCCESS)
self.assertFalse(output['in_progress'])
expected_progress = {'attempted': 3,
'updated': 2,
'succeeded': 2,
'total': 5,
'action_name': 'rescored'}
self.assertEquals(output['task_progress'], expected_progress)
......@@ -121,7 +121,7 @@ class InstructorTaskReportTest(InstructorTaskTestCase):
mock_result.task_id = task_id
mock_result.state = PROGRESS
mock_result.result = {'attempted': 5,
'updated': 4,
'succeeded': 4,
'total': 10,
'action_name': 'rescored'}
output = self._test_get_status_from_result(task_id, mock_result)
......@@ -165,7 +165,7 @@ class InstructorTaskReportTest(InstructorTaskTestCase):
expected_progress = {'message': "Task revoked before running"}
self.assertEquals(output['task_progress'], expected_progress)
def _get_output_for_task_success(self, attempted, updated, total, student=None):
def _get_output_for_task_success(self, attempted, succeeded, total, student=None):
"""returns the task_id and the result returned by instructor_task_status()."""
# view task entry for task in progress
instructor_task = self._create_progress_entry(student)
......@@ -174,7 +174,7 @@ class InstructorTaskReportTest(InstructorTaskTestCase):
mock_result.task_id = task_id
mock_result.state = SUCCESS
mock_result.result = {'attempted': attempted,
'updated': updated,
'succeeded': succeeded,
'total': total,
'action_name': 'rescored'}
output = self._test_get_status_from_result(task_id, mock_result)
......@@ -187,7 +187,7 @@ class InstructorTaskReportTest(InstructorTaskTestCase):
self.assertEquals(output['task_state'], SUCCESS)
self.assertFalse(output['in_progress'])
expected_progress = {'attempted': 10,
'updated': 8,
'succeeded': 8,
'total': 10,
'action_name': 'rescored'}
self.assertEquals(output['task_progress'], expected_progress)
......
......@@ -65,7 +65,7 @@ def instructor_task_status(request):
'in_progress': boolean indicating if task is still running.
'task_progress': dict containing progress information. This includes:
'attempted': number of attempts made
'updated': number of attempts that "succeeded"
'succeeded': number of attempts that "succeeded"
'total': number of possible subtasks to attempt
'action_name': user-visible verb to use in status messages. Should be past-tense.
'duration_ms': how long the task has (or had) been running.
......@@ -122,16 +122,20 @@ def get_task_completion_info(instructor_task):
if instructor_task.task_state in [FAILURE, REVOKED]:
return (succeeded, task_output.get('message', 'No message provided'))
if any([key not in task_output for key in ['action_name', 'attempted', 'updated', 'total']]):
if any([key not in task_output for key in ['action_name', 'attempted', 'total']]):
fmt = "Invalid task_output information found for instructor_task {0}: {1}"
log.warning(fmt.format(instructor_task.task_id, instructor_task.task_output))
return (succeeded, "No progress status information available")
action_name = task_output['action_name']
num_attempted = task_output['attempted']
num_updated = task_output['updated']
num_total = task_output['total']
# old tasks may still have 'updated' instead of the preferred 'succeeded':
num_succeeded = task_output.get('updated', 0) + task_output.get('succeeded', 0)
num_skipped = task_output.get('skipped', 0)
# num_failed = task_output.get('failed', 0)
student = None
problem_url = None
email_id = None
......@@ -147,12 +151,12 @@ def get_task_completion_info(instructor_task):
if instructor_task.task_state == PROGRESS:
# special message for providing progress updates:
msg_format = "Progress: {action} {updated} of {attempted} so far"
msg_format = "Progress: {action} {succeeded} of {attempted} so far"
elif student is not None and problem_url is not None:
# this reports on actions on problems for a particular student:
if num_attempted == 0:
msg_format = "Unable to find submission to be {action} for student '{student}'"
elif num_updated == 0:
elif num_succeeded == 0:
msg_format = "Problem failed to be {action} for student '{student}'"
else:
succeeded = True
......@@ -161,33 +165,40 @@ def get_task_completion_info(instructor_task):
# this reports on actions on problems for all students:
if num_attempted == 0:
msg_format = "Unable to find any students with submissions to be {action}"
elif num_updated == 0:
elif num_succeeded == 0:
msg_format = "Problem failed to be {action} for any of {attempted} students"
elif num_updated == num_attempted:
elif num_succeeded == num_attempted:
succeeded = True
msg_format = "Problem successfully {action} for {attempted} students"
else: # num_updated < num_attempted
msg_format = "Problem {action} for {updated} of {attempted} students"
else: # num_succeeded < num_attempted
msg_format = "Problem {action} for {succeeded} of {attempted} students"
elif email_id is not None:
# this reports on actions on bulk emails
if num_attempted == 0:
msg_format = "Unable to find any recipients to be {action}"
elif num_updated == 0:
elif num_succeeded == 0:
msg_format = "Message failed to be {action} for any of {attempted} recipients "
elif num_updated == num_attempted:
elif num_succeeded == num_attempted:
succeeded = True
msg_format = "Message successfully {action} for {attempted} recipients"
else: # num_updated < num_attempted
msg_format = "Message {action} for {updated} of {attempted} recipients"
else: # num_succeeded < num_attempted
msg_format = "Message {action} for {succeeded} of {attempted} recipients"
else:
# provide a default:
msg_format = "Status: {action} {updated} of {attempted}"
msg_format = "Status: {action} {succeeded} of {attempted}"
if num_skipped > 0:
msg_format += " (skipping {skipped})"
if student is None and num_attempted != num_total:
msg_format += " (out of {total})"
# Update status in task result object itself:
message = msg_format.format(action=action_name, updated=num_updated,
attempted=num_attempted, total=num_total,
student=student)
message = msg_format.format(
action=action_name,
succeeded=num_succeeded,
attempted=num_attempted,
total=num_total,
skipped=num_skipped,
student=student)
return (succeeded, message)
......@@ -550,6 +550,13 @@ function goto( mode)
return true;
}
</script>
<p>These email actions run in the background, and status for active email tasks will appear in a table below.
To see status for all bulk email tasks submitted for this course, click on this button:
</p>
<p>
<input type="submit" name="action" value="Show Background Email Task History">
</p>
%endif
</form>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment