Commit e0aa8cf7 by David Ormsbee Committed by Sarina Canelake

Grade report celery task and direct file push to S3 from the new instructor dashboard.

Hook up display of grade files ready for download to new instructor dashboard.

LMS-58
parent 8d01a36e
# Compute grades using real division, with no integer truncation
from __future__ import division
from collections import defaultdict
import random
import logging
from collections import defaultdict
from django.conf import settings
from django.contrib.auth.models import User
from django.db import transaction
from django.core.handlers.base import BaseHandler
from django.test.client import RequestFactory
from dogapi import dog_stats_api
from courseware import courses
from courseware.model_data import FieldDataCache, DjangoKeyValueStore
from xblock.fields import Scope
from .module_render import get_module, get_module_for_descriptor
from xmodule import graders
from xmodule.capa_module import CapaModule
from xmodule.graders import Score
from .models import StudentModule
from .module_render import get_module, get_module_for_descriptor
log = logging.getLogger("mitx.courseware")
......@@ -411,3 +417,60 @@ def get_score(course_id, user, problem_descriptor, module_creator, field_data_ca
total = weight
return (correct, total)
@contextmanager
def manual_transaction():
"""A context manager for managing manual transactions"""
try:
yield
except Exception:
transaction.rollback()
log.exception('Due to an error, this transaction has been rolled back')
raise
else:
transaction.commit()
def iterate_grades_for(course_id, students):
"""Given a course_id and an iterable of students (User), yield a tuple of:
(student, gradeset, err_msg) for every student enrolled in the course.
If an error occured, gradeset will be an empty dict and err_msg will be an
exception message. If there was no error, err_msg is an empty string.
The gradeset is a dictionary with the following fields:
- grade : A final letter grade.
- percent : The final percent for the class (rounded up).
- section_breakdown : A breakdown of each section that makes
up the grade. (For display)
- grade_breakdown : A breakdown of the major components that
make up the final grade. (For display)
- raw_scores contains scores for every graded module
"""
course = courses.get_course_by_id(course_id)
# We make a fake request because grading code expects to be able to look at
# the request. We have to attach the correct user to the request before
# grading that student.
request = RequestFactory().get('/')
for student in students:
with dog_stats_api.timer('lms.grades.iterate_grades_for', tags=['action:{}'.format(course_id)]):
try:
request.user = student
gradeset = grade(student, request, course)
yield student, gradeset, ""
except Exception as exc:
# Keep marching on even if this student couldn't be graded for
# some reason.
log.exception(
'Cannot grade student %s (%s) in course %s because of exception: %s',
student.username,
student.id,
course_id,
exc.message
)
yield student, {}, exc.message
......@@ -32,6 +32,7 @@ from student.models import unique_id_for_user
import instructor_task.api
from instructor_task.api_helper import AlreadyRunningError
from instructor_task.views import get_task_completion_info
from instructor_task.models import GradesStore
import instructor.enrollment as enrollment
from instructor.enrollment import enroll_email, unenroll_email, get_email_params
from instructor.views.tools import strip_if_string, get_student_from_identifier
......@@ -753,6 +754,40 @@ def list_instructor_tasks(request, course_id):
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def list_grade_downloads(_request, course_id):
"""
List grade CSV files that are available for download for this course.
"""
grades_store = GradesStore.from_config()
response_payload = {
'downloads' : [
dict(name=name, url=url, link='<a href="{}">{}</a>'.format(url, name))
for name, url in grades_store.links_for(course_id)
]
}
return JsonResponse(response_payload)
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
def calculate_grades_csv(request, course_id):
"""
AlreadyRunningError is raised if the course's grades are already being updated.
"""
try:
instructor_task.api.submit_calculate_grades_csv(request, course_id)
return JsonResponse({"status" : "Grade calculation started"})
except AlreadyRunningError:
return JsonResponse({
"status" : "Grade calculation already running"
})
@ensure_csrf_cookie
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@require_level('staff')
@require_query_params('rolename')
def list_forum_members(request, course_id):
"""
......
......@@ -37,4 +37,10 @@ urlpatterns = patterns('', # nopep8
'instructor.views.api.proxy_legacy_analytics', name="proxy_legacy_analytics"),
url(r'^send_email$',
'instructor.views.api.send_email', name="send_email"),
# Grade downloads...
url(r'^list_grade_downloads$',
'instructor.views.api.list_grade_downloads', name="list_grade_downloads"),
url(r'calculate_grades_csv$',
'instructor.views.api.calculate_grades_csv', name="calculate_grades_csv"),
)
......@@ -171,6 +171,8 @@ def _section_data_download(course_id, access):
'get_students_features_url': reverse('get_students_features', kwargs={'course_id': course_id}),
'get_anon_ids_url': reverse('get_anon_ids', kwargs={'course_id': course_id}),
'list_instructor_tasks_url': reverse('list_instructor_tasks', kwargs={'course_id': course_id}),
'list_grade_downloads_url' : reverse('list_grade_downloads', kwargs={'course_id' : course_id}),
'calculate_grades_csv_url' : reverse('calculate_grades_csv', kwargs={'course_id' : course_id}),
}
return section_data
......
......@@ -16,7 +16,8 @@ from instructor_task.models import InstructorTask
from instructor_task.tasks import (rescore_problem,
reset_problem_attempts,
delete_problem_state,
send_bulk_course_email)
send_bulk_course_email,
calculate_grades_csv)
from instructor_task.api_helper import (check_arguments_for_rescoring,
encode_problem_and_student_input,
......@@ -206,3 +207,14 @@ def submit_bulk_course_email(request, course_id, email_id):
# create the key value by using MD5 hash:
task_key = hashlib.md5(task_key_stub).hexdigest()
return submit_task(request, task_type, task_class, course_id, task_input, task_key)
def submit_calculate_grades_csv(request, course_id):
"""
AlreadyRunningError is raised if the course's grades are already being updated.
"""
task_type = 'grade_course'
task_class = calculate_grades_csv
task_input = {}
task_key = ""
return submit_task(request, task_type, task_class, course_id, task_input, task_key)
......@@ -12,9 +12,20 @@ file and check it in at the same time as your model changes. To do that,
ASSUMPTIONS: modules have unique IDs, even across different module_types
"""
from cStringIO import StringIO
from gzip import GzipFile
from uuid import uuid4
import csv
import json
import hashlib
import os
import os.path
import urllib
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models, transaction
......@@ -176,3 +187,172 @@ class InstructorTask(models.Model):
def create_output_for_revoked():
"""Creates standard message to store in output format for revoked tasks."""
return json.dumps({'message': 'Task revoked before running'})
class GradesStore(object):
"""
Simple abstraction layer that can fetch and store CSV files for grades
download. Should probably refactor later to create a GradesFile object that
can simply be appended to for the sake of memory efficiency, rather than
passing in the whole dataset. Doing that for now just because it's simpler.
"""
@classmethod
def from_config(cls):
"""
Return one of the GradesStore subclasses depending on django
configuration. Look at subclasses for expected configuration.
"""
storage_type = settings.GRADES_DOWNLOAD.get("STORAGE_TYPE")
if storage_type.lower() == "s3":
return S3GradesStore.from_config()
elif storage_type.lower() == "localfs":
return LocalFSGradesStore.from_config()
class S3GradesStore(GradesStore):
"""
"""
def __init__(self, bucket_name, root_path):
self.root_path = root_path
conn = S3Connection(
settings.AWS_ACCESS_KEY_ID,
settings.AWS_SECRET_ACCESS_KEY
)
self.bucket = conn.get_bucket(bucket_name)
@classmethod
def from_config(cls):
return cls(
settings.GRADES_DOWNLOAD['BUCKET'],
settings.GRADES_DOWNLOAD['ROOT_PATH']
)
def key_for(self, course_id, filename):
"""Return the key we would use to store and retrive the data for the
given filename."""
hashed_course_id = hashlib.sha1(course_id)
key = Key(self.bucket)
key.key = "{}/{}/{}".format(
self.root_path,
hashed_course_id.hexdigest(),
filename
)
return key
def store(self, course_id, filename, buff):
key = self.key_for(course_id, filename)
data = buff.getvalue()
key.size = len(data)
key.content_encoding = "gzip"
key.content_type = "text/csv"
key.set_contents_from_string(
data,
headers={
"Content-Encoding" : "gzip",
"Content-Length" : len(data),
"Content-Type" : "text/csv",
}
)
def store_rows(self, course_id, filename, rows):
"""
Given a course_id, filename, and rows (each row is an iterable of strings),
write this data out.
"""
output_buffer = StringIO()
gzip_file = GzipFile(fileobj=output_buffer, mode="wb")
csv.writer(gzip_file).writerows(rows)
gzip_file.close()
self.store(course_id, filename, output_buffer)
def links_for(self, course_id):
"""
For a given `course_id`, return a list of `(filename, url)` tuples. `url`
can be plugged straight into an href
"""
course_dir = self.key_for(course_id, '')
return sorted(
[
(key.key.split("/")[-1], key.generate_url(expires_in=300))
for key in self.bucket.list(prefix=course_dir.key)
],
reverse=True
)
class LocalFSGradesStore(GradesStore):
"""
LocalFS implementation of a GradesStore. This is meant for debugging
purposes and is *absolutely not for production use*. Use S3GradesStore for
that.
"""
def __init__(self, root_path):
"""
Initialize with root_path where we're going to store our files. We
will build a directory structure under this for each course.
"""
self.root_path = root_path
if not os.path.exists(root_path):
os.makedirs(root_path)
@classmethod
def from_config(cls):
"""
Generate an instance of this object from Django settings. It assumes
that there is a dict in settings named GRADES_DOWNLOAD and that it has
a ROOT_PATH that maps to an absolute file path that the web app has
write permissions to. `LocalFSGradesStore` will create any intermediate
directories as needed.
"""
return cls(settings.GRADES_DOWNLOAD['ROOT_PATH'])
def path_to(self, course_id, filename):
"""Return the full path to a given file for a given course."""
return os.path.join(self.root_path, urllib.quote(course_id, safe=''), filename)
def store(self, course_id, filename, buff):
"""
Given the `course_id` and `filename`, store the contents of `buff` in
that file. Overwrite anything that was there previously. `buff` is
assumed to be a StringIO objecd (or anything that can flush its contents
to string using `.getvalue()`).
"""
full_path = self.path_to(course_id, filename)
directory = os.path.dirname(full_path)
if not os.path.exists(directory):
os.mkdir(directory)
with open(full_path, "wb") as f:
f.write(buff.getvalue())
def store_rows(self, course_id, filename, rows):
"""
Given a course_id, filename, and rows (each row is an iterable of strings),
write this data out.
"""
output_buffer = StringIO()
csv.writer(output_buffer).writerows(rows)
self.store(course_id, filename, output_buffer)
def links_for(self, course_id):
"""
For a given `course_id`, return a list of `(filename, url)` tuples. `url`
can be plugged straight into an href
"""
course_dir = self.path_to(course_id, '')
if not os.path.exists(course_dir):
return []
return sorted(
[
(filename, ("file://" + urllib.quote(os.path.join(course_dir, filename))))
for filename in os.listdir(course_dir)
],
reverse=True
)
\ No newline at end of file
......@@ -19,6 +19,7 @@ a problem URL and optionally a student. These are used to set up the initial va
of the query for traversing StudentModule objects.
"""
from django.conf import settings
from django.utils.translation import ugettext_noop
from celery import task
from functools import partial
......@@ -29,6 +30,7 @@ from instructor_task.tasks_helper import (
rescore_problem_module_state,
reset_attempts_module_state,
delete_problem_module_state,
push_grades_to_s3,
)
from bulk_email.tasks import perform_delegate_email_batches
......@@ -127,3 +129,13 @@ def send_bulk_course_email(entry_id, _xmodule_instance_args):
action_name = ugettext_noop('emailed')
visit_fcn = perform_delegate_email_batches
return run_main_task(entry_id, visit_fcn, action_name)
@task(base=BaseInstructorTask, routing_key=settings.GRADES_DOWNLOAD_ROUTING_KEY) # pylint: disable=E1102
def calculate_grades_csv(entry_id, xmodule_instance_args):
"""
Grade a course and push the results to an S3 bucket for download.
"""
action_name = ugettext_noop('graded')
task_fn = partial(push_grades_to_s3, xmodule_instance_args)
return run_main_task(entry_id, task_fn, action_name)
\ No newline at end of file
......@@ -4,24 +4,26 @@ running state of a course.
"""
import json
import urllib
from datetime import datetime
from time import time
from celery import Task, current_task
from celery.utils.log import get_task_logger
from celery.states import SUCCESS, FAILURE
from django.contrib.auth.models import User
from django.db import transaction, reset_queries
from dogapi import dog_stats_api
from pytz import UTC
from xmodule.modulestore.django import modulestore
from track.views import task_track
from courseware.grades import iterate_grades_for
from courseware.models import StudentModule
from courseware.model_data import FieldDataCache
from courseware.module_render import get_module_for_descriptor_internal
from instructor_task.models import InstructorTask, PROGRESS
from instructor_task.models import GradesStore, InstructorTask, PROGRESS
# define different loggers for use within tasks and on client side
TASK_LOG = get_task_logger(__name__)
......@@ -465,3 +467,104 @@ def delete_problem_module_state(xmodule_instance_args, _module_descriptor, stude
track_function = _get_track_function_for_task(student_module.student, xmodule_instance_args)
track_function('problem_delete_state', {})
return UPDATE_STATUS_SUCCEEDED
def push_grades_to_s3(_xmodule_instance_args, _entry_id, course_id, _task_input, action_name):
"""
For a given `course_id`, generate a grades CSV file for all students that
are enrolled, and store using a `GradesStore`. Once created, the files can
be accessed by instantiating another `GradesStore` (via
`GradesStore.from_config()`) and calling `link_for()` on it. Writes are
buffered, so we'll never write part of a CSV file to S3 -- i.e. any files
that are visible in GradesStore will be complete ones.
"""
# Get start time for task:
start_time = datetime.now(UTC)
status_interval = 100
# The pre-fetching of groups is done to make auth checks not require an
# additional DB lookup (this kills the Progress page in particular).
# But when doing grading at this scale, the memory required to store the resulting
# enrolled_students is too large to fit comfortably in memory, and subsequent
# course grading requests lead to memory fragmentation. So we will err here on the
# side of smaller memory allocations at the cost of additional lookups.
enrolled_students = User.objects.filter(
courseenrollment__course_id=course_id,
courseenrollment__is_active=True
)
# perform the main loop
num_attempted = 0
num_succeeded = 0
num_failed = 0
num_total = enrolled_students.count()
curr_step = "Calculating Grades"
def update_task_progress():
"""Return a dict containing info about current task"""
current_time = datetime.now(UTC)
progress = {
'action_name': action_name,
'attempted': num_attempted,
'succeeded': num_succeeded,
'failed' : num_failed,
'total' : num_total,
'duration_ms': int((current_time - start_time).total_seconds() * 1000),
'step' : curr_step,
}
_get_current_task().update_state(state=PROGRESS, meta=progress)
return progress
# Loop over all our students and build a
header = None
rows = []
err_rows = [["id", "username", "error_msg"]]
for student, gradeset, err_msg in iterate_grades_for(course_id, enrolled_students):
# Periodically update task status (this is a db write)
if num_attempted % status_interval == 0:
update_task_progress()
num_attempted += 1
if gradeset:
num_succeeded += 1
if not header:
header = [section['label'] for section in gradeset[u'section_breakdown']]
rows.append(["id", "email", "username", "grade"] + header)
percents = {
section['label']: section.get('percent', 0.0)
for section in gradeset[u'section_breakdown']
if 'label' in section
}
row_percents = [percents[label] for label in header]
rows.append([student.id, student.email, student.username, gradeset['percent']] + row_percents)
else:
# An empty gradeset means we failed to grade a student.
num_failed += 1
err_rows.append([student.id, student.username, err_msg])
curr_step = "Uploading CSVs"
update_task_progress()
grades_store = GradesStore.from_config()
timestamp_str = start_time.strftime("%Y-%m-%d-%H%M")
TASK_LOG.debug("Uploading CSV files for course {}".format(course_id))
course_id_prefix = urllib.quote(course_id.replace("/", "_"))
grades_store.store_rows(
course_id,
"{}_grade_report_{}.csv".format(course_id_prefix, timestamp_str),
rows
)
# If there are any error rows (don't count the header), write that out as well
if len(err_rows) > 1:
grades_store.store_rows(
course_id,
"{}_grade_report_{}_err.csv".format(course_id_prefix, timestamp_str),
err_rows
)
# One last update before we close out...
return update_task_progress()
......@@ -75,7 +75,6 @@ def instructor_task_status(request):
'traceback': optional, returned if task failed and produced a traceback.
"""
output = {}
if 'task_id' in request.REQUEST:
task_id = request.REQUEST['task_id']
......
......@@ -86,6 +86,7 @@ CELERY_DEFAULT_EXCHANGE = 'edx.{0}core'.format(QUEUE_VARIANT)
HIGH_PRIORITY_QUEUE = 'edx.{0}core.high'.format(QUEUE_VARIANT)
DEFAULT_PRIORITY_QUEUE = 'edx.{0}core.default'.format(QUEUE_VARIANT)
LOW_PRIORITY_QUEUE = 'edx.{0}core.low'.format(QUEUE_VARIANT)
HIGH_MEM_QUEUE = 'edx.{0}core.high_mem'.format(QUEUE_VARIANT)
CELERY_DEFAULT_QUEUE = DEFAULT_PRIORITY_QUEUE
CELERY_DEFAULT_ROUTING_KEY = DEFAULT_PRIORITY_QUEUE
......@@ -93,9 +94,19 @@ CELERY_DEFAULT_ROUTING_KEY = DEFAULT_PRIORITY_QUEUE
CELERY_QUEUES = {
HIGH_PRIORITY_QUEUE: {},
LOW_PRIORITY_QUEUE: {},
DEFAULT_PRIORITY_QUEUE: {}
DEFAULT_PRIORITY_QUEUE: {},
HIGH_MEM_QUEUE: {},
}
# If we're a worker on the high_mem queue, set ourselves to die after processing
# one request to avoid having memory leaks take down the worker server. This env
# var is set in /etc/init/edx-workers.conf -- this should probably be replaced
# with some celery API call to see what queue we started listening to, but I
# don't know what that call is or if it's active at this point in the code.
if os.environ.get('QUEUE') == 'high_mem':
CELERYD_MAX_TASKS_PER_CHILD = 1
########################## NON-SECURE ENV CONFIG ##############################
# Things like server locations, ports, etc.
......@@ -312,3 +323,8 @@ TRACKING_BACKENDS.update(AUTH_TOKENS.get("TRACKING_BACKENDS", {}))
# Student identity verification settings
VERIFY_STUDENT = AUTH_TOKENS.get("VERIFY_STUDENT", VERIFY_STUDENT)
# Grades download
GRADES_DOWNLOAD_ROUTING_KEY = HIGH_MEM_QUEUE
GRADES_DOWNLOAD = ENV_TOKENS.get("GRADES_DOWNLOAD", GRADES_DOWNLOAD)
......@@ -192,6 +192,10 @@ MITX_FEATURES = {
# Disable instructor dash buttons for downloading course data
# when enrollment exceeds this number
'MAX_ENROLLMENT_INSTR_BUTTONS': 200,
# Grade calculation started from the new instructor dashboard will write
# grades CSV files to S3 and give links for downloads.
'ENABLE_S3_GRADE_DOWNLOADS' : True,
}
# Used for A/B testing
......@@ -846,6 +850,7 @@ CELERY_DEFAULT_EXCHANGE_TYPE = 'direct'
HIGH_PRIORITY_QUEUE = 'edx.core.high'
DEFAULT_PRIORITY_QUEUE = 'edx.core.default'
LOW_PRIORITY_QUEUE = 'edx.core.low'
HIGH_MEM_QUEUE = 'edx.core.high_mem'
CELERY_QUEUE_HA_POLICY = 'all'
......@@ -857,7 +862,8 @@ CELERY_DEFAULT_ROUTING_KEY = DEFAULT_PRIORITY_QUEUE
CELERY_QUEUES = {
HIGH_PRIORITY_QUEUE: {},
LOW_PRIORITY_QUEUE: {},
DEFAULT_PRIORITY_QUEUE: {}
DEFAULT_PRIORITY_QUEUE: {},
HIGH_MEM_QUEUE: {},
}
# let logging work as configured:
......@@ -1061,3 +1067,12 @@ REGISTRATION_OPTIONAL_FIELDS = set([
'mailing_address',
'goals',
])
# Grades download
GRADES_DOWNLOAD_ROUTING_KEY = HIGH_MEM_QUEUE
GRADES_DOWNLOAD = {
'STORAGE_TYPE' : 'localfs',
'BUCKET' : 'edx-grades',
'ROOT_PATH' : '/tmp/edx-s3/grades',
}
......@@ -283,6 +283,13 @@ EDX_API_KEY = None
####################### Shoppingcart ###########################
MITX_FEATURES['ENABLE_SHOPPING_CART'] = True
###################### Grade Downloads ######################
GRADES_DOWNLOAD = {
'STORAGE_TYPE' : 'localfs',
'BUCKET' : 'edx-grades',
'ROOT_PATH' : '/tmp/edx-s3/grades',
}
#####################################################################
# Lastly, see if the developer has any local overrides.
try:
......
......@@ -247,6 +247,13 @@ PASSWORD_HASHERS = (
# 'django.contrib.auth.hashers.CryptPasswordHasher',
)
###################### Grade Downloads ######################
GRADES_DOWNLOAD = {
'STORAGE_TYPE' : 'localfs',
'BUCKET' : 'edx-grades',
'ROOT_PATH' : '/tmp/edx-s3/grades',
}
################### Make tests quieter
# OpenID spews messages like this to stderr, we don't need to see them:
......
......@@ -21,9 +21,12 @@ class DataDownload
@$display_text = @$display.find '.data-display-text'
@$display_table = @$display.find '.data-display-table'
@$request_response_error = @$display.find '.request-response-error'
@$list_studs_btn = @$section.find("input[name='list-profiles']'")
@$list_anon_btn = @$section.find("input[name='list-anon-ids']'")
@$grade_config_btn = @$section.find("input[name='dump-gradeconf']'")
@grade_downloads = new GradeDownloads(@$section)
@instructor_tasks = new (PendingInstructorTasks()) @$section
# attach click handlers
......@@ -84,10 +87,14 @@ class DataDownload
@$display_text.html data['grading_config_summary']
# handler for when the section title is clicked.
onClickTitle: -> @instructor_tasks.task_poller.start()
onClickTitle: ->
@instructor_tasks.task_poller.start()
@grade_downloads.downloads_poller.start()
# handler for when the section is closed
onExit: -> @instructor_tasks.task_poller.stop()
onExit: ->
@instructor_tasks.task_poller.stop()
@grade_downloads.downloads_poller.stop()
clear_display: ->
@$display_text.empty()
......@@ -95,6 +102,69 @@ class DataDownload
@$request_response_error.empty()
class GradeDownloads
### Grade Downloads -- links expire quickly, so we refresh every 5 mins ####
constructor: (@$section) ->
@$grade_downloads_table = @$section.find ".grade-downloads-table"
@$calculate_grades_csv_btn = @$section.find("input[name='calculate-grades-csv']'")
@$display = @$section.find '.data-display'
@$display_text = @$display.find '.data-display-text'
@$request_response_error = @$display.find '.request-response-error'
POLL_INTERVAL = 1000 * 60 * 5 # 5 minutes in ms
@downloads_poller = new window.InstructorDashboard.util.IntervalManager(
POLL_INTERVAL, => @reload_grade_downloads()
)
@$calculate_grades_csv_btn.click (e) =>
url = @$calculate_grades_csv_btn.data 'endpoint'
$.ajax
dataType: 'json'
url: url
error: std_ajax_err =>
@$request_response_error.text "Error generating grades."
success: (data) =>
@$display_text.html data['status']
reload_grade_downloads: ->
endpoint = @$grade_downloads_table.data 'endpoint'
$.ajax
dataType: 'json'
url: endpoint
success: (data) =>
if data.downloads.length
@create_grade_downloads_table data.downloads
else
console.log "No grade CSVs ready for download"
error: std_ajax_err => console.error "Error finding grade download CSVs"
create_grade_downloads_table: (grade_downloads_data) ->
@$grade_downloads_table.empty()
options =
enableCellNavigation: true
enableColumnReorder: false
autoHeight: true
forceFitColumns: true
columns = [
id: 'link'
field: 'link'
name: 'File'
sortable: false,
minWidth: 200,
formatter: (row, cell, value, columnDef, dataContext) ->
'<a href="' + dataContext['url'] + '">' + dataContext['name'] + '</a>'
]
$table_placeholder = $ '<div/>', class: 'slickgrid'
@$grade_downloads_table.append $table_placeholder
grid = new Slick.Grid($table_placeholder, grade_downloads_data, columns, options)
# export for use
# create parent namespaces if they do not already exist.
_.defaults window, InstructorDashboard: {}
......
......@@ -7,11 +7,6 @@
<input type="button" name="list-profiles" value="${_("List enrolled students with profile information")}" data-endpoint="${ section_data['get_students_features_url'] }">
<input type="button" name="list-profiles" value="CSV" data-csv="true">
<br>
## <input type="button" name="list-grades" value="Student grades">
## <input type="button" name="list-profiles" value="CSV" data-csv="true" class="csv">
## <br>
## <input type="button" name="list-answer-distributions" value="Answer distributions (x students got y points)">
## <br>
<input type="button" name="dump-gradeconf" value="${_("Grading Configuration")}" data-endpoint="${ section_data['get_grading_config_url'] }">
<input type="button" name="list-anon-ids" value="${_("Get Student Anonymized IDs CSV")}" data-csv="true" class="csv" data-endpoint="${ section_data['get_anon_ids_url'] }" class="${'is-disabled' if disable_buttons else ''}">
......@@ -20,15 +15,23 @@
<div class="data-display-table"></div>
<div class="request-response-error"></div>
%if settings.MITX_FEATURES.get('ENABLE_S3_GRADE_DOWNLOADS'):
<div>
<h2> ${_("Grades")}</h2>
<input type="button" name="calculate-grades-csv" value="${_('Calculate Grades')}" data-endpoint="${ section_data['calculate_grades_csv_url'] }"/>
<br/>
<p>${_("Available grades downloads:")}</p>
<div class="grade-downloads-table" data-endpoint="${ section_data['list_grade_downloads_url'] }" ></div>
</div>
%endif
%if settings.MITX_FEATURES.get('ENABLE_INSTRUCTOR_BACKGROUND_TASKS'):
<div class="running-tasks-container action-type-container">
<hr>
<h2> ${_("Pending Instructor Tasks")} </h2>
<p>${_("The status for any active tasks appears in a table below.")} </p>
<br />
<div class="running-tasks-table" data-endpoint="${ section_data['list_instructor_tasks_url'] }"></div>
</div>
%endif
</div>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment