Commit 13a7e905 by Brian Talbot

Merge branch 'master' into fix/btalbot/studio-sasscleanup

parents 0c928066 bc854ef0
......@@ -12,7 +12,7 @@ profile=no
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
ignore=CVS, migrations
# Pickle collected data for later comparisons.
persistent=yes
......@@ -43,7 +43,7 @@ disable=E1102,W0142
output-format=text
# Include message's id in output
include-ids=no
include-ids=yes
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
......@@ -97,7 +97,7 @@ bad-functions=map,filter,apply,input
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression which should only match correct module level names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__)|log)$
# Regular expression which should only match correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
......
......@@ -9,10 +9,8 @@ from tempdir import mkdtemp_clean
import json
from fs.osfs import OSFS
import copy
from mock import Mock
from json import dumps, loads
from json import loads
from student.models import Registration
from django.contrib.auth.models import User
from cms.djangoapps.contentstore.utils import get_modulestore
......@@ -22,12 +20,11 @@ from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore import Location
from xmodule.modulestore.store_utilities import clone_course
from xmodule.modulestore.store_utilities import delete_course
from xmodule.modulestore.django import modulestore, _MODULESTORES
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.django import contentstore
from xmodule.templates import update_templates
from xmodule.modulestore.xml_exporter import export_to_xml
from xmodule.modulestore.xml_importer import import_from_xml
from xmodule.templates import update_templates
from xmodule.capa_module import CapaDescriptor
from xmodule.course_module import CourseDescriptor
......@@ -63,7 +60,6 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
self.client = Client()
self.client.login(username=uname, password=password)
def check_edit_unit(self, test_course_name):
import_from_xml(modulestore(), 'common/test/data/', [test_course_name])
......@@ -82,8 +78,8 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
def test_static_tab_reordering(self):
import_from_xml(modulestore(), 'common/test/data/', ['full'])
ms = modulestore('direct')
course = ms.get_item(Location(['i4x', 'edX', 'full', 'course', '6.002_Spring_2012', None]))
module_store = modulestore('direct')
course = module_store.get_item(Location(['i4x', 'edX', 'full', 'course', '6.002_Spring_2012', None]))
# reverse the ordering
reverse_tabs = []
......@@ -91,9 +87,9 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
if tab['type'] == 'static_tab':
reverse_tabs.insert(0, 'i4x://edX/full/static_tab/{0}'.format(tab['url_slug']))
resp = self.client.post(reverse('reorder_static_tabs'), json.dumps({'tabs': reverse_tabs}), "application/json")
self.client.post(reverse('reorder_static_tabs'), json.dumps({'tabs': reverse_tabs}), "application/json")
course = ms.get_item(Location(['i4x', 'edX', 'full', 'course', '6.002_Spring_2012', None]))
course = module_store.get_item(Location(['i4x', 'edX', 'full', 'course', '6.002_Spring_2012', None]))
# compare to make sure that the tabs information is in the expected order after the server call
course_tabs = []
......@@ -106,28 +102,29 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
def test_delete(self):
import_from_xml(modulestore(), 'common/test/data/', ['full'])
ms = modulestore('direct')
course = ms.get_item(Location(['i4x', 'edX', 'full', 'course', '6.002_Spring_2012', None]))
module_store = modulestore('direct')
sequential = ms.get_item(Location(['i4x', 'edX', 'full', 'sequential','Administrivia_and_Circuit_Elements', None]))
sequential = module_store.get_item(Location(['i4x', 'edX', 'full', 'sequential', 'Administrivia_and_Circuit_Elements', None]))
chapter = ms.get_item(Location(['i4x', 'edX', 'full', 'chapter','Week_1', None]))
chapter = module_store.get_item(Location(['i4x', 'edX', 'full', 'chapter','Week_1', None]))
# make sure the parent no longer points to the child object which was deleted
self.assertTrue(sequential.location.url() in chapter.definition['children'])
resp = self.client.post(reverse('delete_item'), json.dumps({'id': sequential.location.url(), 'delete_children':'true'}), "application/json")
self.client.post(reverse('delete_item'),
json.dumps({'id': sequential.location.url(), 'delete_children':'true'}),
"application/json")
bFound = False
found = False
try:
sequential = ms.get_item(Location(['i4x', 'edX', 'full', 'sequential','Administrivia_and_Circuit_Elements', None]))
bFound = True
module_store.get_item(Location(['i4x', 'edX', 'full', 'sequential', 'Administrivia_and_Circuit_Elements', None]))
found = True
except ItemNotFoundError:
pass
self.assertFalse(bFound)
self.assertFalse(found)
chapter = ms.get_item(Location(['i4x', 'edX', 'full', 'chapter','Week_1', None]))
chapter = module_store.get_item(Location(['i4x', 'edX', 'full', 'chapter','Week_1', None]))
# make sure the parent no longer points to the child object which was deleted
self.assertFalse(sequential.location.url() in chapter.definition['children'])
......@@ -140,22 +137,22 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
while there is a base definition in /about/effort.html
'''
import_from_xml(modulestore(), 'common/test/data/', ['full'])
ms = modulestore('direct')
effort = ms.get_item(Location(['i4x', 'edX', 'full', 'about', 'effort', None]))
module_store = modulestore('direct')
effort = module_store.get_item(Location(['i4x', 'edX', 'full', 'about', 'effort', None]))
self.assertEqual(effort.definition['data'], '6 hours')
# this one should be in a non-override folder
effort = ms.get_item(Location(['i4x', 'edX', 'full', 'about', 'end_date', None]))
effort = module_store.get_item(Location(['i4x', 'edX', 'full', 'about', 'end_date', None]))
self.assertEqual(effort.definition['data'], 'TBD')
def test_remove_hide_progress_tab(self):
import_from_xml(modulestore(), 'common/test/data/', ['full'])
ms = modulestore('direct')
cs = contentstore()
module_store = modulestore('direct')
content_store = contentstore()
source_location = CourseDescriptor.id_to_location('edX/full/6.002_Spring_2012')
course = ms.get_item(source_location)
course = module_store.get_item(source_location)
self.assertNotIn('hide_progress_tab', course.metadata)
def test_clone_course(self):
......@@ -174,19 +171,19 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
data = parse_json(resp)
self.assertEqual(data['id'], 'i4x://MITx/999/course/Robot_Super_Course')
ms = modulestore('direct')
cs = contentstore()
module_store = modulestore('direct')
content_store = contentstore()
source_location = CourseDescriptor.id_to_location('edX/full/6.002_Spring_2012')
dest_location = CourseDescriptor.id_to_location('MITx/999/Robot_Super_Course')
clone_course(ms, cs, source_location, dest_location)
clone_course(module_store, content_store, source_location, dest_location)
# now loop through all the units in the course and verify that the clone can render them, which
# means the objects are at least present
items = ms.get_items(Location(['i4x', 'edX', 'full', 'vertical', None]))
items = module_store.get_items(Location(['i4x', 'edX', 'full', 'vertical', None]))
self.assertGreater(len(items), 0)
clone_items = ms.get_items(Location(['i4x', 'MITx', '999', 'vertical', None]))
clone_items = module_store.get_items(Location(['i4x', 'MITx', '999', 'vertical', None]))
self.assertGreater(len(clone_items), 0)
for descriptor in items:
new_loc = descriptor.location._replace(org='MITx', course='999')
......@@ -197,14 +194,14 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
def test_delete_course(self):
import_from_xml(modulestore(), 'common/test/data/', ['full'])
ms = modulestore('direct')
cs = contentstore()
module_store = modulestore('direct')
content_store = contentstore()
location = CourseDescriptor.id_to_location('edX/full/6.002_Spring_2012')
delete_course(ms, cs, location, commit=True)
delete_course(module_store, content_store, location, commit=True)
items = ms.get_items(Location(['i4x', 'edX', 'full', 'vertical', None]))
items = module_store.get_items(Location(['i4x', 'edX', 'full', 'vertical', None]))
self.assertEqual(len(items), 0)
def verify_content_existence(self, modulestore, root_dir, location, dirname, category_name, filename_suffix=''):
......@@ -219,10 +216,10 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
self.assertTrue(fs.exists(item.location.name + filename_suffix))
def test_export_course(self):
ms = modulestore('direct')
cs = contentstore()
module_store = modulestore('direct')
content_store = contentstore()
import_from_xml(ms, 'common/test/data/', ['full'])
import_from_xml(module_store, 'common/test/data/', ['full'])
location = CourseDescriptor.id_to_location('edX/full/6.002_Spring_2012')
root_dir = path(mkdtemp_clean())
......@@ -230,24 +227,24 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
print 'Exporting to tempdir = {0}'.format(root_dir)
# export out to a tempdir
export_to_xml(ms, cs, location, root_dir, 'test_export')
export_to_xml(module_store, content_store, location, root_dir, 'test_export')
# check for static tabs
self.verify_content_existence(ms, root_dir, location, 'tabs', 'static_tab', '.html')
self.verify_content_existence(module_store, root_dir, location, 'tabs', 'static_tab', '.html')
# check for custom_tags
self.verify_content_existence(ms, root_dir, location, 'info', 'course_info', '.html')
self.verify_content_existence(module_store, root_dir, location, 'info', 'course_info', '.html')
# check for custom_tags
self.verify_content_existence(ms, root_dir, location, 'custom_tags', 'custom_tag_template')
self.verify_content_existence(module_store, root_dir, location, 'custom_tags', 'custom_tag_template')
# check for graiding_policy.json
fs = OSFS(root_dir / 'test_export/policies/6.002_Spring_2012')
self.assertTrue(fs.exists('grading_policy.json'))
course = ms.get_item(location)
course = module_store.get_item(location)
# compare what's on disk compared to what we have in our course
with fs.open('grading_policy.json','r') as grading_policy:
with fs.open('grading_policy.json', 'r') as grading_policy:
on_disk = loads(grading_policy.read())
self.assertEqual(on_disk, course.definition['data']['grading_policy'])
......@@ -255,18 +252,18 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
self.assertTrue(fs.exists('policy.json'))
# compare what's on disk to what we have in the course module
with fs.open('policy.json','r') as course_policy:
with fs.open('policy.json', 'r') as course_policy:
on_disk = loads(course_policy.read())
self.assertIn('course/6.002_Spring_2012', on_disk)
self.assertEqual(on_disk['course/6.002_Spring_2012'], course.metadata)
# remove old course
delete_course(ms, cs, location)
delete_course(module_store, content_store, location)
# reimport
import_from_xml(ms, root_dir, ['test_export'])
import_from_xml(module_store, root_dir, ['test_export'])
items = ms.get_items(Location(['i4x', 'edX', 'full', 'vertical', None]))
items = module_store.get_items(Location(['i4x', 'edX', 'full', 'vertical', None]))
self.assertGreater(len(items), 0)
for descriptor in items:
print "Checking {0}....".format(descriptor.location.url())
......@@ -276,11 +273,11 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
shutil.rmtree(root_dir)
def test_course_handouts_rewrites(self):
ms = modulestore('direct')
cs = contentstore()
module_store = modulestore('direct')
content_store = contentstore()
# import a test course
import_from_xml(ms, 'common/test/data/', ['full'])
import_from_xml(module_store, 'common/test/data/', ['full'])
handout_location = Location(['i4x', 'edX', 'full', 'course_info', 'handouts'])
......@@ -295,32 +292,32 @@ class ContentStoreToyCourseTest(ModuleStoreTestCase):
self.assertContains(resp, '/c4x/edX/full/asset/handouts_schematic_tutorial.pdf')
def test_export_course_with_unknown_metadata(self):
ms = modulestore('direct')
cs = contentstore()
module_store = modulestore('direct')
content_store = contentstore()
import_from_xml(ms, 'common/test/data/', ['full'])
import_from_xml(module_store, 'common/test/data/', ['full'])
location = CourseDescriptor.id_to_location('edX/full/6.002_Spring_2012')
root_dir = path(mkdtemp_clean())
course = ms.get_item(location)
course = module_store.get_item(location)
# add a bool piece of unknown metadata so we can verify we don't throw an exception
course.metadata['new_metadata'] = True
ms.update_metadata(location, course.metadata)
module_store.update_metadata(location, course.metadata)
print 'Exporting to tempdir = {0}'.format(root_dir)
# export out to a tempdir
bExported = False
exported = False
try:
export_to_xml(ms, cs, location, root_dir, 'test_export')
bExported = True
export_to_xml(module_store, content_store, location, root_dir, 'test_export')
exported = True
except Exception:
pass
self.assertTrue(bExported)
self.assertTrue(exported)
class ContentStoreTest(ModuleStoreTestCase):
"""
......@@ -459,7 +456,7 @@ class ContentStoreTest(ModuleStoreTestCase):
def test_capa_module(self):
"""Test that a problem treats markdown specially."""
course = CourseFactory.create(org='MITx', course='999', display_name='Robot Super Course')
CourseFactory.create(org='MITx', course='999', display_name='Robot Super Course')
problem_data = {
'parent_location': 'i4x://MITx/999/course/Robot_Super_Course',
......@@ -481,10 +478,10 @@ class ContentStoreTest(ModuleStoreTestCase):
def test_import_metadata_with_attempts_empty_string(self):
import_from_xml(modulestore(), 'common/test/data/', ['simple'])
ms = modulestore('direct')
module_store = modulestore('direct')
did_load_item = False
try:
ms.get_item(Location(['i4x', 'edX', 'simple', 'problem', 'ps01-simple', None]))
module_store.get_item(Location(['i4x', 'edX', 'simple', 'problem', 'ps01-simple', None]))
did_load_item = True
except ItemNotFoundError:
pass
......@@ -495,10 +492,10 @@ class ContentStoreTest(ModuleStoreTestCase):
def test_metadata_inheritance(self):
import_from_xml(modulestore(), 'common/test/data/', ['full'])
ms = modulestore('direct')
course = ms.get_item(Location(['i4x', 'edX', 'full', 'course', '6.002_Spring_2012', None]))
module_store = modulestore('direct')
course = module_store.get_item(Location(['i4x', 'edX', 'full', 'course', '6.002_Spring_2012', None]))
verticals = ms.get_items(['i4x', 'edX', 'full', 'vertical', None, None])
verticals = module_store.get_items(['i4x', 'edX', 'full', 'vertical', None, None])
# let's assert on the metadata_inheritance on an existing vertical
for vertical in verticals:
......@@ -511,13 +508,13 @@ class ContentStoreTest(ModuleStoreTestCase):
source_template_location = Location('i4x', 'edx', 'templates', 'html', 'Blank_HTML_Page')
# crate a new module and add it as a child to a vertical
ms.clone_item(source_template_location, new_component_location)
module_store.clone_item(source_template_location, new_component_location)
parent = verticals[0]
ms.update_children(parent.location, parent.definition.get('children', []) + [new_component_location.url()])
module_store.update_children(parent.location, parent.definition.get('children', []) + [new_component_location.url()])
# flush the cache
ms.get_cached_metadata_inheritance_tree(new_component_location, -1)
new_module = ms.get_item(new_component_location)
module_store.get_cached_metadata_inheritance_tree(new_component_location, -1)
new_module = module_store.get_item(new_component_location)
# check for grace period definition which should be defined at the course level
self.assertIn('graceperiod', new_module.metadata)
......@@ -530,11 +527,11 @@ class ContentStoreTest(ModuleStoreTestCase):
# now let's define an override at the leaf node level
#
new_module.metadata['graceperiod'] = '1 day'
ms.update_metadata(new_module.location, new_module.metadata)
module_store.update_metadata(new_module.location, new_module.metadata)
# flush the cache and refetch
ms.get_cached_metadata_inheritance_tree(new_component_location, -1)
new_module = ms.get_item(new_component_location)
module_store.get_cached_metadata_inheritance_tree(new_component_location, -1)
new_module = module_store.get_item(new_component_location)
self.assertIn('graceperiod', new_module.metadata)
self.assertEqual('1 day', new_module.metadata['graceperiod'])
......@@ -543,15 +540,15 @@ class ContentStoreTest(ModuleStoreTestCase):
class TemplateTestCase(ModuleStoreTestCase):
def test_template_cleanup(self):
ms = modulestore('direct')
module_store = modulestore('direct')
# insert a bogus template in the store
bogus_template_location = Location('i4x', 'edx', 'templates', 'html', 'bogus')
source_template_location = Location('i4x', 'edx', 'templates', 'html', 'Blank_HTML_Page')
ms.clone_item(source_template_location, bogus_template_location)
module_store.clone_item(source_template_location, bogus_template_location)
verify_create = ms.get_item(bogus_template_location)
verify_create = module_store.get_item(bogus_template_location)
self.assertIsNotNone(verify_create)
# now run cleanup
......@@ -560,10 +557,9 @@ class TemplateTestCase(ModuleStoreTestCase):
# now try to find dangling template, it should not be in DB any longer
asserted = False
try:
verify_create = ms.get_item(bogus_template_location)
verify_create = module_store.get_item(bogus_template_location)
except ItemNotFoundError:
asserted = True
self.assertTrue(asserted)
......@@ -86,12 +86,14 @@ def signup(request):
csrf_token = csrf(request)['csrf_token']
return render_to_response('signup.html', {'csrf': csrf_token})
def old_login_redirect(request):
'''
Redirect to the active login url.
'''
return redirect('login', permanent=True)
@ssl_login_shortcut
@ensure_csrf_cookie
def login_page(request):
......@@ -104,6 +106,7 @@ def login_page(request):
'forgot_password_link': "//{base}/#forgot-password-modal".format(base=settings.LMS_BASE),
})
def howitworks(request):
if request.user.is_authenticated():
return index(request)
......@@ -112,6 +115,7 @@ def howitworks(request):
# ==== Views for any logged-in user ==================================
@login_required
@ensure_csrf_cookie
def index(request):
......@@ -145,6 +149,7 @@ def index(request):
# ==== Views with per-item permissions================================
def has_access(user, location, role=STAFF_ROLE_NAME):
'''
Return True if user allowed to access this piece of data
......@@ -393,6 +398,7 @@ def preview_component(request, location):
'editor': wrap_xmodule(component.get_html, component, 'xmodule_edit.html')(),
})
@expect_json
@login_required
@ensure_csrf_cookie
......@@ -720,6 +726,7 @@ def create_draft(request):
return HttpResponse()
@login_required
@expect_json
def publish_draft(request):
......@@ -749,6 +756,7 @@ def unpublish_unit(request):
return HttpResponse()
@login_required
@expect_json
def clone_item(request):
......@@ -779,8 +787,7 @@ def clone_item(request):
return HttpResponse(json.dumps({'id': dest_location.url()}))
#@login_required
#@ensure_csrf_cookie
def upload_asset(request, org, course, coursename):
'''
cdodge: this method allows for POST uploading of files into the course asset library, which will
......@@ -842,6 +849,7 @@ def upload_asset(request, org, course, coursename):
response['asset_url'] = StaticContent.get_url_path_from_location(content.location)
return response
'''
This view will return all CMS users who are editors for the specified course
'''
......@@ -874,6 +882,7 @@ def create_json_response(errmsg = None):
return resp
'''
This POST-back view will add a user - specified by email - to the list of editors for
the specified course
......@@ -906,6 +915,7 @@ def add_user(request, location):
return create_json_response()
'''
This POST-back view will remove a user - specified by email - from the list of editors for
the specified course
......@@ -937,6 +947,7 @@ def remove_user(request, location):
def landing(request, org, course, coursename):
return render_to_response('temp-course-landing.html', {})
@login_required
@ensure_csrf_cookie
def static_pages(request, org, course, coursename):
......@@ -1040,6 +1051,7 @@ def edit_tabs(request, org, course, coursename):
'components': components
})
def not_found(request):
return render_to_response('error.html', {'error': '404'})
......@@ -1075,6 +1087,7 @@ def course_info(request, org, course, name, provided_id=None):
'handouts_location': Location(['i4x', org, course, 'course_info', 'handouts']).url()
})
@expect_json
@login_required
@ensure_csrf_cookie
......@@ -1172,6 +1185,7 @@ def get_course_settings(request, org, course, name):
"section": "details"})
})
@login_required
@ensure_csrf_cookie
def course_config_graders_page(request, org, course, name):
......@@ -1195,6 +1209,7 @@ def course_config_graders_page(request, org, course, name):
'course_details': json.dumps(course_details, cls=CourseSettingsEncoder)
})
@login_required
@ensure_csrf_cookie
def course_config_advanced_page(request, org, course, name):
......@@ -1218,6 +1233,7 @@ def course_config_advanced_page(request, org, course, name):
'advanced_dict' : json.dumps(CourseMetadata.fetch(location)),
})
@expect_json
@login_required
@ensure_csrf_cookie
......@@ -1249,6 +1265,7 @@ def course_settings_updates(request, org, course, name, section):
return HttpResponse(json.dumps(manager.update_from_json(request.POST), cls=CourseSettingsEncoder),
mimetype="application/json")
@expect_json
@login_required
@ensure_csrf_cookie
......@@ -1374,6 +1391,7 @@ def asset_index(request, org, course, name):
def edge(request):
return render_to_response('university_profiles/edge.html', {})
@login_required
@expect_json
def create_new_course(request):
......@@ -1429,6 +1447,7 @@ def create_new_course(request):
return HttpResponse(json.dumps({'id': new_course.location.url()}))
def initialize_course_tabs(course):
# set up the default tabs
# I've added this because when we add static tabs, the LMS either expects a None for the tabs list or
......@@ -1446,6 +1465,7 @@ def initialize_course_tabs(course):
modulestore('direct').update_metadata(course.location.url(), course.own_metadata)
@ensure_csrf_cookie
@login_required
def import_course(request, org, course, name):
......@@ -1523,6 +1543,7 @@ def import_course(request, org, course, name):
course_module.location.name])
})
@ensure_csrf_cookie
@login_required
def generate_export_course(request, org, course, name):
......@@ -1574,6 +1595,7 @@ def export_course(request, org, course, name):
'successful_import_redirect_url': ''
})
def event(request):
'''
A noop to swallow the analytics call so that cms methods don't spook and poor developers looking at
......
......@@ -120,7 +120,8 @@ def get_cohort(user, course_id):
return None
choices = course.auto_cohort_groups
if len(choices) == 0:
n = len(choices)
if n == 0:
# Nowhere to put user
log.warning("Course %s is auto-cohorted, but there are no"
" auto_cohort_groups specified",
......@@ -128,7 +129,14 @@ def get_cohort(user, course_id):
return None
# Put user in a random group, creating it if needed
group_name = random.choice(choices)
choice = random.randrange(0, n)
group_name = choices[choice]
# Victor: we are seeing very strange behavior on prod, where almost all users
# end up in the same group. Log at INFO to try to figure out what's going on.
log.info("DEBUG: adding user {0} to cohort {1}. choice={2}".format(
user, group_name,choice))
group, created = CourseUserGroup.objects.get_or_create(
course_id=course_id,
group_type=CourseUserGroup.COHORT,
......
......@@ -6,7 +6,7 @@ from django.test.utils import override_settings
from course_groups.models import CourseUserGroup
from course_groups.cohorts import (get_cohort, get_course_cohorts,
is_commentable_cohorted)
is_commentable_cohorted, get_cohort_by_name)
from xmodule.modulestore.django import modulestore, _MODULESTORES
......@@ -180,6 +180,37 @@ class TestCohorts(django.test.TestCase):
"user2 should still be in originally placed cohort")
def test_auto_cohorting_randomization(self):
"""
Make sure get_cohort() randomizes properly.
"""
course = modulestore().get_course("edX/toy/2012_Fall")
self.assertEqual(course.id, "edX/toy/2012_Fall")
self.assertFalse(course.is_cohorted)
groups = ["group_{0}".format(n) for n in range(5)]
self.config_course_cohorts(course, [], cohorted=True,
auto_cohort=True,
auto_cohort_groups=groups)
# Assign 100 users to cohorts
for i in range(100):
user = User.objects.create(username="test_{0}".format(i),
email="a@b{0}.com".format(i))
get_cohort(user, course.id)
# Now make sure that the assignment was at least vaguely random:
# each cohort should have at least 1, and fewer than 50 students.
# (with 5 groups, probability of 0 users in any group is about
# .8**100= 2.0e-10)
for cohort_name in groups:
cohort = get_cohort_by_name(course.id, cohort_name)
num_users = cohort.users.count()
self.assertGreater(num_users, 1)
self.assertLess(num_users, 50)
def test_get_course_cohorts(self):
course1_id = 'a/b/c'
course2_id = 'e/f/g'
......
......@@ -10,7 +10,6 @@ from xmodule.open_ended_grading_classes.combined_open_ended_modulev1 import Comb
log = logging.getLogger("mitx.courseware")
VERSION_TUPLES = (
('1', CombinedOpenEndedV1Descriptor, CombinedOpenEndedV1Module),
)
......@@ -18,6 +17,7 @@ VERSION_TUPLES = (
DEFAULT_VERSION = 1
DEFAULT_VERSION = str(DEFAULT_VERSION)
class CombinedOpenEndedModule(XModule):
"""
This is a module that encapsulates all open ended grading (self assessment, peer assessment, etc).
......@@ -129,13 +129,15 @@ class CombinedOpenEndedModule(XModule):
version_index = versions.index(self.version)
static_data = {
'rewrite_content_links' : self.rewrite_content_links,
'rewrite_content_links': self.rewrite_content_links,
}
self.child_descriptor = descriptors[version_index](self.system)
self.child_definition = descriptors[version_index].definition_from_xml(etree.fromstring(definition['data']), self.system)
self.child_definition = descriptors[version_index].definition_from_xml(etree.fromstring(definition['data']),
self.system)
self.child_module = modules[version_index](self.system, location, self.child_definition, self.child_descriptor,
instance_state = json.dumps(instance_state), metadata = self.metadata, static_data= static_data)
instance_state=json.dumps(instance_state), metadata=self.metadata,
static_data=static_data)
def get_html(self):
return self.child_module.get_html()
......
......@@ -356,7 +356,14 @@ class CourseDescriptor(SequenceDescriptor):
"""
Return the pdf_textbooks config, as a python object, or None if not specified.
"""
return self.metadata.get('pdf_textbooks')
return self.metadata.get('pdf_textbooks', [])
@property
def html_textbooks(self):
"""
Return the html_textbooks config, as a python object, or None if not specified.
"""
return self.metadata.get('html_textbooks', [])
@tabs.setter
def tabs(self, value):
......
......@@ -86,7 +86,10 @@ class FolditModule(XModule):
"""
from foldit.models import Score
return [(e['username'], e['score']) for e in Score.get_tops_n(10)]
leaders = [(e['username'], e['score']) for e in Score.get_tops_n(10)]
leaders.sort(key=lambda x: x[1])
return leaders
def get_html(self):
"""
......
......@@ -40,14 +40,15 @@ ACCEPT_FILE_UPLOAD = False
TRUE_DICT = ["True", True, "TRUE", "true"]
HUMAN_TASK_TYPE = {
'selfassessment' : "Self Assessment",
'openended' : "edX Assessment",
}
'selfassessment': "Self Assessment",
'openended': "edX Assessment",
}
#Default value that controls whether or not to skip basic spelling checks in the controller
#Metadata overrides this
SKIP_BASIC_CHECKS = False
class CombinedOpenEndedV1Module():
"""
This is a module that encapsulates all open ended grading (self assessment, peer assessment, etc).
......@@ -83,7 +84,7 @@ class CombinedOpenEndedV1Module():
TEMPLATE_DIR = "combinedopenended"
def __init__(self, system, location, definition, descriptor,
instance_state=None, shared_state=None, metadata = None, static_data = None, **kwargs):
instance_state=None, shared_state=None, metadata=None, static_data=None, **kwargs):
"""
Definition file should have one or many task blocks, a rubric block, and a prompt block:
......@@ -122,7 +123,7 @@ class CombinedOpenEndedV1Module():
self.metadata = metadata
self.display_name = metadata.get('display_name', "Open Ended")
self.rewrite_content_links = static_data.get('rewrite_content_links',"")
self.rewrite_content_links = static_data.get('rewrite_content_links', "")
# Load instance state
......@@ -177,9 +178,9 @@ class CombinedOpenEndedV1Module():
'rubric': definition['rubric'],
'display_name': self.display_name,
'accept_file_upload': self.accept_file_upload,
'close_date' : self.timeinfo.close_date,
's3_interface' : self.system.s3_interface,
'skip_basic_checks' : self.skip_basic_checks,
'close_date': self.timeinfo.close_date,
's3_interface': self.system.s3_interface,
'skip_basic_checks': self.skip_basic_checks,
}
self.task_xml = definition['task_xml']
......@@ -267,7 +268,8 @@ class CombinedOpenEndedV1Module():
self.current_task_parsed_xml = self.current_task_descriptor.definition_from_xml(etree_xml, self.system)
if current_task_state is None and self.current_task_number == 0:
self.current_task = child_task_module(self.system, self.location,
self.current_task_parsed_xml, self.current_task_descriptor, self.static_data)
self.current_task_parsed_xml, self.current_task_descriptor,
self.static_data)
self.task_states.append(self.current_task.get_instance_state())
self.state = self.ASSESSING
elif current_task_state is None and self.current_task_number > 0:
......@@ -282,7 +284,8 @@ class CombinedOpenEndedV1Module():
'history': [{'answer': last_response}],
})
self.current_task = child_task_module(self.system, self.location,
self.current_task_parsed_xml, self.current_task_descriptor, self.static_data,
self.current_task_parsed_xml, self.current_task_descriptor,
self.static_data,
instance_state=current_task_state)
self.task_states.append(self.current_task.get_instance_state())
self.state = self.ASSESSING
......@@ -290,7 +293,8 @@ class CombinedOpenEndedV1Module():
if self.current_task_number > 0 and not reset:
current_task_state = self.overwrite_state(current_task_state)
self.current_task = child_task_module(self.system, self.location,
self.current_task_parsed_xml, self.current_task_descriptor, self.static_data,
self.current_task_parsed_xml, self.current_task_descriptor,
self.static_data,
instance_state=current_task_state)
return True
......@@ -307,7 +311,7 @@ class CombinedOpenEndedV1Module():
last_response_data = self.get_last_response(self.current_task_number - 1)
current_response_data = self.get_current_attributes(self.current_task_number)
if(current_response_data['min_score_to_attempt'] > last_response_data['score']
if (current_response_data['min_score_to_attempt'] > last_response_data['score']
or current_response_data['max_score_to_attempt'] < last_response_data['score']):
self.state = self.DONE
self.allow_reset = True
......@@ -334,7 +338,7 @@ class CombinedOpenEndedV1Module():
'display_name': self.display_name,
'accept_file_upload': self.accept_file_upload,
'location': self.location,
'legend_list' : LEGEND_LIST,
'legend_list': LEGEND_LIST,
}
return context
......@@ -429,7 +433,7 @@ class CombinedOpenEndedV1Module():
feedback_dicts = rubric_data['feedback_dicts']
grader_ids = rubric_data['grader_ids']
submission_ids = rubric_data['submission_ids']
elif task_type== "selfassessment":
elif task_type == "selfassessment":
rubric_scores = last_post_assessment
grader_types = ['SA']
feedback_items = ['']
......@@ -446,7 +450,7 @@ class CombinedOpenEndedV1Module():
human_state = task.HUMAN_NAMES[state]
else:
human_state = state
if len(grader_types)>0:
if len(grader_types) > 0:
grader_type = grader_types[0]
else:
grader_type = "IN"
......@@ -468,14 +472,14 @@ class CombinedOpenEndedV1Module():
'correct': last_correctness,
'min_score_to_attempt': min_score_to_attempt,
'max_score_to_attempt': max_score_to_attempt,
'rubric_scores' : rubric_scores,
'grader_types' : grader_types,
'feedback_items' : feedback_items,
'grader_type' : grader_type,
'human_grader_type' : human_grader_name,
'feedback_dicts' : feedback_dicts,
'grader_ids' : grader_ids,
'submission_ids' : submission_ids,
'rubric_scores': rubric_scores,
'grader_types': grader_types,
'feedback_items': feedback_items,
'grader_type': grader_type,
'human_grader_type': human_grader_name,
'feedback_dicts': feedback_dicts,
'grader_ids': grader_ids,
'submission_ids': submission_ids,
}
return last_response_dict
......@@ -519,20 +523,27 @@ class CombinedOpenEndedV1Module():
Output: Dictionary to be rendered via ajax that contains the result html.
"""
all_responses = []
loop_up_to_task = self.current_task_number+1
for i in xrange(0,loop_up_to_task):
loop_up_to_task = self.current_task_number + 1
for i in xrange(0, loop_up_to_task):
all_responses.append(self.get_last_response(i))
rubric_scores = [all_responses[i]['rubric_scores'] for i in xrange(0,len(all_responses)) if len(all_responses[i]['rubric_scores'])>0 and all_responses[i]['grader_types'][0] in HUMAN_GRADER_TYPE.keys()]
grader_types = [all_responses[i]['grader_types'] for i in xrange(0,len(all_responses)) if len(all_responses[i]['grader_types'])>0 and all_responses[i]['grader_types'][0] in HUMAN_GRADER_TYPE.keys()]
feedback_items = [all_responses[i]['feedback_items'] for i in xrange(0,len(all_responses)) if len(all_responses[i]['feedback_items'])>0 and all_responses[i]['grader_types'][0] in HUMAN_GRADER_TYPE.keys()]
rubric_html = self.rubric_renderer.render_combined_rubric(stringify_children(self.static_data['rubric']), rubric_scores,
rubric_scores = [all_responses[i]['rubric_scores'] for i in xrange(0, len(all_responses)) if
len(all_responses[i]['rubric_scores']) > 0 and all_responses[i]['grader_types'][
0] in HUMAN_GRADER_TYPE.keys()]
grader_types = [all_responses[i]['grader_types'] for i in xrange(0, len(all_responses)) if
len(all_responses[i]['grader_types']) > 0 and all_responses[i]['grader_types'][
0] in HUMAN_GRADER_TYPE.keys()]
feedback_items = [all_responses[i]['feedback_items'] for i in xrange(0, len(all_responses)) if
len(all_responses[i]['feedback_items']) > 0 and all_responses[i]['grader_types'][
0] in HUMAN_GRADER_TYPE.keys()]
rubric_html = self.rubric_renderer.render_combined_rubric(stringify_children(self.static_data['rubric']),
rubric_scores,
grader_types, feedback_items)
response_dict = all_responses[-1]
context = {
'results': rubric_html,
'task_name' : 'Scored Rubric',
'class_name' : 'combined-rubric-container'
'task_name': 'Scored Rubric',
'class_name': 'combined-rubric-container'
}
html = self.system.render_template('{0}/combined_open_ended_results.html'.format(self.TEMPLATE_DIR), context)
return {'html': html, 'success': True}
......@@ -544,7 +555,7 @@ class CombinedOpenEndedV1Module():
Output: Dictionary to be rendered via ajax that contains the result html.
"""
context = {
'legend_list' : LEGEND_LIST,
'legend_list': LEGEND_LIST,
}
html = self.system.render_template('{0}/combined_open_ended_legend.html'.format(self.TEMPLATE_DIR), context)
return {'html': html, 'success': True}
......@@ -556,15 +567,16 @@ class CombinedOpenEndedV1Module():
Output: Dictionary to be rendered via ajax that contains the result html.
"""
self.update_task_states()
loop_up_to_task = self.current_task_number+1
all_responses =[]
for i in xrange(0,loop_up_to_task):
loop_up_to_task = self.current_task_number + 1
all_responses = []
for i in xrange(0, loop_up_to_task):
all_responses.append(self.get_last_response(i))
context_list = []
for ri in all_responses:
for i in xrange(0,len(ri['rubric_scores'])):
feedback = ri['feedback_dicts'][i].get('feedback','')
rubric_data = self.rubric_renderer.render_rubric(stringify_children(self.static_data['rubric']), ri['rubric_scores'][i])
for i in xrange(0, len(ri['rubric_scores'])):
feedback = ri['feedback_dicts'][i].get('feedback', '')
rubric_data = self.rubric_renderer.render_rubric(stringify_children(self.static_data['rubric']),
ri['rubric_scores'][i])
if rubric_data['success']:
rubric_html = rubric_data['html']
else:
......@@ -572,22 +584,22 @@ class CombinedOpenEndedV1Module():
context = {
'rubric_html': rubric_html,
'grader_type': ri['grader_type'],
'feedback' : feedback,
'grader_id' : ri['grader_ids'][i],
'submission_id' : ri['submission_ids'][i],
'feedback': feedback,
'grader_id': ri['grader_ids'][i],
'submission_id': ri['submission_ids'][i],
}
context_list.append(context)
feedback_table = self.system.render_template('{0}/open_ended_result_table.html'.format(self.TEMPLATE_DIR), {
'context_list' : context_list,
'grader_type_image_dict' : GRADER_TYPE_IMAGE_DICT,
'human_grader_types' : HUMAN_GRADER_TYPE,
'context_list': context_list,
'grader_type_image_dict': GRADER_TYPE_IMAGE_DICT,
'human_grader_types': HUMAN_GRADER_TYPE,
'rows': 50,
'cols': 50,
})
context = {
'results': feedback_table,
'task_name' : "Feedback",
'class_name' : "result-container",
'task_name': "Feedback",
'class_name': "result-container",
}
html = self.system.render_template('{0}/combined_open_ended_results.html'.format(self.TEMPLATE_DIR), context)
return {'html': html, 'success': True}
......@@ -617,8 +629,8 @@ class CombinedOpenEndedV1Module():
'reset': self.reset,
'get_results': self.get_results,
'get_combined_rubric': self.get_rubric,
'get_status' : self.get_status_ajax,
'get_legend' : self.get_legend,
'get_status': self.get_status_ajax,
'get_legend': self.get_legend,
}
if dispatch not in handlers:
......@@ -699,11 +711,12 @@ class CombinedOpenEndedV1Module():
context = {
'status_list': status,
'grader_type_image_dict' : GRADER_TYPE_IMAGE_DICT,
'legend_list' : LEGEND_LIST,
'render_via_ajax' : render_via_ajax,
'grader_type_image_dict': GRADER_TYPE_IMAGE_DICT,
'legend_list': LEGEND_LIST,
'render_via_ajax': render_via_ajax,
}
status_html = self.system.render_template("{0}/combined_open_ended_status.html".format(self.TEMPLATE_DIR), context)
status_html = self.system.render_template("{0}/combined_open_ended_status.html".format(self.TEMPLATE_DIR),
context)
return status_html
......@@ -793,7 +806,9 @@ class CombinedOpenEndedV1Descriptor(XmlDescriptor, EditingDescriptor):
for child in expected_children:
if len(xml_object.xpath(child)) == 0:
#This is a staff_facing_error
raise ValueError("Combined Open Ended definition must include at least one '{0}' tag. Contact the learning sciences group for assistance.".format(child))
raise ValueError(
"Combined Open Ended definition must include at least one '{0}' tag. Contact the learning sciences group for assistance.".format(
child))
def parse_task(k):
"""Assumes that xml_object has child k"""
......
......@@ -4,24 +4,26 @@ from lxml import etree
log = logging.getLogger(__name__)
GRADER_TYPE_IMAGE_DICT = {
'SA' : '/static/images/self_assessment_icon.png',
'PE' : '/static/images/peer_grading_icon.png',
'ML' : '/static/images/ml_grading_icon.png',
'IN' : '/static/images/peer_grading_icon.png',
'BC' : '/static/images/ml_grading_icon.png',
}
'SA': '/static/images/self_assessment_icon.png',
'PE': '/static/images/peer_grading_icon.png',
'ML': '/static/images/ml_grading_icon.png',
'IN': '/static/images/peer_grading_icon.png',
'BC': '/static/images/ml_grading_icon.png',
}
HUMAN_GRADER_TYPE = {
'SA' : 'Self-Assessment',
'PE' : 'Peer-Assessment',
'IN' : 'Instructor-Assessment',
'ML' : 'AI-Assessment',
'BC' : 'AI-Assessment',
}
'SA': 'Self-Assessment',
'PE': 'Peer-Assessment',
'IN': 'Instructor-Assessment',
'ML': 'AI-Assessment',
'BC': 'AI-Assessment',
}
DO_NOT_DISPLAY = ['BC', 'IN']
LEGEND_LIST = [{'name' : HUMAN_GRADER_TYPE[k], 'image' : GRADER_TYPE_IMAGE_DICT[k]} for k in GRADER_TYPE_IMAGE_DICT.keys() if k not in DO_NOT_DISPLAY ]
LEGEND_LIST = [{'name': HUMAN_GRADER_TYPE[k], 'image': GRADER_TYPE_IMAGE_DICT[k]} for k in GRADER_TYPE_IMAGE_DICT.keys()
if k not in DO_NOT_DISPLAY]
class RubricParsingError(Exception):
def __init__(self, msg):
......@@ -29,15 +31,14 @@ class RubricParsingError(Exception):
class CombinedOpenEndedRubric(object):
TEMPLATE_DIR = "combinedopenended/openended"
def __init__ (self, system, view_only = False):
def __init__(self, system, view_only=False):
self.has_score = False
self.view_only = view_only
self.system = system
def render_rubric(self, rubric_xml, score_list = None):
def render_rubric(self, rubric_xml, score_list=None):
'''
render_rubric: takes in an xml string and outputs the corresponding
html for that xml, given the type of rubric we're generating
......@@ -50,11 +51,11 @@ class CombinedOpenEndedRubric(object):
success = False
try:
rubric_categories = self.extract_categories(rubric_xml)
if score_list and len(score_list)==len(rubric_categories):
for i in xrange(0,len(rubric_categories)):
if score_list and len(score_list) == len(rubric_categories):
for i in xrange(0, len(rubric_categories)):
category = rubric_categories[i]
for j in xrange(0,len(category['options'])):
if score_list[i]==j:
for j in xrange(0, len(category['options'])):
if score_list[i] == j:
rubric_categories[i]['options'][j]['selected'] = True
rubric_scores = [cat['score'] for cat in rubric_categories]
max_scores = map((lambda cat: cat['options'][-1]['points']), rubric_categories)
......@@ -67,15 +68,16 @@ class CombinedOpenEndedRubric(object):
'has_score': self.has_score,
'view_only': self.view_only,
'max_score': max_score,
'combined_rubric' : False
'combined_rubric': False
})
success = True
except:
#This is a staff_facing_error
error_message = "[render_rubric] Could not parse the rubric with xml: {0}. Contact the learning sciences group for assistance.".format(rubric_xml)
error_message = "[render_rubric] Could not parse the rubric with xml: {0}. Contact the learning sciences group for assistance.".format(
rubric_xml)
log.exception(error_message)
raise RubricParsingError(error_message)
return {'success' : success, 'html' : html, 'rubric_scores' : rubric_scores}
return {'success': success, 'html': html, 'rubric_scores': rubric_scores}
def check_if_rubric_is_parseable(self, rubric_string, location, max_score_allowed, max_score):
rubric_dict = self.render_rubric(rubric_string)
......@@ -83,7 +85,8 @@ class CombinedOpenEndedRubric(object):
rubric_feedback = rubric_dict['html']
if not success:
#This is a staff_facing_error
error_message = "Could not parse rubric : {0} for location {1}. Contact the learning sciences group for assistance.".format(rubric_string, location.url())
error_message = "Could not parse rubric : {0} for location {1}. Contact the learning sciences group for assistance.".format(
rubric_string, location.url())
log.error(error_message)
raise RubricParsingError(error_message)
......@@ -123,12 +126,13 @@ class CombinedOpenEndedRubric(object):
for category in element:
if category.tag != 'category':
#This is a staff_facing_error
raise RubricParsingError("[extract_categories] Expected a <category> tag: got {0} instead. Contact the learning sciences group for assistance.".format(category.tag))
raise RubricParsingError(
"[extract_categories] Expected a <category> tag: got {0} instead. Contact the learning sciences group for assistance.".format(
category.tag))
else:
categories.append(self.extract_category(category))
return categories
def extract_category(self, category):
'''
construct an individual category
......@@ -150,13 +154,17 @@ class CombinedOpenEndedRubric(object):
# if we are missing the score tag and we are expecting one
elif self.has_score:
#This is a staff_facing_error
raise RubricParsingError("[extract_category] Category {0} is missing a score. Contact the learning sciences group for assistance.".format(descriptionxml.text))
raise RubricParsingError(
"[extract_category] Category {0} is missing a score. Contact the learning sciences group for assistance.".format(
descriptionxml.text))
# parse description
if descriptionxml.tag != 'description':
#This is a staff_facing_error
raise RubricParsingError("[extract_category]: expected description tag, got {0} instead. Contact the learning sciences group for assistance.".format(descriptionxml.tag))
raise RubricParsingError(
"[extract_category]: expected description tag, got {0} instead. Contact the learning sciences group for assistance.".format(
descriptionxml.tag))
description = descriptionxml.text
......@@ -167,7 +175,9 @@ class CombinedOpenEndedRubric(object):
for option in optionsxml:
if option.tag != 'option':
#This is a staff_facing_error
raise RubricParsingError("[extract_category]: expected option tag, got {0} instead. Contact the learning sciences group for assistance.".format(option.tag))
raise RubricParsingError(
"[extract_category]: expected option tag, got {0} instead. Contact the learning sciences group for assistance.".format(
option.tag))
else:
pointstr = option.get("points")
if pointstr:
......@@ -177,13 +187,16 @@ class CombinedOpenEndedRubric(object):
points = int(pointstr)
except ValueError:
#This is a staff_facing_error
raise RubricParsingError("[extract_category]: expected points to have int, got {0} instead. Contact the learning sciences group for assistance.".format(pointstr))
raise RubricParsingError(
"[extract_category]: expected points to have int, got {0} instead. Contact the learning sciences group for assistance.".format(
pointstr))
elif autonumbering:
# use the generated one if we're in the right mode
points = cur_points
cur_points = cur_points + 1
else:
raise Exception("[extract_category]: missing points attribute. Cannot continue to auto-create points values after a points value is explicitly defined.")
raise Exception(
"[extract_category]: missing points attribute. Cannot continue to auto-create points values after a points value is explicitly defined.")
selected = score == points
optiontext = option.text
......@@ -193,19 +206,20 @@ class CombinedOpenEndedRubric(object):
options = sorted(options, key=lambda option: option['points'])
CombinedOpenEndedRubric.validate_options(options)
return {'description': description, 'options': options, 'score' : score}
return {'description': description, 'options': options, 'score': score}
def render_combined_rubric(self,rubric_xml,scores,score_types,feedback_types):
success, score_tuples = CombinedOpenEndedRubric.reformat_scores_for_rendering(scores,score_types,feedback_types)
def render_combined_rubric(self, rubric_xml, scores, score_types, feedback_types):
success, score_tuples = CombinedOpenEndedRubric.reformat_scores_for_rendering(scores, score_types,
feedback_types)
rubric_categories = self.extract_categories(rubric_xml)
max_scores = map((lambda cat: cat['options'][-1]['points']), rubric_categories)
max_score = max(max_scores)
for i in xrange(0,len(rubric_categories)):
for i in xrange(0, len(rubric_categories)):
category = rubric_categories[i]
for j in xrange(0,len(category['options'])):
for j in xrange(0, len(category['options'])):
rubric_categories[i]['options'][j]['grader_types'] = []
for tuple in score_tuples:
if tuple[1] == i and tuple[2] ==j:
if tuple[1] == i and tuple[2] == j:
for grader_type in tuple[3]:
rubric_categories[i]['options'][j]['grader_types'].append(grader_type)
......@@ -214,13 +228,12 @@ class CombinedOpenEndedRubric(object):
'has_score': True,
'view_only': True,
'max_score': max_score,
'combined_rubric' : True,
'grader_type_image_dict' : GRADER_TYPE_IMAGE_DICT,
'human_grader_types' : HUMAN_GRADER_TYPE,
'combined_rubric': True,
'grader_type_image_dict': GRADER_TYPE_IMAGE_DICT,
'human_grader_types': HUMAN_GRADER_TYPE,
})
return html
@staticmethod
def validate_options(options):
'''
......@@ -228,14 +241,16 @@ class CombinedOpenEndedRubric(object):
'''
if len(options) == 0:
#This is a staff_facing_error
raise RubricParsingError("[extract_category]: no options associated with this category. Contact the learning sciences group for assistance.")
raise RubricParsingError(
"[extract_category]: no options associated with this category. Contact the learning sciences group for assistance.")
if len(options) == 1:
return
prev = options[0]['points']
for option in options[1:]:
if prev == option['points']:
#This is a staff_facing_error
raise RubricParsingError("[extract_category]: found duplicate point values between two different options. Contact the learning sciences group for assistance.")
raise RubricParsingError(
"[extract_category]: found duplicate point values between two different options. Contact the learning sciences group for assistance.")
else:
prev = option['points']
......@@ -250,7 +265,7 @@ class CombinedOpenEndedRubric(object):
@return:
"""
success = False
if len(scores)==0:
if len(scores) == 0:
#This is a dev_facing_error
log.error("Score length is 0 when trying to reformat rubric scores for rendering.")
return success, ""
......@@ -264,25 +279,25 @@ class CombinedOpenEndedRubric(object):
score_lists = []
score_type_list = []
feedback_type_list = []
for i in xrange(0,len(scores)):
for i in xrange(0, len(scores)):
score_cont_list = scores[i]
for j in xrange(0,len(score_cont_list)):
for j in xrange(0, len(score_cont_list)):
score_list = score_cont_list[j]
score_lists.append(score_list)
score_type_list.append(score_types[i][j])
feedback_type_list.append(feedback_types[i][j])
score_list_len = len(score_lists[0])
for i in xrange(0,len(score_lists)):
for i in xrange(0, len(score_lists)):
score_list = score_lists[i]
if len(score_list)!=score_list_len:
if len(score_list) != score_list_len:
return success, ""
score_tuples = []
for i in xrange(0,len(score_lists)):
for j in xrange(0,len(score_lists[i])):
tuple = [1,j,score_lists[i][j],[],[]]
score_tuples, tup_ind = CombinedOpenEndedRubric.check_for_tuple_matches(score_tuples,tuple)
for i in xrange(0, len(score_lists)):
for j in xrange(0, len(score_lists[i])):
tuple = [1, j, score_lists[i][j], [], []]
score_tuples, tup_ind = CombinedOpenEndedRubric.check_for_tuple_matches(score_tuples, tuple)
score_tuples[tup_ind][0] += 1
score_tuples[tup_ind][3].append(score_type_list[i])
score_tuples[tup_ind][4].append(feedback_type_list[i])
......@@ -302,18 +317,12 @@ class CombinedOpenEndedRubric(object):
category = tuple[1]
score = tuple[2]
tup_ind = -1
for t in xrange(0,len(tuples)):
for t in xrange(0, len(tuples)):
if tuples[t][1] == category and tuples[t][2] == score:
tup_ind = t
break
if tup_ind == -1:
tuples.append([0,category,score,[],[]])
tup_ind = len(tuples)-1
tuples.append([0, category, score, [], []])
tup_ind = len(tuples) - 1
return tuples, tup_ind
......@@ -8,6 +8,7 @@ class ControllerQueryService(GradingService):
"""
Interface to staff grading backend.
"""
def __init__(self, config, system):
config['system'] = system
super(ControllerQueryService, self).__init__(config)
......@@ -75,15 +76,16 @@ class ControllerQueryService(GradingService):
response = self.post(self.take_action_on_flags_url, params)
return response
def convert_seconds_to_human_readable(seconds):
if seconds < 60:
human_string = "{0} seconds".format(seconds)
elif seconds < 60 * 60:
human_string = "{0} minutes".format(round(seconds/60,1))
elif seconds < (24*60*60):
human_string = "{0} hours".format(round(seconds/(60*60),1))
human_string = "{0} minutes".format(round(seconds / 60, 1))
elif seconds < (24 * 60 * 60):
human_string = "{0} hours".format(round(seconds / (60 * 60), 1))
else:
human_string = "{0} days".format(round(seconds/(60*60*24),1))
human_string = "{0} days".format(round(seconds / (60 * 60 * 24), 1))
eta_string = "{0}".format(human_string)
return eta_string
......@@ -19,6 +19,7 @@ class GradingService(object):
"""
Interface to staff grading backend.
"""
def __init__(self, config):
self.username = config['username']
self.password = config['password']
......
......@@ -5,6 +5,7 @@ to send them to S3.
try:
from PIL import Image
ENABLE_PIL = True
except:
ENABLE_PIL = False
......@@ -51,6 +52,7 @@ class ImageProperties(object):
"""
Class to check properties of an image and to validate if they are allowed.
"""
def __init__(self, image_data):
"""
Initializes class variables
......@@ -141,6 +143,7 @@ class URLProperties(object):
Checks to see if a URL points to acceptable content. Added to check if students are submitting reasonable
links to the peer grading image functionality of the external grading service.
"""
def __init__(self, url_string):
self.url_string = url_string
......@@ -252,7 +255,8 @@ def upload_to_s3(file_to_upload, keyname, s3_interface):
return True, public_url
except:
#This is a dev_facing_error
error_message = "Could not connect to S3 to upload peer grading image. Trying to utilize bucket: {0}".format(bucketname.lower())
error_message = "Could not connect to S3 to upload peer grading image. Trying to utilize bucket: {0}".format(
bucketname.lower())
log.error(error_message)
return False, error_message
......
......@@ -77,7 +77,6 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
self.send_to_grader(self.latest_answer(), system)
self.created = False
def _parse(self, oeparam, prompt, rubric, system):
'''
Parse OpenEndedResponse XML:
......@@ -104,7 +103,9 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
# response types)
except TypeError, ValueError:
#This is a dev_facing_error
log.exception("Grader payload from external open ended grading server is not a json object! Object: {0}".format(grader_payload))
log.exception(
"Grader payload from external open ended grading server is not a json object! Object: {0}".format(
grader_payload))
self.initial_display = find_with_default(oeparam, 'initial_display', '')
self.answer = find_with_default(oeparam, 'answer_display', 'No answer given.')
......@@ -148,7 +149,9 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
for tag in ['feedback', 'submission_id', 'grader_id', 'score']:
if tag not in survey_responses:
#This is a student_facing_error
return {'success': False, 'msg': "Could not find needed tag {0} in the survey responses. Please try submitting again.".format(tag)}
return {'success': False,
'msg': "Could not find needed tag {0} in the survey responses. Please try submitting again.".format(
tag)}
try:
submission_id = int(survey_responses['submission_id'])
grader_id = int(survey_responses['grader_id'])
......@@ -266,7 +269,6 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
return True
def get_answers(self):
"""
Gets and shows the answer for this problem.
......@@ -300,7 +302,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
# We want to display available feedback in a particular order.
# This dictionary specifies which goes first--lower first.
priorities = { # These go at the start of the feedback
priorities = {# These go at the start of the feedback
'spelling': 0,
'grammar': 1,
# needs to be after all the other feedback
......@@ -411,7 +413,6 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
return feedback_template, rubric_scores
def _parse_score_msg(self, score_msg, system, join_feedback=True):
"""
Grader reply is a JSON-dump of the following dict
......@@ -437,12 +438,12 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
'valid': False,
'score': 0,
'feedback': '',
'rubric_scores' : [[0]],
'grader_types' : [''],
'feedback_items' : [''],
'feedback_dicts' : [{}],
'grader_ids' : [0],
'submission_ids' : [0],
'rubric_scores': [[0]],
'grader_types': [''],
'feedback_items': [''],
'feedback_dicts': [{}],
'grader_ids': [0],
'submission_ids': [0],
}
try:
score_result = json.loads(score_msg)
......@@ -527,12 +528,12 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
'valid': True,
'score': score,
'feedback': feedback,
'rubric_scores' : rubric_scores,
'grader_types' : grader_types,
'feedback_items' : feedback_items,
'feedback_dicts' : feedback_dicts,
'grader_ids' : grader_ids,
'submission_ids' : submission_ids,
'rubric_scores': rubric_scores,
'grader_types': grader_types,
'feedback_items': feedback_items,
'feedback_dicts': feedback_dicts,
'grader_ids': grader_ids,
'submission_ids': submission_ids,
}
def latest_post_assessment(self, system, short_feedback=False, join_feedback=True):
......@@ -585,7 +586,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
#This is a dev_facing_error
log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch))
#This is a dev_facing_error
return json.dumps({'error': 'Error handling action. Please try again.', 'success' : False})
return json.dumps({'error': 'Error handling action. Please try again.', 'success': False})
before = self.get_progress()
d = handlers[dispatch](get, system)
......@@ -679,7 +680,6 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
correct = ""
previous_answer = self.initial_display
context = {
'prompt': self.prompt,
'previous_answer': previous_answer,
......@@ -692,7 +692,7 @@ class OpenEndedModule(openendedchild.OpenEndedChild):
'child_type': 'openended',
'correct': correct,
'accept_file_upload': self.accept_file_upload,
'eta_message' : eta_string,
'eta_message': eta_string,
}
html = system.render_template('{0}/open_ended.html'.format(self.TEMPLATE_DIR), context)
return html
......@@ -723,7 +723,9 @@ class OpenEndedDescriptor(XmlDescriptor, EditingDescriptor):
for child in ['openendedparam']:
if len(xml_object.xpath(child)) != 1:
#This is a staff_facing_error
raise ValueError("Open Ended definition must include exactly one '{0}' tag. Contact the learning sciences group for assistance.".format(child))
raise ValueError(
"Open Ended definition must include exactly one '{0}' tag. Contact the learning sciences group for assistance.".format(
child))
def parse(k):
"""Assumes that xml_object has child k"""
......
......@@ -108,13 +108,12 @@ class OpenEndedChild(object):
self._max_score = static_data['max_score']
if system.open_ended_grading_interface:
self.peer_gs = PeerGradingService(system.open_ended_grading_interface, system)
self.controller_qs = controller_query_service.ControllerQueryService(system.open_ended_grading_interface,system)
self.controller_qs = controller_query_service.ControllerQueryService(system.open_ended_grading_interface,
system)
else:
self.peer_gs = MockPeerGradingService()
self.controller_qs = None
self.system = system
self.location_string = location
......@@ -152,7 +151,8 @@ class OpenEndedChild(object):
return True, {
'success': False,
#This is a student_facing_error
'error': 'You have attempted this problem {0} times. You are allowed {1} attempts.'.format(self.attempts, self.max_attempts)
'error': 'You have attempted this problem {0} times. You are allowed {1} attempts.'.format(
self.attempts, self.max_attempts)
}
else:
return False, {}
......@@ -308,7 +308,7 @@ class OpenEndedChild(object):
@return: Boolean correct.
"""
correct = False
if(isinstance(score, (int, long, float, complex))):
if (isinstance(score, (int, long, float, complex))):
score_ratio = int(score) / float(self.max_score())
correct = (score_ratio >= 0.66)
return correct
......@@ -342,7 +342,8 @@ class OpenEndedChild(object):
try:
image_data.seek(0)
success, s3_public_url = open_ended_image_submission.upload_to_s3(image_data, image_key, self.s3_interface)
success, s3_public_url = open_ended_image_submission.upload_to_s3(image_data, image_key,
self.s3_interface)
except:
log.exception("Could not upload image to S3.")
......@@ -454,16 +455,18 @@ class OpenEndedChild(object):
success = True
except:
#This is a dev_facing_error
log.error("Could not contact external open ended graders for location {0} and student {1}".format(self.location_string,student_id))
log.error("Could not contact external open ended graders for location {0} and student {1}".format(
self.location_string, student_id))
#This is a student_facing_error
error_message = "Could not contact the graders. Please notify course staff."
return success, allowed_to_submit, error_message
if count_graded>=count_required:
if count_graded >= count_required:
return success, allowed_to_submit, ""
else:
allowed_to_submit = False
#This is a student_facing_error
error_message = error_string.format(count_required-count_graded, count_graded, count_required, student_sub_count)
error_message = error_string.format(count_required - count_graded, count_graded, count_required,
student_sub_count)
return success, allowed_to_submit, error_message
def get_eta(self):
......@@ -478,7 +481,7 @@ class OpenEndedChild(object):
success = response['success']
if isinstance(success, basestring):
success = (success.lower()=="true")
success = (success.lower() == "true")
if success:
eta = controller_query_service.convert_seconds_to_human_readable(response['eta'])
......@@ -487,6 +490,3 @@ class OpenEndedChild(object):
eta_string = ""
return eta_string
......@@ -14,6 +14,7 @@ class PeerGradingService(GradingService):
"""
Interface with the grading controller for peer grading
"""
def __init__(self, config, system):
config['system'] = system
super(PeerGradingService, self).__init__(config)
......@@ -39,7 +40,8 @@ class PeerGradingService(GradingService):
{'location': problem_location, 'grader_id': grader_id})
return self.try_to_decode(self._render_rubric(response))
def save_grade(self, location, grader_id, submission_id, score, feedback, submission_key, rubric_scores, submission_flagged):
def save_grade(self, location, grader_id, submission_id, score, feedback, submission_key, rubric_scores,
submission_flagged):
data = {'grader_id': grader_id,
'submission_id': submission_id,
'score': score,
......@@ -89,6 +91,7 @@ class PeerGradingService(GradingService):
pass
return text
"""
This is a mock peer grading service that can be used for unit tests
without making actual service calls to the grading controller
......
......@@ -73,7 +73,6 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
html = system.render_template('{0}/self_assessment_prompt.html'.format(self.TEMPLATE_DIR), context)
return html
def handle_ajax(self, dispatch, get, system):
"""
This is called by courseware.module_render, to handle an AJAX call.
......@@ -95,7 +94,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
#This is a dev_facing_error
log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch))
#This is a dev_facing_error
return json.dumps({'error': 'Error handling action. Please try again.', 'success' : False})
return json.dumps({'error': 'Error handling action. Please try again.', 'success': False})
before = self.get_progress()
d = handlers[dispatch](get, system)
......@@ -159,7 +158,6 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
return system.render_template('{0}/self_assessment_hint.html'.format(self.TEMPLATE_DIR), context)
def save_answer(self, get, system):
"""
After the answer is submitted, show the rubric.
......@@ -224,7 +222,7 @@ class SelfAssessmentModule(openendedchild.OpenEndedChild):
try:
score = int(get['assessment'])
score_list = get.getlist('score_list[]')
for i in xrange(0,len(score_list)):
for i in xrange(0, len(score_list)):
score_list[i] = int(score_list[i])
except ValueError:
#This is a dev_facing_error
......@@ -305,7 +303,9 @@ class SelfAssessmentDescriptor(XmlDescriptor, EditingDescriptor):
for child in expected_children:
if len(xml_object.xpath(child)) != 1:
#This is a staff_facing_error
raise ValueError("Self assessment definition must include exactly one '{0}' tag. Contact the learning sciences group for assistance.".format(child))
raise ValueError(
"Self assessment definition must include exactly one '{0}' tag. Contact the learning sciences group for assistance.".format(
child))
def parse(k):
"""Assumes that xml_object has child k"""
......
......@@ -53,12 +53,11 @@ class PeerGradingModule(XModule):
#We need to set the location here so the child modules can use it
system.set('location', location)
self.system = system
if(self.system.open_ended_grading_interface):
if (self.system.open_ended_grading_interface):
self.peer_gs = PeerGradingService(self.system.open_ended_grading_interface, self.system)
else:
self.peer_gs = MockPeerGradingService()
self.use_for_single_location = self.metadata.get('use_for_single_location', USE_FOR_SINGLE_LOCATION)
if isinstance(self.use_for_single_location, basestring):
self.use_for_single_location = (self.use_for_single_location in TRUE_DICT)
......@@ -90,7 +89,6 @@ class PeerGradingModule(XModule):
self.display_due_date = self.timeinfo.display_due_date
self.ajax_url = self.system.ajax_url
if not self.ajax_url.endswith("/"):
self.ajax_url = self.ajax_url + "/"
......@@ -154,7 +152,7 @@ class PeerGradingModule(XModule):
#This is a dev_facing_error
log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch))
#This is a dev_facing_error
return json.dumps({'error': 'Error handling action. Please try again.', 'success' : False})
return json.dumps({'error': 'Error handling action. Please try again.', 'success': False})
d = handlers[dispatch](get)
......@@ -191,7 +189,8 @@ class PeerGradingModule(XModule):
except:
success, response = self.query_data_for_location()
if not success:
log.exception("No instance data found and could not get data from controller for loc {0} student {1}".format(
log.exception(
"No instance data found and could not get data from controller for loc {0} student {1}".format(
self.system.location.url(), self.system.anonymous_student_id
))
return None
......@@ -271,7 +270,8 @@ class PeerGradingModule(XModule):
error: if there was an error in the submission, this is the error message
"""
required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback', 'rubric_scores[]', 'submission_flagged'])
required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback', 'rubric_scores[]',
'submission_flagged'])
success, message = self._check_required(get, required)
if not success:
return self._err_response(message)
......@@ -430,7 +430,9 @@ class PeerGradingModule(XModule):
return response
except GradingServiceError:
#This is a dev_facing_error
log.exception("Error saving calibration grade, location: {0}, submission_id: {1}, submission_key: {2}, grader_id: {3}".format(location, submission_id, submission_key, grader_id))
log.exception(
"Error saving calibration grade, location: {0}, submission_id: {1}, submission_key: {2}, grader_id: {3}".format(
location, submission_id, submission_key, grader_id))
#This is a student_facing_error
return self._err_response('There was an error saving your score. Please notify course staff.')
......@@ -508,7 +510,6 @@ class PeerGradingModule(XModule):
problem['due'] = None
problem['closed'] = False
ajax_url = self.ajax_url
html = self.system.render_template('peer_grading/peer_grading.html', {
'course_id': self.system.course_id,
......@@ -531,7 +532,8 @@ class PeerGradingModule(XModule):
if not self.use_for_single_location:
#This is an error case, because it must be set to use a single location to be called without get parameters
#This is a dev_facing_error
log.error("Peer grading problem in peer_grading_module called with no get parameters, but use_for_single_location is False.")
log.error(
"Peer grading problem in peer_grading_module called with no get parameters, but use_for_single_location is False.")
return {'html': "", 'success': False}
problem_location = self.link_to_location
......@@ -596,7 +598,9 @@ class PeerGradingDescriptor(XmlDescriptor, EditingDescriptor):
for child in expected_children:
if len(xml_object.xpath(child)) == 0:
#This is a staff_facing_error
raise ValueError("Peer grading definition must include at least one '{0}' tag. Contact the learning sciences group for assistance.".format(child))
raise ValueError(
"Peer grading definition must include at least one '{0}' tag. Contact the learning sciences group for assistance.".format(
child))
def parse_task(k):
"""Assumes that xml_object has child k"""
......
......@@ -14,6 +14,7 @@ from datetime import datetime
from . import test_system
import test_util_open_ended
"""
Tests for the various pieces of the CombinedOpenEndedGrading system
......@@ -46,9 +47,9 @@ class OpenEndedChildTest(unittest.TestCase):
'display_name': 'Name',
'accept_file_upload': False,
'close_date': None,
's3_interface' : "",
'open_ended_grading_interface' : {},
'skip_basic_checks' : False,
's3_interface': "",
'open_ended_grading_interface': {},
'skip_basic_checks': False,
}
definition = Mock()
descriptor = Mock()
......@@ -58,22 +59,18 @@ class OpenEndedChildTest(unittest.TestCase):
self.openendedchild = OpenEndedChild(self.test_system, self.location,
self.definition, self.descriptor, self.static_data, self.metadata)
def test_latest_answer_empty(self):
answer = self.openendedchild.latest_answer()
self.assertEqual(answer, "")
def test_latest_score_empty(self):
answer = self.openendedchild.latest_score()
self.assertEqual(answer, None)
def test_latest_post_assessment_empty(self):
answer = self.openendedchild.latest_post_assessment(self.test_system)
self.assertEqual(answer, "")
def test_new_history_entry(self):
new_answer = "New Answer"
self.openendedchild.new_history_entry(new_answer)
......@@ -99,7 +96,6 @@ class OpenEndedChildTest(unittest.TestCase):
score = self.openendedchild.latest_score()
self.assertEqual(score, 4)
def test_record_latest_post_assessment(self):
new_answer = "New Answer"
self.openendedchild.new_history_entry(new_answer)
......@@ -124,13 +120,11 @@ class OpenEndedChildTest(unittest.TestCase):
self.assertEqual(score['score'], new_score)
self.assertEqual(score['total'], self.static_data['max_score'])
def test_reset(self):
self.openendedchild.reset(self.test_system)
state = json.loads(self.openendedchild.get_instance_state())
self.assertEqual(state['state'], OpenEndedChild.INITIAL)
def test_is_last_response_correct(self):
new_answer = "New Answer"
self.openendedchild.new_history_entry(new_answer)
......@@ -165,11 +159,11 @@ class OpenEndedModuleTest(unittest.TestCase):
'max_score': max_score,
'display_name': 'Name',
'accept_file_upload': False,
'rewrite_content_links' : "",
'rewrite_content_links': "",
'close_date': None,
's3_interface' : test_util_open_ended.S3_INTERFACE,
'open_ended_grading_interface' : test_util_open_ended.OPEN_ENDED_GRADING_INTERFACE,
'skip_basic_checks' : False,
's3_interface': test_util_open_ended.S3_INTERFACE,
'open_ended_grading_interface': test_util_open_ended.OPEN_ENDED_GRADING_INTERFACE,
'skip_basic_checks': False,
}
oeparam = etree.XML('''
......@@ -188,7 +182,8 @@ class OpenEndedModuleTest(unittest.TestCase):
self.test_system.location = self.location
self.mock_xqueue = MagicMock()
self.mock_xqueue.send_to_queue.return_value = (None, "Message")
self.test_system.xqueue = {'interface': self.mock_xqueue, 'callback_url': '/', 'default_queuename': 'testqueue', 'waittime': 1}
self.test_system.xqueue = {'interface': self.mock_xqueue, 'callback_url': '/', 'default_queuename': 'testqueue',
'waittime': 1}
self.openendedmodule = OpenEndedModule(self.test_system, self.location,
self.definition, self.descriptor, self.static_data, self.metadata)
......@@ -301,12 +296,12 @@ class CombinedOpenEndedModuleTest(unittest.TestCase):
'rubric': rubric,
'max_score': max_score,
'display_name': 'Name',
'accept_file_upload' : False,
'rewrite_content_links' : "",
'close_date' : "",
's3_interface' : test_util_open_ended.S3_INTERFACE,
'open_ended_grading_interface' : test_util_open_ended.OPEN_ENDED_GRADING_INTERFACE,
'skip_basic_checks' : False,
'accept_file_upload': False,
'rewrite_content_links': "",
'close_date': "",
's3_interface': test_util_open_ended.S3_INTERFACE,
'open_ended_grading_interface': test_util_open_ended.OPEN_ENDED_GRADING_INTERFACE,
'skip_basic_checks': False,
}
oeparam = etree.XML('''
......@@ -344,7 +339,7 @@ class CombinedOpenEndedModuleTest(unittest.TestCase):
self.location,
self.definition,
self.descriptor,
static_data = self.static_data,
static_data=self.static_data,
metadata=self.metadata)
def test_get_tag_name(self):
......
......@@ -10,8 +10,8 @@ from . import test_system
import test_util_open_ended
class SelfAssessmentTest(unittest.TestCase):
class SelfAssessmentTest(unittest.TestCase):
rubric = '''<rubric><rubric>
<category>
<description>Response Quality</description>
......@@ -48,9 +48,9 @@ class SelfAssessmentTest(unittest.TestCase):
'display_name': "Name",
'accept_file_upload': False,
'close_date': None,
's3_interface' : test_util_open_ended.S3_INTERFACE,
'open_ended_grading_interface' : test_util_open_ended.OPEN_ENDED_GRADING_INTERFACE,
'skip_basic_checks' : False,
's3_interface': test_util_open_ended.S3_INTERFACE,
'open_ended_grading_interface': test_util_open_ended.OPEN_ENDED_GRADING_INTERFACE,
'skip_basic_checks': False,
}
self.module = SelfAssessmentModule(test_system(), self.location,
......@@ -64,13 +64,14 @@ class SelfAssessmentTest(unittest.TestCase):
def test_self_assessment_flow(self):
responses = {'assessment': '0', 'score_list[]': ['0', '0']}
def get_fake_item(name):
return responses[name]
def get_data_for_location(self,location,student):
def get_data_for_location(self, location, student):
return {
'count_graded' : 0,
'count_required' : 0,
'count_graded': 0,
'count_required': 0,
'student_sub_count': 0,
}
......@@ -89,7 +90,6 @@ class SelfAssessmentTest(unittest.TestCase):
self.module.save_assessment(mock_query_dict, self.module.system)
self.assertEqual(self.module.state, self.module.DONE)
d = self.module.reset({})
self.assertTrue(d['success'])
self.assertEqual(self.module.state, self.module.INITIAL)
......
OPEN_ENDED_GRADING_INTERFACE = {
'url' : 'http://127.0.0.1:3033/',
'username' : 'incorrect',
'password' : 'incorrect',
'staff_grading' : 'staff_grading',
'peer_grading' : 'peer_grading',
'grading_controller' : 'grading_controller'
'url': 'http://127.0.0.1:3033/',
'username': 'incorrect',
'password': 'incorrect',
'staff_grading': 'staff_grading',
'peer_grading': 'peer_grading',
'grading_controller': 'grading_controller'
}
S3_INTERFACE = {
'aws_access_key' : "",
'aws_secret_key' : "",
"aws_bucket_name" : "",
'aws_access_key': "",
'aws_secret_key': "",
"aws_bucket_name": "",
}
\ No newline at end of file
......@@ -130,6 +130,17 @@ def _pdf_textbooks(tab, user, course, active_page):
for index, textbook in enumerate(course.pdf_textbooks)]
return []
def _html_textbooks(tab, user, course, active_page):
"""
Generates one tab per textbook. Only displays if user is authenticated.
"""
if user.is_authenticated():
# since there can be more than one textbook, active_page is e.g. "book/0".
return [CourseTab(textbook['tab_title'], reverse('html_book', args=[course.id, index]),
active_page == "htmltextbook/{0}".format(index))
for index, textbook in enumerate(course.html_textbooks)]
return []
def _staff_grading(tab, user, course, active_page):
if has_access(user, course, 'staff'):
link = reverse('staff_grading', args=[course.id])
......@@ -209,6 +220,7 @@ VALID_TAB_TYPES = {
'external_link': TabImpl(key_checker(['name', 'link']), _external_link),
'textbooks': TabImpl(null_validator, _textbooks),
'pdf_textbooks': TabImpl(null_validator, _pdf_textbooks),
'html_textbooks': TabImpl(null_validator, _html_textbooks),
'progress': TabImpl(need_name, _progress),
'static_tab': TabImpl(key_checker(['name', 'url_slug']), _static_tab),
'peer_grading': TabImpl(null_validator, _peer_grading),
......
......@@ -59,7 +59,7 @@ class Score(models.Model):
scores = Score.objects \
.filter(puzzle_id__in=puzzles) \
.annotate(total_score=models.Sum('best_score')) \
.order_by('-total_score')[:n]
.order_by('total_score')[:n]
num = len(puzzles)
return [{'username': s.user.username,
......
......@@ -143,11 +143,12 @@ class FolditTestCase(TestCase):
def test_SetPlayerPuzzleScores_manyplayers(self):
"""
Check that when we send scores from multiple users, the correct order
of scores is displayed.
of scores is displayed. Note that, before being processed by
display_score, lower scores are better.
"""
puzzle_id = ['1']
player1_score = 0.07
player2_score = 0.08
player1_score = 0.08
player2_score = 0.02
response1 = self.make_puzzle_score_request(puzzle_id, player1_score,
self.user)
......@@ -164,8 +165,12 @@ class FolditTestCase(TestCase):
self.assertEqual(len(top_10), 2)
# Top score should be player2_score. Second should be player1_score
self.assertEqual(top_10[0]['score'], Score.display_score(player2_score))
self.assertEqual(top_10[1]['score'], Score.display_score(player1_score))
self.assertAlmostEqual(top_10[0]['score'],
Score.display_score(player2_score),
delta=0.5)
self.assertAlmostEqual(top_10[1]['score'],
Score.display_score(player1_score),
delta=0.5)
# Top score user should be self.user2.username
self.assertEqual(top_10[0]['username'], self.user2.username)
......
......@@ -22,7 +22,7 @@ NOTIFICATION_TYPES = (
('staff_needs_to_grade', 'staff_grading', 'Staff Grading'),
('new_student_grading_to_view', 'open_ended_problems', 'Problems you have submitted'),
('flagged_submissions_exist', 'open_ended_flagged_problems', 'Flagged Submissions')
)
)
def staff_grading_notifications(course, user):
......@@ -46,7 +46,9 @@ def staff_grading_notifications(course, user):
#Non catastrophic error, so no real action
notifications = {}
#This is a dev_facing_error
log.info("Problem with getting notifications from staff grading service for course {0} user {1}.".format(course_id, student_id))
log.info(
"Problem with getting notifications from staff grading service for course {0} user {1}.".format(course_id,
student_id))
if pending_grading:
img_path = "/static/images/grading_notification.png"
......@@ -80,7 +82,9 @@ def peer_grading_notifications(course, user):
#Non catastrophic error, so no real action
notifications = {}
#This is a dev_facing_error
log.info("Problem with getting notifications from peer grading service for course {0} user {1}.".format(course_id, student_id))
log.info(
"Problem with getting notifications from peer grading service for course {0} user {1}.".format(course_id,
student_id))
if pending_grading:
img_path = "/static/images/grading_notification.png"
......@@ -105,7 +109,9 @@ def combined_notifications(course, user):
return notification_dict
min_time_to_query = user.last_login
last_module_seen = StudentModule.objects.filter(student=user, course_id=course_id, modified__gt=min_time_to_query).values('modified').order_by('-modified')
last_module_seen = StudentModule.objects.filter(student=user, course_id=course_id,
modified__gt=min_time_to_query).values('modified').order_by(
'-modified')
last_module_seen_count = last_module_seen.count()
if last_module_seen_count > 0:
......@@ -117,7 +123,8 @@ def combined_notifications(course, user):
img_path = ""
try:
controller_response = controller_qs.check_combined_notifications(course.id, student_id, user_is_staff, last_time_viewed)
controller_response = controller_qs.check_combined_notifications(course.id, student_id, user_is_staff,
last_time_viewed)
log.debug(controller_response)
notifications = json.loads(controller_response)
if notifications['success']:
......@@ -127,7 +134,9 @@ def combined_notifications(course, user):
#Non catastrophic error, so no real action
notifications = {}
#This is a dev_facing_error
log.exception("Problem with getting notifications from controller query service for course {0} user {1}.".format(course_id, student_id))
log.exception(
"Problem with getting notifications from controller query service for course {0} user {1}.".format(
course_id, student_id))
if pending_grading:
img_path = "/static/images/grading_notification.png"
......@@ -151,7 +160,8 @@ def set_value_in_cache(student_id, course_id, notification_type, value):
def create_key_name(student_id, course_id, notification_type):
key_name = "{prefix}{type}_{course}_{student}".format(prefix=KEY_PREFIX, type=notification_type, course=course_id, student=student_id)
key_name = "{prefix}{type}_{course}_{student}".format(prefix=KEY_PREFIX, type=notification_type, course=course_id,
student=student_id)
return key_name
......
......@@ -15,6 +15,7 @@ class StaffGrading(object):
"""
Wrap up functionality for staff grading of submissions--interface exposes get_html, ajax views.
"""
def __init__(self, course):
self.course = course
......
......@@ -20,10 +20,12 @@ log = logging.getLogger(__name__)
STAFF_ERROR_MESSAGE = 'Could not contact the external grading server. Please contact the development team. If you do not have a point of contact, you can contact Vik at vik@edx.org.'
class MockStaffGradingService(object):
"""
A simple mockup of a staff grading service, testing.
"""
def __init__(self):
self.cnt = 0
......@@ -45,13 +47,16 @@ class MockStaffGradingService(object):
return json.dumps({'success': True,
'problem_list': [
json.dumps({'location': 'i4x://MITx/3.091x/problem/open_ended_demo1',
'problem_name': "Problem 1", 'num_graded': 3, 'num_pending': 5, 'min_for_ml': 10}),
'problem_name': "Problem 1", 'num_graded': 3, 'num_pending': 5,
'min_for_ml': 10}),
json.dumps({'location': 'i4x://MITx/3.091x/problem/open_ended_demo2',
'problem_name': "Problem 2", 'num_graded': 1, 'num_pending': 5, 'min_for_ml': 10})
'problem_name': "Problem 2", 'num_graded': 1, 'num_pending': 5,
'min_for_ml': 10})
]})
def save_grade(self, course_id, grader_id, submission_id, score, feedback, skipped, rubric_scores, submission_flagged):
def save_grade(self, course_id, grader_id, submission_id, score, feedback, skipped, rubric_scores,
submission_flagged):
return self.get_next(course_id, 'fake location', grader_id)
......@@ -59,6 +64,7 @@ class StaffGradingService(GradingService):
"""
Interface to staff grading backend.
"""
def __init__(self, config):
config['system'] = ModuleSystem(None, None, None, render_to_string, None)
super(StaffGradingService, self).__init__(config)
......@@ -114,7 +120,8 @@ class StaffGradingService(GradingService):
return json.dumps(self._render_rubric(response))
def save_grade(self, course_id, grader_id, submission_id, score, feedback, skipped, rubric_scores, submission_flagged):
def save_grade(self, course_id, grader_id, submission_id, score, feedback, skipped, rubric_scores,
submission_flagged):
"""
Save a score and feedback for a submission.
......@@ -297,7 +304,7 @@ def save_grade(request, course_id):
if request.method != 'POST':
raise Http404
required = set(['score', 'feedback', 'submission_id', 'location','submission_flagged', 'rubric_scores[]'])
required = set(['score', 'feedback', 'submission_id', 'location', 'submission_flagged', 'rubric_scores[]'])
actual = set(request.POST.keys())
missing = required - actual
if len(missing) > 0:
......@@ -307,7 +314,6 @@ def save_grade(request, course_id):
grader_id = unique_id_for_user(request.user)
p = request.POST
location = p['location']
skipped = 'skipped' in p
......@@ -322,7 +328,9 @@ def save_grade(request, course_id):
p['submission_flagged'])
except GradingServiceError:
#This is a dev_facing_error
log.exception("Error saving grade in the staff grading interface in open ended grading. Request: {0} Course ID: {1}".format(request, course_id))
log.exception(
"Error saving grade in the staff grading interface in open ended grading. Request: {0} Course ID: {1}".format(
request, course_id))
#This is a staff_facing_error
return _err_response(STAFF_ERROR_MESSAGE)
......@@ -330,13 +338,16 @@ def save_grade(request, course_id):
result = json.loads(result_json)
except ValueError:
#This is a dev_facing_error
log.exception("save_grade returned broken json in the staff grading interface in open ended grading: {0}".format(result_json))
log.exception(
"save_grade returned broken json in the staff grading interface in open ended grading: {0}".format(
result_json))
#This is a staff_facing_error
return _err_response(STAFF_ERROR_MESSAGE)
if not result.get('success', False):
#This is a dev_facing_error
log.warning('Got success=False from staff grading service in open ended grading. Response: {0}'.format(result_json))
log.warning(
'Got success=False from staff grading service in open ended grading. Response: {0}'.format(result_json))
return _err_response(STAFF_ERROR_MESSAGE)
# Ok, save_grade seemed to work. Get the next submission to grade.
......
......@@ -22,6 +22,7 @@ from xmodule.x_module import ModuleSystem
from mitxmako.shortcuts import render_to_string
import logging
log = logging.getLogger(__name__)
from django.test.utils import override_settings
from django.http import QueryDict
......@@ -36,6 +37,7 @@ class TestStaffGradingService(ct.PageLoader):
access control and error handling logic -- all the actual work is on the
backend.
'''
def setUp(self):
xmodule.modulestore.django._MODULESTORES = {}
......@@ -50,6 +52,7 @@ class TestStaffGradingService(ct.PageLoader):
self.course_id = "edX/toy/2012_Fall"
self.toy = modulestore().get_course(self.course_id)
def make_instructor(course):
group_name = _course_staff_group_name(course.location)
g = Group.objects.create(name=group_name)
......@@ -130,6 +133,7 @@ class TestPeerGradingService(ct.PageLoader):
access control and error handling logic -- all the actual work is on the
backend.
'''
def setUp(self):
xmodule.modulestore.django._MODULESTORES = {}
......@@ -148,11 +152,12 @@ class TestPeerGradingService(ct.PageLoader):
self.mock_service = peer_grading_service.MockPeerGradingService()
self.system = ModuleSystem(location, None, None, render_to_string, None,
s3_interface = test_util_open_ended.S3_INTERFACE,
s3_interface=test_util_open_ended.S3_INTERFACE,
open_ended_grading_interface=test_util_open_ended.OPEN_ENDED_GRADING_INTERFACE
)
self.descriptor = peer_grading_module.PeerGradingDescriptor(self.system)
self.peer_module = peer_grading_module.PeerGradingModule(self.system, location, "<peergrading/>", self.descriptor)
self.peer_module = peer_grading_module.PeerGradingModule(self.system, location, "<peergrading/>",
self.descriptor)
self.peer_module.peer_gs = self.mock_service
self.logout()
......@@ -185,8 +190,10 @@ class TestPeerGradingService(ct.PageLoader):
}
qdict = MagicMock()
def fake_get_item(key):
return data[key]
qdict.__getitem__.side_effect = fake_get_item
qdict.getlist = fake_get_item
qdict.keys = data.keys
......@@ -247,8 +254,10 @@ class TestPeerGradingService(ct.PageLoader):
}
qdict = MagicMock()
def fake_get_item(key):
return data[key]
qdict.__getitem__.side_effect = fake_get_item
qdict.getlist = fake_get_item
qdict.keys = data.keys
......
......@@ -50,22 +50,24 @@ def _reverse_without_slash(url_name, course_id):
ajax_url = reverse(url_name, kwargs={'course_id': course_id})
return ajax_url
DESCRIPTION_DICT = {
'Peer Grading': "View all problems that require peer assessment in this particular course.",
'Staff Grading': "View ungraded submissions submitted by students for the open ended problems in the course.",
'Problems you have submitted': "View open ended problems that you have previously submitted for grading.",
'Flagged Submissions': "View submissions that have been flagged by students as inappropriate."
}
}
ALERT_DICT = {
'Peer Grading': "New submissions to grade",
'Staff Grading': "New submissions to grade",
'Problems you have submitted': "New grades have been returned",
'Flagged Submissions': "Submissions have been flagged for review"
}
}
STUDENT_ERROR_MESSAGE = "Error occured while contacting the grading service. Please notify course staff."
STAFF_ERROR_MESSAGE = "Error occured while contacting the grading service. Please notify the development team. If you do not have a point of contact, please email Vik at vik@edx.org"
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
def staff_grading(request, course_id):
"""
......@@ -92,7 +94,7 @@ def peer_grading(request, course_id):
#Get the current course
course = get_course_with_access(request.user, course_id, 'load')
course_id_parts = course.id.split("/")
false_dict = [False,"False", "false", "FALSE"]
false_dict = [False, "False", "false", "FALSE"]
#Reverse the base course url
base_course_url = reverse('courses')
......@@ -174,7 +176,7 @@ def student_problem_list(request, course_id):
except:
#This is a student_facing_error
eta_string = "Error getting ETA."
problem_list[i].update({'eta_string' : eta_string})
problem_list[i].update({'eta_string': eta_string})
except GradingServiceError:
#This is a student_facing_error
......@@ -318,12 +320,13 @@ def take_action_on_flags(request, course_id):
if request.method != 'POST':
raise Http404
required = ['submission_id', 'action_type', 'student_id']
for key in required:
if key not in request.POST:
#This is a staff_facing_error
return HttpResponse(json.dumps({'success': False, 'error': STAFF_ERROR_MESSAGE + 'Missing key {0} from submission. Please reload and try again.'.format(key)}),
return HttpResponse(json.dumps({'success': False,
'error': STAFF_ERROR_MESSAGE + 'Missing key {0} from submission. Please reload and try again.'.format(
key)}),
mimetype="application/json")
p = request.POST
......@@ -338,5 +341,7 @@ def take_action_on_flags(request, course_id):
return HttpResponse(response, mimetype="application/json")
except GradingServiceError:
#This is a dev_facing_error
log.exception("Error taking action on flagged peer grading submissions, submission_id: {0}, action_type: {1}, grader_id: {2}".format(submission_id, action_type, grader_id))
log.exception(
"Error taking action on flagged peer grading submissions, submission_id: {0}, action_type: {1}, grader_id: {2}".format(
submission_id, action_type, grader_id))
return _err_response(STAFF_ERROR_MESSAGE)
from lxml import etree
# from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.http import Http404
from mitxmako.shortcuts import render_to_response
from courseware.access import has_access
......@@ -15,6 +15,8 @@ def index(request, course_id, book_index, page=None):
staff_access = has_access(request.user, course, 'staff')
book_index = int(book_index)
if book_index < 0 or book_index >= len(course.textbooks):
raise Http404("Invalid book index value: {0}".format(book_index))
textbook = course.textbooks[book_index]
table_of_contents = textbook.table_of_contents
......@@ -40,6 +42,8 @@ def pdf_index(request, course_id, book_index, chapter=None, page=None):
staff_access = has_access(request.user, course, 'staff')
book_index = int(book_index)
if book_index < 0 or book_index >= len(course.pdf_textbooks):
raise Http404("Invalid book index value: {0}".format(book_index))
textbook = course.pdf_textbooks[book_index]
def remap_static_url(original_url, course):
......@@ -67,3 +71,39 @@ def pdf_index(request, course_id, book_index, chapter=None, page=None):
'chapter': chapter,
'page': page,
'staff_access': staff_access})
@login_required
def html_index(request, course_id, book_index, chapter=None, anchor_id=None):
course = get_course_with_access(request.user, course_id, 'load')
staff_access = has_access(request.user, course, 'staff')
book_index = int(book_index)
if book_index < 0 or book_index >= len(course.html_textbooks):
raise Http404("Invalid book index value: {0}".format(book_index))
textbook = course.html_textbooks[book_index]
def remap_static_url(original_url, course):
input_url = "'" + original_url + "'"
output_url = replace_static_urls(
input_url,
course.metadata['data_dir'],
course_namespace=course.location
)
# strip off the quotes again...
return output_url[1:-1]
if 'url' in textbook:
textbook['url'] = remap_static_url(textbook['url'], course)
# then remap all the chapter URLs as well, if they are provided.
if 'chapters' in textbook:
for entry in textbook['chapters']:
entry['url'] = remap_static_url(entry['url'], course)
return render_to_response('static_htmlbook.html',
{'book_index': book_index,
'course': course,
'textbook': textbook,
'chapter': chapter,
'anchor_id': anchor_id,
'staff_access': staff_access})
......@@ -158,6 +158,19 @@ div.book-wrapper {
img {
max-width: 100%;
}
div {
text-align: left;
line-height: 1.6em;
margin-left: 5px;
margin-right: 5px;
margin-top: 5px;
margin-bottom: 5px;
.Paragraph, h2 {
margin-top: 10px;
}
}
}
}
......
<%inherit file="main.html" />
<%namespace name='static' file='static_content.html'/>
<%block name="title"><title>${course.number} Textbook</title>
</%block>
<%block name="headextra">
<%static:css group='course'/>
<%static:js group='courseware'/>
</%block>
<%block name="js_extra">
<script type="text/javascript">
(function($) {
$.fn.myHTMLViewer = function(options) {
var urlToLoad = null;
if (options.url) {
urlToLoad = options.url;
}
var chapterUrls = null;
if (options.chapters) {
chapterUrls = options.chapters;
}
var chapterToLoad = 1;
if (options.chapterNum) {
// TODO: this should only be specified if there are
// chapters, and it should be in-bounds.
chapterToLoad = options.chapterNum;
}
var anchorToLoad = null;
if (options.chapters) {
anchorToLoad = options.anchor_id;
}
loadUrl = function htmlViewLoadUrl(url, anchorId) {
// clear out previous load, if any:
parentElement = document.getElementById('bookpage');
while (parentElement.hasChildNodes())
parentElement.removeChild(parentElement.lastChild);
// load new URL in:
$('#bookpage').load(url);
// if there is an anchor set, then go to that location:
if (anchorId != null) {
// TODO: add implementation....
}
};
loadChapterUrl = function htmlViewLoadChapterUrl(chapterNum, anchorId) {
if (chapterNum < 1 || chapterNum > chapterUrls.length) {
return;
}
var chapterUrl = chapterUrls[chapterNum-1];
loadUrl(chapterUrl, anchorId);
};
// define navigation links for chapters:
if (chapterUrls != null) {
var loadChapterUrlHelper = function(i) {
return function(event) {
// when opening a new chapter, always open to the top:
loadChapterUrl(i, null);
};
};
for (var index = 1; index <= chapterUrls.length; index += 1) {
$("#htmlchapter-" + index).click(loadChapterUrlHelper(index));
}
}
// finally, load the appropriate url/page
if (urlToLoad != null) {
loadUrl(urlToLoad, anchorToLoad);
} else {
loadChapterUrl(chapterToLoad, anchorToLoad);
}
}
})(jQuery);
$(document).ready(function() {
var options = {};
%if 'url' in textbook:
options.url = "${textbook['url']}";
%endif
%if 'chapters' in textbook:
var chptrs = [];
%for chap in textbook['chapters']:
chptrs.push("${chap['url']}");
%endfor
options.chapters = chptrs;
%endif
%if chapter is not None:
options.chapterNum = ${chapter};
%endif
%if anchor_id is not None:
options.anchor_id = ${anchor_id};
%endif
$('#outerContainer').myHTMLViewer(options);
});
</script>
</%block>
<%include file="/courseware/course_navigation.html" args="active_page='htmltextbook/{0}'.format(book_index)" />
<div id="outerContainer">
<div id="mainContainer" class="book-wrapper">
%if 'chapters' in textbook:
<section aria-label="Textbook Navigation" class="book-sidebar">
<ul id="booknav" class="treeview-booknav">
<%def name="print_entry(entry, index_value)">
<li id="htmlchapter-${index_value}">
<a class="chapter">
${entry.get('title')}
</a>
</li>
</%def>
%for (index, entry) in enumerate(textbook['chapters']):
${print_entry(entry, index+1)}
% endfor
</ul>
</section>
%endif
<section id="viewerContainer" class="book">
<section class="page">
<div id="bookpage" />
</section>
</section>
</div>
</div>
......@@ -280,6 +280,15 @@ if settings.COURSEWARE_ENABLED:
url(r'^courses/(?P<course_id>[^/]+/[^/]+/[^/]+)/pdfbook/(?P<book_index>[^/]*)/chapter/(?P<chapter>[^/]*)/(?P<page>[^/]*)$',
'staticbook.views.pdf_index'),
url(r'^courses/(?P<course_id>[^/]+/[^/]+/[^/]+)/htmlbook/(?P<book_index>[^/]*)/$',
'staticbook.views.html_index', name="html_book"),
url(r'^courses/(?P<course_id>[^/]+/[^/]+/[^/]+)/htmlbook/(?P<book_index>[^/]*)/chapter/(?P<chapter>[^/]*)/$',
'staticbook.views.html_index'),
url(r'^courses/(?P<course_id>[^/]+/[^/]+/[^/]+)/htmlbook/(?P<book_index>[^/]*)/chapter/(?P<chapter>[^/]*)/(?P<anchor_id>[^/]*)/$',
'staticbook.views.html_index'),
url(r'^courses/(?P<course_id>[^/]+/[^/]+/[^/]+)/htmlbook/(?P<book_index>[^/]*)/(?P<anchor_id>[^/]*)/$',
'staticbook.views.html_index'),
url(r'^courses/(?P<course_id>[^/]+/[^/]+/[^/]+)/courseware/?$',
'courseware.views.index', name="courseware"),
url(r'^courses/(?P<course_id>[^/]+/[^/]+/[^/]+)/courseware/(?P<chapter>[^/]*)/$',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment