Commit 924a45fe by pmitros

Merge pull request #33 from MITx/MC

Multiple Choice support. Outstanding issues: 
* Cleanup of the way names are handled
* Figure out why $.ajax needs a stringify
* I'd prefer to keep both input types and response types as classes. This is necessary for richer, AJAXy interactions. 
parents 98f19470 72dee8ba
......@@ -14,19 +14,21 @@ from lxml.etree import Element
from mako.template import Template
from util import contextualize_text
from inputtypes import textline, schematic
from responsetypes import numericalresponse, formularesponse, customresponse, schematicresponse, StudentInputError
import inputtypes
from responsetypes import NumericalResponse, FormulaResponse, CustomResponse, SchematicResponse, MultipleChoiceResponse, StudentInputError, TrueFalseResponse
import calc
import eia
log = logging.getLogger("mitx.courseware")
response_types = {'numericalresponse':numericalresponse,
'formularesponse':formularesponse,
'customresponse':customresponse,
'schematicresponse':schematicresponse}
entry_types = ['textline', 'schematic']
response_types = {'numericalresponse':NumericalResponse,
'formularesponse':FormulaResponse,
'customresponse':CustomResponse,
'schematicresponse':SchematicResponse,
'multiplechoiceresponse':MultipleChoiceResponse,
'truefalseresponse':TrueFalseResponse}
entry_types = ['textline', 'schematic', 'choicegroup']
response_properties = ["responseparam", "answer"]
# How to convert from original XML to HTML
# We should do this with xlst later
......@@ -35,6 +37,7 @@ html_transforms = {'problem': {'tag':'div'},
"customresponse": {'tag':'span'},
"schematicresponse": {'tag':'span'},
"formularesponse": {'tag':'span'},
"multiplechoiceresponse": {'tag':'span'},
"text": {'tag':'span'}}
global_context={'random':random,
......@@ -48,27 +51,19 @@ global_context={'random':random,
html_problem_semantics = ["responseparam", "answer", "script"]
# These should be removed from HTML output, but keeping subelements
html_skip = ["numericalresponse", "customresponse", "schematicresponse", "formularesponse", "text"]
# These should be transformed
html_special_response = {"textline":textline.render,
"schematic":schematic.render}
class LoncapaProblem(object):
def __init__(self, fileobject, id=None, state=None, seed=None):
def __init__(self, fileobject, id, state=None, seed=None):
## Initialize class variables from state
self.seed = None
self.student_answers = dict()
self.correct_map = dict()
self.done = False
self.problem_id = id
if seed != None:
self.seed = seed
if id:
self.problem_id = id
else:
print "NO ID"
raise Exception("This should never happen (183)")
if state:
if 'seed' in state:
self.seed = state['seed']
......@@ -93,6 +88,9 @@ class LoncapaProblem(object):
self.preprocess_problem(self.tree, correct_map=self.correct_map, answer_map = self.student_answers)
self.context = self.extract_context(self.tree, seed=self.seed)
for response in self.tree.xpath('//'+"|//".join(response_types)):
responder = response_types[response.tag](response, self.context)
responder.preprocess_response()
def get_state(self):
''' Stored per-user session data neeeded to:
......@@ -130,7 +128,6 @@ class LoncapaProblem(object):
grader = response_types[response.tag](response, self.context)
results = grader.grade(answers)
self.correct_map.update(results)
return self.correct_map
def get_question_answers(self):
......@@ -168,7 +165,7 @@ class LoncapaProblem(object):
if problemtree.tag in html_problem_semantics:
return
if problemtree.tag in html_special_response:
if hasattr(inputtypes, problemtree.tag):
status = "unsubmitted"
if problemtree.get('id') in self.correct_map:
status = self.correct_map[problemtree.get('id')]
......@@ -177,12 +174,12 @@ class LoncapaProblem(object):
if self.student_answers and problemtree.get('id') in self.student_answers:
value = self.student_answers[problemtree.get('id')]
return html_special_response[problemtree.tag](problemtree, value, status) #TODO
return getattr(inputtypes, problemtree.tag)(problemtree, value, status) #TODO
tree=Element(problemtree.tag)
for item in problemtree:
subitems = self.extract_html(item)
if subitems:
if subitems is not None:
for subitem in subitems:
tree.append(subitem)
for (key,value) in problemtree.items():
......@@ -203,7 +200,6 @@ class LoncapaProblem(object):
# TODO: Fix. This loses Element().tail
#if problemtree.tag in html_skip:
# return tree
return [tree]
def preprocess_problem(self, tree, correct_map=dict(), answer_map=dict()): # private
......
from lxml.etree import Element
from lxml import etree
from mitxmako.shortcuts import render_to_response, render_to_string
from mitxmako.shortcuts import render_to_string
class textline(object):
@staticmethod
def render(element, value, state):
eid=element.get('id')
count = int(eid.split('_')[-2])-1 # HACK
size = element.get('size')
context = {'id':eid, 'value':value, 'state':state, 'count':count, 'size': size}
html=render_to_string("textinput.html", context)
return etree.XML(html)
#takes the xml tree as 'element', the student's previous answer as 'value', and the graded status as 'state'
class schematic(object):
@staticmethod
def render(element, value, state):
eid = element.get('id')
height = element.get('height')
width = element.get('width')
parts = element.get('parts')
analyses = element.get('analyses')
initial_value = element.get('initial_value')
submit_analyses = element.get('submit_analyses')
context = {
'id':eid,
'value':value,
'initial_value':initial_value,
'state':state,
'width':width,
'height':height,
'parts':parts,
'analyses':analyses,
'submit_analyses':submit_analyses,
}
html=render_to_string("schematicinput.html", context)
return etree.XML(html)
def choicegroup(element, value, state):
eid=element.get('id')
if element.get('type') == "MultipleChoice":
type="radio"
elif element.get('type') == "TrueFalse":
type="checkbox"
else:
type="radio"
choices={}
for choice in element:
assert choice.tag =="choice", "only <choice> tags should be immediate children of a <choicegroup>"
choices[choice.get("name")] = etree.tostring(choice[0])
context={'id':eid, 'value':value, 'state':state, 'type':type, 'choices':choices}
html=render_to_string("choicegroup.html", context)
return etree.XML(html)
def textline(element, value, state):
eid=element.get('id')
count = int(eid.split('_')[-2])-1 # HACK
size = element.get('size')
context = {'id':eid, 'value':value, 'state':state, 'count':count, 'size': size}
html=render_to_string("textinput.html", context)
return etree.XML(html)
def schematic(element, value, state):
eid = element.get('id')
height = element.get('height')
width = element.get('width')
parts = element.get('parts')
analyses = element.get('analyses')
initial_value = element.get('initial_value')
submit_analyses = element.get('submit_analyses')
context = {
'id':eid,
'value':value,
'initial_value':initial_value,
'state':state,
'width':width,
'height':height,
'parts':parts,
'analyses':analyses,
'submit_analyses':submit_analyses,
}
html=render_to_string("schematicinput.html", context)
return etree.XML(html)
......@@ -5,10 +5,14 @@ import numpy
import random
import scipy
import traceback
import copy
import abc
from calc import evaluator, UndefinedVariable
from django.conf import settings
from util import contextualize_text
from lxml import etree
from lxml.etree import Element
import calc
import eia
......@@ -34,7 +38,80 @@ def compare_with_tolerance(v1, v2, tol):
tolerance = evaluator(dict(),dict(),tol)
return abs(v1-v2) <= tolerance
class numericalresponse(object):
class GenericResponse(object):
__metaclass__=abc.ABCMeta
@abc.abstractmethod
def grade(self, student_answers):
pass
@abc.abstractmethod
def get_answers(self):
pass
#not an abstract method because plenty of responses will not want to preprocess anything, and we should not require that they override this method.
def preprocess_response(self):
pass
#Every response type needs methods "grade" and "get_answers"
class MultipleChoiceResponse(GenericResponse):
def __init__(self, xml, context):
self.xml = xml
self.correct_choices = xml.xpath('//*[@id=$id]//choice[@correct="true"]',
id=xml.get('id'))
self.correct_choices = [choice.get('name') for choice in self.correct_choices]
self.context = context
self.answer_id = xml.xpath('//*[@id=$id]//choicegroup/@id',
id=xml.get('id'))
if not len(self.answer_id) == 1:
raise Exception("should have exactly one choice group per multiplechoicceresponse")
self.answer_id=self.answer_id[0]
def grade(self, student_answers):
if self.answer_id in student_answers and student_answers[self.answer_id] in self.correct_choices:
return {self.answer_id:'correct'}
else:
return {self.answer_id:'incorrect'}
def get_answers(self):
return {self.answer_id:self.correct_choices}
def preprocess_response(self):
i=0
for response in self.xml.xpath("choicegroup"):
response.set("type", "MultipleChoice")
for choice in list(response):
if choice.get("name") == None:
choice.set("name", "choice_"+str(i))
i+=1
else:
choice.set("name", "choice_"+choice.get("name"))
class TrueFalseResponse(MultipleChoiceResponse):
def preprocess_response(self):
i=0
for response in self.xml.xpath("choicegroup"):
response.set("type", "TrueFalse")
for choice in list(response):
if choice.get("name") == None:
choice.set("name", "choice_"+str(i))
i+=1
else:
choice.set("name", "choice_"+choice.get("name"))
def grade(self, student_answers):
correct = set(self.correct_choices)
answers = set(student_answers.get(self.answer_id, []))
if correct == answers:
return { self.answer_id : 'correct'}
return {self.answer_id : 'incorrect'}
class NumericalResponse(GenericResponse):
def __init__(self, xml, context):
self.xml = xml
self.correct_answer = contextualize_text(xml.get('answer'), context)
......@@ -63,7 +140,7 @@ class numericalresponse(object):
def get_answers(self):
return {self.answer_id:self.correct_answer}
class customresponse(object):
class CustomResponse(GenericResponse):
def __init__(self, xml, context):
self.xml = xml
## CRITICAL TODO: Should cover all entrytypes
......@@ -94,7 +171,7 @@ class customresponse(object):
class StudentInputError(Exception):
pass
class formularesponse(object):
class FormulaResponse(GenericResponse):
def __init__(self, xml, context):
self.xml = xml
self.correct_answer = contextualize_text(xml.get('answer'), context)
......@@ -164,7 +241,7 @@ class formularesponse(object):
def get_answers(self):
return {self.answer_id:self.correct_answer}
class schematicresponse(object):
class SchematicResponse(GenericResponse):
def __init__(self, xml, context):
self.xml = xml
self.answer_ids = xml.xpath('//*[@id=$id]//schematic/@id',
......
......@@ -3,9 +3,15 @@ import logging
from lxml import etree
from django.http import Http404
from django.http import HttpResponse
from django.shortcuts import redirect
from django.template import Context
from django.template import Context, loader
from fs.osfs import OSFS
from django.conf import settings
from mitxmako.shortcuts import render_to_string
from fs.osfs import OSFS
from models import StudentModule
......@@ -43,7 +49,6 @@ def make_track_function(request):
def f(event_type, event):
return track.views.server_track(request, event_type, event, page='x_module')
return f
def grade_histogram(module_id):
''' Print out a histogram of grades on a given problem.
Part of staff member debug info.
......
......@@ -371,7 +371,6 @@ class Module(XModule):
self.lcp.context=dict()
self.lcp.questions=dict() # Detailed info about questions in problem instance. TODO: Should be by id and not lid.
self.lcp.seed=None
self.lcp=LoncapaProblem(self.filestore.open(self.filename), self.item_id, self.lcp.get_state())
event_info['new_state']=self.lcp.get_state()
......
<problem>
<multiplechoiceresponse>
<choicegroup>
<choice correct="false" >
<startouttext />This is foil One.<endouttext />
</choice>
<choice correct="false" >
<startouttext />This is foil Two.<endouttext />
</choice>
<choice correct="true" >
<startouttext />This is foil Three.<endouttext />
</choice>
<choice correct="false">
<startouttext />This is foil Four.<endouttext />
</choice>
<choice correct="false">
<startouttext />This is foil Five.<endouttext />
</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
<problem>
<multiplechoiceresponse>
<choicegroup>
<choice correct="false" name="foil1">
<startouttext />This is foil One.<endouttext />
</choice>
<choice correct="false" name="foil2">
<startouttext />This is foil Two.<endouttext />
</choice>
<choice correct="true" name="foil3">
<startouttext />This is foil Three.<endouttext />
</choice>
<choice correct="false" name="foil4">
<startouttext />This is foil Four.<endouttext />
</choice>
<choice correct="false" name="foil5">
<startouttext />This is foil Five.<endouttext />
</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
<problem>
<truefalseresponse max="10" randomize="yes">
<choicegroup>
<choice location="random" correct="true" name="foil1">
<startouttext />This is foil One.<endouttext />
</choice>
<choice location="random" correct="true" name="foil2">
<startouttext />This is foil Two.<endouttext />
</choice>
<choice location="random" correct="false" name="foil3">
<startouttext />This is foil Three.<endouttext />
</choice>
<choice location="random" correct="false" name="foil4">
<startouttext />This is foil Four.<endouttext />
</choice>
<choice location="random" correct="false" name="foil5">
<startouttext />This is foil Five.<endouttext />
</choice>
</choicegroup>
</truefalseresponse>
</problem>
import unittest
import os
import numpy
import courseware.modules
import courseware.capa.calc as calc
import courseware.capa.capa_problem as lcp
import courseware.graders as graders
from courseware.graders import Score, CourseGrader, WeightedSubsectionsGrader, SingleSectionGrader, AssignmentFormatGrader
from courseware.grades import aggregate_scores
......@@ -61,6 +63,37 @@ class ModelsTest(unittest.TestCase):
exception_happened = True
self.assertTrue(exception_happened)
class MultiChoiceTest(unittest.TestCase):
def test_MC_grade(self):
multichoice_file = os.path.dirname(__file__)+"/test_files/multichoice.xml"
test_lcp = lcp.LoncapaProblem(open(multichoice_file), '1')
correct_answers = {'1_2_1':'choice_foil3'}
self.assertEquals(test_lcp.grade_answers(correct_answers)['1_2_1'], 'correct')
false_answers = {'1_2_1':'choice_foil2'}
self.assertEquals(test_lcp.grade_answers(false_answers)['1_2_1'], 'incorrect')
def test_MC_bare_grades(self):
multichoice_file = os.path.dirname(__file__)+"/test_files/multi_bare.xml"
test_lcp = lcp.LoncapaProblem(open(multichoice_file), '1')
correct_answers = {'1_2_1':'choice_2'}
self.assertEquals(test_lcp.grade_answers(correct_answers)['1_2_1'], 'correct')
false_answers = {'1_2_1':'choice_1'}
self.assertEquals(test_lcp.grade_answers(false_answers)['1_2_1'], 'incorrect')
def test_TF_grade(self):
truefalse_file = os.getcwd()+"/djangoapps/courseware/test_files/truefalse.xml"
test_lcp = lcp.LoncapaProblem(open(truefalse_file), '1')
correct_answers = {'1_2_1':['choice_foil2', 'choice_foil1']}
self.assertEquals(test_lcp.grade_answers(correct_answers)['1_2_1'], 'correct')
false_answers = {'1_2_1':['choice_foil1']}
self.assertEquals(test_lcp.grade_answers(false_answers)['1_2_1'], 'incorrect')
false_answers = {'1_2_1':['choice_foil1', 'choice_foil3']}
self.assertEquals(test_lcp.grade_answers(false_answers)['1_2_1'], 'incorrect')
false_answers = {'1_2_1':['choice_foil3']}
self.assertEquals(test_lcp.grade_answers(false_answers)['1_2_1'], 'incorrect')
false_answers = {'1_2_1':['choice_foil1', 'choice_foil2', 'choice_foil3']}
self.assertEquals(test_lcp.grade_answers(false_answers)['1_2_1'], 'incorrect')
class GradesheetTest(unittest.TestCase):
def test_weighted_grading(self):
......
......@@ -42,7 +42,7 @@ function postJSON(url, data, callback) {
$.ajax({type:'POST',
url: url,
dataType: 'json',
data: data,
data: JSON.stringify(data),
success: callback,
headers : {'X-CSRFToken':getCookie('csrftoken')}
});
......@@ -52,7 +52,7 @@ function postJSONAsync(url, data, callback) {
$.ajax({type:'POST',
url: url,
dataType: 'json',
data: data,
data: JSON.stringify(data),
success: callback,
headers : {'X-CSRFToken':getCookie('csrftoken')},
async:true
......
<form class="multiple-choice">
% for choice_id, choice_description in choices.items():
<label for="input_${id}_${choice_id}"> <input type="${type}" name="input_${id}" id="input_${id}_${choice_id}" value="${choice_id}"
% if choice_id in value:
checked="true"
% endif
/> ${choice_description} </label>
% endfor
<span id="answer_${id}"></span>
% if state == 'unsubmitted':
<span class="unanswered" style="display:inline-block;" id="status_${id}"></span>
% elif state == 'correct':
<span class="correct" id="status_${id}"></span>
% elif state == 'incorrect':
<span class="incorrect" id="status_${id}"></span>
% elif state == 'incomplete':
<span class="incorrect" id="status_${id}"></span>
% endif
</form>
......@@ -6,7 +6,22 @@ function ${ id }_content_updated() {
$("input.schematic").each(function(index,element){ element.schematic.update_value(); });
var submit_data={};
$.each($("[id^=input_${ id }_]"), function(index,value){
submit_data[value.id]=value.value;
if (value.type==="checkbox"){
if (value.checked) {
if (typeof submit_data[value.name] == 'undefined'){
submit_data[value.name]=[];
}
submit_data[value.name].push(value.value);
}
}
if (value.type==="radio"){
if (value.checked) {
submit_data[value.name]= value.value;
}
}
else{
submit_data[value.id]=value.value;
}
});
postJSON('${ MITX_ROOT_URL }/modx/problem/${ id }/problem_check',
submit_data,
......@@ -40,9 +55,15 @@ function ${ id }_content_updated() {
$('#show_${ id }').unbind('click').click(function() {
postJSON('${ MITX_ROOT_URL }/modx/problem/${ id }/problem_show', {}, function(data) {
for (var key in data) {
$("#answer_"+key).text(data[key]);
}
});
if ($.isArray(data[key])){
for (var ans_index in data[key]){
var choice_id = 'input_'+key+'_'+data[key][ans_index];
$("label[for="+choice_id+"]").attr("correct_answer", "true");
}
}
$("#answer_"+key).text(data[key]);
}
});
log_event('problem_show', {'problem':'${ id }'});
});
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment