Commit 0e7ceb68 by Stephen Sanchez

Merge pull request #180 from edx/sanchez/peer-workflow-model-update

Optimizations to the PeerWorkflow and PeerWorkflowItem models
parents 4b6cff6c 5f1a989a
......@@ -39,7 +39,7 @@ class Migration(SchemaMigration):
# Adding model 'Assessment'
db.create_table('assessment_assessment', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('submission', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['submissions.Submission'])),
('submission_uuid', self.gf('django.db.models.fields.CharField')(max_length=128, db_index=True)),
('rubric', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['assessment.Rubric'])),
('scored_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, db_index=True)),
('scorer_id', self.gf('django.db.models.fields.CharField')(max_length=40, db_index=True)),
......@@ -56,6 +56,23 @@ class Migration(SchemaMigration):
))
db.send_create_signal('assessment', ['AssessmentPart'])
# Adding model 'AssessmentFeedback'
db.create_table('assessment_assessmentfeedback', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('submission_uuid', self.gf('django.db.models.fields.CharField')(unique=True, max_length=128, db_index=True)),
('helpfulness', self.gf('django.db.models.fields.IntegerField')(default=2)),
('feedback', self.gf('django.db.models.fields.TextField')(default='', max_length=10000)),
))
db.send_create_signal('assessment', ['AssessmentFeedback'])
# Adding M2M table for field assessments on 'AssessmentFeedback'
db.create_table('assessment_assessmentfeedback_assessments', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('assessmentfeedback', models.ForeignKey(orm['assessment.assessmentfeedback'], null=False)),
('assessment', models.ForeignKey(orm['assessment.assessment'], null=False))
))
db.create_unique('assessment_assessmentfeedback_assessments', ['assessmentfeedback_id', 'assessment_id'])
# Adding model 'PeerWorkflow'
db.create_table('assessment_peerworkflow', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
......@@ -64,16 +81,19 @@ class Migration(SchemaMigration):
('course_id', self.gf('django.db.models.fields.CharField')(max_length=40, db_index=True)),
('submission_uuid', self.gf('django.db.models.fields.CharField')(unique=True, max_length=128, db_index=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, db_index=True)),
('completed_at', self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True)),
))
db.send_create_signal('assessment', ['PeerWorkflow'])
# Adding model 'PeerWorkflowItem'
db.create_table('assessment_peerworkflowitem', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('scorer_id', self.gf('django.db.models.fields.related.ForeignKey')(related_name='items', to=orm['assessment.PeerWorkflow'])),
('scorer', self.gf('django.db.models.fields.related.ForeignKey')(related_name='graded', to=orm['assessment.PeerWorkflow'])),
('author', self.gf('django.db.models.fields.related.ForeignKey')(related_name='graded_by', to=orm['assessment.PeerWorkflow'])),
('submission_uuid', self.gf('django.db.models.fields.CharField')(max_length=128, db_index=True)),
('started_at', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now, db_index=True)),
('assessment', self.gf('django.db.models.fields.IntegerField')(default=-1)),
('assessment', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['assessment.Assessment'], null=True)),
('scored', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal('assessment', ['PeerWorkflowItem'])
......@@ -94,6 +114,12 @@ class Migration(SchemaMigration):
# Deleting model 'AssessmentPart'
db.delete_table('assessment_assessmentpart')
# Deleting model 'AssessmentFeedback'
db.delete_table('assessment_assessmentfeedback')
# Removing M2M table for field assessments on 'AssessmentFeedback'
db.delete_table('assessment_assessmentfeedback_assessments')
# Deleting model 'PeerWorkflow'
db.delete_table('assessment_peerworkflow')
......@@ -110,7 +136,15 @@ class Migration(SchemaMigration):
'score_type': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'scored_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'scorer_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'submission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Submission']"})
'submission_uuid': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'})
},
'assessment.assessmentfeedback': {
'Meta': {'object_name': 'AssessmentFeedback'},
'assessments': ('django.db.models.fields.related.ManyToManyField', [], {'default': 'None', 'related_name': "'assessment_feedback'", 'symmetrical': 'False', 'to': "orm['assessment.Assessment']"}),
'feedback': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '10000'}),
'helpfulness': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'submission_uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'})
},
'assessment.assessmentpart': {
'Meta': {'object_name': 'AssessmentPart'},
......@@ -137,6 +171,7 @@ class Migration(SchemaMigration):
},
'assessment.peerworkflow': {
'Meta': {'ordering': "['created_at', 'id']", 'object_name': 'PeerWorkflow'},
'completed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
......@@ -146,9 +181,11 @@ class Migration(SchemaMigration):
},
'assessment.peerworkflowitem': {
'Meta': {'ordering': "['started_at', 'id']", 'object_name': 'PeerWorkflowItem'},
'assessment': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
'assessment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['assessment.Assessment']", 'null': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'graded_by'", 'to': "orm['assessment.PeerWorkflow']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'scorer_id': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': "orm['assessment.PeerWorkflow']"}),
'scored': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'scorer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'graded'", 'to': "orm['assessment.PeerWorkflow']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'submission_uuid': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'})
},
......@@ -156,24 +193,6 @@ class Migration(SchemaMigration):
'Meta': {'object_name': 'Rubric'},
'content_hash': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'submissions.studentitem': {
'Meta': {'unique_together': "(('course_id', 'student_id', 'item_id'),)", 'object_name': 'StudentItem'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'item_type': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'student_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'submissions.submission': {
'Meta': {'ordering': "['-submitted_at', '-id']", 'object_name': 'Submission'},
'answer': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'attempt_number': ('django.db.models.fields.PositiveIntegerField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'student_item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.StudentItem']"}),
'submitted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '36', 'blank': 'True'})
}
}
......
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'AssessmentFeedback'
db.create_table('assessment_assessmentfeedback', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('submission_uuid', self.gf('django.db.models.fields.CharField')(unique=True, max_length=128, db_index=True)),
('helpfulness', self.gf('django.db.models.fields.IntegerField')(default=2)),
('feedback', self.gf('django.db.models.fields.TextField')(default='', max_length=10000)),
))
db.send_create_signal('assessment', ['AssessmentFeedback'])
# Adding M2M table for field assessments on 'AssessmentFeedback'
db.create_table('assessment_assessmentfeedback_assessments', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('assessmentfeedback', models.ForeignKey(orm['assessment.assessmentfeedback'], null=False)),
('assessment', models.ForeignKey(orm['assessment.assessment'], null=False))
))
db.create_unique('assessment_assessmentfeedback_assessments', ['assessmentfeedback_id', 'assessment_id'])
def backwards(self, orm):
# Deleting model 'AssessmentFeedback'
db.delete_table('assessment_assessmentfeedback')
# Removing M2M table for field assessments on 'AssessmentFeedback'
db.delete_table('assessment_assessmentfeedback_assessments')
models = {
'assessment.assessment': {
'Meta': {'ordering': "['-scored_at', '-id']", 'object_name': 'Assessment'},
'feedback': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '10000', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'rubric': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['assessment.Rubric']"}),
'score_type': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'scored_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'scorer_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'submission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.Submission']"})
},
'assessment.assessmentfeedback': {
'Meta': {'object_name': 'AssessmentFeedback'},
'assessments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'assessment_feedback'", 'symmetrical': 'False', 'to': "orm['assessment.Assessment']"}),
'feedback': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '10000'}),
'helpfulness': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'submission_uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'})
},
'assessment.assessmentpart': {
'Meta': {'object_name': 'AssessmentPart'},
'assessment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parts'", 'to': "orm['assessment.Assessment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['assessment.CriterionOption']"})
},
'assessment.criterion': {
'Meta': {'ordering': "['rubric', 'order_num']", 'object_name': 'Criterion'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order_num': ('django.db.models.fields.PositiveIntegerField', [], {}),
'prompt': ('django.db.models.fields.TextField', [], {'max_length': '10000'}),
'rubric': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'criteria'", 'to': "orm['assessment.Rubric']"})
},
'assessment.criterionoption': {
'Meta': {'ordering': "['criterion', 'order_num']", 'object_name': 'CriterionOption'},
'criterion': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['assessment.Criterion']"}),
'explanation': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order_num': ('django.db.models.fields.PositiveIntegerField', [], {}),
'points': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'assessment.peerworkflow': {
'Meta': {'ordering': "['created_at', 'id']", 'object_name': 'PeerWorkflow'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'student_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'submission_uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'})
},
'assessment.peerworkflowitem': {
'Meta': {'ordering': "['started_at', 'id']", 'object_name': 'PeerWorkflowItem'},
'assessment': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'scorer_id': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': "orm['assessment.PeerWorkflow']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'submission_uuid': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'})
},
'assessment.rubric': {
'Meta': {'object_name': 'Rubric'},
'content_hash': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'submissions.studentitem': {
'Meta': {'unique_together': "(('course_id', 'student_id', 'item_id'),)", 'object_name': 'StudentItem'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'item_type': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'student_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'submissions.submission': {
'Meta': {'ordering': "['-submitted_at', '-id']", 'object_name': 'Submission'},
'answer': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'attempt_number': ('django.db.models.fields.PositiveIntegerField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'student_item': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['submissions.StudentItem']"}),
'submitted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '36', 'blank': 'True'})
}
}
complete_apps = ['assessment']
\ No newline at end of file
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Assessment.submission'
db.delete_column('assessment_assessment', 'submission_id')
# Adding field 'Assessment.submission_uuid'
db.add_column('assessment_assessment', 'submission_uuid',
self.gf('django.db.models.fields.CharField')(default="", max_length=128, db_index=True),
keep_default=False)
def backwards(self, orm):
# Adding field 'Assessment.submission'
db.add_column('assessment_assessment', 'submission',
self.gf('django.db.models.fields.related.ForeignKey')(default=0, to=orm['submissions.Submission']),
keep_default=False)
# Deleting field 'Assessment.submission_uuid'
db.delete_column('assessment_assessment', 'submission_uuid')
models = {
'assessment.assessment': {
'Meta': {'ordering': "['-scored_at', '-id']", 'object_name': 'Assessment'},
'feedback': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '10000', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'rubric': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['assessment.Rubric']"}),
'score_type': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'scored_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'scorer_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'submission_uuid': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'})
},
'assessment.assessmentfeedback': {
'Meta': {'object_name': 'AssessmentFeedback'},
'assessments': ('django.db.models.fields.related.ManyToManyField', [], {'default': 'None', 'related_name': "'assessment_feedback'", 'symmetrical': 'False', 'to': "orm['assessment.Assessment']"}),
'feedback': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '10000'}),
'helpfulness': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'submission_uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'})
},
'assessment.assessmentpart': {
'Meta': {'object_name': 'AssessmentPart'},
'assessment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parts'", 'to': "orm['assessment.Assessment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['assessment.CriterionOption']"})
},
'assessment.criterion': {
'Meta': {'ordering': "['rubric', 'order_num']", 'object_name': 'Criterion'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order_num': ('django.db.models.fields.PositiveIntegerField', [], {}),
'prompt': ('django.db.models.fields.TextField', [], {'max_length': '10000'}),
'rubric': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'criteria'", 'to': "orm['assessment.Rubric']"})
},
'assessment.criterionoption': {
'Meta': {'ordering': "['criterion', 'order_num']", 'object_name': 'CriterionOption'},
'criterion': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['assessment.Criterion']"}),
'explanation': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order_num': ('django.db.models.fields.PositiveIntegerField', [], {}),
'points': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'assessment.peerworkflow': {
'Meta': {'ordering': "['created_at', 'id']", 'object_name': 'PeerWorkflow'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'student_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'submission_uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'})
},
'assessment.peerworkflowitem': {
'Meta': {'ordering': "['started_at', 'id']", 'object_name': 'PeerWorkflowItem'},
'assessment': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'scorer_id': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': "orm['assessment.PeerWorkflow']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'submission_uuid': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'})
},
'assessment.rubric': {
'Meta': {'object_name': 'Rubric'},
'content_hash': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['assessment']
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'PeerWorkflow.completed_at'
db.add_column('assessment_peerworkflow', 'completed_at',
self.gf('django.db.models.fields.DateTimeField')(null=True),
keep_default=False)
# Adding field 'PeerWorkflowItem.scored'
db.add_column('assessment_peerworkflowitem', 'scored',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'PeerWorkflow.completed_at'
db.delete_column('assessment_peerworkflow', 'completed_at')
# Deleting field 'PeerWorkflowItem.scored'
db.delete_column('assessment_peerworkflowitem', 'scored')
models = {
'assessment.assessment': {
'Meta': {'ordering': "['-scored_at', '-id']", 'object_name': 'Assessment'},
'feedback': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '10000', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'rubric': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['assessment.Rubric']"}),
'score_type': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'scored_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'scorer_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'submission_uuid': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'})
},
'assessment.assessmentfeedback': {
'Meta': {'object_name': 'AssessmentFeedback'},
'assessments': ('django.db.models.fields.related.ManyToManyField', [], {'default': 'None', 'related_name': "'assessment_feedback'", 'symmetrical': 'False', 'to': "orm['assessment.Assessment']"}),
'feedback': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '10000'}),
'helpfulness': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'submission_uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'})
},
'assessment.assessmentpart': {
'Meta': {'object_name': 'AssessmentPart'},
'assessment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parts'", 'to': "orm['assessment.Assessment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['assessment.CriterionOption']"})
},
'assessment.criterion': {
'Meta': {'ordering': "['rubric', 'order_num']", 'object_name': 'Criterion'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order_num': ('django.db.models.fields.PositiveIntegerField', [], {}),
'prompt': ('django.db.models.fields.TextField', [], {'max_length': '10000'}),
'rubric': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'criteria'", 'to': "orm['assessment.Rubric']"})
},
'assessment.criterionoption': {
'Meta': {'ordering': "['criterion', 'order_num']", 'object_name': 'CriterionOption'},
'criterion': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['assessment.Criterion']"}),
'explanation': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order_num': ('django.db.models.fields.PositiveIntegerField', [], {}),
'points': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'assessment.peerworkflow': {
'Meta': {'ordering': "['created_at', 'id']", 'object_name': 'PeerWorkflow'},
'completed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_id': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'student_id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'submission_uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'})
},
'assessment.peerworkflowitem': {
'Meta': {'ordering': "['started_at', 'id']", 'object_name': 'PeerWorkflowItem'},
'assessment': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'scored': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'scorer_id': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': "orm['assessment.PeerWorkflow']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'submission_uuid': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'})
},
'assessment.rubric': {
'Meta': {'object_name': 'Rubric'},
'content_hash': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['assessment']
\ No newline at end of file
......@@ -442,7 +442,7 @@ class PeerWorkflow(models.Model):
course_id = models.CharField(max_length=40, db_index=True)
submission_uuid = models.CharField(max_length=128, db_index=True, unique=True)
created_at = models.DateTimeField(default=now, db_index=True)
completed_at = models.DateTimeField(null=True)
completed_at = models.DateTimeField(null=True, db_index=True)
class Meta:
ordering = ["created_at", "id"]
......@@ -450,8 +450,8 @@ class PeerWorkflow(models.Model):
def __repr__(self):
return (
"PeerWorkflow(student_id={0.student_id}, item_id={0.item_id}, "
"course_id={0.course_id}, submission_uuid={0.submission_uuid})"
"created_at={0.created_at}"
"course_id={0.course_id}, submission_uuid={0.submission_uuid}"
"created_at={0.created_at}, completed_at={0.completed_at})"
).format(self)
def __unicode__(self):
......@@ -465,26 +465,24 @@ class PeerWorkflowItem(models.Model):
associated workflow represents the scorer of the given submission, and the
assessment represents the completed assessment for this work item.
Assessments are represented as their ID, defaulting to -1. This is done to
optimized complex queries against PeerWorkflowItems with the Assessments
indexed, whereas a Null reference would be costly.
"""
scorer_id = models.ForeignKey(PeerWorkflow, related_name='items')
scorer = models.ForeignKey(PeerWorkflow, related_name='graded')
author = models.ForeignKey(PeerWorkflow, related_name='graded_by')
submission_uuid = models.CharField(max_length=128, db_index=True)
started_at = models.DateTimeField(default=now, db_index=True)
assessment = models.IntegerField(default=-1)
assessment = models.ForeignKey(Assessment, null=True)
# This WorkflowItem was used to determine the final score for the Workflow.
scored = models.BooleanField(default=False)
@classmethod
def get_scored_assessments(cls, submission_uuid):
workflow_items = PeerWorkflowItem.objects.filter(
submission_uuid=submission_uuid, scored=True
)
return Assessment.objects.filter(
pk__in=[item.pk for item in workflow_items]
pk__in=[
item.assessment.pk for item in PeerWorkflowItem.objects.filter(
submission_uuid=submission_uuid, scored=True
)
]
)
class Meta:
......@@ -492,9 +490,10 @@ class PeerWorkflowItem(models.Model):
def __repr__(self):
return (
"PeerWorkflowItem(scorer_id={0.scorer_id}, "
"PeerWorkflowItem(scorer={0.scorer}, author={0.author}, "
"submission_uuid={0.submission_uuid}, "
"started_at={0.started_at}, assessment={0.assessment})"
"started_at={0.started_at}, assessment={0.assessment}, "
"scored={0.scored})"
).format(self)
def __unicode__(self):
......
......@@ -99,27 +99,25 @@ def get_score(submission_uuid, requirements):
if not is_complete(submission_uuid, requirements):
return None
workflow = PeerWorkflow.objects.get(submission_uuid=submission_uuid)
assessments = Assessment.objects.filter(
submission_uuid=submission_uuid, score_type=PEER_TYPE
)[:requirements["must_be_graded_by"]]
items = workflow.graded_by.filter(assessment__in=assessments)
submission_finished = assessments.count() >= requirements["must_be_graded_by"]
submission_finished = items.count() >= requirements["must_be_graded_by"]
if not submission_finished:
return None
PeerWorkflowItem.objects.filter(
assessment__in=[a.pk for a in assessments]
).update(scored=True)
items.update(scored=True)
PeerWorkflow.objects.filter(submission_uuid=submission_uuid).update(
completed_at=timezone.now()
)
workflow.completed_at = timezone.now()
workflow.save()
return {
"points_earned": sum(
get_assessment_median_scores(submission_uuid).values()
),
"points_possible": assessments[0].points_possible,
"points_possible": items[0].assessment.points_possible,
}
......@@ -307,7 +305,9 @@ def get_assessment_median_scores(submission_uuid):
information to form the median scores, an error is raised.
"""
try:
assessments = PeerWorkflowItem.get_scored_assessments(submission_uuid)
workflow = PeerWorkflow.objects.get(submission_uuid=submission_uuid)
items = workflow.graded_by.filter(scored=True)
assessments = [item.assessment for item in items]
scores = Assessment.scores_by_criterion(assessments)
return Assessment.get_median_score_dict(scores)
except DatabaseError:
......@@ -396,14 +396,14 @@ def get_assessments(submission_uuid, scored_only=True, limit=None):
'points_earned': 6,
'points_possible': 12,
'scored_at': datetime.datetime(2014, 1, 29, 17, 14, 52, 649284 tzinfo=<UTC>),
'scorer_id': u"Tim",
'scorer': u"Tim",
'feedback': u'Your submission was thrilling.'
},
{
'points_earned': 11,
'points_possible': 12,
'scored_at': datetime.datetime(2014, 1, 31, 14, 10, 17, 544214 tzinfo=<UTC>),
'scorer_id': u"Bob",
'scorer': u"Bob",
'feedback': u'Great submission.'
}
]
......@@ -556,14 +556,14 @@ def create_peer_workflow(submission_uuid):
raise PeerAssessmentInternalError(error_message)
def create_peer_workflow_item(scorer_id, submission_uuid):
def create_peer_workflow_item(scorer, submission_uuid):
"""
Begin peer-assessing a particular submission.
Note that this does NOT pick the submission from the prioritized list of available submissions.
Mainly useful for testing.
Args:
scorer_id (str): The ID of the scoring student.
scorer (str): The ID of the scoring student.
submission_uuid (str): The unique identifier of the submission being scored
Returns:
......@@ -575,7 +575,7 @@ def create_peer_workflow_item(scorer_id, submission_uuid):
"""
submission = get_submission_and_student(submission_uuid)
student_item_dict = copy.copy(submission['student_item'])
student_item_dict['student_id'] = scorer_id
student_item_dict['student_id'] = scorer
workflow = _get_latest_workflow(student_item_dict)
_create_peer_workflow_item(workflow, submission_uuid)
......@@ -662,8 +662,10 @@ def _create_peer_workflow_item(workflow, submission_uuid):
"""
try:
peer_workflow = PeerWorkflow.objects.get(submission_uuid=submission_uuid)
workflow_item, __ = PeerWorkflowItem.objects.get_or_create(
scorer_id=workflow,
scorer=workflow,
author=peer_workflow,
submission_uuid=submission_uuid
)
return workflow_item
......@@ -706,8 +708,8 @@ def _find_active_assessments(workflow):
"1"
"""
workflows = workflow.items.filter(
assessment=-1,
workflows = workflow.graded.filter(
assessment__isnull=True,
started_at__gt=timezone.now() - TIME_LIMIT
)
return workflows[0].submission_uuid if workflows else None
......@@ -752,6 +754,7 @@ def _get_submission_for_review(workflow, graded_by, over_grading=False):
workflow.item_id,
workflow.course_id,
workflow.student_id,
workflow.id,
timeout,
graded_by
)
......@@ -781,6 +784,7 @@ def _get_submission_for_over_grading(workflow):
workflow.item_id,
workflow.course_id,
workflow.student_id,
workflow.id,
timeout
)
......@@ -795,20 +799,17 @@ def _get_next_submission(order, workflow, *args):
For example, for a general peer assessment query, the following would be
the generated SQL query:
select pw.id, pw.submission_uuid , count(pwi.id) as c
select pw.id, pw.submission_uuid , pw.student_id, count(pwi.id) as c
from assessment_peerworkflow pw
left join assessment_peerworkflowitem pwi
on pw.submission_uuid=pwi.submission_uuid
where pw.item_id='item_one'
on pw.id=pwi.author_id
where pw.completed_at is NULL
and pw.item_id='item_one'
and pw.course_id='Demo_Course'
and pw.student_id<>'Buffy1'
and pw.submission_uuid<>'bc164f09-eb14-4b1d-9ba8-bb2c1c924fba'
and pw.submission_uuid<>'7c5e7db4-e82d-45e1-8fda-79c5deaa16d5'
and pw.submission_uuid<>'9ba64ff5-f18e-4794-b45b-cee26248a0a0'
and pw.submission_uuid<>'cdd6cf7a-2787-43ec-8d31-62fdb14d4e09'
and pw.submission_uuid<>'ebc7d4e1-1577-4443-ab58-2caad9a10837'
and (pwi.scorer_id_id is NULL or pwi.assessment<>-1 or pwi.started_at > '2014-03-04 20:09:04')
group by pw.submission_uuid having count(pwi.id) < 3
and pw.student_id<>'Tim'
and pw.id not in (select pwi.author_id from assessment_peerworkflowitem pwi where pwi.scorer_id=3159)
and (pwi.scorer_id is NULL or pwi.assessment_id is not NULL or pwi.started_at > '2014-03-04 20:09:04')
group by pw.id having count(pwi.id) < 3
order by pw.created_at, pw.id
limit 1;
......@@ -825,28 +826,25 @@ def _get_next_submission(order, workflow, *args):
"""
try:
exclude = ""
for item in workflow.items.all():
exclude += "and pw.submission_uuid<>'{}' ".format(item.submission_uuid)
raw_query = (
"select pw.id, pw.submission_uuid, pwi.scorer_id_id, count(pwi.id) as c "
"select pw.id, pw.submission_uuid, count(pwi.id) as c "
"from assessment_peerworkflow pw "
"left join assessment_peerworkflowitem pwi "
"on pw.submission_uuid=pwi.submission_uuid "
"where pw.item_id=%s "
"on pw.id=pwi.author_id "
"where pw.completed_at is NULL "
"and pw.item_id=%s "
"and pw.course_id=%s "
"and pw.student_id<>%s "
"{} "
" and (pwi.scorer_id_id is NULL or pwi.assessment<>-1 or pwi.started_at > %s) "
"group by pw.submission_uuid "
"and pw.id not in (select pwi.author_id from assessment_peerworkflowitem pwi where pwi.scorer_id=%s) "
"and (pwi.scorer_id is NULL or pwi.assessment_id is not NULL or pwi.started_at > %s) "
"group by pw.id "
"{} "
"limit 1; "
)
query = raw_query.format(exclude, order)
peer_workflows = PeerWorkflow.objects.raw(query, args)
if len(list(peer_workflows)) == 0:
query = raw_query.format(order)
peer_workflows = list(PeerWorkflow.objects.raw(query, args))
if not peer_workflows:
return None
return peer_workflows[0].submission_uuid
......@@ -859,13 +857,6 @@ def _get_next_submission(order, workflow, *args):
raise PeerAssessmentInternalError(error_message)
def _assessors_count(peer_workflow):
return PeerWorkflowItem.objects.filter(
~Q(assessment=-1) |
Q(assessment=-1, started_at__gt=timezone.now() - TIME_LIMIT),
submission_uuid=peer_workflow.submission_uuid).count()
def _close_active_assessment(workflow, submission_uuid, assessment):
"""Associate the work item with a complete assessment.
......@@ -891,8 +882,8 @@ def _close_active_assessment(workflow, submission_uuid, assessment):
"""
try:
item = workflow.items.get(submission_uuid=submission_uuid)
item.assessment = assessment.id
item = workflow.graded.get(submission_uuid=submission_uuid)
item.assessment = assessment
item.save()
except (DatabaseError, PeerWorkflowItem.DoesNotExist):
error_message = _(
......@@ -928,7 +919,7 @@ def _num_peers_graded(workflow):
>>> _num_peers_graded(workflow, 3)
True
"""
return workflow.items.all().exclude(assessment=-1).count()
return workflow.graded.filter(assessment__isnull=False).count()
def get_assessment_feedback(submission_uuid):
......
......@@ -343,5 +343,10 @@ class PeerWorkflowItemSerializer(serializers.ModelSerializer):
class Meta:
model = PeerWorkflowItem
fields = (
'scorer_id', 'submission_uuid', 'started_at', 'assessment', 'scored'
'scorer',
'author',
'submission_uuid',
'started_at',
'assessment',
'scored'
)
......@@ -599,7 +599,7 @@ class TestPeerApi(TestCase):
def test_create_workflow_item_error(self, mock_filter):
mock_filter.side_effect = DatabaseError("Oh no.")
tim_answer, tim = self._create_student_and_submission("Tim", "Tim's answer", MONDAY)
peer_api._create_peer_workflow_item(tim, "5")
peer_api._create_peer_workflow_item(tim, tim_answer['uuid'])
def test_get_submission_to_evaluate(self):
self._create_student_and_submission("Tim", "Tim's answer", MONDAY)
......@@ -645,14 +645,14 @@ class TestPeerApi(TestCase):
tim, _ = self._create_student_and_submission("Tim", "Tim's answer")
peer_api.get_rubric_max_scores(tim["uuid"])
@patch.object(Assessment.objects, 'filter')
@patch.object(PeerWorkflow.objects, 'get')
@raises(peer_api.PeerAssessmentInternalError)
def test_median_score_db_error(self, mock_filter):
mock_filter.side_effect = DatabaseError("Bad things happened")
tim, _ = self._create_student_and_submission("Tim", "Tim's answer")
peer_api.get_assessment_median_scores(tim["uuid"])
@patch.object(Assessment.objects, 'filter')
@patch.object(PeerWorkflowItem, 'get_scored_assessments')
@raises(peer_api.PeerAssessmentInternalError)
def test_get_assessments_db_error(self, mock_filter):
mock_filter.side_effect = DatabaseError("Bad things happened")
......@@ -673,7 +673,7 @@ class TestPeerApi(TestCase):
MONDAY
)
@patch.object(Assessment.objects, 'filter')
@patch.object(PeerWorkflowItem, 'get_scored_assessments')
@raises(peer_api.PeerAssessmentInternalError)
def test_error_on_get_assessment(self, mock_filter):
self._create_student_and_submission("Tim", "Tim's answer")
......
......@@ -478,7 +478,8 @@
"pk": 1,
"model": "assessment.peerworkflowitem",
"fields": {
"scorer_id": 5,
"scorer": 5,
"author": 1,
"started_at": "2014-03-12T11:35:22Z",
"assessment": -1,
"submission_uuid": "55c6f020-a9da-11e3-976d-080027880ca6"
......@@ -488,7 +489,8 @@
"pk": 2,
"model": "assessment.peerworkflowitem",
"fields": {
"scorer_id": 1,
"scorer": 1,
"author": 5,
"started_at": "2014-03-12T11:35:36Z",
"assessment": -1,
"submission_uuid": "65650b3e-a9da-11e3-8b23-080027880ca6"
......@@ -498,7 +500,8 @@
"pk": 3,
"model": "assessment.peerworkflowitem",
"fields": {
"scorer_id": 9,
"scorer": 9,
"author": 7,
"started_at": "2014-03-12T11:36:10Z",
"assessment": 1,
"submission_uuid": "7e8ededc-a9da-11e3-89ce-080027880ca6"
......@@ -508,7 +511,8 @@
"pk": 4,
"model": "assessment.peerworkflowitem",
"fields": {
"scorer_id": 11,
"scorer": 11,
"author": 13,
"started_at": "2014-03-12T11:37:24Z",
"assessment": 2,
"submission_uuid": "ac0539f6-a9da-11e3-af02-080027880ca6"
......@@ -518,7 +522,8 @@
"pk": 5,
"model": "assessment.peerworkflowitem",
"fields": {
"scorer_id": 15,
"scorer": 15,
"author": 17,
"started_at": "2014-03-12T11:38:01Z",
"assessment": 4,
"submission_uuid": "c18b952c-a9da-11e3-9ccf-080027880ca6"
......@@ -528,7 +533,8 @@
"pk": 6,
"model": "assessment.peerworkflowitem",
"fields": {
"scorer_id": 17,
"scorer": 17,
"author": 15,
"started_at": "2014-03-12T11:38:18Z",
"assessment": 6,
"submission_uuid": "beb581f0-a9da-11e3-9b83-080027880ca6"
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment