Commit dd90c556 by Calen Pennington Committed by Kevin Falcone

Create a new CSMHExtended table to hold our new data

This is a clone (copy) of CSMH's declaration and methods with an added
id of UnsignedBigInAutoField

We should be able to delete the save_history code, but needs testing.

Add error logging when capa failures happen

Put StudentModuleHistory into its own database

Bump out the primary key on CSMHE

This gives us a gap to backfill as needed.
Since the new table's pk is an unsigned bigint, even for people who don't
consolidate CSMH into CSMHE, the lost rows are unlikely to matter.

Remove StudentModuleHistory cleaner
parent e0407893
......@@ -80,6 +80,14 @@ DATABASES = {
'timeout': 30,
},
'ATOMIC_REQUESTS': True,
},
'student_module_history': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': TEST_ROOT / "db" / "test_student_module_history.db",
'TEST_NAME': TEST_ROOT / "db" / "test_student_module_history.db",
'OPTIONS': {
'timeout': 30,
},
}
}
......
......@@ -93,11 +93,23 @@ class CapaModule(CapaMixin, XModule):
result = handlers[dispatch](data)
except NotFoundError as err:
_, _, traceback_obj = sys.exc_info()
log.exception(
"Unable to find data when dispatching %s to %s for user %s",
dispatch,
self.scope_ids.usage_id,
self.scope_ids.user_id
)
_, _, traceback_obj = sys.exc_info() # pylint: disable=redefined-outer-name
raise ProcessingError(not_found_error_message), None, traceback_obj
except Exception as err:
_, _, traceback_obj = sys.exc_info()
log.exception(
"Unknown error when dispatching %s to %s for user %s",
dispatch,
self.scope_ids.usage_id,
self.scope_ids.user_id
)
_, _, traceback_obj = sys.exc_info() # pylint: disable=redefined-outer-name
raise ProcessingError(generic_error_message), None, traceback_obj
after = self.get_progress()
......
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -412,7 +412,7 @@ CREATE TABLE `auth_permission` (
PRIMARY KEY (`id`),
UNIQUE KEY `content_type_id` (`content_type_id`,`codename`),
CONSTRAINT `auth__content_type_id_508cf46651277a81_fk_django_content_type_id` FOREIGN KEY (`content_type_id`) REFERENCES `django_content_type` (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=746 DEFAULT CHARSET=utf8;
) ENGINE=InnoDB AUTO_INCREMENT=740 DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
DROP TABLE IF EXISTS `auth_registration`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
......@@ -444,7 +444,7 @@ CREATE TABLE `auth_user` (
`date_joined` datetime(6) NOT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `username` (`username`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8;
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
DROP TABLE IF EXISTS `auth_user_groups`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
......@@ -1353,24 +1353,6 @@ CREATE TABLE `courseware_xmoduleuserstatesummaryfield` (
KEY `courseware_xmoduleuserstatesummaryfield_0528eb2a` (`usage_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
DROP TABLE IF EXISTS `credentials_credentialsapiconfig`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `credentials_credentialsapiconfig` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`change_date` datetime(6) NOT NULL,
`enabled` tinyint(1) NOT NULL,
`internal_service_url` varchar(200) NOT NULL,
`public_service_url` varchar(200) NOT NULL,
`enable_learner_issuance` tinyint(1) NOT NULL,
`enable_studio_authoring` tinyint(1) NOT NULL,
`cache_ttl` int(10) unsigned NOT NULL,
`changed_by_id` int(11) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `credentials_crede_changed_by_id_273a2e6b0649c861_fk_auth_user_id` (`changed_by_id`),
CONSTRAINT `credentials_crede_changed_by_id_273a2e6b0649c861_fk_auth_user_id` FOREIGN KEY (`changed_by_id`) REFERENCES `auth_user` (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
DROP TABLE IF EXISTS `credit_creditcourse`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
......@@ -1618,7 +1600,7 @@ CREATE TABLE `django_content_type` (
`model` varchar(100) NOT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `django_content_type_app_label_45f3b1d93ec8c61c_uniq` (`app_label`,`model`)
) ENGINE=InnoDB AUTO_INCREMENT=248 DEFAULT CHARSET=utf8;
) ENGINE=InnoDB AUTO_INCREMENT=246 DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
DROP TABLE IF EXISTS `django_migrations`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
......@@ -1629,7 +1611,7 @@ CREATE TABLE `django_migrations` (
`name` varchar(255) NOT NULL,
`applied` datetime(6) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=107 DEFAULT CHARSET=utf8;
) ENGINE=InnoDB AUTO_INCREMENT=103 DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
DROP TABLE IF EXISTS `django_openid_auth_association`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
......
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
......@@ -151,7 +151,10 @@ class FieldOverridePerformanceTestCase(ProceduralCourseTestMixin,
"""
return check_sum_of_calls(XBlock, ['__init__'], instantiations, instantiations, include_arguments=False)
def instrument_course_progress_render(self, course_width, enable_ccx, view_as_ccx, queries, reads, xblocks):
def instrument_course_progress_render(
self, course_width, enable_ccx, view_as_ccx,
default_queries, history_queries, reads, xblocks
):
"""
Renders the progress page, instrumenting Mongo reads and SQL queries.
"""
......@@ -173,10 +176,11 @@ class FieldOverridePerformanceTestCase(ProceduralCourseTestMixin,
# can actually take affect.
OverrideFieldData.provider_classes = None
with self.assertNumQueries(queries):
with self.assertMongoCallCount(reads):
with self.assertXBlockInstantiations(xblocks):
self.grade_course(self.course, view_as_ccx)
with self.assertNumQueries(default_queries, using='default'):
with self.assertNumQueries(history_queries, using='student_module_history'):
with self.assertMongoCallCount(reads):
with self.assertXBlockInstantiations(xblocks):
self.grade_course(self.course, view_as_ccx)
@ddt.data(*itertools.product(('no_overrides', 'ccx'), range(1, 4), (True, False), (True, False)))
@ddt.unpack
......@@ -201,8 +205,12 @@ class FieldOverridePerformanceTestCase(ProceduralCourseTestMixin,
raise SkipTest("Can't use a MongoModulestore test as a CCX course")
with self.settings(FIELD_OVERRIDE_PROVIDERS=providers[overrides]):
queries, reads, xblocks = self.TEST_DATA[(overrides, course_width, enable_ccx, view_as_ccx)]
self.instrument_course_progress_render(course_width, enable_ccx, view_as_ccx, queries, reads, xblocks)
default_queries, history_queries, reads, xblocks = self.TEST_DATA[
(overrides, course_width, enable_ccx, view_as_ccx)
]
self.instrument_course_progress_render(
course_width, enable_ccx, view_as_ccx, default_queries, history_queries, reads, xblocks
)
class TestFieldOverrideMongoPerformance(FieldOverridePerformanceTestCase):
......@@ -213,25 +221,30 @@ class TestFieldOverrideMongoPerformance(FieldOverridePerformanceTestCase):
__test__ = True
TEST_DATA = {
# (providers, course_width, enable_ccx, view_as_ccx): # of sql queries, # of mongo queries, # of xblocks
('no_overrides', 1, True, False): (48, 6, 13),
('no_overrides', 2, True, False): (135, 6, 84),
('no_overrides', 3, True, False): (480, 6, 335),
('ccx', 1, True, False): (48, 6, 13),
('ccx', 2, True, False): (135, 6, 84),
('ccx', 3, True, False): (480, 6, 335),
('ccx', 1, True, True): (48, 6, 13),
('ccx', 2, True, True): (135, 6, 84),
('ccx', 3, True, True): (480, 6, 335),
('no_overrides', 1, False, False): (48, 6, 13),
('no_overrides', 2, False, False): (135, 6, 84),
('no_overrides', 3, False, False): (480, 6, 335),
('ccx', 1, False, False): (48, 6, 13),
('ccx', 2, False, False): (135, 6, 84),
('ccx', 3, False, False): (480, 6, 335),
('ccx', 1, False, True): (48, 6, 13),
('ccx', 2, False, True): (135, 6, 84),
('ccx', 3, False, True): (480, 6, 335),
# (providers, course_width, enable_ccx, view_as_ccx): (
# # of sql queries to default,
# # sql queries to student_module_history,
# # of mongo queries,
# # of xblocks
# )
('no_overrides', 1, True, False): (23, 1, 6, 13),
('no_overrides', 2, True, False): (53, 16, 6, 84),
('no_overrides', 3, True, False): (183, 81, 6, 335),
('ccx', 1, True, False): (23, 1, 6, 13),
('ccx', 2, True, False): (53, 16, 6, 84),
('ccx', 3, True, False): (183, 81, 6, 335),
('ccx', 1, True, True): (23, 1, 6, 13),
('ccx', 2, True, True): (53, 16, 6, 84),
('ccx', 3, True, True): (183, 81, 6, 335),
('no_overrides', 1, False, False): (23, 1, 6, 13),
('no_overrides', 2, False, False): (53, 16, 6, 84),
('no_overrides', 3, False, False): (183, 81, 6, 335),
('ccx', 1, False, False): (23, 1, 6, 13),
('ccx', 2, False, False): (53, 16, 6, 84),
('ccx', 3, False, False): (183, 81, 6, 335),
('ccx', 1, False, True): (23, 1, 6, 13),
('ccx', 2, False, True): (53, 16, 6, 84),
('ccx', 3, False, True): (183, 81, 6, 335),
}
......@@ -243,22 +256,22 @@ class TestFieldOverrideSplitPerformance(FieldOverridePerformanceTestCase):
__test__ = True
TEST_DATA = {
('no_overrides', 1, True, False): (48, 4, 9),
('no_overrides', 2, True, False): (135, 19, 54),
('no_overrides', 3, True, False): (480, 84, 215),
('ccx', 1, True, False): (48, 4, 9),
('ccx', 2, True, False): (135, 19, 54),
('ccx', 3, True, False): (480, 84, 215),
('ccx', 1, True, True): (50, 4, 13),
('ccx', 2, True, True): (137, 19, 84),
('ccx', 3, True, True): (482, 84, 335),
('no_overrides', 1, False, False): (48, 4, 9),
('no_overrides', 2, False, False): (135, 19, 54),
('no_overrides', 3, False, False): (480, 84, 215),
('ccx', 1, False, False): (48, 4, 9),
('ccx', 2, False, False): (135, 19, 54),
('ccx', 3, False, False): (480, 84, 215),
('ccx', 1, False, True): (48, 4, 9),
('ccx', 2, False, True): (135, 19, 54),
('ccx', 3, False, True): (480, 84, 215),
('no_overrides', 1, True, False): (23, 1, 4, 9),
('no_overrides', 2, True, False): (53, 16, 19, 54),
('no_overrides', 3, True, False): (183, 81, 84, 215),
('ccx', 1, True, False): (23, 1, 4, 9),
('ccx', 2, True, False): (53, 16, 19, 54),
('ccx', 3, True, False): (183, 81, 84, 215),
('ccx', 1, True, True): (25, 1, 4, 13),
('ccx', 2, True, True): (55, 16, 19, 84),
('ccx', 3, True, True): (185, 81, 84, 335),
('no_overrides', 1, False, False): (23, 1, 4, 9),
('no_overrides', 2, False, False): (53, 16, 19, 54),
('no_overrides', 3, False, False): (183, 81, 84, 215),
('ccx', 1, False, False): (23, 1, 4, 9),
('ccx', 2, False, False): (53, 16, 19, 54),
('ccx', 3, False, False): (183, 81, 84, 215),
('ccx', 1, False, True): (23, 1, 4, 9),
('ccx', 2, False, True): (53, 16, 19, 54),
('ccx', 3, False, True): (183, 81, 84, 215),
}
"""
Custom fields for use in the courseware django app.
"""
from django.db.models.fields import AutoField
class UnsignedBigIntAutoField(AutoField):
"""
An unsigned 8-byte integer for auto-incrementing primary keys.
"""
def db_type(self, connection):
if connection.settings_dict['ENGINE'] == 'django.db.backends.mysql':
return "bigint UNSIGNED AUTO_INCREMENT"
elif connection.settings_dict['ENGINE'] == 'django.db.backends.sqlite3':
# Sqlite will only auto-increment the ROWID column. Any INTEGER PRIMARY KEY column
# is an alias for that (https://www.sqlite.org/autoinc.html). An unsigned integer
# isn't an alias for ROWID, so we have to give up on the unsigned part.
return "integer"
else:
return None
"""A command to clean the StudentModuleHistory table.
When we added XBlock storage, each field modification wrote a new history row
to the db. Now that we have bulk saves to avoid that database hammering, we
need to clean out the unnecessary rows from the database.
This command that does that.
"""
import datetime
import json
import logging
import optparse
import time
import traceback
from django.core.management.base import NoArgsCommand
from django.db import transaction
from django.db.models import Max
from courseware.models import StudentModuleHistory
class Command(NoArgsCommand):
"""The actual clean_history command to clean history rows."""
help = "Deletes unneeded rows from the StudentModuleHistory table."
option_list = NoArgsCommand.option_list + (
optparse.make_option(
'--batch',
type='int',
default=100,
help="Batch size, number of module_ids to examine in a transaction.",
),
optparse.make_option(
'--dry-run',
action='store_true',
default=False,
help="Don't change the database, just show what would be done.",
),
optparse.make_option(
'--sleep',
type='float',
default=0,
help="Seconds to sleep between batches.",
),
)
def handle_noargs(self, **options):
# We don't want to see the SQL output from the db layer.
logging.getLogger("django.db.backends").setLevel(logging.INFO)
smhc = StudentModuleHistoryCleaner(
dry_run=options["dry_run"],
)
smhc.main(batch_size=options["batch"], sleep=options["sleep"])
class StudentModuleHistoryCleaner(object):
"""Logic to clean rows from the StudentModuleHistory table."""
DELETE_GAP_SECS = 0.5 # Rows this close can be discarded.
STATE_FILE = "clean_history.json"
BATCH_SIZE = 100
def __init__(self, dry_run=False):
self.dry_run = dry_run
self.next_student_module_id = 0
self.last_student_module_id = 0
def main(self, batch_size=None, sleep=0):
"""Invoked from the management command to do all the work."""
batch_size = batch_size or self.BATCH_SIZE
self.last_student_module_id = self.get_last_student_module_id()
self.load_state()
while self.next_student_module_id <= self.last_student_module_id:
with transaction.atomic():
for smid in self.module_ids_to_check(batch_size):
try:
self.clean_one_student_module(smid)
except Exception: # pylint: disable=broad-except
trace = traceback.format_exc()
self.say("Couldn't clean student_module_id {}:\n{}".format(smid, trace))
if self.dry_run:
transaction.set_rollback(True)
else:
self.say("Committing")
self.save_state()
if sleep:
time.sleep(sleep)
def say(self, message):
"""
Display a message to the user.
The message will have a trailing newline added to it.
"""
print message
def load_state(self):
"""
Load the latest state from disk.
"""
try:
state_file = open(self.STATE_FILE)
except IOError:
self.say("No stored state")
self.next_student_module_id = 0
else:
with state_file:
state = json.load(state_file)
self.say(
"Loaded stored state: {}".format(
json.dumps(state, sort_keys=True)
)
)
self.next_student_module_id = state['next_student_module_id']
def save_state(self):
"""
Save the state to disk.
"""
state = {
'next_student_module_id': self.next_student_module_id,
}
with open(self.STATE_FILE, "w") as state_file:
json.dump(state, state_file)
self.say("Saved state: {}".format(json.dumps(state, sort_keys=True)))
def get_last_student_module_id(self):
"""
Return the id of the last student_module.
"""
last = StudentModuleHistory.objects.all() \
.aggregate(Max('student_module'))['student_module__max']
self.say("Last student_module_id is {}".format(last))
return last
def module_ids_to_check(self, batch_size):
"""Produce a sequence of student module ids to check.
`batch_size` is how many module ids to produce, max.
The sequence starts with `next_student_module_id`, and goes up to
and including `last_student_module_id`.
`next_student_module_id` is updated as each id is yielded.
"""
start = self.next_student_module_id
for smid in range(start, start + batch_size):
if smid > self.last_student_module_id:
break
yield smid
self.next_student_module_id = smid + 1
def get_history_for_student_modules(self, student_module_id):
"""
Get the history rows for a student module.
```student_module_id```: the id of the student module we're
interested in.
Return a list: [(id, created), ...], all the rows of history.
"""
history = StudentModuleHistory.objects \
.filter(student_module=student_module_id) \
.order_by('created', 'id')
return [(row.id, row.created) for row in history]
def delete_history(self, ids_to_delete):
"""
Delete history rows.
```ids_to_delete```: a non-empty list (or set...) of history row ids to delete.
"""
assert ids_to_delete
StudentModuleHistory.objects.filter(id__in=ids_to_delete).delete()
def clean_one_student_module(self, student_module_id):
"""Clean one StudentModule's-worth of history.
`student_module_id`: the id of the StudentModule to process.
"""
delete_gap = datetime.timedelta(seconds=self.DELETE_GAP_SECS)
history = self.get_history_for_student_modules(student_module_id)
if not history:
self.say("No history for student_module_id {}".format(student_module_id))
return
ids_to_delete = []
next_created = None
for history_id, created in reversed(history):
if next_created is not None:
# Compare this timestamp with the next one.
if (next_created - created) < delete_gap:
# This row is followed closely by another, we can discard
# this one.
ids_to_delete.append(history_id)
next_created = created
verb = "Would have deleted" if self.dry_run else "Deleting"
self.say("{verb} {to_delete} rows of {total} for student_module_id {id}".format(
verb=verb,
to_delete=len(ids_to_delete),
total=len(history),
id=student_module_id,
))
if ids_to_delete and not self.dry_run:
self.delete_history(ids_to_delete)
"""Test the clean_history management command."""
import fnmatch
from mock import Mock
from nose.plugins.attrib import attr
import os.path
import textwrap
import dateutil.parser
from django.test import TransactionTestCase
from django.db import connection
from courseware.management.commands.clean_history import StudentModuleHistoryCleaner
# In lots of places in this file, smhc == StudentModuleHistoryCleaner
def parse_date(sdate):
"""Parse a string date into a datetime."""
parsed = dateutil.parser.parse(sdate)
parsed = parsed.replace(tzinfo=dateutil.tz.gettz('UTC'))
return parsed
class SmhcSayStubbed(StudentModuleHistoryCleaner):
"""StudentModuleHistoryCleaner, but with .say() stubbed for testing."""
def __init__(self, **kwargs):
super(SmhcSayStubbed, self).__init__(**kwargs)
self.said_lines = []
def say(self, msg):
self.said_lines.append(msg)
class SmhcDbMocked(SmhcSayStubbed):
"""StudentModuleHistoryCleaner, but with db access mocked."""
def __init__(self, **kwargs):
super(SmhcDbMocked, self).__init__(**kwargs)
self.get_history_for_student_modules = Mock()
self.delete_history = Mock()
def set_rows(self, rows):
"""Set the mocked history rows."""
rows = [(row_id, parse_date(created)) for row_id, created in rows]
self.get_history_for_student_modules.return_value = rows
class HistoryCleanerTest(TransactionTestCase):
"""Base class for all history cleaner tests."""
maxDiff = None
def setUp(self):
super(HistoryCleanerTest, self).setUp()
self.addCleanup(self.clean_up_state_file)
def write_state_file(self, state):
"""Write the string `state` into the state file read by StudentModuleHistoryCleaner."""
with open(StudentModuleHistoryCleaner.STATE_FILE, "w") as state_file:
state_file.write(state)
def read_state_file(self):
"""Return the string contents of the state file read by StudentModuleHistoryCleaner."""
with open(StudentModuleHistoryCleaner.STATE_FILE) as state_file:
return state_file.read()
def clean_up_state_file(self):
"""Remove any state file lying around."""
if os.path.exists(StudentModuleHistoryCleaner.STATE_FILE):
os.remove(StudentModuleHistoryCleaner.STATE_FILE)
def assert_said(self, smhc, *msgs):
"""Fail if the `smhc` didn't say `msgs`.
The messages passed here are `fnmatch`-style patterns: "*" means anything.
"""
for said, pattern in zip(smhc.said_lines, msgs):
if not fnmatch.fnmatch(said, pattern):
fmt = textwrap.dedent("""\
Messages:
{msgs}
don't match patterns:
{patterns}
Failed at {said!r} and {pattern!r}
""")
msg = fmt.format(
msgs="\n".join(smhc.said_lines),
patterns="\n".join(msgs),
said=said,
pattern=pattern
)
self.fail(msg)
def parse_rows(self, rows):
"""Parse convenient rows into real data."""
rows = [
(row_id, parse_date(created), student_module_id)
for row_id, created, student_module_id in rows
]
return rows
def write_history(self, rows):
"""Write history rows to the db.
Each row should be (id, created, student_module_id).
"""
cursor = connection.cursor()
cursor.executemany(
"""
INSERT INTO courseware_studentmodulehistory
(id, created, student_module_id)
VALUES (%s, %s, %s)
""",
self.parse_rows(rows),
)
def read_history(self):
"""Read the history from the db, and return it as a list of tuples.
Returns [(id, created, student_module_id), ...]
"""
cursor = connection.cursor()
cursor.execute("""
SELECT id, created, student_module_id FROM courseware_studentmodulehistory
""")
return cursor.fetchall()
def assert_history(self, rows):
"""Assert that the history rows are the same as `rows`."""
self.assertEqual(self.parse_rows(rows), self.read_history())
@attr('shard_1')
class HistoryCleanerNoDbTest(HistoryCleanerTest):
"""Tests of StudentModuleHistoryCleaner with db access mocked."""
def test_empty(self):
smhc = SmhcDbMocked()
smhc.set_rows([])
smhc.clean_one_student_module(1)
self.assert_said(smhc, "No history for student_module_id 1")
# Nothing to delete, so delete_history wasn't called.
self.assertFalse(smhc.delete_history.called)
def test_one_row(self):
smhc = SmhcDbMocked()
smhc.set_rows([
(1, "2013-07-13 12:11:10.987"),
])
smhc.clean_one_student_module(1)
self.assert_said(smhc, "Deleting 0 rows of 1 for student_module_id 1")
# Nothing to delete, so delete_history wasn't called.
self.assertFalse(smhc.delete_history.called)
def test_one_row_dry_run(self):
smhc = SmhcDbMocked(dry_run=True)
smhc.set_rows([
(1, "2013-07-13 12:11:10.987"),
])
smhc.clean_one_student_module(1)
self.assert_said(smhc, "Would have deleted 0 rows of 1 for student_module_id 1")
# Nothing to delete, so delete_history wasn't called.
self.assertFalse(smhc.delete_history.called)
def test_two_rows_close(self):
smhc = SmhcDbMocked()
smhc.set_rows([
(7, "2013-07-13 12:34:56.789"),
(9, "2013-07-13 12:34:56.987"),
])
smhc.clean_one_student_module(1)
self.assert_said(smhc, "Deleting 1 rows of 2 for student_module_id 1")
smhc.delete_history.assert_called_once_with([7])
def test_two_rows_far(self):
smhc = SmhcDbMocked()
smhc.set_rows([
(7, "2013-07-13 12:34:56.789"),
(9, "2013-07-13 12:34:57.890"),
])
smhc.clean_one_student_module(1)
self.assert_said(smhc, "Deleting 0 rows of 2 for student_module_id 1")
self.assertFalse(smhc.delete_history.called)
def test_a_bunch_of_rows(self):
smhc = SmhcDbMocked()
smhc.set_rows([
(4, "2013-07-13 16:30:00.000"), # keep
(8, "2013-07-13 16:30:01.100"),
(15, "2013-07-13 16:30:01.200"),
(16, "2013-07-13 16:30:01.300"), # keep
(23, "2013-07-13 16:30:02.400"),
(42, "2013-07-13 16:30:02.500"),
(98, "2013-07-13 16:30:02.600"), # keep
(99, "2013-07-13 16:30:59.000"), # keep
])
smhc.clean_one_student_module(17)
self.assert_said(smhc, "Deleting 4 rows of 8 for student_module_id 17")
smhc.delete_history.assert_called_once_with([42, 23, 15, 8])
@attr('shard_1')
class HistoryCleanerWitDbTest(HistoryCleanerTest):
"""Tests of StudentModuleHistoryCleaner with a real db."""
def test_no_history(self):
# Cleaning a student_module_id with no history leaves the db unchanged.
smhc = SmhcSayStubbed()
self.write_history([
(4, "2013-07-13 16:30:00.000", 11), # keep
(8, "2013-07-13 16:30:01.100", 11),
(15, "2013-07-13 16:30:01.200", 11),
(16, "2013-07-13 16:30:01.300", 11), # keep
(23, "2013-07-13 16:30:02.400", 11),
(42, "2013-07-13 16:30:02.500", 11),
(98, "2013-07-13 16:30:02.600", 11), # keep
(99, "2013-07-13 16:30:59.000", 11), # keep
])
smhc.clean_one_student_module(22)
self.assert_said(smhc, "No history for student_module_id 22")
self.assert_history([
(4, "2013-07-13 16:30:00.000", 11), # keep
(8, "2013-07-13 16:30:01.100", 11),
(15, "2013-07-13 16:30:01.200", 11),
(16, "2013-07-13 16:30:01.300", 11), # keep
(23, "2013-07-13 16:30:02.400", 11),
(42, "2013-07-13 16:30:02.500", 11),
(98, "2013-07-13 16:30:02.600", 11), # keep
(99, "2013-07-13 16:30:59.000", 11), # keep
])
def test_a_bunch_of_rows(self):
# Cleaning a student_module_id with 8 records, 4 to delete.
smhc = SmhcSayStubbed()
self.write_history([
(4, "2013-07-13 16:30:00.000", 11), # keep
(8, "2013-07-13 16:30:01.100", 11),
(15, "2013-07-13 16:30:01.200", 11),
(16, "2013-07-13 16:30:01.300", 11), # keep
(17, "2013-07-13 16:30:01.310", 22), # other student_module_id!
(23, "2013-07-13 16:30:02.400", 11),
(42, "2013-07-13 16:30:02.500", 11),
(98, "2013-07-13 16:30:02.600", 11), # keep
(99, "2013-07-13 16:30:59.000", 11), # keep
])
smhc.clean_one_student_module(11)
self.assert_said(smhc, "Deleting 4 rows of 8 for student_module_id 11")
self.assert_history([
(4, "2013-07-13 16:30:00.000", 11), # keep
(16, "2013-07-13 16:30:01.300", 11), # keep
(17, "2013-07-13 16:30:01.310", 22), # other student_module_id!
(98, "2013-07-13 16:30:02.600", 11), # keep
(99, "2013-07-13 16:30:59.000", 11), # keep
])
def test_a_bunch_of_rows_dry_run(self):
# Cleaning a student_module_id with 8 records, 4 to delete,
# but don't really do it.
smhc = SmhcSayStubbed(dry_run=True)
self.write_history([
(4, "2013-07-13 16:30:00.000", 11), # keep
(8, "2013-07-13 16:30:01.100", 11),
(15, "2013-07-13 16:30:01.200", 11),
(16, "2013-07-13 16:30:01.300", 11), # keep
(23, "2013-07-13 16:30:02.400", 11),
(42, "2013-07-13 16:30:02.500", 11),
(98, "2013-07-13 16:30:02.600", 11), # keep
(99, "2013-07-13 16:30:59.000", 11), # keep
])
smhc.clean_one_student_module(11)
self.assert_said(smhc, "Would have deleted 4 rows of 8 for student_module_id 11")
self.assert_history([
(4, "2013-07-13 16:30:00.000", 11), # keep
(8, "2013-07-13 16:30:01.100", 11),
(15, "2013-07-13 16:30:01.200", 11),
(16, "2013-07-13 16:30:01.300", 11), # keep
(23, "2013-07-13 16:30:02.400", 11),
(42, "2013-07-13 16:30:02.500", 11),
(98, "2013-07-13 16:30:02.600", 11), # keep
(99, "2013-07-13 16:30:59.000", 11), # keep
])
def test_a_bunch_of_rows_in_jumbled_order(self):
# Cleaning a student_module_id with 8 records, 4 to delete.
smhc = SmhcSayStubbed()
self.write_history([
(23, "2013-07-13 16:30:01.100", 11),
(24, "2013-07-13 16:30:01.300", 11), # keep
(27, "2013-07-13 16:30:02.500", 11),
(30, "2013-07-13 16:30:01.350", 22), # other student_module_id!
(32, "2013-07-13 16:30:59.000", 11), # keep
(50, "2013-07-13 16:30:02.400", 11),
(51, "2013-07-13 16:30:02.600", 11), # keep
(56, "2013-07-13 16:30:00.000", 11), # keep
(57, "2013-07-13 16:30:01.200", 11),
])
smhc.clean_one_student_module(11)
self.assert_said(smhc, "Deleting 4 rows of 8 for student_module_id 11")
self.assert_history([
(24, "2013-07-13 16:30:01.300", 11), # keep
(30, "2013-07-13 16:30:01.350", 22), # other student_module_id!
(32, "2013-07-13 16:30:59.000", 11), # keep
(51, "2013-07-13 16:30:02.600", 11), # keep
(56, "2013-07-13 16:30:00.000", 11), # keep
])
def test_a_bunch_of_rows_with_timestamp_ties(self):
# Sometimes rows are written with identical timestamps. The one with
# the greater id is the winner in that case.
smhc = SmhcSayStubbed()
self.write_history([
(21, "2013-07-13 16:30:01.100", 11),
(24, "2013-07-13 16:30:01.100", 11), # keep
(22, "2013-07-13 16:30:01.100", 11),
(23, "2013-07-13 16:30:01.100", 11),
(27, "2013-07-13 16:30:02.500", 11),
(30, "2013-07-13 16:30:01.350", 22), # other student_module_id!
(32, "2013-07-13 16:30:59.000", 11), # keep
(50, "2013-07-13 16:30:02.500", 11), # keep
])
smhc.clean_one_student_module(11)
self.assert_said(smhc, "Deleting 4 rows of 7 for student_module_id 11")
self.assert_history([
(24, "2013-07-13 16:30:01.100", 11), # keep
(30, "2013-07-13 16:30:01.350", 22), # other student_module_id!
(32, "2013-07-13 16:30:59.000", 11), # keep
(50, "2013-07-13 16:30:02.500", 11), # keep
])
def test_get_last_student_module(self):
# Can we find the last student_module_id properly?
smhc = SmhcSayStubbed()
self.write_history([
(23, "2013-07-13 16:30:01.100", 11),
(24, "2013-07-13 16:30:01.300", 44),
(27, "2013-07-13 16:30:02.500", 11),
(30, "2013-07-13 16:30:01.350", 22),
(32, "2013-07-13 16:30:59.000", 11),
(51, "2013-07-13 16:30:02.600", 33),
(56, "2013-07-13 16:30:00.000", 11),
])
last = smhc.get_last_student_module_id()
self.assertEqual(last, 44)
self.assert_said(smhc, "Last student_module_id is 44")
def test_load_state_with_no_stored_state(self):
smhc = SmhcSayStubbed()
self.assertFalse(os.path.exists(smhc.STATE_FILE))
smhc.load_state()
self.assertEqual(smhc.next_student_module_id, 0)
self.assert_said(smhc, "No stored state")
def test_load_stored_state(self):
self.write_state_file('{"next_student_module_id": 23}')
smhc = SmhcSayStubbed()
smhc.load_state()
self.assertEqual(smhc.next_student_module_id, 23)
self.assert_said(smhc, 'Loaded stored state: {"next_student_module_id": 23}')
def test_save_state(self):
smhc = SmhcSayStubbed()
smhc.next_student_module_id = 47
smhc.save_state()
state = self.read_state_file()
self.assertEqual(state, '{"next_student_module_id": 47}')
class SmhcForTestingMain(SmhcSayStubbed):
"""A StudentModuleHistoryCleaner with a few function stubbed for testing main."""
def __init__(self, *args, **kwargs):
self.exception_smids = kwargs.pop('exception_smids', ())
super(SmhcForTestingMain, self).__init__(*args, **kwargs)
def clean_one_student_module(self, smid):
self.say("(not really cleaning {})".format(smid))
if smid in self.exception_smids:
raise Exception("Something went wrong!")
@attr('shard_1')
class HistoryCleanerMainTest(HistoryCleanerTest):
"""Tests of StudentModuleHistoryCleaner.main(), using SmhcForTestingMain."""
def test_only_one_record(self):
smhc = SmhcForTestingMain()
self.write_history([
(1, "2013-07-15 11:47:00.000", 1),
])
smhc.main()
self.assert_said(
smhc,
'Last student_module_id is 1',
'No stored state',
'(not really cleaning 0)',
'(not really cleaning 1)',
'Committing',
'Saved state: {"next_student_module_id": 2}',
)
def test_already_processed_some(self):
smhc = SmhcForTestingMain()
self.write_state_file('{"next_student_module_id": 25}')
self.write_history([
(1, "2013-07-15 15:04:00.000", 23),
(2, "2013-07-15 15:04:11.000", 23),
(3, "2013-07-15 15:04:01.000", 24),
(4, "2013-07-15 15:04:00.000", 25),
(5, "2013-07-15 15:04:00.000", 26),
])
smhc.main()
self.assert_said(
smhc,
'Last student_module_id is 26',
'Loaded stored state: {"next_student_module_id": 25}',
'(not really cleaning 25)',
'(not really cleaning 26)',
'Committing',
'Saved state: {"next_student_module_id": 27}'
)
def test_working_in_batches(self):
smhc = SmhcForTestingMain()
self.write_state_file('{"next_student_module_id": 25}')
self.write_history([
(3, "2013-07-15 15:04:01.000", 24),
(4, "2013-07-15 15:04:00.000", 25),
(5, "2013-07-15 15:04:00.000", 26),
(6, "2013-07-15 15:04:00.000", 27),
(7, "2013-07-15 15:04:00.000", 28),
(8, "2013-07-15 15:04:00.000", 29),
])
smhc.main(batch_size=3)
self.assert_said(
smhc,
'Last student_module_id is 29',
'Loaded stored state: {"next_student_module_id": 25}',
'(not really cleaning 25)',
'(not really cleaning 26)',
'(not really cleaning 27)',
'Committing',
'Saved state: {"next_student_module_id": 28}',
'(not really cleaning 28)',
'(not really cleaning 29)',
'Committing',
'Saved state: {"next_student_module_id": 30}',
)
def test_something_failing_while_cleaning(self):
smhc = SmhcForTestingMain(exception_smids=[26])
self.write_state_file('{"next_student_module_id": 25}')
self.write_history([
(3, "2013-07-15 15:04:01.000", 24),
(4, "2013-07-15 15:04:00.000", 25),
(5, "2013-07-15 15:04:00.000", 26),
(6, "2013-07-15 15:04:00.000", 27),
(7, "2013-07-15 15:04:00.000", 28),
(8, "2013-07-15 15:04:00.000", 29),
])
smhc.main(batch_size=3)
self.assert_said(
smhc,
'Last student_module_id is 29',
'Loaded stored state: {"next_student_module_id": 25}',
'(not really cleaning 25)',
'(not really cleaning 26)',
"Couldn't clean student_module_id 26:\nTraceback*Exception: Something went wrong!\n",
'(not really cleaning 27)',
'Committing',
'Saved state: {"next_student_module_id": 28}',
'(not really cleaning 28)',
'(not really cleaning 29)',
'Committing',
'Saved state: {"next_student_module_id": 30}',
)
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import courseware.fields
from django.conf import settings
def bump_pk_start(apps, schema_editor):
if not schema_editor.connection.alias == 'student_module_history':
return
StudentModuleHistory = apps.get_model("courseware", "StudentModuleHistory")
initial_id = settings.STUDENTMODULEHISTORYEXTENDED_OFFSET + StudentModuleHistory.objects.all().latest('id').id
if schema_editor.connection.vendor == 'mysql':
schema_editor.execute('ALTER TABLE courseware_studentmodulehistoryextended AUTO_INCREMENT=%s', [initial_id])
elif schema_editor.connection.vendor == 'sqlite3':
# This is a hack to force sqlite to add new rows after the earlier rows we
# want to migrate.
StudentModuleHistory(
id=initial_id,
course_key=None,
usage_key=None,
username="",
version="",
created=datetime.datetime.now(),
).save()
class Migration(migrations.Migration):
dependencies = [
('courseware', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='StudentModuleHistoryExtended',
fields=[
('id', courseware.fields.UnsignedBigIntAutoField(serialize=False, primary_key=True)),
('version', models.CharField(db_index=True, max_length=255, null=True, blank=True)),
('created', models.DateTimeField(db_index=True)),
('state', models.TextField(null=True, blank=True)),
('grade', models.FloatField(null=True, blank=True)),
('max_grade', models.FloatField(null=True, blank=True)),
('student_module', models.ForeignKey(to='courseware.StudentModule', db_constraint=False)),
],
options={
'get_latest_by': 'created',
},
),
migrations.RunPython(bump_pk_start, reverse_code=migrations.RunPython.noop),
]
......@@ -26,7 +26,7 @@ from student.models import user_by_anonymous_id
from submissions.models import score_set, score_reset
from xmodule_django.models import CourseKeyField, LocationKeyField, BlockTypeKeyField
log = logging.getLogger(__name__)
from courseware.fields import UnsignedBigIntAutoField
log = logging.getLogger("edx.courseware")
......@@ -188,6 +188,50 @@ class StudentModuleHistory(models.Model):
def __unicode__(self):
return unicode(repr(self))
class StudentModuleHistoryExtended(models.Model):
"""Keeps a complete history of state changes for a given XModule for a given
Student. Right now, we restrict this to problems so that the table doesn't
explode in size.
This new extended CSMH has a larger primary key that won't run out of space
so quickly."""
objects = ChunkingManager()
HISTORY_SAVING_TYPES = {'problem'}
class Meta(object):
app_label = "courseware"
get_latest_by = "created"
id = UnsignedBigIntAutoField(primary_key=True) # pylint: disable=invalid-name
student_module = models.ForeignKey(StudentModule, db_index=True, db_constraint=False)
version = models.CharField(max_length=255, null=True, blank=True, db_index=True)
# This should be populated from the modified field in StudentModule
created = models.DateTimeField(db_index=True)
state = models.TextField(null=True, blank=True)
grade = models.FloatField(null=True, blank=True)
max_grade = models.FloatField(null=True, blank=True)
@receiver(post_save, sender=StudentModule)
def save_history(sender, instance, **kwargs): # pylint: disable=no-self-argument, unused-argument
"""
Checks the instance's module_type, and creates & saves a
StudentModuleHistory entry if the module_type is one that
we save.
"""
if instance.module_type in StudentModuleHistoryExtended.HISTORY_SAVING_TYPES:
history_entry = StudentModuleHistoryExtended(student_module=instance,
version=None,
created=instance.modified,
state=instance.state,
grade=instance.grade,
max_grade=instance.max_grade)
history_entry.save()
def __unicode__(self):
return unicode(repr(self))
class XBlockFieldBase(models.Model):
"""
......
"""
Database Routers for use with the courseware django app.
"""
class StudentModuleHistoryRouter(object):
"""
A Database Router that separates StudentModuleHistory into its own database.
"""
DATABASE_NAME = 'student_module_history'
def _is_csmh(self, model):
"""
Return True if ``model`` is courseware.StudentModuleHistory.
"""
return (
model._meta.app_label == 'courseware' and # pylint: disable=protected-access
model.__name__ == 'StudentModuleHistory'
)
def db_for_read(self, model, **hints): # pylint: disable=unused-argument
"""
Use the StudentModuleHistoryRouter.DATABASE_NAME if the model is StudentModuleHistory.
"""
if self._is_csmh(model):
return self.DATABASE_NAME
else:
return None
def db_for_write(self, model, **hints): # pylint: disable=unused-argument
"""
Use the StudentModuleHistoryRouter.DATABASE_NAME if the model is StudentModuleHistory.
"""
if self._is_csmh(model):
return self.DATABASE_NAME
else:
return None
def allow_relation(self, obj1, obj2, **hints): # pylint: disable=unused-argument
"""
Disable relations if the model is StudentModuleHistory.
"""
if self._is_csmh(obj1) or self._is_csmh(obj2):
return False
return None
def allow_migrate(self, db, model): # pylint: disable=unused-argument
"""
Only sync StudentModuleHistory to StudentModuleHistoryRouter.DATABASE_Name
"""
if self._is_csmh(model):
return db == self.DATABASE_NAME
elif db == self.DATABASE_NAME:
return False
return None
......@@ -249,8 +249,9 @@ class TestMissingStudentModule(TestCase):
# to discover if something other than the DjangoXBlockUserStateClient
# has written to the StudentModule (such as UserStateCache setting the score
# on the StudentModule).
with self.assertNumQueries(5):
self.kvs.set(user_state_key('a_field'), 'a_value')
with self.assertNumQueries(2, using='default'):
with self.assertNumQueries(1, using='student_module_history'):
self.kvs.set(user_state_key('a_field'), 'a_value')
self.assertEquals(1, sum(len(cache) for cache in self.field_data_cache.cache.values()))
self.assertEquals(1, StudentModule.objects.all().count())
......
......@@ -72,6 +72,14 @@ DATABASES = {
'timeout': 30,
},
'ATOMIC_REQUESTS': True,
},
'student_module_history': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': TEST_ROOT / "db" / "test_student_module_history.db",
'TEST_NAME': TEST_ROOT / "db" / "test_student_module_history.db",
'OPTIONS': {
'timeout': 30,
},
}
}
......
......@@ -759,6 +759,11 @@ MICROSITE_DATABASE_TEMPLATE_CACHE_TTL = ENV_TOKENS.get(
# Course Content Bookmarks Settings
MAX_BOOKMARKS_PER_COURSE = ENV_TOKENS.get('MAX_BOOKMARKS_PER_COURSE', MAX_BOOKMARKS_PER_COURSE)
# Offset for pk of courseware.StudentModuleHistoryExtended
STUDENTMODULEHISTORYEXTENDED_OFFSET = ENV_TOKENS.get(
'STUDENTMODULEHISTORYEXTENDED_OFFSET', STUDENTMODULEHISTORYEXTENDED_OFFSET
)
# Cutoff date for granting audit certificates
if ENV_TOKENS.get('AUDIT_CERT_CUTOFF_DATE', None):
AUDIT_CERT_CUTOFF_DATE = dateutil.parser.parse(ENV_TOKENS.get('AUDIT_CERT_CUTOFF_DATE'))
......
......@@ -13,18 +13,25 @@ from .aws import *
import os
from django.core.exceptions import ImproperlyConfigured
DB_OVERRIDES = dict(
PASSWORD=os.environ.get('DB_MIGRATION_PASS', None),
ENGINE=os.environ.get('DB_MIGRATION_ENGINE', DATABASES['default']['ENGINE']),
USER=os.environ.get('DB_MIGRATION_USER', DATABASES['default']['USER']),
NAME=os.environ.get('DB_MIGRATION_NAME', DATABASES['default']['NAME']),
HOST=os.environ.get('DB_MIGRATION_HOST', DATABASES['default']['HOST']),
PORT=os.environ.get('DB_MIGRATION_PORT', DATABASES['default']['PORT']),
)
if DB_OVERRIDES['PASSWORD'] is None:
raise ImproperlyConfigured("No database password was provided for running "
"migrations. This is fatal.")
def get_db_overrides(db_name):
"""
Now that we have multiple databases, we want to look up from the environment
for both databases.
"""
db_overrides = dict(
PASSWORD=os.environ.get('DB_MIGRATION_PASS', None),
ENGINE=os.environ.get('DB_MIGRATION_ENGINE', DATABASES[db_name]['ENGINE']),
USER=os.environ.get('DB_MIGRATION_USER', DATABASES[db_name]['USER']),
NAME=os.environ.get('DB_MIGRATION_NAME', DATABASES[db_name]['NAME']),
HOST=os.environ.get('DB_MIGRATION_HOST', DATABASES[db_name]['HOST']),
PORT=os.environ.get('DB_MIGRATION_PORT', DATABASES[db_name]['PORT']),
)
for override, value in DB_OVERRIDES.iteritems():
DATABASES['default'][override] = value
if db_overrides['PASSWORD'] is None:
raise ImproperlyConfigured("No database password was provided for running "
"migrations. This is fatal.")
return db_overrides
for db in ['default', 'student_module_history']:
DATABASES[db].update(get_db_overrides(db))
......@@ -39,6 +39,14 @@
"PASSWORD": "",
"PORT": "3306",
"USER": "root"
},
"student_module_history": {
"ENGINE": "django.db.backends.mysql",
"HOST": "localhost",
"NAME": "student_module_history_test",
"PASSWORD": "",
"PORT": "3306",
"USER": "root"
}
},
"DOC_STORE_CONFIG": {
......
......@@ -413,6 +413,12 @@ GEOIPV6_PATH = REPO_ROOT / "common/static/data/geoip/GeoIPv6.dat"
# Where to look for a status message
STATUS_MESSAGE_PATH = ENV_ROOT / "status_message.json"
############################ Global Database Configuration #####################
DATABASE_ROUTERS = [
'courseware.routers.StudentModuleHistoryRouter',
]
############################ OpenID Provider ##################################
OPENID_PROVIDER_TRUSTED_ROOTS = ['cs50.net', '*.cs50.net']
......@@ -2759,6 +2765,10 @@ MOBILE_APP_USER_AGENT_REGEXES = [
r'edX/org.edx.mobile',
]
# Offset for courseware.StudentModuleHistoryExtended which is used to
# calculate the starting primary key for the underlying table.
STUDENTMODULEHISTORYEXTENDED_OFFSET = 10000
# Deprecated xblock types
DEPRECATED_ADVANCED_COMPONENT_TYPES = []
......
......@@ -48,6 +48,11 @@ DATABASES = {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ENV_ROOT / "db" / "edx.db",
'ATOMIC_REQUESTS': True,
},
'student_module_history': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ENV_ROOT / "db" / "student_module_history.db",
'ATOMIC_REQUESTS': True,
}
}
......
......@@ -31,6 +31,15 @@ DATABASES = {
'HOST': '127.0.0.1',
'PORT': '3306',
'ATOMIC_REQUESTS': True,
},
'student_module_history': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'student_module_history',
'USER': 'root',
'PASSWORD': '',
'HOST': '127.0.0.1',
'PORT': '3306',
'ATOMIC_REQUESTS': True,
}
}
......
......@@ -27,6 +27,11 @@ DATABASES = {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ENV_ROOT / "db" / "edx.db",
'ATOMIC_REQUESTS': True,
},
'student_module_history': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ENV_ROOT / "db" / "student_module_history.db",
'ATOMIC_REQUESTS': True,
}
}
......
......@@ -186,7 +186,10 @@ DATABASES = {
'NAME': TEST_ROOT / 'db' / 'edx.db',
'ATOMIC_REQUESTS': True,
},
'student_module_history': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': TEST_ROOT / 'db' / 'student_module_history.db'
},
}
if os.environ.get('DISABLE_MIGRATIONS'):
......
......@@ -19,7 +19,9 @@ DATABASES = {
'ENGINE': 'django.db.backends.sqlite3',
'ATOMIC_REQUESTS': True,
},
'student_module_history': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
# Provide a dummy XQUEUE_INTERFACE setting as LMS expects it to exist on start up
......
......@@ -24,29 +24,54 @@
DB_CACHE_DIR="common/test/db_cache"
declare -A databases
databases=(["default"]="edxtest" ["student_module_history"]="student_module_history_test")
# Ensure the test database exists.
echo "CREATE DATABASE IF NOT EXISTS edxtest;" | mysql -u root
for db in "${!databases[@]}"; do
echo "CREATE DATABASE IF NOT EXISTS ${databases[$db]};" | mysql -u root
# Clear out the test database
#
# We are using the django-extensions's reset_db command which uses "DROP DATABASE" and
# "CREATE DATABASE" in case the tests are being run in an environment (e.g. devstack
# or a jenkins worker environment) that already ran tests on another commit that had
# different migrations that created, dropped, or altered tables.
echo "Issuing a reset_db command to the bok_choy MySQL database."
./manage.py lms --settings bok_choy reset_db --traceback --noinput --router $db
# If there are cached database schemas/data, load them
if [[ ! -f $DB_CACHE_DIR/bok_choy_schema_$db.sql || ! -f $DB_CACHE_DIR/bok_choy_data_$db.json ]]; then
echo "Missing $DB_CACHE_DIR/bok_choy_schema_$db.sql or $DB_CACHE_DIR/bok_choy_data_$db.json, rebuilding cache"
REBUILD_CACHE=true
fi
done
# migrations are only stored in the default database
if [[ ! -f $DB_CACHE_DIR/bok_choy_migrations_data.sql ]]; then
REBUILD_CACHE=true
fi
# Clear out the test database
#
# We are using the django-extensions's reset_db command which uses "DROP DATABASE" and
# "CREATE DATABASE" in case the tests are being run in an environment (e.g. devstack
# or a jenkins worker environment) that already ran tests on another commit that had
# different migrations that created, dropped, or altered tables.
echo "Issuing a reset_db command to the bok_choy MySQL database."
./manage.py lms --settings bok_choy reset_db --traceback --noinput
# If there are cached database schemas/data, load them
if [[ -f $DB_CACHE_DIR/bok_choy_schema.sql && -f $DB_CACHE_DIR/bok_choy_migrations_data.sql && -f $DB_CACHE_DIR/bok_choy_data.json ]]; then
if [[ -z $REBUILD_CACHE ]]; then
echo "Found the bok_choy DB cache files. Loading them into the database..."
# Load the schema, then the data (including the migration history)
echo "Loading the schema from the filesystem into the MySQL DB."
mysql -u root edxtest < $DB_CACHE_DIR/bok_choy_schema.sql
for db in "${!databases[@]}"; do
# Load the schema, then the data (including the migration history)
echo "Loading the schema from the filesystem into the MySQL DB."
mysql -u root "${databases["$db"]}" < $DB_CACHE_DIR/bok_choy_schema_$db.sql
echo "Loading the fixture data from the filesystem into the MySQL DB."
./manage.py lms --settings bok_choy loaddata --database $db $DB_CACHE_DIR/bok_choy_data_$db.json
done
# Migrations are stored in the default database
echo "Loading the migration data from the filesystem into the MySQL DB."
mysql -u root edxtest < $DB_CACHE_DIR/bok_choy_migrations_data.sql
echo "Loading the fixture data from the filesystem into the MySQL DB."
./manage.py lms --settings bok_choy loaddata $DB_CACHE_DIR/bok_choy_data.json
mysql -u root "${databases['default']}" < $DB_CACHE_DIR/bok_choy_migrations_data.sql
# Re-run migrations to ensure we are up-to-date
echo "Running the lms migrations on the bok_choy DB."
......@@ -66,13 +91,15 @@ else
echo "Issuing a migrate command to the bok_choy MySQL database for the cms django apps."
./manage.py cms --settings bok_choy migrate --traceback --noinput
# Dump the schema and data to the cache
echo "Using the dumpdata command to save the fixture data to the filesystem."
./manage.py lms --settings bok_choy dumpdata > $DB_CACHE_DIR/bok_choy_data.json
for db in "${!databases[@]}"; do
# Dump the schema and data to the cache
echo "Using the dumpdata command to save the fixture data to the filesystem."
./manage.py lms --settings bok_choy dumpdata --database $db > $DB_CACHE_DIR/bok_choy_data_$db.json
echo "Saving the schema of the bok_choy DB to the filesystem."
mysqldump -u root --no-data --skip-comments --skip-dump-date "${databases[$db]}" > $DB_CACHE_DIR/bok_choy_schema_$db.sql
done
# dump_data does not dump the django_migrations table so we do it separately.
echo "Saving the django_migrations table of the bok_choy DB to the filesystem."
mysqldump -u root --no-create-info edxtest django_migrations > $DB_CACHE_DIR/bok_choy_migrations_data.sql
echo "Saving the schema of the bok_choy DB to the filesystem."
mysqldump -u root --no-data --skip-comments --skip-dump-date edxtest > $DB_CACHE_DIR/bok_choy_schema.sql
mysqldump -u root --no-create-info "${databases['default']}" django_migrations > $DB_CACHE_DIR/bok_choy_migrations_data.sql
fi
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment