Commit 2aa99671 by Calen Pennington

Merge pull request #12386 from cpennington/cale/concurrent-unit-tests

[EV-12] Run LMS unit tests concurrently on jenkins
parents 558281a0 927b74e7
......@@ -54,7 +54,10 @@ _NOSEID_DIR.makedirs_p()
NOSE_ARGS = [
'--id-file', _NOSEID_DIR / 'noseids',
'--xunit-file', _REPORT_DIR / 'nosetests.xml',
]
NOSE_PLUGINS = [
'openedx.core.djangolib.testing.utils.NoseDatabaseIsolation'
]
TEST_ROOT = path('test_root')
......@@ -323,3 +326,7 @@ FEATURES['CUSTOM_COURSES_EDX'] = True
# API access management -- needed for simple-history to run.
INSTALLED_APPS += ('openedx.core.djangoapps.api_admin',)
# Set the default Oauth2 Provider Model so that migrations can run in
# verbose mode
OAUTH2_PROVIDER_APPLICATION_MODEL = 'oauth2_provider.Application'
......@@ -413,6 +413,13 @@ class OpenIdProviderLiveServerTest(LiveServerTestCase):
request = factory.request()
abs_provider_url = request.build_absolute_uri(location=provider_url)
# In order for this absolute URL to work (i.e. to get xrds, then authentication)
# in the test environment, we either need a live server that works with the default
# fetcher (i.e. urlopen2), or a test server that is reached through a custom fetcher.
# Here we do the latter:
fetcher = MyFetcher(self.client)
openid.fetchers.setDefaultFetcher(fetcher, wrap_exceptions=False)
# now we can begin the login process by invoking a local openid client,
# with a pointer to the (also-local) openid provider:
with self.settings(OPENID_SSO_SERVER_URL=abs_provider_url):
......
......@@ -45,6 +45,7 @@ class MongoContentStore(ContentStore):
self.fs = gridfs.GridFS(mongo_db, bucket) # pylint: disable=invalid-name
self.fs_files = mongo_db[bucket + ".files"] # the underlying collection GridFS uses
self.chunks = mongo_db[bucket + ".chunks"]
def close_connections(self):
"""
......@@ -52,13 +53,31 @@ class MongoContentStore(ContentStore):
"""
self.fs_files.database.connection.close()
def _drop_database(self):
def _drop_database(self, database=True, collections=True, connections=True):
"""
A destructive operation to drop the underlying database and close all connections.
Intended to be used by test code for cleanup.
If database is True, then this should drop the entire database.
Otherwise, if collections is True, then this should drop all of the collections used
by this modulestore.
Otherwise, the modulestore should remove all data from the collections.
If connections is True, then close the connection to the database as well.
"""
self.close_connections()
self.fs_files.database.connection.drop_database(self.fs_files.database)
connection = self.fs_files.database.connection
if database:
connection.drop_database(self.fs_files.database)
elif collections:
self.fs_files.drop()
self.chunks.drop()
else:
self.fs_files.remove({})
self.chunks.remove({})
if connections:
self.close_connections()
def save(self, content):
content_id, content_son = self.asset_db_key(content.location)
......
......@@ -10,7 +10,6 @@ import datetime
from pytz import UTC
from collections import defaultdict
import collections
from contextlib import contextmanager
import threading
from operator import itemgetter
......@@ -1144,10 +1143,17 @@ class ModuleStoreWrite(ModuleStoreRead, ModuleStoreAssetWriteInterface):
pass
@abstractmethod
def _drop_database(self):
def _drop_database(self, database=True, collections=True, connections=True):
"""
A destructive operation to drop the underlying database and close all connections.
Intended to be used by test code for cleanup.
If database is True, then this should drop the entire database.
Otherwise, if collections is True, then this should drop all of the collections used
by this modulestore.
Otherwise, the modulestore should remove all data from the collections.
If connections is True, then close the connection to the database as well.
"""
pass
......@@ -1291,7 +1297,7 @@ class ModuleStoreWriteBase(ModuleStoreReadBase, ModuleStoreWrite):
:param category: the xblock category
:param fields: the dictionary of {fieldname: value}
"""
result = collections.defaultdict(dict)
result = defaultdict(dict)
if fields is None:
return result
cls = self.mixologist.mix(XBlock.load_class(category, select=prefer_xmodules))
......@@ -1342,14 +1348,21 @@ class ModuleStoreWriteBase(ModuleStoreReadBase, ModuleStoreWrite):
self.contentstore.delete_all_course_assets(course_key)
super(ModuleStoreWriteBase, self).delete_course(course_key, user_id)
def _drop_database(self):
def _drop_database(self, database=True, collections=True, connections=True):
"""
A destructive operation to drop the underlying database and close all connections.
Intended to be used by test code for cleanup.
If database is True, then this should drop the entire database.
Otherwise, if collections is True, then this should drop all of the collections used
by this modulestore.
Otherwise, the modulestore should remove all data from the collections.
If connections is True, then close the connection to the database as well.
"""
if self.contentstore:
self.contentstore._drop_database() # pylint: disable=protected-access
super(ModuleStoreWriteBase, self)._drop_database()
self.contentstore._drop_database(database, collections, connections) # pylint: disable=protected-access
super(ModuleStoreWriteBase, self)._drop_database(database, collections, connections)
def create_child(self, user_id, parent_usage_key, block_type, block_id=None, fields=None, **kwargs):
"""
......
......@@ -810,15 +810,22 @@ class MixedModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase):
for modulestore in self.modulestores:
modulestore.close_connections()
def _drop_database(self):
def _drop_database(self, database=True, collections=True, connections=True):
"""
A destructive operation to drop all databases and close all db connections.
A destructive operation to drop the underlying database and close all connections.
Intended to be used by test code for cleanup.
If database is True, then this should drop the entire database.
Otherwise, if collections is True, then this should drop all of the collections used
by this modulestore.
Otherwise, the modulestore should remove all data from the collections.
If connections is True, then close the connection to the database as well.
"""
for modulestore in self.modulestores:
# drop database if the store supports it (read-only stores do not)
if hasattr(modulestore, '_drop_database'):
modulestore._drop_database() # pylint: disable=protected-access
modulestore._drop_database(database, collections, connections) # pylint: disable=protected-access
@strip_key
def create_xblock(self, runtime, course_key, block_type, block_id=None, fields=None, **kwargs):
......
......@@ -611,17 +611,32 @@ class MongoModuleStore(ModuleStoreDraftAndPublished, ModuleStoreWriteBase, Mongo
self.database.connection._ensure_connected()
return self.database.connection.max_wire_version
def _drop_database(self):
def _drop_database(self, database=True, collections=True, connections=True):
"""
A destructive operation to drop the underlying database and close all connections.
Intended to be used by test code for cleanup.
If database is True, then this should drop the entire database.
Otherwise, if collections is True, then this should drop all of the collections used
by this modulestore.
Otherwise, the modulestore should remove all data from the collections.
If connections is True, then close the connection to the database as well.
"""
# drop the assets
super(MongoModuleStore, self)._drop_database()
super(MongoModuleStore, self)._drop_database(database, collections, connections)
connection = self.collection.database.connection
connection.drop_database(self.collection.database.proxied_object)
connection.close()
if database:
connection.drop_database(self.collection.database.proxied_object)
elif collections:
self.collection.drop()
else:
self.collection.remove({})
if connections:
connection.close()
@autoretry_read()
def fill_in_run(self, course_key):
......
......@@ -556,3 +556,43 @@ class MongoConnection(object):
unique=True,
background=True
)
def close_connections(self):
"""
Closes any open connections to the underlying databases
"""
self.database.connection.close()
def mongo_wire_version(self):
"""
Returns the wire version for mongo. Only used to unit tests which instrument the connection.
"""
return self.database.connection.max_wire_version
def _drop_database(self, database=True, collections=True, connections=True):
"""
A destructive operation to drop the underlying database and close all connections.
Intended to be used by test code for cleanup.
If database is True, then this should drop the entire database.
Otherwise, if collections is True, then this should drop all of the collections used
by this modulestore.
Otherwise, the modulestore should remove all data from the collections.
If connections is True, then close the connection to the database as well.
"""
connection = self.database.connection
if database:
connection.drop_database(self.database.name)
elif collections:
self.course_index.drop()
self.structures.drop()
self.definitions.drop()
else:
self.course_index.remove({})
self.structures.remove({})
self.definitions.remove({})
if connections:
connection.close()
......@@ -663,7 +663,6 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
super(SplitMongoModuleStore, self).__init__(contentstore, **kwargs)
self.db_connection = MongoConnection(**doc_store_config)
self.db = self.db_connection.database
if default_class is not None:
module_path, __, class_name = default_class.rpartition('.')
......@@ -693,25 +692,30 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
"""
Closes any open connections to the underlying databases
"""
self.db.connection.close()
self.db_connection.close_connections()
def mongo_wire_version(self):
"""
Returns the wire version for mongo. Only used to unit tests which instrument the connection.
"""
return self.db.connection.max_wire_version
return self.db_connection.mongo_wire_version
def _drop_database(self):
def _drop_database(self, database=True, collections=True, connections=True):
"""
A destructive operation to drop the underlying database and close all connections.
Intended to be used by test code for cleanup.
If database is True, then this should drop the entire database.
Otherwise, if collections is True, then this should drop all of the collections used
by this modulestore.
Otherwise, the modulestore should remove all data from the collections.
If connections is True, then close the connection to the database as well.
"""
# drop the assets
super(SplitMongoModuleStore, self)._drop_database()
super(SplitMongoModuleStore, self)._drop_database(database, collections, connections)
connection = self.db.connection
connection.drop_database(self.db.name)
connection.close()
self.db_connection._drop_database(database, collections, connections) # pylint: disable=protected-access
def cache_items(self, system, base_block_ids, course_key, depth=0, lazy=True):
"""
......
......@@ -4,6 +4,7 @@ Modulestore configuration for test cases.
"""
import copy
import functools
import os
from uuid import uuid4
from contextlib import contextmanager
......@@ -93,8 +94,8 @@ def draft_mongo_store_config(data_dir):
'DOC_STORE_CONFIG': {
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'db': 'test_xmodule',
'collection': 'modulestore_{0}'.format(uuid4().hex[:5]),
'db': 'test_xmodule_{}'.format(os.getpid()),
'collection': 'modulestore',
},
'OPTIONS': modulestore_options
}
......@@ -120,8 +121,8 @@ def split_mongo_store_config(data_dir):
'DOC_STORE_CONFIG': {
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'db': 'test_xmodule',
'collection': 'modulestore_{0}'.format(uuid4().hex[:5]),
'db': 'test_xmodule_{}'.format(os.getpid()),
'collection': 'modulestore',
},
'OPTIONS': modulestore_options
}
......@@ -130,6 +131,27 @@ def split_mongo_store_config(data_dir):
return store
def contentstore_config():
"""
Return a new configuration for the contentstore that is isolated
from other such configurations.
"""
return {
'ENGINE': 'xmodule.contentstore.mongo.MongoContentStore',
'DOC_STORE_CONFIG': {
'host': MONGO_HOST,
'db': 'test_xcontent_{}'.format(os.getpid()),
'port': MONGO_PORT_NUM,
},
# allow for additional options that can be keyed on a name, e.g. 'trashcan'
'ADDITIONAL_OPTIONS': {
'trashcan': {
'bucket': 'trash_fs'
}
}
}
@patch('xmodule.modulestore.django.create_modulestore_instance', autospec=True)
def drop_mongo_collections(mock_create):
"""
......@@ -140,7 +162,7 @@ def drop_mongo_collections(mock_create):
module_store = modulestore()
if hasattr(module_store, '_drop_database'):
module_store._drop_database() # pylint: disable=protected-access
module_store._drop_database(database=False) # pylint: disable=protected-access
_CONTENTSTORE.clear()
if hasattr(module_store, 'close_connections'):
module_store.close_connections()
......@@ -154,17 +176,20 @@ TEST_DATA_DIR = settings.COMMON_TEST_DATA_ROOT
# test course into this modulestore.
# If your test needs a graded course to test against, import the common/test/data/graded
# test course into this modulestore.
TEST_DATA_MIXED_MODULESTORE = mixed_store_config(
TEST_DATA_DIR, {}
TEST_DATA_MIXED_MODULESTORE = functools.partial(
mixed_store_config,
TEST_DATA_DIR,
{}
)
# All store requests now go through mixed
# Use this modulestore if you specifically want to test mongo and not a mocked modulestore.
TEST_DATA_MONGO_MODULESTORE = mixed_store_config(mkdtemp_clean(), {})
TEST_DATA_MONGO_MODULESTORE = functools.partial(mixed_store_config, mkdtemp_clean(), {})
# All store requests now go through mixed
# Use this modulestore if you specifically want to test split-mongo and not a mocked modulestore.
TEST_DATA_SPLIT_MODULESTORE = mixed_store_config(
TEST_DATA_SPLIT_MODULESTORE = functools.partial(
mixed_store_config,
mkdtemp_clean(),
{},
store_order=[StoreConstructors.split, StoreConstructors.draft]
......@@ -191,10 +216,12 @@ class ModuleStoreIsolationMixin(CacheIsolationMixin):
"""
MODULESTORE = mixed_store_config(mkdtemp_clean(), {})
MODULESTORE = functools.partial(mixed_store_config, mkdtemp_clean(), {})
CONTENTSTORE = functools.partial(contentstore_config)
ENABLED_CACHES = ['mongo_metadata_inheritance', 'loc_cache']
__settings_overrides = []
__old_modulestores = []
__old_contentstores = []
@classmethod
def start_modulestore_isolation(cls):
......@@ -205,10 +232,12 @@ class ModuleStoreIsolationMixin(CacheIsolationMixin):
"""
cls.start_cache_isolation()
override = override_settings(
MODULESTORE=cls.MODULESTORE,
MODULESTORE=cls.MODULESTORE(),
CONTENTSTORE=cls.CONTENTSTORE(),
)
cls.__old_modulestores.append(copy.deepcopy(settings.MODULESTORE))
cls.__old_contentstores.append(copy.deepcopy(settings.CONTENTSTORE))
override.__enter__()
cls.__settings_overrides.append(override)
XMODULE_FACTORY_LOCK.enable()
......@@ -227,6 +256,7 @@ class ModuleStoreIsolationMixin(CacheIsolationMixin):
cls.__settings_overrides.pop().__exit__(None, None, None)
assert settings.MODULESTORE == cls.__old_modulestores.pop()
assert settings.CONTENTSTORE == cls.__old_contentstores.pop()
cls.end_cache_isolation()
......
......@@ -543,10 +543,8 @@ class SplitModuleTest(unittest.TestCase):
"""
Clear persistence between each test.
"""
collection_prefix = SplitModuleTest.MODULESTORE['DOC_STORE_CONFIG']['collection'] + '.'
if SplitModuleTest.modulestore:
for collection in ('active_versions', 'structures', 'definitions'):
modulestore().db.drop_collection(collection_prefix + collection)
modulestore()._drop_database(database=False, connections=False) # pylint: disable=protected-access
# drop the modulestore to force re init
SplitModuleTest.modulestore = None
super(SplitModuleTest, self).tearDown()
......
......@@ -57,35 +57,17 @@ class SplitWMongoCourseBootstrapper(unittest.TestCase):
self.db_config,
**self.modulestore_options
)
self.addCleanup(self.split_mongo.db.connection.close)
self.addCleanup(self.tear_down_split)
self.addCleanup(self.split_mongo._drop_database) # pylint: disable=protected-access
self.draft_mongo = DraftMongoModuleStore(
None, self.db_config, branch_setting_func=lambda: ModuleStoreEnum.Branch.draft_preferred,
metadata_inheritance_cache_subsystem=MemoryCache(),
**self.modulestore_options
)
self.addCleanup(self.tear_down_mongo)
self.addCleanup(self.draft_mongo._drop_database) # pylint: disable=protected-access
self.old_course_key = None
self.runtime = None
self._create_course()
def tear_down_split(self):
"""
Remove the test collections, close the db connection
"""
split_db = self.split_mongo.db
split_db.drop_collection(split_db.course_index.proxied_object)
split_db.drop_collection(split_db.structures.proxied_object)
split_db.drop_collection(split_db.definitions.proxied_object)
def tear_down_mongo(self):
"""
Remove the test collections, close the db connection
"""
split_db = self.split_mongo.db
# old_mongo doesn't give a db attr, but all of the dbs are the same
split_db.drop_collection(self.draft_mongo.collection.proxied_object)
def _create_item(self, category, name, data, metadata, parent_category, parent_name, draft=True, split=True):
"""
Create the item of the given category and block id in split and old mongo, add it to the optional
......
......@@ -131,6 +131,9 @@ class PartitionTestCase(TestCase):
extensions, namespace=USER_PARTITION_SCHEME_NAMESPACE
)
# Be sure to clean up the global scheme_extensions after the test.
self.addCleanup(self.cleanup_scheme_extensions)
# Create a test partition
self.user_partition = UserPartition(
self.TEST_ID,
......@@ -145,6 +148,12 @@ class PartitionTestCase(TestCase):
self.user_partition.get_scheme(self.non_random_scheme.name)
self.user_partition.get_scheme(self.random_scheme.name)
def cleanup_scheme_extensions(self):
"""
Unset the UserPartition.scheme_extensions cache.
"""
UserPartition.scheme_extensions = None
class TestUserPartition(PartitionTestCase):
"""Test constructing UserPartitions"""
......
......@@ -208,6 +208,18 @@ To run a single test format the command like this.
paver test_system -t lms/djangoapps/courseware/tests/tests.py:ActivateLoginTest.test_activate_login
The ``lms`` suite of tests runs concurrently, and with randomized order, by default.
You can override these by using ``--no-randomize`` to disable randomization,
and ``--processes=N`` to control how many tests will run concurrently (``0`` will
disable concurrency). For example:
::
# This will run all tests in the order that they appear in their files, serially
paver test_system -s lms --no-randomize --processes=0
# This will run using only 2 processes for tests
paver test_system -s lms --processes=2
To re-run all failing django tests from lms or cms, use the
``--failed``,\ ``-f`` flag (see note at end of section).
......
......@@ -661,10 +661,6 @@ class CcxDetailTest(CcxRestApiTest):
"""
Test for the CCX REST APIs
"""
@classmethod
def setUpClass(cls):
super(CcxDetailTest, cls).setUpClass()
def setUp(self):
"""
Set up tests
......
......@@ -4,6 +4,7 @@ Tests for wiki middleware.
from django.conf import settings
from django.test.client import Client
from nose.plugins.attrib import attr
from unittest import skip
from wiki.models import URLPath
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
......@@ -33,6 +34,7 @@ class TestComprehensiveTheming(ModuleStoreTestCase):
self.client = Client()
self.client.login(username='instructor', password='secret')
@skip("Fails when run immediately after lms.djangoapps.course_wiki.tests.test_middleware")
@with_comprehensive_theme(settings.REPO_ROOT / 'themes/red-theme')
def test_themed_footer(self):
"""
......
......@@ -430,6 +430,8 @@ class ViewsTestCase(ModuleStoreTestCase):
course = CourseFactory.create(org="new", number="unenrolled", display_name="course")
request = self.request_factory.get(reverse('about_course', args=[unicode(course.id)]))
request.user = AnonymousUser()
# Set up the edxmako middleware for this request to create the RequestContext
mako_middleware_process_request(request)
response = views.course_about(request, unicode(course.id))
self.assertEqual(response.status_code, 200)
......@@ -468,6 +470,8 @@ class ViewsTestCase(ModuleStoreTestCase):
request = self.request_factory.get(reverse('about_course', args=[unicode(course.id)]))
request.user = AnonymousUser() if is_anonymous else self.user
# Set up the edxmako middleware for this request to create the RequestContext
mako_middleware_process_request(request)
# Construct the link for each of the four possibilities:
......@@ -905,6 +909,8 @@ class ViewsTestCase(ModuleStoreTestCase):
# Middleware is not supported by the request factory. Simulate a
# logged-in user by setting request.user manually.
request.user = self.user
# Set up the edxmako middleware for this request to create the RequestContext
mako_middleware_process_request(request)
self.assertFalse(self.course.bypass_home)
......@@ -1067,6 +1073,7 @@ class TestProgressDueDate(BaseDueDateTests):
def get_text(self, course):
""" Returns the HTML for the progress page """
# Set up the edxmako middleware for this request to create the RequestContext
mako_middleware_process_request(self.request)
return views.progress(self.request, course_id=unicode(course.id), student_id=self.user.id).content
......@@ -1097,6 +1104,9 @@ class StartDateTests(ModuleStoreTestCase):
self.request_factory = RequestFactory()
self.user = UserFactory.create()
self.request = self.request_factory.get("foo")
# Set up the edxmako middleware for this request to create the RequestContext
mako_middleware_process_request(self.request)
self.request.user = self.user
def set_up_course(self):
......@@ -1157,6 +1167,7 @@ class ProgressPageTests(ModuleStoreTestCase):
self.request = self.request_factory.get("foo")
self.request.user = self.user
# Set up the edxmako middleware for this request to create the RequestContext
mako_middleware_process_request(self.request)
self.setup_course()
......@@ -1656,6 +1667,8 @@ class TestIndexView(ModuleStoreTestCase):
)
)
request.user = user
# Set up the edxmako middleware for this request to create the RequestContext
mako_middleware_process_request(request)
# Trigger the assertions embedded in the ViewCheckerBlocks
......@@ -1687,6 +1700,8 @@ class TestIndexView(ModuleStoreTestCase):
) + '?activate_block_id=test_block_id'
)
request.user = user
# Set up the edxmako middleware for this request to create the RequestContext
mako_middleware_process_request(request)
response = CoursewareIndex.as_view()(
......@@ -1736,6 +1751,8 @@ class TestIndexViewWithGating(ModuleStoreTestCase, MilestonesTestCaseMixin):
)
)
request.user = self.user
# Set up the edxmako middleware for this request to create the RequestContext
mako_middleware_process_request(request)
with self.assertRaises(Http404):
......
......@@ -7,7 +7,7 @@ import shutil
import StringIO
import subprocess
import unittest
from uuid import uuid4
from nose.plugins.attrib import attr
from django.conf import settings
......@@ -17,7 +17,14 @@ from django.test.utils import override_settings
from opaque_keys.edx.locations import SlashSeparatedCourseKey
import dashboard.git_import as git_import
from dashboard.git_import import GitImportError
from dashboard.git_import import (
GitImportError,
GitImportErrorNoDir,
GitImportErrorUrlBad,
GitImportErrorCannotPull,
GitImportErrorBadRepo,
GitImportErrorRemoteBranchMissing,
)
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
......@@ -37,7 +44,10 @@ FEATURES_WITH_SSL_AUTH['AUTH_USE_CERTIFICATES'] = True
@attr('shard_3')
@override_settings(MONGODB_LOG=TEST_MONGODB_LOG)
@override_settings(
MONGODB_LOG=TEST_MONGODB_LOG,
GIT_REPO_DIR=settings.TEST_ROOT / "course_repos_{}".format(uuid4().hex)
)
@unittest.skipUnless(settings.FEATURES.get('ENABLE_SYSADMIN_DASHBOARD'),
"ENABLE_SYSADMIN_DASHBOARD not set")
class TestGitAddCourse(SharedModuleStoreTestCase):
......@@ -49,10 +59,13 @@ class TestGitAddCourse(SharedModuleStoreTestCase):
TEST_COURSE = 'MITx/edx4edx/edx4edx'
TEST_BRANCH = 'testing_do_not_delete'
TEST_BRANCH_COURSE = SlashSeparatedCourseKey('MITx', 'edx4edx_branch', 'edx4edx')
GIT_REPO_DIR = settings.GIT_REPO_DIR
ENABLED_CACHES = ['default', 'mongo_metadata_inheritance', 'loc_cache']
def setUp(self):
super(TestGitAddCourse, self).setUp()
self.git_repo_dir = settings.GIT_REPO_DIR
def assertCommandFailureRegexp(self, regex, *args):
"""
Convenience function for testing command failures
......@@ -72,40 +85,40 @@ class TestGitAddCourse(SharedModuleStoreTestCase):
'blah', 'blah', 'blah', 'blah')
# Not a valid path.
self.assertCommandFailureRegexp(
'Path {0} doesn\'t exist, please create it,'.format(self.GIT_REPO_DIR),
'Path {0} doesn\'t exist, please create it,'.format(self.git_repo_dir),
'blah')
# Test successful import from command
if not os.path.isdir(self.GIT_REPO_DIR):
os.mkdir(self.GIT_REPO_DIR)
self.addCleanup(shutil.rmtree, self.GIT_REPO_DIR)
if not os.path.isdir(self.git_repo_dir):
os.mkdir(self.git_repo_dir)
self.addCleanup(shutil.rmtree, self.git_repo_dir)
# Make a course dir that will be replaced with a symlink
# while we are at it.
if not os.path.isdir(self.GIT_REPO_DIR / 'edx4edx'):
os.mkdir(self.GIT_REPO_DIR / 'edx4edx')
if not os.path.isdir(self.git_repo_dir / 'edx4edx'):
os.mkdir(self.git_repo_dir / 'edx4edx')
call_command('git_add_course', self.TEST_REPO,
directory_path=self.GIT_REPO_DIR / 'edx4edx_lite')
directory_path=self.git_repo_dir / 'edx4edx_lite')
# Test with all three args (branch)
call_command('git_add_course', self.TEST_REPO,
directory_path=self.GIT_REPO_DIR / 'edx4edx_lite',
directory_path=self.git_repo_dir / 'edx4edx_lite',
repository_branch=self.TEST_BRANCH)
def test_add_repo(self):
"""
Various exit path tests for test_add_repo
"""
with self.assertRaisesRegexp(GitImportError, GitImportError.NO_DIR):
with self.assertRaises(GitImportErrorNoDir):
git_import.add_repo(self.TEST_REPO, None, None)
os.mkdir(self.GIT_REPO_DIR)
self.addCleanup(shutil.rmtree, self.GIT_REPO_DIR)
os.mkdir(self.git_repo_dir)
self.addCleanup(shutil.rmtree, self.git_repo_dir)
with self.assertRaisesRegexp(GitImportError, GitImportError.URL_BAD):
with self.assertRaises(GitImportErrorUrlBad):
git_import.add_repo('foo', None, None)
with self.assertRaisesRegexp(GitImportError, GitImportError.CANNOT_PULL):
with self.assertRaises(GitImportErrorCannotPull):
git_import.add_repo('file:///foobar.git', None, None)
# Test git repo that exists, but is "broken"
......@@ -115,14 +128,14 @@ class TestGitAddCourse(SharedModuleStoreTestCase):
subprocess.check_output(['git', '--bare', 'init', ], stderr=subprocess.STDOUT,
cwd=bare_repo)
with self.assertRaisesRegexp(GitImportError, GitImportError.BAD_REPO):
with self.assertRaises(GitImportErrorBadRepo):
git_import.add_repo('file://{0}'.format(bare_repo), None, None)
def test_detached_repo(self):
"""
Test repo that is in detached head state.
"""
repo_dir = self.GIT_REPO_DIR
repo_dir = self.git_repo_dir
# Test successful import from command
try:
os.mkdir(repo_dir)
......@@ -133,21 +146,21 @@ class TestGitAddCourse(SharedModuleStoreTestCase):
subprocess.check_output(['git', 'checkout', 'HEAD~2', ],
stderr=subprocess.STDOUT,
cwd=repo_dir / 'edx4edx_lite')
with self.assertRaisesRegexp(GitImportError, GitImportError.CANNOT_PULL):
with self.assertRaises(GitImportErrorCannotPull):
git_import.add_repo(self.TEST_REPO, repo_dir / 'edx4edx_lite', None)
def test_branching(self):
"""
Exercise branching code of import
"""
repo_dir = self.GIT_REPO_DIR
repo_dir = self.git_repo_dir
# Test successful import from command
if not os.path.isdir(repo_dir):
os.mkdir(repo_dir)
self.addCleanup(shutil.rmtree, repo_dir)
# Checkout non existent branch
with self.assertRaisesRegexp(GitImportError, GitImportError.REMOTE_BRANCH_MISSING):
with self.assertRaises(GitImportErrorRemoteBranchMissing):
git_import.add_repo(self.TEST_REPO, repo_dir / 'edx4edx_lite', 'asdfasdfasdf')
# Checkout new branch
......@@ -185,13 +198,13 @@ class TestGitAddCourse(SharedModuleStoreTestCase):
cwd=bare_repo)
# Build repo dir
repo_dir = self.GIT_REPO_DIR
repo_dir = self.git_repo_dir
if not os.path.isdir(repo_dir):
os.mkdir(repo_dir)
self.addCleanup(shutil.rmtree, repo_dir)
rdir = '{0}/bare'.format(repo_dir)
with self.assertRaisesRegexp(GitImportError, GitImportError.BAD_REPO):
with self.assertRaises(GitImportErrorBadRepo):
git_import.add_repo('file://{0}'.format(bare_repo), None, None)
# Get logger for checking strings in logs
......
......@@ -346,7 +346,8 @@ class Courses(SysadminDashboardView):
# Try the data dir, then try to find it in the git import dir
if not gdir.exists():
gdir = path(git_import.GIT_REPO_DIR) / cdir
git_repo_dir = getattr(settings, 'GIT_REPO_DIR', git_import.DEFAULT_GIT_REPO_DIR)
gdir = path(git_repo_dir / cdir)
if not gdir.exists():
return info
......
......@@ -6,6 +6,7 @@ import os
import re
import shutil
import unittest
from uuid import uuid4
from util.date_utils import get_time_display, DEFAULT_DATE_TIME_FORMAT
from nose.plugins.attrib import attr
......@@ -18,7 +19,7 @@ import mongoengine
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from dashboard.models import CourseImportLog
from dashboard.git_import import GitImportError
from dashboard.git_import import GitImportErrorNoDir
from datetime import datetime
from student.roles import CourseStaffRole, GlobalStaff
from student.tests.factories import UserFactory
......@@ -109,7 +110,10 @@ class SysadminBaseTestCase(SharedModuleStoreTestCase):
@attr('shard_1')
@override_settings(MONGODB_LOG=TEST_MONGODB_LOG)
@override_settings(
MONGODB_LOG=TEST_MONGODB_LOG,
GIT_REPO_DIR=settings.TEST_ROOT / "course_repos_{}".format(uuid4().hex)
)
@unittest.skipUnless(settings.FEATURES.get('ENABLE_SYSADMIN_DASHBOARD'),
"ENABLE_SYSADMIN_DASHBOARD not set")
class TestSysAdminMongoCourseImport(SysadminBaseTestCase):
......@@ -149,7 +153,7 @@ class TestSysAdminMongoCourseImport(SysadminBaseTestCase):
# Create git loaded course
response = self._add_edx4edx()
self.assertIn(GitImportError.NO_DIR,
self.assertIn(GitImportErrorNoDir(settings.GIT_REPO_DIR).message,
response.content.decode('UTF-8'))
def test_mongo_course_add_delete(self):
......
......@@ -70,6 +70,10 @@ FEATURES['ENABLE_VERIFIED_CERTIFICATES'] = True
FEATURES['ENABLE_S3_GRADE_DOWNLOADS'] = True
FEATURES['ALLOW_COURSE_STAFF_GRADE_DOWNLOADS'] = True
GRADES_DOWNLOAD['ROOT_PATH'] += "-{}".format(os.getpid())
FINANCIAL_REPORTS['ROOT_PATH'] += "-{}".format(os.getpid())
# Toggles embargo on for testing
FEATURES['EMBARGO'] = True
......@@ -96,7 +100,10 @@ _NOSEID_DIR.makedirs_p()
NOSE_ARGS = [
'--id-file', _NOSEID_DIR / 'noseids',
'--xunit-file', _REPORT_DIR / 'nosetests.xml',
]
NOSE_PLUGINS = [
'openedx.core.djangolib.testing.utils.NoseDatabaseIsolation'
]
# Local Directories
......@@ -164,8 +171,8 @@ update_module_store_settings(
doc_store_settings={
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'db': 'test_xmodule',
'collection': 'test_modulestore{0}'.format(THIS_UUID),
'db': 'test_xmodule_{}'.format(THIS_UUID),
'collection': 'test_modulestore',
},
)
......@@ -173,7 +180,7 @@ CONTENTSTORE = {
'ENGINE': 'xmodule.contentstore.mongo.MongoContentStore',
'DOC_STORE_CONFIG': {
'host': MONGO_HOST,
'db': 'xcontent',
'db': 'test_xcontent_{}'.format(THIS_UUID),
'port': MONGO_PORT_NUM,
}
}
......@@ -181,12 +188,10 @@ CONTENTSTORE = {
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': TEST_ROOT / 'db' / 'edx.db',
'ATOMIC_REQUESTS': True,
},
'student_module_history': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': TEST_ROOT / 'db' / 'student_module_history.db'
},
}
......@@ -576,3 +581,7 @@ JWT_AUTH.update({
# better performant unit tests.
from openedx.core.lib.block_structure.transformer_registry import TransformerRegistry
TransformerRegistry.USE_PLUGIN_MANAGER = False
# Set the default Oauth2 Provider Model so that migrations can run in
# verbose mode
OAUTH2_PROVIDER_APPLICATION_MODEL = 'oauth2_provider.Application'
......@@ -10,11 +10,14 @@ Utility classes for testing django applications.
import copy
from django import db
from django.core.cache import caches
from django.test import TestCase, override_settings
from django.conf import settings
from django.contrib import sites
from nose.plugins import Plugin
from request_cache.middleware import RequestCache
......@@ -138,3 +141,20 @@ class CacheIsolationTestCase(CacheIsolationMixin, TestCase):
self.clear_caches()
self.addCleanup(self.clear_caches)
class NoseDatabaseIsolation(Plugin):
"""
nosetest plugin that resets django databases before any tests begin.
Used to make sure that tests running in multi processes aren't sharing
a database connection.
"""
name = "database-isolation"
def begin(self):
"""
Before any tests start, reset all django database connections.
"""
for db_ in db.connections.all():
db_.close()
......@@ -31,6 +31,9 @@ __test__ = False # do not collect
('extra_args=', 'e', 'adds as extra args to the test command'),
('cov_args=', 'c', 'adds as args to coverage for the test run'),
('skip_clean', 'C', 'skip cleaning repository before running tests'),
('processes=', 'p', 'number of processes to use running tests'),
make_option('-r', '--randomize', action='store_true', dest='randomize', help='run the tests in a random order'),
make_option('--no-randomize', action='store_false', dest='randomize', help="don't run the tests in a random order"),
make_option("--verbose", action="store_const", const=2, dest="verbosity"),
make_option("-q", "--quiet", action="store_const", const=0, dest="verbosity"),
make_option("-v", "--verbosity", action="count", dest="verbosity", default=1),
......@@ -59,6 +62,8 @@ def test_system(options):
'skip_clean': getattr(options, 'skip_clean', False),
'pdb': getattr(options, 'pdb', False),
'disable_migrations': getattr(options, 'disable_migrations', False),
'processes': getattr(options, 'processes', None),
'randomize': getattr(options, 'randomize', None),
}
if test_id:
......
......@@ -6,6 +6,11 @@ from pavelib.utils.test import utils as test_utils
from pavelib.utils.test.suites.suite import TestSuite
from pavelib.utils.envs import Env
try:
from pygments.console import colorize
except ImportError:
colorize = lambda color, text: text
__test__ = False # do not collect
......@@ -33,6 +38,7 @@ class NoseTestSuite(TestSuite):
self.test_ids = self.test_id_dir / 'noseids'
self.extra_args = kwargs.get('extra_args', '')
self.cov_args = kwargs.get('cov_args', '')
self.use_ids = True
def __enter__(self):
super(NoseTestSuite, self).__enter__()
......@@ -101,6 +107,9 @@ class NoseTestSuite(TestSuite):
if self.pdb:
opts += " --pdb"
if self.use_ids:
opts += " --with-id"
return opts
......@@ -113,25 +122,49 @@ class SystemTestSuite(NoseTestSuite):
self.test_id = kwargs.get('test_id', self._default_test_id)
self.fasttest = kwargs.get('fasttest', False)
self.processes = kwargs.get('processes', None)
self.randomize = kwargs.get('randomize', None)
if self.processes is None:
# Use one process per core for LMS tests, and no multiprocessing
# otherwise.
self.processes = -1 if self.root == 'lms' else 0
self.processes = int(self.processes)
if self.randomize is None:
self.randomize = self.root == 'lms'
if self.processes != 0 and self.verbosity > 1:
print colorize(
'red',
"The TestId module and multiprocessing module can't be run "
"together in verbose mode. Disabling TestId for {} tests.".format(self.root)
)
self.use_ids = False
def __enter__(self):
super(SystemTestSuite, self).__enter__()
@property
def cmd(self):
cmd = (
'./manage.py {system} test --verbosity={verbosity} '
'{test_id} {test_opts} --settings=test {extra} '
'--with-xunit --xunit-file={xunit_report}'.format(
system=self.root,
verbosity=self.verbosity,
test_id=self.test_id,
test_opts=self.test_options_flags,
extra=self.extra_args,
xunit_report=self.report_dir / "nosetests.xml",
)
)
return self._under_coverage_cmd(cmd)
cmd = [
'./manage.py', self.root, 'test',
'--verbosity={}'.format(self.verbosity),
self.test_id,
self.test_options_flags,
'--settings=test',
self.extra_args,
'--with-xunitmp',
'--xunitmp-file={}'.format(self.report_dir / "nosetests.xml"),
'--processes={}'.format(self.processes),
'--with-database-isolation',
]
if self.randomize:
cmd.append('--with-randomly')
return self._under_coverage_cmd(" ".join(cmd))
@property
def _default_test_id(self):
......
......@@ -157,13 +157,13 @@ moto==0.3.1
nose==1.3.7
nose-exclude
nose-ignore-docstring
nose-randomly==1.2.0
nosexcover==1.0.7
pep8==1.5.7
PyContracts==1.7.1
python-subunit==0.0.16
pyquery==1.2.9
radon==1.2
rednose==0.4.3
selenium==2.53.1
splinter==0.5.4
testtools==0.9.34
......
......@@ -99,21 +99,22 @@ case "$TEST_SUITE" in
;;
"lms-unit")
LMS_ARGS="--with-flaky"
case "$SHARD" in
"all")
paver test_system -s lms --extra_args="--with-flaky" --cov_args="-p"
paver test_system -s lms --extra_args="$LMS_ARGS" --cov_args="-p" -v
;;
"1")
paver test_system -s lms --extra_args="--attr='shard_1' --with-flaky" --cov_args="-p" -v
paver test_system -s lms --extra_args="--attr='shard_1' $LMS_ARGS" --cov_args="-p" -v
;;
"2")
paver test_system -s lms --extra_args="--attr='shard_2' --with-flaky" --cov_args="-p" -v
paver test_system -s lms --extra_args="--attr='shard_2' $LMS_ARGS" --cov_args="-p" -v
;;
"3")
paver test_system -s lms --extra_args="--attr='shard_3' --with-flaky" --cov_args="-p" -v
paver test_system -s lms --extra_args="--attr='shard_3' $LMS_ARGS" --cov_args="-p" -v
;;
"4")
paver test_system -s lms --extra_args="--attr='shard_1=False,shard_2=False,shard_3=False' --with-flaky" --cov_args="-p" -v
paver test_system -s lms --extra_args="--attr='shard_1=False,shard_2=False,shard_3=False' $LMS_ARGS" --cov_args="-p" -v
;;
*)
# If no shard is specified, rather than running all tests, create an empty xunit file. This is a
......
[nosetests]
logging-clear-handlers=1
with-xunit=1
rednose=1
with-xunitmp=1
with-ignore-docstrings=1
with-id=1
exclude-dir=lms/envs
cms/envs
......@@ -11,6 +9,8 @@ exclude-dir=lms/envs
# which shadows the xblock library (among other things)
no-path-adjustment=1
process-timeout=300
# Uncomment the following lines to open pdb when a test fails
#nocapture=1
#pdb=1
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment