Commit 77a29242 by Nimisha Asthagiri Committed by GitHub

Merge pull request #14571 from edx/beryl/cbs_models

Storage-backed versioned Block Structures: Models
parents 5e21f64c 6f9b810f
...@@ -85,17 +85,10 @@ def request_cached(f): ...@@ -85,17 +85,10 @@ def request_cached(f):
""" """
Wrapper function to decorate with. Wrapper function to decorate with.
""" """
# Build our cache key based on the module the function belongs to, the functions name, and a stringified
# list of arguments and a query string-style stringified list of keyword arguments.
converted_args = map(str, args)
converted_kwargs = map(str, reduce(list.__add__, map(list, sorted(kwargs.iteritems())), []))
cache_keys = [f.__module__, f.func_name] + converted_args + converted_kwargs
cache_key = '.'.join(cache_keys)
# Check to see if we have a result in cache. If not, invoke our wrapped # Check to see if we have a result in cache. If not, invoke our wrapped
# function. Cache and return the result to the caller. # function. Cache and return the result to the caller.
rcache = RequestCache.get_request_cache() rcache = RequestCache.get_request_cache()
cache_key = func_call_cache_key(f, *args, **kwargs)
if cache_key in rcache.data: if cache_key in rcache.data:
return rcache.data.get(cache_key) return rcache.data.get(cache_key)
...@@ -105,4 +98,17 @@ def request_cached(f): ...@@ -105,4 +98,17 @@ def request_cached(f):
return result return result
wrapper.request_cached_contained_func = f
return wrapper return wrapper
def func_call_cache_key(func, *args, **kwargs):
"""
Returns a cache key based on the function's module
the function's name, and a stringified list of arguments
and a query string-style stringified list of keyword arguments.
"""
converted_args = map(str, args)
converted_kwargs = map(str, reduce(list.__add__, map(list, sorted(kwargs.iteritems())), []))
cache_keys = [func.__module__, func.func_name] + converted_args + converted_kwargs
return '.'.join(cache_keys)
...@@ -597,6 +597,9 @@ BROKER_URL = "{0}://{1}:{2}@{3}/{4}".format(CELERY_BROKER_TRANSPORT, ...@@ -597,6 +597,9 @@ BROKER_URL = "{0}://{1}:{2}@{3}/{4}".format(CELERY_BROKER_TRANSPORT,
CELERY_BROKER_VHOST) CELERY_BROKER_VHOST)
BROKER_USE_SSL = ENV_TOKENS.get('CELERY_BROKER_USE_SSL', False) BROKER_USE_SSL = ENV_TOKENS.get('CELERY_BROKER_USE_SSL', False)
# Block Structures
BLOCK_STRUCTURES_SETTINGS = ENV_TOKENS.get('BLOCK_STRUCTURES_SETTINGS', BLOCK_STRUCTURES_SETTINGS)
# upload limits # upload limits
STUDENT_FILEUPLOAD_MAX_SIZE = ENV_TOKENS.get("STUDENT_FILEUPLOAD_MAX_SIZE", STUDENT_FILEUPLOAD_MAX_SIZE) STUDENT_FILEUPLOAD_MAX_SIZE = ENV_TOKENS.get("STUDENT_FILEUPLOAD_MAX_SIZE", STUDENT_FILEUPLOAD_MAX_SIZE)
......
...@@ -80,11 +80,11 @@ BLOCK_STRUCTURES_SETTINGS = dict( ...@@ -80,11 +80,11 @@ BLOCK_STRUCTURES_SETTINGS = dict(
# We have CELERY_ALWAYS_EAGER set to True, so there's no asynchronous # We have CELERY_ALWAYS_EAGER set to True, so there's no asynchronous
# code running and the celery routing is unimportant. # code running and the celery routing is unimportant.
# It does not make sense to retry. # It does not make sense to retry.
BLOCK_STRUCTURES_TASK_MAX_RETRIES=0, TASK_MAX_RETRIES=0,
# course publish task delay is irrelevant is because the task is run synchronously # course publish task delay is irrelevant is because the task is run synchronously
BLOCK_STRUCTURES_COURSE_PUBLISH_TASK_DELAY=0, COURSE_PUBLISH_TASK_DELAY=0,
# retry delay is irrelevent because we never retry # retry delay is irrelevent because we never retry
BLOCK_STRUCTURES_TASK_DEFAULT_RETRY_DELAY=0, TASK_DEFAULT_RETRY_DELAY=0,
) )
###################### Grade Downloads ###################### ###################### Grade Downloads ######################
......
...@@ -1805,13 +1805,18 @@ BLOCK_STRUCTURES_SETTINGS = dict( ...@@ -1805,13 +1805,18 @@ BLOCK_STRUCTURES_SETTINGS = dict(
# for a better chance at getting the latest changes when there # for a better chance at getting the latest changes when there
# are secondary reads in sharded mongoDB clusters. See TNL-5041 # are secondary reads in sharded mongoDB clusters. See TNL-5041
# for more info. # for more info.
BLOCK_STRUCTURES_COURSE_PUBLISH_TASK_DELAY=30, COURSE_PUBLISH_TASK_DELAY=30,
# Delay, in seconds, between retry attempts if a task fails. # Delay, in seconds, between retry attempts if a task fails.
BLOCK_STRUCTURES_TASK_DEFAULT_RETRY_DELAY=30, TASK_DEFAULT_RETRY_DELAY=30,
# Maximum number of retries per task. # Maximum number of retries per task.
BLOCK_STRUCTURES_TASK_MAX_RETRIES=5, TASK_MAX_RETRIES=5,
# Backend storage
# STORAGE_CLASS='storages.backends.s3boto.S3BotoStorage',
# STORAGE_KWARGS=dict(bucket='nim-beryl-test'),
# DIRECTORY_PREFIX='/modeltest/',
) )
################################ Bulk Email ################################### ################################ Bulk Email ###################################
......
"""
Django Admin for Block Structures.
"""
from django.contrib import admin
from config_models.admin import ConfigurationModelAdmin
from .config.models import BlockStructureConfiguration
class BlockStructureAdmin(ConfigurationModelAdmin):
"""
Configuration Admin for BlockStructureConfiguration.
"""
def get_displayable_field_names(self):
"""
Excludes unused 'enabled field from super's list.
"""
displayable_field_names = super(BlockStructureAdmin, self).get_displayable_field_names()
displayable_field_names.remove('enabled')
return displayable_field_names
admin.site.register(BlockStructureConfiguration, BlockStructureAdmin)
"""
This module contains various configuration settings via
waffle switches for the Block Structure framework.
"""
from waffle import switch_is_active
INVALIDATE_CACHE_ON_PUBLISH = u'invalidate_cache_on_publish'
STORAGE_BACKING_FOR_CACHE = u'storage_backing_for_cache'
RAISE_ERROR_WHEN_NOT_FOUND = u'raise_error_when_not_found'
def is_enabled(setting_name):
"""
Returns whether the given setting is enabled.
"""
return switch_is_active(
waffle_switch_name(setting_name)
)
def waffle_switch_name(setting_name):
"""
Returns the name of the waffle switch for the
given name of the setting.
"""
return u'block_structure.{}'.format(setting_name)
"""
This module contains various configuration settings via
waffle switches for the Block Structure framework.
"""
from openedx.core.djangolib.waffle_utils import is_switch_enabled
from request_cache.middleware import request_cached
from .models import BlockStructureConfiguration
INVALIDATE_CACHE_ON_PUBLISH = u'invalidate_cache_on_publish'
STORAGE_BACKING_FOR_CACHE = u'storage_backing_for_cache'
RAISE_ERROR_WHEN_NOT_FOUND = u'raise_error_when_not_found'
PRUNE_OLD_VERSIONS = u'prune_old_versions'
def is_enabled(setting_name):
"""
Returns whether the given block_structure setting
is enabled.
"""
bs_waffle_name = _bs_waffle_switch_name(setting_name)
return is_switch_enabled(bs_waffle_name)
@request_cached
def num_versions_to_keep():
"""
Returns and caches the current setting for num_versions_to_keep.
"""
return BlockStructureConfiguration.current().num_versions_to_keep
@request_cached
def cache_timeout_in_seconds():
"""
Returns and caches the current setting for cache_timeout_in_seconds.
"""
return BlockStructureConfiguration.current().cache_timeout_in_seconds
def _bs_waffle_switch_name(setting_name):
"""
Returns the name of the waffle switch for the
given block structure setting.
"""
return u'block_structure.{}'.format(setting_name)
"""
Models for configuration of Block Structures.
"""
from django.db.models import IntegerField
from config_models.models import ConfigurationModel
class BlockStructureConfiguration(ConfigurationModel):
"""
Configuration model for Block Structures.
"""
DEFAULT_PRUNE_KEEP_COUNT = 5
DEFAULT_CACHE_TIMEOUT_IN_SECONDS = 60 * 60 * 24 # 24 hours
class Meta(object):
app_label = 'block_structure'
db_table = 'block_structure_config'
num_versions_to_keep = IntegerField(blank=True, null=True, default=DEFAULT_PRUNE_KEEP_COUNT)
cache_timeout_in_seconds = IntegerField(blank=True, null=True, default=DEFAULT_CACHE_TIMEOUT_IN_SECONDS)
def __unicode__(self):
return u"BlockStructureConfiguration: num_versions_to_keep: {}, cache_timeout_in_seconds: {}".format(
self.num_versions_to_keep,
self.cache_timeout_in_seconds,
)
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='BlockStructureConfiguration',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')),
('enabled', models.BooleanField(default=False, verbose_name='Enabled')),
('num_versions_to_keep', models.IntegerField(default=5, null=True, blank=True)),
('cache_timeout_in_seconds', models.IntegerField(default=86400, null=True, blank=True)),
('changed_by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='Changed by')),
],
options={
'db_table': 'block_structure_config',
},
),
]
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
import openedx.core.djangoapps.xmodule_django.models
import model_utils.fields
import openedx.core.djangoapps.content.block_structure.models
class Migration(migrations.Migration):
dependencies = [
('block_structure', '0001_config'),
]
operations = [
migrations.CreateModel(
name='BlockStructureModel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)),
('data_usage_key', openedx.core.djangoapps.xmodule_django.models.UsageKeyField(unique=True, max_length=255, verbose_name='Identifier of the data being collected.')),
('data_version', models.CharField(max_length=255, null=True, verbose_name='Version of the data at the time of collection.', blank=True)),
('data_edit_timestamp', models.DateTimeField(null=True, verbose_name='Edit timestamp of the data at the time of collection.', blank=True)),
('transformers_schema_version', models.CharField(max_length=255, verbose_name='Representation of the schema version of the transformers used during collection.')),
('block_structure_schema_version', models.CharField(max_length=255, verbose_name='Version of the block structure schema at the time of collection.')),
('data', models.FileField(max_length=500, upload_to=openedx.core.djangoapps.content.block_structure.models._path_name)),
],
options={
'db_table': 'block_structure',
},
),
]
"""
Models used by the block structure framework.
"""
from datetime import datetime
from django.conf import settings
from django.core.files.base import ContentFile
from django.db import models
from logging import getLogger
from model_utils.models import TimeStampedModel
from openedx.core.djangoapps.xmodule_django.models import UsageKeyField
from openedx.core.lib.block_structure.exceptions import BlockStructureNotFound
from openedx.core.storage import get_storage
import openedx.core.djangoapps.content.block_structure.config as config
log = getLogger(__name__)
def _create_path(directory, filename):
"""
Returns the full path for the given directory and filename.
"""
return '{}/{}'.format(directory, filename)
def _directory_name(data_usage_key):
"""
Returns the directory name for the given
data_usage_key.
"""
return '{}{}'.format(
settings.BLOCK_STRUCTURES_SETTINGS.get('DIRECTORY_PREFIX', ''),
unicode(data_usage_key),
)
def _path_name(bs_model, filename): # pylint:disable=unused-argument
"""
Returns path name to use for the given
BlockStructureModel instance.
"""
filename = datetime.utcnow().strftime('%Y-%m-%d-%H:%M:%S-%f')
return _create_path(
_directory_name(bs_model.data_usage_key),
filename,
)
def _bs_model_storage():
"""
Get django Storage object for BlockStructureModel.
"""
return get_storage(
settings.BLOCK_STRUCTURES_SETTINGS.get('STORAGE_CLASS'),
**settings.BLOCK_STRUCTURES_SETTINGS.get('STORAGE_KWARGS', {})
)
class BlockStructureModel(TimeStampedModel):
"""
Model for storing Block Structure information.
"""
VERSION_FIELDS = [
u'data_version',
u'data_edit_timestamp',
u'transformers_schema_version',
u'block_structure_schema_version',
]
UNIQUENESS_FIELDS = [u'data_usage_key'] + VERSION_FIELDS
class Meta(object):
db_table = 'block_structure'
data_usage_key = UsageKeyField(
u'Identifier of the data being collected.',
blank=False,
max_length=255,
unique=True,
)
data_version = models.CharField(
u'Version of the data at the time of collection.',
blank=True,
null=True,
max_length=255,
)
data_edit_timestamp = models.DateTimeField(
u'Edit timestamp of the data at the time of collection.',
blank=True,
null=True,
)
transformers_schema_version = models.CharField(
u'Representation of the schema version of the transformers used during collection.',
blank=False,
max_length=255,
)
block_structure_schema_version = models.CharField(
u'Version of the block structure schema at the time of collection.',
blank=False,
max_length=255,
)
data = models.FileField(
upload_to=_path_name,
max_length=500, # allocate enough for base path + prefix + usage_key + timestamp in filepath
)
def get_serialized_data(self):
"""
Returns the collected data for this instance.
"""
serialized_data = self.data.read()
log.info("BlockStructure: Read data from store; %r, size: %d", self, len(serialized_data))
return serialized_data
@classmethod
def get(cls, data_usage_key):
"""
Returns the entry associated with the given data_usage_key.
Raises:
BlockStructureNotFound if an entry for data_usage_key is not found.
"""
try:
return cls.objects.get(data_usage_key=data_usage_key)
except cls.DoesNotExist:
log.info("BlockStructure: Not found in table; %r.", data_usage_key)
raise BlockStructureNotFound(data_usage_key)
@classmethod
def update_or_create(cls, serialized_data, data_usage_key, **kwargs):
"""
Updates or creates the BlockStructureModel entry
for the given data_usage_key in the kwargs,
uploading serialized_data as the content data.
"""
bs_model, created = cls.objects.update_or_create(defaults=kwargs, data_usage_key=data_usage_key)
bs_model.data.save('', ContentFile(serialized_data))
log.info(
'BlockStructure: %s in store; %r, size: %d',
'Created' if created else 'Updated',
bs_model,
len(serialized_data),
)
if not created:
cls._prune_files(data_usage_key)
return bs_model, created
def __unicode__(self):
"""
Returns a string representation of this model.
"""
return u', '.join(
u'{}: {}'.format(field_name, unicode(getattr(self, field_name)))
for field_name in self.UNIQUENESS_FIELDS
)
@classmethod
def _prune_files(cls, data_usage_key, num_to_keep=None):
"""
Deletes previous file versions for data_usage_key.
"""
if not config.is_enabled(config.PRUNE_OLD_VERSIONS):
return
if num_to_keep is None:
num_to_keep = config.num_versions_to_keep()
try:
all_files_by_date = sorted(cls._get_all_files(data_usage_key))
files_to_delete = all_files_by_date[:-num_to_keep] if num_to_keep > 0 else all_files_by_date
cls._delete_files(files_to_delete)
log.info(
'BlockStructure: Deleted %d out of total %d files in store; data_usage_key: %r, num_to_keep: %d.',
len(files_to_delete),
len(all_files_by_date),
data_usage_key,
num_to_keep,
)
except Exception as error: # pylint: disable=broad-except
log.exception(
'BlockStructure: Exception when deleting old files; data_usage_key: %r, %r',
data_usage_key,
error,
)
@classmethod
def _delete_files(cls, files):
"""
Deletes the given files from storage.
"""
storage = _bs_model_storage()
map(storage.delete, files)
@classmethod
def _get_all_files(cls, data_usage_key):
"""
Returns all filenames that exist for the given key.
"""
directory = _directory_name(data_usage_key)
_, filenames = _bs_model_storage().listdir(directory)
return [
_create_path(directory, filename)
for filename in filenames
if filename and not filename.startswith('.')
]
...@@ -28,7 +28,7 @@ def _listen_for_course_publish(sender, course_key, **kwargs): # pylint: disable ...@@ -28,7 +28,7 @@ def _listen_for_course_publish(sender, course_key, **kwargs): # pylint: disable
update_course_in_cache.apply_async( update_course_in_cache.apply_async(
[unicode(course_key)], [unicode(course_key)],
countdown=settings.BLOCK_STRUCTURES_SETTINGS['BLOCK_STRUCTURES_COURSE_PUBLISH_TASK_DELAY'], countdown=settings.BLOCK_STRUCTURES_SETTINGS['COURSE_PUBLISH_TASK_DELAY'],
) )
......
...@@ -22,8 +22,8 @@ NO_RETRY_TASKS = (XMLSyntaxError, LoncapaProblemError, UnicodeEncodeError) ...@@ -22,8 +22,8 @@ NO_RETRY_TASKS = (XMLSyntaxError, LoncapaProblemError, UnicodeEncodeError)
@task( @task(
default_retry_delay=settings.BLOCK_STRUCTURES_SETTINGS['BLOCK_STRUCTURES_TASK_DEFAULT_RETRY_DELAY'], default_retry_delay=settings.BLOCK_STRUCTURES_SETTINGS['TASK_DEFAULT_RETRY_DELAY'],
max_retries=settings.BLOCK_STRUCTURES_SETTINGS['BLOCK_STRUCTURES_TASK_MAX_RETRIES'], max_retries=settings.BLOCK_STRUCTURES_SETTINGS['TASK_MAX_RETRIES'],
bind=True, bind=True,
) )
def update_course_in_cache(self, course_id): def update_course_in_cache(self, course_id):
...@@ -34,8 +34,8 @@ def update_course_in_cache(self, course_id): ...@@ -34,8 +34,8 @@ def update_course_in_cache(self, course_id):
@task( @task(
default_retry_delay=settings.BLOCK_STRUCTURES_SETTINGS['BLOCK_STRUCTURES_TASK_DEFAULT_RETRY_DELAY'], default_retry_delay=settings.BLOCK_STRUCTURES_SETTINGS['TASK_DEFAULT_RETRY_DELAY'],
max_retries=settings.BLOCK_STRUCTURES_SETTINGS['BLOCK_STRUCTURES_TASK_MAX_RETRIES'], max_retries=settings.BLOCK_STRUCTURES_SETTINGS['TASK_MAX_RETRIES'],
bind=True, bind=True,
) )
def get_course_in_cache(self, course_id): def get_course_in_cache(self, course_id):
......
...@@ -3,7 +3,10 @@ Helpers for Course Blocks tests. ...@@ -3,7 +3,10 @@ Helpers for Course Blocks tests.
""" """
from openedx.core.lib.block_structure.cache import BlockStructureCache from openedx.core.lib.block_structure.cache import BlockStructureCache
from openedx.core.djangolib.testing.waffle_utils import override_switch
from ..api import get_cache from ..api import get_cache
from ..config import _bs_waffle_switch_name
def is_course_in_block_structure_cache(course_key, store): def is_course_in_block_structure_cache(course_key, store):
...@@ -12,3 +15,15 @@ def is_course_in_block_structure_cache(course_key, store): ...@@ -12,3 +15,15 @@ def is_course_in_block_structure_cache(course_key, store):
""" """
course_usage_key = store.make_course_usage_key(course_key) course_usage_key = store.make_course_usage_key(course_key)
return BlockStructureCache(get_cache()).get(course_usage_key) is not None return BlockStructureCache(get_cache()).get(course_usage_key) is not None
class override_config_setting(override_switch): # pylint:disable=invalid-name
"""
Subclasses override_switch to use the block structure
name-spaced switch names.
"""
def __init__(self, name, active):
super(override_config_setting, self).__init__(
_bs_waffle_switch_name(name),
active
)
"""
Unit tests for Block Structure models.
"""
# pylint: disable=protected-access
import ddt
from django.test import TestCase
from django.utils.timezone import now
from itertools import product
from mock import patch, Mock
from uuid import uuid4
from opaque_keys.edx.locator import CourseLocator, BlockUsageLocator
from openedx.core.lib.block_structure.exceptions import BlockStructureNotFound
from ..config import PRUNE_OLD_VERSIONS
from ..models import BlockStructureModel
from .helpers import override_config_setting
@ddt.ddt
class BlockStructureModelTestCase(TestCase):
"""
Tests for BlockStructureModel.
"""
def setUp(self):
super(BlockStructureModelTestCase, self).setUp()
self.course_key = CourseLocator('org', 'course', unicode(uuid4()))
self.usage_key = BlockUsageLocator(course_key=self.course_key, block_type='course', block_id='course')
self.params = self._create_bsm_params()
def tearDown(self):
with override_config_setting(PRUNE_OLD_VERSIONS, active=True):
BlockStructureModel._prune_files(self.usage_key, num_to_keep=0)
super(BlockStructureModelTestCase, self).tearDown()
def _assert_bsm_fields(self, bsm, expected_serialized_data):
"""
Verifies that the field values and serialized data
on the given bsm are as expected.
"""
for field_name, field_value in self.params.iteritems():
self.assertEqual(field_value, getattr(bsm, field_name))
self.assertEqual(bsm.get_serialized_data(), expected_serialized_data)
self.assertIn(unicode(self.usage_key), bsm.data.name)
def _assert_file_count_equal(self, expected_count):
"""
Asserts the number of files for self.usage_key
is as expected.
"""
self.assertEqual(len(BlockStructureModel._get_all_files(self.usage_key)), expected_count)
def _create_bsm_params(self):
"""
Returns the parameters for creating a BlockStructureModel.
"""
return dict(
data_usage_key=self.usage_key,
data_version='DV',
data_edit_timestamp=now(),
transformers_schema_version='TV',
block_structure_schema_version=unicode(1),
)
def _verify_update_or_create_call(self, serialized_data, mock_log=None, expect_created=None):
"""
Calls BlockStructureModel.update_or_create
and verifies the response.
"""
bsm, created = BlockStructureModel.update_or_create(serialized_data, **self.params)
if mock_log:
self.assertEqual("Created" if expect_created else "Updated", mock_log.info.call_args[0][1])
self.assertEqual(len(serialized_data), mock_log.info.call_args[0][3])
self._assert_bsm_fields(bsm, serialized_data)
if expect_created is not None:
self.assertEqual(created, expect_created)
return bsm
@patch('openedx.core.djangoapps.content.block_structure.models.log')
def test_update_or_create(self, mock_log):
serialized_data = 'initial data'
# shouldn't already exist
with self.assertRaises(BlockStructureNotFound):
BlockStructureModel.get(self.usage_key)
self.assertIn("BlockStructure: Not found in table;", mock_log.info.call_args[0][0])
# create an entry
bsm = self._verify_update_or_create_call(serialized_data, mock_log, expect_created=True)
# get entry
found_bsm = BlockStructureModel.get(self.usage_key)
self._assert_bsm_fields(found_bsm, serialized_data)
self.assertIn("BlockStructure: Read data from store;", mock_log.info.call_args[0][0])
# update entry
self.params.update(dict(data_version='new version'))
updated_serialized_data = 'updated data'
updated_bsm = self._verify_update_or_create_call(updated_serialized_data, mock_log, expect_created=False)
self.assertNotEqual(bsm.data.name, updated_bsm.data.name)
# old files not pruned
self._assert_file_count_equal(2)
@override_config_setting(PRUNE_OLD_VERSIONS, active=True)
@patch('openedx.core.djangoapps.content.block_structure.config.num_versions_to_keep', Mock(return_value=1))
def test_prune_files(self):
self._verify_update_or_create_call('test data', expect_created=True)
self._verify_update_or_create_call('updated data', expect_created=False)
self._assert_file_count_equal(1)
@override_config_setting(PRUNE_OLD_VERSIONS, active=True)
@patch('openedx.core.djangoapps.content.block_structure.config.num_versions_to_keep', Mock(return_value=1))
@patch('openedx.core.djangoapps.content.block_structure.models.BlockStructureModel._delete_files')
@patch('openedx.core.djangoapps.content.block_structure.models.log')
def test_prune_exception(self, mock_log, mock_delete):
mock_delete.side_effect = Exception
self._verify_update_or_create_call('test data', expect_created=True)
self._verify_update_or_create_call('updated data', expect_created=False)
self.assertIn('BlockStructure: Exception when deleting old files', mock_log.exception.call_args[0][0])
self._assert_file_count_equal(2) # old files not pruned
@ddt.data(
*product(
range(1, 3), # prune_keep_count
range(4), # num_prior_edits
)
)
@ddt.unpack
def test_prune_keep_count(self, prune_keep_count, num_prior_edits):
with patch(
'openedx.core.djangoapps.content.block_structure.config.num_versions_to_keep',
return_value=prune_keep_count,
):
for _ in range(num_prior_edits):
self._verify_update_or_create_call('data')
if num_prior_edits:
self._assert_file_count_equal(num_prior_edits)
with override_config_setting(PRUNE_OLD_VERSIONS, active=True):
self._verify_update_or_create_call('data')
self._assert_file_count_equal(min(prune_keep_count, num_prior_edits + 1))
...@@ -3,7 +3,6 @@ Unit tests for the Course Blocks signals ...@@ -3,7 +3,6 @@ Unit tests for the Course Blocks signals
""" """
import ddt import ddt
from mock import patch from mock import patch
from waffle.testutils import override_switch
from opaque_keys.edx.locator import LibraryLocator, CourseLocator from opaque_keys.edx.locator import LibraryLocator, CourseLocator
from xmodule.modulestore.exceptions import ItemNotFoundError from xmodule.modulestore.exceptions import ItemNotFoundError
...@@ -11,9 +10,9 @@ from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase ...@@ -11,9 +10,9 @@ from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory from xmodule.modulestore.tests.factories import CourseFactory
from ..api import get_block_structure_manager from ..api import get_block_structure_manager
from ..config import INVALIDATE_CACHE_ON_PUBLISH
from ..signals import _listen_for_course_publish from ..signals import _listen_for_course_publish
from ..config import INVALIDATE_CACHE_ON_PUBLISH, waffle_switch_name from .helpers import is_course_in_block_structure_cache, override_config_setting
from .helpers import is_course_in_block_structure_cache
@ddt.ddt @ddt.ddt
...@@ -55,7 +54,7 @@ class CourseBlocksSignalTest(ModuleStoreTestCase): ...@@ -55,7 +54,7 @@ class CourseBlocksSignalTest(ModuleStoreTestCase):
def test_cache_invalidation(self, invalidate_cache_enabled, mock_bs_manager_clear): def test_cache_invalidation(self, invalidate_cache_enabled, mock_bs_manager_clear):
test_display_name = "Jedi 101" test_display_name = "Jedi 101"
with override_switch(waffle_switch_name(INVALIDATE_CACHE_ON_PUBLISH), active=invalidate_cache_enabled): with override_config_setting(INVALIDATE_CACHE_ON_PUBLISH, active=invalidate_cache_enabled):
self.course.display_name = test_display_name self.course.display_name = test_display_name
self.store.update_item(self.course, self.user.id) self.store.update_item(self.course, self.user.id)
......
...@@ -5,9 +5,9 @@ from django.contrib.auth import get_user_model ...@@ -5,9 +5,9 @@ from django.contrib.auth import get_user_model
from django.contrib.auth.models import AnonymousUser from django.contrib.auth.models import AnonymousUser
from django.http.request import HttpRequest from django.http.request import HttpRequest
from django.test import TestCase from django.test import TestCase
from waffle.models import Switch
from ..utils import get_mock_request, toggle_switch from ..utils import get_mock_request
USER_MODEL = get_user_model() USER_MODEL = get_user_model()
...@@ -28,27 +28,3 @@ class TestGetMockRequest(TestCase): ...@@ -28,27 +28,3 @@ class TestGetMockRequest(TestCase):
def test_mock_request_without_user(self): def test_mock_request_without_user(self):
request = get_mock_request() request = get_mock_request()
self.assertIsInstance(request.user, AnonymousUser) self.assertIsInstance(request.user, AnonymousUser)
class TestToggleSwitch(TestCase):
"""
Verify that the toggle_switch utility can be used to turn Waffle Switches
on and off.
"""
def test_toggle_switch(self):
"""Verify that a new switch can be turned on and off."""
name = 'foo'
switch = toggle_switch(name)
# Verify that the switch was saved.
self.assertEqual(switch, Switch.objects.get())
# Verify that the switch has the right name and is active.
self.assertEqual(switch.name, name)
self.assertTrue(switch.active)
switch = toggle_switch(name)
# Verify that the switch has been turned off.
self.assertFalse(switch.active)
...@@ -190,25 +190,3 @@ def skip_unless_lms(func): ...@@ -190,25 +190,3 @@ def skip_unless_lms(func):
Only run the decorated test in the LMS test suite Only run the decorated test in the LMS test suite
""" """
return skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in LMS')(func) return skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in LMS')(func)
def toggle_switch(name, active=True):
"""
Activate or deactivate a Waffle switch. The switch is created if it does not exist.
Arguments:
name (str): Name of the switch to be toggled.
Keyword Arguments:
active (bool): Whether a newly created switch should be on or off.
Returns:
Switch
"""
switch, created = Switch.objects.get_or_create(name=name, defaults={'active': active})
if not created:
switch.active = not switch.active
switch.save()
return switch
"""
Test utilities when using waffle.
"""
from waffle.testutils import override_switch as waffle_override_switch
from request_cache.middleware import RequestCache, func_call_cache_key
from ..waffle_utils import is_switch_enabled
class override_switch(waffle_override_switch): # pylint:disable=invalid-name
"""
Subclasses waffle's override_switch in order clear the cache
used on the is_switch_enabled function.
"""
def _clear_cache(self):
"""
Clears the requestcached values on the is_switch_enabled function.
"""
cache_key = func_call_cache_key(is_switch_enabled.request_cached_contained_func, self.name)
RequestCache.get_request_cache().data.pop(cache_key, None)
def __enter__(self):
self._clear_cache()
super(override_switch, self).__enter__()
def __exit__(self, *args, **kwargs):
self._clear_cache()
super(override_switch, self).__exit__(*args, **kwargs)
"""
Utilities for waffle usage.
"""
from request_cache.middleware import request_cached
from waffle import switch_is_active
@request_cached
def is_switch_enabled(waffle_name):
"""
Returns and caches whether the given waffle switch is enabled.
See testing.waffle_utils.override_config_setting for a
helper to override and clear the cache during tests.
"""
return switch_is_active(waffle_name)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment