Commit 090fc12b by Nimisha Asthagiri Committed by GitHub

Merge pull request #14512 from edx/beryl/cbs_waffled_manager

Storage-backed Versioned Block Structures: Manager and Store
parents 45eff7ef 02c24cb2
......@@ -4,8 +4,12 @@ Tests for Blocks api.py
import ddt
from django.test.client import RequestFactory
from itertools import product
from openedx.core.djangoapps.content.block_structure.api import clear_course_from_cache
from openedx.core.djangoapps.content.block_structure.config import STORAGE_BACKING_FOR_CACHE
from openedx.core.djangoapps.content.block_structure.tests.helpers import override_config_setting
from student.tests.factories import UserFactory
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
......@@ -121,26 +125,45 @@ class TestGetBlocksQueryCounts(SharedModuleStoreTestCase):
with self.store.default_store(store_type):
return SampleCourseFactory.create()
def _get_blocks(self, course, expected_mongo_queries):
def _get_blocks(self, course, expected_mongo_queries, expected_sql_queries):
"""
Verifies the number of expected queries when calling
get_blocks on the given course.
"""
with check_mongo_calls(expected_mongo_queries):
with self.assertNumQueries(2):
with self.assertNumQueries(expected_sql_queries):
get_blocks(self.request, course.location, self.user)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_query_counts_cached(self, store_type):
course = self._create_course(store_type)
self._get_blocks(course, expected_mongo_queries=0)
@ddt.data(
*product(
(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split),
(True, False),
)
)
@ddt.unpack
def test_query_counts_cached(self, store_type, with_storage_backing):
with override_config_setting(STORAGE_BACKING_FOR_CACHE, active=with_storage_backing):
course = self._create_course(store_type)
self._get_blocks(
course,
expected_mongo_queries=0,
expected_sql_queries=4 if with_storage_backing else 3,
)
@ddt.data(
(ModuleStoreEnum.Type.mongo, 5),
(ModuleStoreEnum.Type.split, 3),
*product(
((ModuleStoreEnum.Type.mongo, 5), (ModuleStoreEnum.Type.split, 3)),
(True, False),
)
)
@ddt.unpack
def test_query_counts_uncached(self, store_type, expected_mongo_queries):
course = self._create_course(store_type)
clear_course_from_cache(course.id)
self._get_blocks(course, expected_mongo_queries)
def test_query_counts_uncached(self, store_type_tuple, with_storage_backing):
store_type, expected_mongo_queries = store_type_tuple
with override_config_setting(STORAGE_BACKING_FOR_CACHE, active=with_storage_backing):
course = self._create_course(store_type)
clear_course_from_cache(course.id)
self._get_blocks(
course,
expected_mongo_queries,
expected_sql_queries=9 if with_storage_backing else 3,
)
......@@ -158,7 +158,7 @@ class MilestonesTransformerTestCase(CourseStructureTestCase, MilestonesTestCaseM
self.course.enable_subsection_gating = True
self.setup_gated_section(self.blocks[gated_block_ref], self.blocks[gating_block_ref])
with self.assertNumQueries(3):
with self.assertNumQueries(6):
self.get_blocks_and_check_against_expected(self.user, expected_blocks_before_completion)
# clear the request cache to simulate a new request
......@@ -174,7 +174,7 @@ class MilestonesTransformerTestCase(CourseStructureTestCase, MilestonesTestCaseM
self.blocks[gating_block_child],
self.user.id,
)
with self.assertNumQueries(2):
with self.assertNumQueries(5):
self.get_blocks_and_check_against_expected(self.user, self.ALL_BLOCKS_EXCEPT_SPECIAL)
def test_staff_access(self):
......
......@@ -8,7 +8,7 @@ from xmodule.modulestore.django import modulestore
import openedx.core.djangoapps.content.block_structure.api as api
import openedx.core.djangoapps.content.block_structure.tasks as tasks
import openedx.core.lib.block_structure.cache as cache
import openedx.core.lib.block_structure.store as store
from openedx.core.lib.command_utils import (
get_mutually_exclusive_required_option,
validate_dependent_option,
......@@ -113,7 +113,7 @@ class Command(BaseCommand):
cache_log_level = logging.INFO
log.setLevel(log_level)
cache.logger.setLevel(cache_log_level)
store.logger.setLevel(cache_log_level)
def _generate_course_blocks(self, options, course_keys):
"""
......
......@@ -71,11 +71,11 @@ class TestGradeIteration(SharedModuleStoreTestCase):
"""
with patch.object(
BlockStructureFactory,
'create_from_cache',
wraps=BlockStructureFactory.create_from_cache
) as mock_create_from_cache:
'create_from_store',
wraps=BlockStructureFactory.create_from_store
) as mock_create_from_store:
all_course_grades, all_errors = self._course_grades_and_errors_for(self.course, self.students)
self.assertEquals(mock_create_from_cache.call_count, 1)
self.assertEquals(mock_create_from_store.call_count, 1)
self.assertEqual(len(all_errors), 0)
for course_grade in all_course_grades.values():
......@@ -100,7 +100,8 @@ class TestGradeIteration(SharedModuleStoreTestCase):
else mock_course_grade.return_value
for student in self.students
]
all_course_grades, all_errors = self._course_grades_and_errors_for(self.course, self.students)
with self.assertNumQueries(4):
all_course_grades, all_errors = self._course_grades_and_errors_for(self.course, self.students)
self.assertEqual(
all_errors,
{
......
......@@ -136,8 +136,8 @@ class RecalculateSubsectionGradeTest(ModuleStoreTestCase):
self.set_up_course()
self.assertTrue(PersistentGradesEnabledFlag.feature_enabled(self.course.id))
with patch(
'openedx.core.lib.block_structure.factory.BlockStructureFactory.create_from_cache',
side_effect=BlockStructureNotFound,
'openedx.core.lib.block_structure.factory.BlockStructureFactory.create_from_store',
side_effect=BlockStructureNotFound(self.course.location),
) as mock_block_structure_create:
self._apply_recalculate_subsection_grade()
self.assertEquals(mock_block_structure_create.call_count, 1)
......
......@@ -1773,7 +1773,7 @@ class TestCertificateGeneration(InstructorTaskModuleTestCase):
'failed': 3,
'skipped': 2
}
with self.assertNumQueries(166):
with self.assertNumQueries(169):
self.assertCertificatesGenerated(task_input, expected_results)
expected_results = {
......
......@@ -25,7 +25,7 @@ def update_course_in_cache(course_key):
block_structure.updated_collected function that updates the block
structure in the cache for the given course_key.
"""
return get_block_structure_manager(course_key).update_collected()
return get_block_structure_manager(course_key).update_collected_if_needed()
def clear_course_from_cache(course_key):
......
"""
Helpers for Course Blocks tests.
"""
from openedx.core.lib.block_structure.cache import BlockStructureCache
from openedx.core.djangolib.testing.waffle_utils import override_switch
from openedx.core.lib.block_structure.exceptions import BlockStructureNotFound
from openedx.core.lib.block_structure.store import BlockStructureStore
from ..api import get_cache
from ..config import _bs_waffle_switch_name
......@@ -14,7 +14,11 @@ def is_course_in_block_structure_cache(course_key, store):
Returns whether the given course is in the Block Structure cache.
"""
course_usage_key = store.make_course_usage_key(course_key)
return BlockStructureCache(get_cache()).get(course_usage_key) is not None
try:
BlockStructureStore(get_cache()).get(course_usage_key)
return True
except BlockStructureNotFound:
return False
class override_config_setting(override_switch): # pylint:disable=invalid-name
......
"""
Module for the Cache class for BlockStructure objects.
"""
# pylint: disable=protected-access
from logging import getLogger
from openedx.core.lib.cache_utils import zpickle, zunpickle
from .block_structure import BlockStructureBlockData
from .factory import BlockStructureFactory
logger = getLogger(__name__) # pylint: disable=C0103
class BlockStructureCache(object):
"""
Cache for BlockStructure objects.
"""
def __init__(self, cache):
"""
Arguments:
cache (django.core.cache.backends.base.BaseCache) - The
cache into which cacheable data of the block structure
is to be serialized.
"""
self._cache = cache
def add(self, block_structure):
"""
Store a compressed and pickled serialization of the given
block structure into the given cache.
The key in the cache is 'root.key.<root_block_usage_key>'.
The data stored in the cache includes the structure's
block relations, transformer data, and block data.
Arguments:
block_structure (BlockStructure) - The block structure
that is to be serialized to the given cache.
"""
data_to_cache = (
block_structure._block_relations,
block_structure.transformer_data,
block_structure._block_data_map,
)
zp_data_to_cache = zpickle(data_to_cache)
# Set the timeout value for the cache to 1 day as a fail-safe
# in case the signal to invalidate the cache doesn't come through.
timeout_in_seconds = 60 * 60 * 24
self._cache.set(
self._encode_root_cache_key(block_structure.root_block_usage_key),
zp_data_to_cache,
timeout=timeout_in_seconds,
)
logger.info(
"Wrote BlockStructure %s to cache, size: %s",
block_structure.root_block_usage_key,
len(zp_data_to_cache),
)
def get(self, root_block_usage_key):
"""
Deserializes and returns the block structure starting at
root_block_usage_key from the given cache, if it's found in the cache.
The given root_block_usage_key must equate the root_block_usage_key
previously passed to serialize_to_cache.
Arguments:
root_block_usage_key (UsageKey) - The usage_key for the root
of the block structure that is to be deserialized from
the given cache.
Returns:
BlockStructure - The deserialized block structure starting
at root_block_usage_key, if found in the cache.
NoneType - If the root_block_usage_key is not found in the cache.
"""
# Find root_block_usage_key in the cache.
zp_data_from_cache = self._cache.get(self._encode_root_cache_key(root_block_usage_key))
if not zp_data_from_cache:
logger.info(
"Did not find BlockStructure %r in the cache.",
root_block_usage_key,
)
return None
else:
logger.info(
"Read BlockStructure %r from cache, size: %s",
root_block_usage_key,
len(zp_data_from_cache),
)
# Deserialize and construct the block structure.
block_relations, transformer_data, block_data_map = zunpickle(zp_data_from_cache)
return BlockStructureFactory.create_new(
root_block_usage_key,
block_relations,
transformer_data,
block_data_map,
)
def delete(self, root_block_usage_key):
"""
Deletes the block structure for the given root_block_usage_key
from the given cache.
Arguments:
root_block_usage_key (UsageKey) - The usage_key for the root
of the block structure that is to be removed from
the cache.
"""
self._cache.delete(self._encode_root_cache_key(root_block_usage_key))
logger.info(
"Deleted BlockStructure %r from the cache.",
root_block_usage_key,
)
@classmethod
def _encode_root_cache_key(cls, root_block_usage_key):
"""
Returns the cache key to use for storing the block structure
for the given root_block_usage_key.
"""
return "v{version}.root.key.{root_usage_key}".format(
version=unicode(BlockStructureBlockData.VERSION),
root_usage_key=unicode(root_block_usage_key),
)
......@@ -36,4 +36,7 @@ class BlockStructureNotFound(BlockStructureException):
"""
Exception for when a Block Structure is not found.
"""
pass
def __init__(self, root_block_usage_key):
super(BlockStructureNotFound, self).__init__(
'Block structure not found; data_usage_key: {}'.format(root_block_usage_key)
)
......@@ -2,7 +2,6 @@
Module for factory class for BlockStructure objects.
"""
from .block_structure import BlockStructureModulestoreData, BlockStructureBlockData
from .exceptions import BlockStructureNotFound
class BlockStructureFactory(object):
......@@ -59,10 +58,10 @@ class BlockStructureFactory(object):
return block_structure
@classmethod
def create_from_cache(cls, root_block_usage_key, block_structure_cache):
def create_from_store(cls, root_block_usage_key, block_structure_store):
"""
Deserializes and returns the block structure starting at
root_block_usage_key from the given cache, if it's found in the cache.
root_block_usage_key from the given store, if it's found in the store.
The given root_block_usage_key must equate the root_block_usage_key
previously passed to serialize_to_cache.
......@@ -72,8 +71,8 @@ class BlockStructureFactory(object):
of the block structure that is to be deserialized from
the given cache.
block_structure_cache (BlockStructureCache) - The
cache from which the block structure is to be
block_structure_store (BlockStructureStore) - The
store from which the block structure is to be
deserialized.
Returns:
......@@ -82,12 +81,9 @@ class BlockStructureFactory(object):
Raises:
BlockStructureNotFound - If the root_block_usage_key is not found
in the cache.
in the store.
"""
block_structure = block_structure_cache.get(root_block_usage_key)
if block_structure is None:
raise BlockStructureNotFound('Block structure for {} not found in the cache.'.format(root_block_usage_key))
return block_structure
return block_structure_store.get(root_block_usage_key)
@classmethod
def create_new(cls, root_block_usage_key, block_relations, transformer_data, block_data_map):
......
......@@ -4,9 +4,11 @@ BlockStructures.
"""
from contextlib import contextmanager
from .cache import BlockStructureCache
from .factory import BlockStructureFactory
from openedx.core.djangoapps.content.block_structure import config
from .exceptions import UsageKeyNotInBlockStructure, TransformerDataIncompatible, BlockStructureNotFound
from .factory import BlockStructureFactory
from .store import BlockStructureStore
from .transformers import BlockStructureTransformers
......@@ -31,7 +33,7 @@ class BlockStructureManager(object):
"""
self.root_block_usage_key = root_block_usage_key
self.modulestore = modulestore
self.block_structure_cache = BlockStructureCache(cache)
self.store = BlockStructureStore(cache)
def get_transformed(self, transformers, starting_block_usage_key=None, collected_block_structure=None):
"""
......@@ -90,22 +92,32 @@ class BlockStructureManager(object):
from each registered transformer.
"""
try:
block_structure = BlockStructureFactory.create_from_cache(
block_structure = BlockStructureFactory.create_from_store(
self.root_block_usage_key,
self.block_structure_cache
self.store,
)
BlockStructureTransformers.verify_versions(block_structure)
except (BlockStructureNotFound, TransformerDataIncompatible):
block_structure = self.update_collected()
if config.is_enabled(config.RAISE_ERROR_WHEN_NOT_FOUND):
raise
else:
block_structure = self._update_collected()
return block_structure
def update_collected(self):
def update_collected_if_needed(self):
"""
Updates the collected Block Structure for the root_block_usage_key.
The store is updated with newly collected transformers data from
the modulestore, only if the data in the store is outdated.
"""
with self._bulk_operations():
if not self.store.is_up_to_date(self.root_block_usage_key, self.modulestore):
self._update_collected()
Details: The cache is updated by collecting transformers data from
def _update_collected(self):
"""
The store is updated with newly collected transformers data from
the modulestore.
"""
with self._bulk_operations():
......@@ -114,15 +126,15 @@ class BlockStructureManager(object):
self.modulestore,
)
BlockStructureTransformers.collect(block_structure)
self.block_structure_cache.add(block_structure)
self.store.add(block_structure)
return block_structure
def clear(self):
"""
Removes cached data for the block structure associated with the given
Removes data for the block structure associated with the given
root block key.
"""
self.block_structure_cache.delete(self.root_block_usage_key)
self.store.delete(self.root_block_usage_key)
@contextmanager
def _bulk_operations(self):
......
"""
Module for the Storage of BlockStructure objects.
"""
# pylint: disable=protected-access
from logging import getLogger
import openedx.core.djangoapps.content.block_structure.config as config
from openedx.core.djangoapps.content.block_structure.models import BlockStructureModel
from openedx.core.lib.cache_utils import zpickle, zunpickle
from .block_structure import BlockStructureBlockData
from .exceptions import BlockStructureNotFound
from .factory import BlockStructureFactory
from .transformer_registry import TransformerRegistry
logger = getLogger(__name__) # pylint: disable=C0103
class StubModel(object):
"""
Stub model to use when storage backing is disabled.
By using this stub, we eliminate the need for extra
conditional statements in the code.
"""
def __init__(self, root_block_usage_key):
self.data_usage_key = root_block_usage_key
def __unicode__(self):
return unicode(self.data_usage_key)
def delete(self):
"""
Noop delete method.
"""
pass
class BlockStructureStore(object):
"""
Storage for BlockStructure objects.
"""
def __init__(self, cache):
"""
Arguments:
cache (django.core.cache.backends.base.BaseCache) - The
cache into which cacheable data of the block structure
is to be serialized.
"""
self._cache = cache
def add(self, block_structure):
"""
Stores and caches a compressed and pickled serialization of
the given block structure.
The data stored includes the structure's
block relations, transformer data, and block data.
Arguments:
block_structure (BlockStructure) - The block structure
that is to be cached and stored.
"""
serialized_data = self._serialize(block_structure)
bs_model = self._update_or_create_model(block_structure, serialized_data)
self._add_to_cache(serialized_data, bs_model)
def get(self, root_block_usage_key):
"""
Deserializes and returns the block structure starting at
root_block_usage_key, if found in the cache or storage.
The given root_block_usage_key must equate the
root_block_usage_key previously passed to the `add` method.
Arguments:
root_block_usage_key (UsageKey) - The usage_key for the
root of the block structure that is to be retrieved
from the store.
Returns:
BlockStructure - The deserialized block structure starting
at root_block_usage_key, if found.
Raises:
BlockStructureNotFound if the root_block_usage_key is not
found.
"""
bs_model = self._get_model(root_block_usage_key)
try:
serialized_data = self._get_from_cache(bs_model)
except BlockStructureNotFound:
serialized_data = self._get_from_store(bs_model)
return self._deserialize(serialized_data, root_block_usage_key)
def delete(self, root_block_usage_key):
"""
Deletes the block structure for the given root_block_usage_key
from the cache and storage.
Arguments:
root_block_usage_key (UsageKey) - The usage_key for the root
of the block structure that is to be removed.
"""
bs_model = self._get_model(root_block_usage_key)
self._cache.delete(self._encode_root_cache_key(bs_model))
bs_model.delete()
logger.info("BlockStructure: Deleted from cache and store; %r.", bs_model)
def is_up_to_date(self, root_block_usage_key, modulestore):
"""
Returns whether the data in storage for the given key is
already up-to-date with the version in the given modulestore.
"""
if _is_storage_backing_enabled():
try:
bs_model = self._get_model(root_block_usage_key)
root_block = modulestore.get_item(root_block_usage_key)
return self._version_data_of_model(bs_model) == self._version_data_of_block(root_block)
except BlockStructureNotFound:
pass
return False
def _get_model(self, root_block_usage_key):
"""
Returns the model associated with the given key.
"""
if _is_storage_backing_enabled():
return BlockStructureModel.get(root_block_usage_key)
else:
return StubModel(root_block_usage_key)
def _update_or_create_model(self, block_structure, serialized_data):
"""
Updates or creates the model for the given block_structure
and serialized_data.
"""
if _is_storage_backing_enabled():
root_block = block_structure[block_structure.root_block_usage_key]
bs_model, _ = BlockStructureModel.update_or_create(
serialized_data,
data_usage_key=block_structure.root_block_usage_key,
**self._version_data_of_block(root_block)
)
return bs_model
else:
return StubModel(block_structure.root_block_usage_key)
def _add_to_cache(self, serialized_data, bs_model):
"""
Adds the given serialized_data for the given BlockStructureModel
to the cache.
"""
cache_key = self._encode_root_cache_key(bs_model)
self._cache.set(cache_key, serialized_data, timeout=config.cache_timeout_in_seconds())
logger.info("BlockStructure: Added to cache; %r, size: %d", bs_model, len(serialized_data))
def _get_from_cache(self, bs_model):
"""
Returns the serialized data for the given BlockStructureModel
from the cache.
Raises:
BlockStructureNotFound if not found.
"""
cache_key = self._encode_root_cache_key(bs_model)
serialized_data = self._cache.get(cache_key)
if not serialized_data:
logger.info("BlockStructure: Not found in cache; %r.", bs_model)
raise BlockStructureNotFound(bs_model.data_usage_key)
else:
logger.info("BlockStructure: Read from cache; %r, size: %d", bs_model, len(serialized_data))
return serialized_data
def _get_from_store(self, bs_model):
"""
Returns the serialized data for the given BlockStructureModel
from storage.
Raises:
BlockStructureNotFound if not found.
"""
if not _is_storage_backing_enabled():
raise BlockStructureNotFound(bs_model.data_usage_key)
return bs_model.get_serialized_data()
def _serialize(self, block_structure):
"""
Serializes the data for the given block_structure.
"""
data_to_cache = (
block_structure._block_relations,
block_structure.transformer_data,
block_structure._block_data_map,
)
return zpickle(data_to_cache)
def _deserialize(self, serialized_data, root_block_usage_key):
"""
Deserializes the given data and returns the parsed block_structure.
"""
block_relations, transformer_data, block_data_map = zunpickle(serialized_data)
return BlockStructureFactory.create_new(
root_block_usage_key,
block_relations,
transformer_data,
block_data_map,
)
@staticmethod
def _encode_root_cache_key(bs_model):
"""
Returns the cache key to use for the given
BlockStructureModel or StubModel.
"""
if _is_storage_backing_enabled():
return unicode(bs_model)
else:
return "v{version}.root.key.{root_usage_key}".format(
version=unicode(BlockStructureBlockData.VERSION),
root_usage_key=unicode(bs_model.data_usage_key),
)
@staticmethod
def _version_data_of_block(root_block):
"""
Returns the version-relevant data for the given block, including the
current schema state of the Transformers and BlockStructure classes.
"""
return dict(
data_version=getattr(root_block, 'course_version', None),
data_edit_timestamp=getattr(root_block, 'subtree_edited_on', None),
transformers_schema_version=TransformerRegistry.get_write_version_hash(),
block_structure_schema_version=unicode(BlockStructureBlockData.VERSION),
)
@staticmethod
def _version_data_of_model(bs_model):
"""
Returns the version-relevant data for the given BlockStructureModel.
"""
return {
field_name: getattr(bs_model, field_name, None)
for field_name in BlockStructureModel.VERSION_FIELDS
}
def _is_storage_backing_enabled():
"""
Returns whether storage backing for Block Structures is enabled.
"""
return config.is_enabled(config.STORAGE_BACKING_FOR_CACHE)
......@@ -4,6 +4,9 @@ Common utilities for tests in block_structure module
from contextlib import contextmanager
from mock import patch
from xmodule.modulestore.exceptions import ItemNotFoundError
from uuid import uuid4
from opaque_keys.edx.locator import CourseLocator, BlockUsageLocator
from ..block_structure import BlockStructureBlockData
from ..transformer import BlockStructureTransformer, FilteringTransformerMixin
......@@ -115,7 +118,7 @@ class MockModulestoreFactory(object):
A factory for creating MockModulestore objects.
"""
@classmethod
def create(cls, children_map):
def create(cls, children_map, block_key_factory):
"""
Creates and returns a MockModulestore from the given
children_map.
......@@ -127,7 +130,11 @@ class MockModulestoreFactory(object):
"""
modulestore = MockModulestore()
modulestore.set_blocks({
block_key: MockXBlock(block_key, children=children, modulestore=modulestore)
block_key_factory(block_key): MockXBlock(
block_key_factory(block_key),
children=[block_key_factory(child) for child in children],
modulestore=modulestore,
)
for block_key, children in enumerate(children_map)
})
return modulestore
......@@ -216,18 +223,26 @@ class ChildrenMapTestMixin(object):
# 5 6
DAG_CHILDREN_MAP = [[1, 2], [3], [3, 4], [5, 6], [], [], []]
def block_key_factory(self, block_id):
"""
Returns a block key object for the given block_id.
Override this method if the block_key should be anything
different from the index integer values in the Children Maps.
"""
return block_id
def create_block_structure(self, children_map, block_structure_cls=BlockStructureBlockData):
"""
Factory method for creating and returning a block structure
for the given children_map.
"""
# create empty block structure
block_structure = block_structure_cls(root_block_usage_key=0)
block_structure = block_structure_cls(root_block_usage_key=self.block_key_factory(0))
# _add_relation
for parent, children in enumerate(children_map):
for child in children:
block_structure._add_relation(parent, child) # pylint: disable=protected-access
block_structure._add_relation(self.block_key_factory(parent), self.block_key_factory(child)) # pylint: disable=protected-access
return block_structure
def get_parents_map(self, children_map):
......@@ -253,8 +268,8 @@ class ChildrenMapTestMixin(object):
for block_key, children in enumerate(children_map):
# Verify presence
self.assertEquals(
block_key in block_structure,
self.assertEqual(
self.block_key_factory(block_key) in block_structure,
block_key not in missing_blocks,
'Expected presence in block_structure for block_key {} to match absence in missing_blocks.'.format(
unicode(block_key)
......@@ -263,16 +278,33 @@ class ChildrenMapTestMixin(object):
# Verify children
if block_key not in missing_blocks:
self.assertEquals(
set(block_structure.get_children(block_key)),
set(children),
self.assertEqual(
set(block_structure.get_children(self.block_key_factory(block_key))),
set(self.block_key_factory(child) for child in children),
)
# Verify parents
parents_map = self.get_parents_map(children_map)
for block_key, parents in enumerate(parents_map):
if block_key not in missing_blocks:
self.assertEquals(
set(block_structure.get_parents(block_key)),
set(parents),
self.assertEqual(
set(block_structure.get_parents(self.block_key_factory(block_key))),
set(self.block_key_factory(parent) for parent in parents),
)
class UsageKeyFactoryMixin(object):
"""
Test Mixin that provides a block_key_factory to create OpaqueKey objects
for block_ids rather than simple integers. By default, the children maps in
ChildrenMapTestMixin use integers for block_ids.
"""
def setUp(self):
super(UsageKeyFactoryMixin, self).setUp()
self.course_key = CourseLocator('org', 'course', unicode(uuid4()))
def block_key_factory(self, block_id):
"""
Returns a block key object for the given block_id.
"""
return BlockUsageLocator(course_key=self.course_key, block_type='course', block_id=unicode(block_id))
"""
Tests for block_structure/cache.py
"""
from nose.plugins.attrib import attr
from unittest import TestCase
from ..cache import BlockStructureCache
from .helpers import ChildrenMapTestMixin, MockCache, MockTransformer
@attr(shard=2)
class TestBlockStructureCache(ChildrenMapTestMixin, TestCase):
"""
Tests for BlockStructureCache
"""
def setUp(self):
super(TestBlockStructureCache, self).setUp()
self.children_map = self.SIMPLE_CHILDREN_MAP
self.block_structure = self.create_block_structure(self.children_map)
self.mock_cache = MockCache()
self.block_structure_cache = BlockStructureCache(self.mock_cache)
def add_transformers(self):
"""
Add each registered transformer to the block structure.
Mimic collection by setting test transformer block data.
"""
for transformer in [MockTransformer]:
self.block_structure._add_transformer(transformer) # pylint: disable=protected-access
self.block_structure.set_transformer_block_field(
usage_key=0, transformer=transformer, key='test', value='{} val'.format(transformer.name())
)
def test_add_and_get(self):
self.assertEquals(self.mock_cache.timeout_from_last_call, 0)
self.add_transformers()
self.block_structure_cache.add(self.block_structure)
self.assertEquals(self.mock_cache.timeout_from_last_call, 60 * 60 * 24)
cached_value = self.block_structure_cache.get(self.block_structure.root_block_usage_key)
self.assertIsNotNone(cached_value)
self.assert_block_structure(cached_value, self.children_map)
def test_get_none(self):
self.assertIsNone(
self.block_structure_cache.get(self.block_structure.root_block_usage_key)
)
def test_delete(self):
self.add_transformers()
self.block_structure_cache.add(self.block_structure)
self.block_structure_cache.delete(self.block_structure.root_block_usage_key)
self.assertIsNone(
self.block_structure_cache.get(self.block_structure.root_block_usage_key)
)
......@@ -5,7 +5,7 @@ from nose.plugins.attrib import attr
from unittest import TestCase
from xmodule.modulestore.exceptions import ItemNotFoundError
from ..cache import BlockStructureCache
from ..store import BlockStructureStore
from ..exceptions import BlockStructureNotFound
from ..factory import BlockStructureFactory
from .helpers import (
......@@ -21,7 +21,7 @@ class TestBlockStructureFactory(TestCase, ChildrenMapTestMixin):
def setUp(self):
super(TestBlockStructureFactory, self).setUp()
self.children_map = self.SIMPLE_CHILDREN_MAP
self.modulestore = MockModulestoreFactory.create(self.children_map)
self.modulestore = MockModulestoreFactory.create(self.children_map, self.block_key_factory)
def test_from_modulestore(self):
block_structure = BlockStructureFactory.create_from_modulestore(
......@@ -37,21 +37,21 @@ class TestBlockStructureFactory(TestCase, ChildrenMapTestMixin):
)
def test_from_cache(self):
cache = BlockStructureCache(MockCache())
store = BlockStructureStore(MockCache())
block_structure = self.create_block_structure(self.children_map)
cache.add(block_structure)
from_cache_block_structure = BlockStructureFactory.create_from_cache(
store.add(block_structure)
from_cache_block_structure = BlockStructureFactory.create_from_store(
block_structure.root_block_usage_key,
cache,
store,
)
self.assert_block_structure(from_cache_block_structure, self.children_map)
def test_from_cache_none(self):
cache = BlockStructureCache(MockCache())
store = BlockStructureStore(MockCache())
with self.assertRaises(BlockStructureNotFound):
BlockStructureFactory.create_from_cache(
BlockStructureFactory.create_from_store(
root_block_usage_key=0,
block_structure_cache=cache,
block_structure_store=store,
)
def test_new(self):
......
"""
Tests for manager.py
"""
import ddt
from nose.plugins.attrib import attr
from unittest import TestCase
from openedx.core.djangoapps.content.block_structure.config import RAISE_ERROR_WHEN_NOT_FOUND, STORAGE_BACKING_FOR_CACHE
from openedx.core.djangoapps.content.block_structure.tests.helpers import override_config_setting
from ..block_structure import BlockStructureBlockData
from ..exceptions import UsageKeyNotInBlockStructure
from ..exceptions import UsageKeyNotInBlockStructure, BlockStructureNotFound
from ..manager import BlockStructureManager
from ..transformers import BlockStructureTransformers
from .helpers import (
MockModulestoreFactory, MockCache, MockTransformer, ChildrenMapTestMixin, mock_registered_transformers
MockModulestoreFactory, MockCache, MockTransformer,
ChildrenMapTestMixin, UsageKeyFactoryMixin,
mock_registered_transformers,
)
......@@ -86,7 +92,8 @@ class TestTransformer1(MockTransformer):
@attr(shard=2)
class TestBlockStructureManager(TestCase, ChildrenMapTestMixin):
@ddt.ddt
class TestBlockStructureManager(UsageKeyFactoryMixin, ChildrenMapTestMixin, TestCase):
"""
Test class for BlockStructureManager.
"""
......@@ -99,13 +106,9 @@ class TestBlockStructureManager(TestCase, ChildrenMapTestMixin):
self.transformers = BlockStructureTransformers(self.registered_transformers)
self.children_map = self.SIMPLE_CHILDREN_MAP
self.modulestore = MockModulestoreFactory.create(self.children_map)
self.modulestore = MockModulestoreFactory.create(self.children_map, self.block_key_factory)
self.cache = MockCache()
self.bs_manager = BlockStructureManager(
root_block_usage_key=0,
modulestore=self.modulestore,
cache=self.cache,
)
self.bs_manager = BlockStructureManager(self.block_key_factory(0), self.modulestore, self.cache)
def collect_and_verify(self, expect_modulestore_called, expect_cache_updated):
"""
......@@ -133,7 +136,10 @@ class TestBlockStructureManager(TestCase, ChildrenMapTestMixin):
def test_get_transformed_with_starting_block(self):
with mock_registered_transformers(self.registered_transformers):
block_structure = self.bs_manager.get_transformed(self.transformers, starting_block_usage_key=1)
block_structure = self.bs_manager.get_transformed(
self.transformers,
starting_block_usage_key=self.block_key_factory(1),
)
substructure_of_children_map = [[], [3, 4], [], [], []]
self.assert_block_structure(block_structure, substructure_of_children_map, missing_blocks=[0, 2])
TestTransformer1.assert_collected(block_structure)
......@@ -152,7 +158,7 @@ class TestBlockStructureManager(TestCase, ChildrenMapTestMixin):
]:
block_structure = self.bs_manager.get_transformed(
self.transformers,
starting_block_usage_key=starting_block,
starting_block_usage_key=self.block_key_factory(starting_block),
collected_block_structure=collected_block_structure,
)
self.assert_block_structure(block_structure, expected_structure, missing_blocks=expected_missing_blocks)
......@@ -167,6 +173,26 @@ class TestBlockStructureManager(TestCase, ChildrenMapTestMixin):
self.collect_and_verify(expect_modulestore_called=False, expect_cache_updated=False)
self.assertEquals(TestTransformer1.collect_call_count, 1)
def test_get_collected_error_raised(self):
with override_config_setting(RAISE_ERROR_WHEN_NOT_FOUND, active=True):
with mock_registered_transformers(self.registered_transformers):
with self.assertRaises(BlockStructureNotFound):
self.bs_manager.get_collected()
@ddt.data(True, False)
def test_update_collected_if_needed(self, with_storage_backing):
with override_config_setting(STORAGE_BACKING_FOR_CACHE, active=with_storage_backing):
with mock_registered_transformers(self.registered_transformers):
self.assertEquals(TestTransformer1.collect_call_count, 0)
self.bs_manager.update_collected_if_needed()
self.assertEquals(TestTransformer1.collect_call_count, 1)
self.bs_manager.update_collected_if_needed()
self.assertEquals(TestTransformer1.collect_call_count, 1 if with_storage_backing else 2)
self.collect_and_verify(expect_modulestore_called=False, expect_cache_updated=False)
def test_get_collected_transformer_version(self):
self.collect_and_verify(expect_modulestore_called=True, expect_cache_updated=True)
......
"""
Tests for block_structure/cache.py
"""
import ddt
from nose.plugins.attrib import attr
from openedx.core.djangoapps.content.block_structure.config import STORAGE_BACKING_FOR_CACHE
from openedx.core.djangoapps.content.block_structure.config.models import BlockStructureConfiguration
from openedx.core.djangoapps.content.block_structure.tests.helpers import override_config_setting
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase
from ..store import BlockStructureStore
from ..exceptions import BlockStructureNotFound
from .helpers import ChildrenMapTestMixin, UsageKeyFactoryMixin, MockCache, MockTransformer
@attr(shard=2)
@ddt.ddt
class TestBlockStructureStore(UsageKeyFactoryMixin, ChildrenMapTestMixin, CacheIsolationTestCase):
"""
Tests for BlockStructureStore
"""
ENABLED_CACHES = ['default']
def setUp(self):
super(TestBlockStructureStore, self).setUp()
self.children_map = self.SIMPLE_CHILDREN_MAP
self.block_structure = self.create_block_structure(self.children_map)
self.add_transformers()
self.mock_cache = MockCache()
self.store = BlockStructureStore(self.mock_cache)
def add_transformers(self):
"""
Add each registered transformer to the block structure.
Mimic collection by setting test transformer block data.
"""
for transformer in [MockTransformer]:
self.block_structure._add_transformer(transformer) # pylint: disable=protected-access
self.block_structure.set_transformer_block_field(
self.block_key_factory(0),
transformer,
key='test',
value='{} val'.format(transformer.name()),
)
@ddt.data(True, False)
def test_get_none(self, with_storage_backing):
with override_config_setting(STORAGE_BACKING_FOR_CACHE, active=with_storage_backing):
with self.assertRaises(BlockStructureNotFound):
self.store.get(self.block_structure.root_block_usage_key)
@ddt.data(True, False)
def test_add_and_get(self, with_storage_backing):
with override_config_setting(STORAGE_BACKING_FOR_CACHE, active=with_storage_backing):
self.store.add(self.block_structure)
stored_value = self.store.get(self.block_structure.root_block_usage_key)
self.assertIsNotNone(stored_value)
self.assert_block_structure(stored_value, self.children_map)
@ddt.data(True, False)
def test_delete(self, with_storage_backing):
with override_config_setting(STORAGE_BACKING_FOR_CACHE, active=with_storage_backing):
self.store.add(self.block_structure)
self.store.delete(self.block_structure.root_block_usage_key)
with self.assertRaises(BlockStructureNotFound):
self.store.get(self.block_structure.root_block_usage_key)
def test_uncached_without_storage(self):
self.store.add(self.block_structure)
self.mock_cache.map.clear()
with self.assertRaises(BlockStructureNotFound):
self.store.get(self.block_structure.root_block_usage_key)
def test_uncached_with_storage(self):
with override_config_setting(STORAGE_BACKING_FOR_CACHE, active=True):
self.store.add(self.block_structure)
self.mock_cache.map.clear()
stored_value = self.store.get(self.block_structure.root_block_usage_key)
self.assert_block_structure(stored_value, self.children_map)
@ddt.data(1, 5, None)
def test_cache_timeout(self, timeout):
if timeout is not None:
BlockStructureConfiguration.objects.create(enabled=True, cache_timeout_in_seconds=timeout)
else:
timeout = BlockStructureConfiguration.DEFAULT_CACHE_TIMEOUT_IN_SECONDS
self.assertEquals(self.mock_cache.timeout_from_last_call, 0)
self.store.add(self.block_structure)
self.assertEquals(self.mock_cache.timeout_from_last_call, timeout)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment