Commit b1765d0a by Eugeny Kolpakov

Merge pull request #7641 from open-craft/eugeny/problem-type-search

Capa problem type filtering using edx-search
parents 25b0264a d4f85d87
""" Code to allow module store to interface with courseware index """ """ Code to allow module store to interface with courseware index """
from __future__ import absolute_import from __future__ import absolute_import
from abc import ABCMeta, abstractmethod
from datetime import timedelta from datetime import timedelta
import logging import logging
from six import add_metaclass
from django.conf import settings from django.conf import settings
from django.utils.translation import ugettext as _ from django.utils.translation import ugettext as _
from eventtracking import tracker from eventtracking import tracker
from xmodule.modulestore import ModuleStoreEnum from xmodule.modulestore import ModuleStoreEnum
from xmodule.library_tools import normalize_key_for_search
from search.search_engine_base import SearchEngine from search.search_engine_base import SearchEngine
# Use default index and document names for now
INDEX_NAME = "courseware_index"
DOCUMENT_TYPE = "courseware_content"
# REINDEX_AGE is the default amount of time that we look back for changes # REINDEX_AGE is the default amount of time that we look back for changes
# that might have happened. If we are provided with a time at which the # that might have happened. If we are provided with a time at which the
# indexing is triggered, then we know it is safe to only index items # indexing is triggered, then we know it is safe to only index items
...@@ -25,13 +22,6 @@ REINDEX_AGE = timedelta(0, 60) # 60 seconds ...@@ -25,13 +22,6 @@ REINDEX_AGE = timedelta(0, 60) # 60 seconds
log = logging.getLogger('edx.modulestore') log = logging.getLogger('edx.modulestore')
def indexing_is_enabled():
"""
Checks to see if the indexing feature is enabled
"""
return settings.FEATURES.get('ENABLE_COURSEWARE_INDEX', False)
class SearchIndexingError(Exception): class SearchIndexingError(Exception):
""" Indicates some error(s) occured during indexing """ """ Indicates some error(s) occured during indexing """
...@@ -40,18 +30,71 @@ class SearchIndexingError(Exception): ...@@ -40,18 +30,71 @@ class SearchIndexingError(Exception):
self.error_list = error_list self.error_list = error_list
class CoursewareSearchIndexer(object): @add_metaclass(ABCMeta)
class SearchIndexerBase(object):
""" """
Class to perform indexing for courseware search from different modulestores Base class to perform indexing for courseware or library search from different modulestores
""" """
__metaclass__ = ABCMeta
INDEX_NAME = None
DOCUMENT_TYPE = None
ENABLE_INDEXING_KEY = None
INDEX_EVENT = {
'name': None,
'category': None
}
@classmethod
def indexing_is_enabled(cls):
"""
Checks to see if the indexing feature is enabled
"""
return settings.FEATURES.get(cls.ENABLE_INDEXING_KEY, False)
@classmethod
@abstractmethod
def normalize_structure_key(cls, structure_key):
""" Normalizes structure key for use in indexing """
@classmethod
@abstractmethod
def _fetch_top_level(cls, modulestore, structure_key):
""" Fetch the item from the modulestore location """
@classmethod
@abstractmethod
def _get_location_info(cls, normalized_structure_key):
""" Builds location info dictionary """
@classmethod
def _id_modifier(cls, usage_id):
""" Modifies usage_id to submit to index """
return usage_id
@classmethod @classmethod
def index_course(cls, modulestore, course_key, triggered_at=None, reindex_age=REINDEX_AGE): def remove_deleted_items(cls, searcher, structure_key, exclude_items):
"""
remove any item that is present in the search index that is not present in updated list of indexed items
as we find items we can shorten the set of items to keep
"""
response = searcher.search(
doc_type=cls.DOCUMENT_TYPE,
field_dictionary=cls._get_location_info(structure_key),
exclude_ids=exclude_items
)
result_ids = [result["data"]["id"] for result in response["results"]]
for result_id in result_ids:
searcher.remove(cls.DOCUMENT_TYPE, result_id)
@classmethod
def index(cls, modulestore, structure_key, triggered_at=None, reindex_age=REINDEX_AGE):
""" """
Process course for indexing Process course for indexing
Arguments: Arguments:
course_key (CourseKey) - course identifier structure_key (CourseKey|LibraryKey) - course or library identifier
triggered_at (datetime) - provides time at which indexing was triggered; triggered_at (datetime) - provides time at which indexing was triggered;
useful for index updates - only things changed recently from that date useful for index updates - only things changed recently from that date
...@@ -64,13 +107,12 @@ class CoursewareSearchIndexer(object): ...@@ -64,13 +107,12 @@ class CoursewareSearchIndexer(object):
Number of items that have been added to the index Number of items that have been added to the index
""" """
error_list = [] error_list = []
searcher = SearchEngine.get_search_engine(INDEX_NAME) searcher = SearchEngine.get_search_engine(cls.INDEX_NAME)
if not searcher: if not searcher:
return return
location_info = { structure_key = cls.normalize_structure_key(structure_key)
"course": unicode(course_key), location_info = cls._get_location_info(structure_key)
}
# Wrap counter in dictionary - otherwise we seem to lose scope inside the embedded function `index_item` # Wrap counter in dictionary - otherwise we seem to lose scope inside the embedded function `index_item`
indexed_count = { indexed_count = {
...@@ -101,7 +143,7 @@ class CoursewareSearchIndexer(object): ...@@ -101,7 +143,7 @@ class CoursewareSearchIndexer(object):
if not item_index_dictionary and not item.has_children: if not item_index_dictionary and not item.has_children:
return return
item_id = unicode(item.scope_ids.usage_id) item_id = unicode(cls._id_modifier(item.scope_ids.usage_id))
indexed_items.add(item_id) indexed_items.add(item_id)
if item.has_children: if item.has_children:
# determine if it's okay to skip adding the children herein based upon how recently any may have changed # determine if it's okay to skip adding the children herein based upon how recently any may have changed
...@@ -122,38 +164,24 @@ class CoursewareSearchIndexer(object): ...@@ -122,38 +164,24 @@ class CoursewareSearchIndexer(object):
if item.start: if item.start:
item_index['start_date'] = item.start item_index['start_date'] = item.start
searcher.index(DOCUMENT_TYPE, item_index) searcher.index(cls.DOCUMENT_TYPE, item_index)
indexed_count["count"] += 1 indexed_count["count"] += 1
except Exception as err: # pylint: disable=broad-except except Exception as err: # pylint: disable=broad-except
# broad exception so that index operation does not fail on one item of many # broad exception so that index operation does not fail on one item of many
log.warning('Could not index item: %s - %r', item.location, err) log.warning('Could not index item: %s - %r', item.location, err)
error_list.append(_('Could not index item: {}').format(item.location)) error_list.append(_('Could not index item: {}').format(item.location))
def remove_deleted_items():
"""
remove any item that is present in the search index that is not present in updated list of indexed items
as we find items we can shorten the set of items to keep
"""
response = searcher.search(
doc_type=DOCUMENT_TYPE,
field_dictionary={"course": unicode(course_key)},
exclude_ids=indexed_items
)
result_ids = [result["data"]["id"] for result in response["results"]]
for result_id in result_ids:
searcher.remove(DOCUMENT_TYPE, result_id)
try: try:
with modulestore.branch_setting(ModuleStoreEnum.RevisionOption.published_only): with modulestore.branch_setting(ModuleStoreEnum.RevisionOption.published_only):
course = modulestore.get_course(course_key, depth=None) structure = cls._fetch_top_level(modulestore, structure_key)
for item in course.get_children(): for item in structure.get_children():
index_item(item) index_item(item)
remove_deleted_items() cls.remove_deleted_items(searcher, structure_key, indexed_items)
except Exception as err: # pylint: disable=broad-except except Exception as err: # pylint: disable=broad-except
# broad exception so that index operation does not prevent the rest of the application from working # broad exception so that index operation does not prevent the rest of the application from working
log.exception( log.exception(
"Indexing error encountered, courseware index may be out of date %s - %r", "Indexing error encountered, courseware index may be out of date %s - %r",
course_key, structure_key,
err err
) )
error_list.append(_('General indexing error occurred')) error_list.append(_('General indexing error occurred'))
...@@ -164,31 +192,111 @@ class CoursewareSearchIndexer(object): ...@@ -164,31 +192,111 @@ class CoursewareSearchIndexer(object):
return indexed_count["count"] return indexed_count["count"]
@classmethod @classmethod
def do_course_reindex(cls, modulestore, course_key): def _do_reindex(cls, modulestore, structure_key):
""" """
(Re)index all content within the given course, tracking the fact that a full reindex has taking place (Re)index all content within the given structure (course or library),
tracking the fact that a full reindex has taken place
""" """
indexed_count = cls.index_course(modulestore, course_key) indexed_count = cls.index(modulestore, structure_key)
if indexed_count: if indexed_count:
cls._track_index_request('edx.course.index.reindexed', indexed_count) cls._track_index_request(cls.INDEX_EVENT['name'], cls.INDEX_EVENT['category'], indexed_count)
return indexed_count return indexed_count
@classmethod @classmethod
def _track_index_request(cls, event_name, indexed_count): def _track_index_request(cls, event_name, category, indexed_count):
"""Track content index requests. """Track content index requests.
Arguments: Arguments:
event_name (str): Name of the event to be logged. event_name (str): Name of the event to be logged.
category (str): category of indexed items
indexed_count (int): number of indexed items
Returns: Returns:
None None
""" """
data = { data = {
"indexed_count": indexed_count, "indexed_count": indexed_count,
'category': 'courseware_index', 'category': category,
} }
tracker.emit( tracker.emit(
event_name, event_name,
data data
) )
class CoursewareSearchIndexer(SearchIndexerBase):
"""
Class to perform indexing for courseware search from different modulestores
"""
INDEX_NAME = "courseware_index"
DOCUMENT_TYPE = "courseware_content"
ENABLE_INDEXING_KEY = 'ENABLE_COURSEWARE_INDEX'
INDEX_EVENT = {
'name': 'edx.course.index.reindexed',
'category': 'courseware_index'
}
@classmethod
def normalize_structure_key(cls, structure_key):
""" Normalizes structure key for use in indexing """
return structure_key
@classmethod
def _fetch_top_level(cls, modulestore, structure_key):
""" Fetch the item from the modulestore location """
return modulestore.get_course(structure_key, depth=None)
@classmethod
def _get_location_info(cls, normalized_structure_key):
""" Builds location info dictionary """
return {"course": unicode(normalized_structure_key)}
@classmethod
def do_course_reindex(cls, modulestore, course_key):
"""
(Re)index all content within the given course, tracking the fact that a full reindex has taken place
"""
return cls._do_reindex(modulestore, course_key)
class LibrarySearchIndexer(SearchIndexerBase):
"""
Base class to perform indexing for library search from different modulestores
"""
INDEX_NAME = "library_index"
DOCUMENT_TYPE = "library_content"
ENABLE_INDEXING_KEY = 'ENABLE_LIBRARY_INDEX'
INDEX_EVENT = {
'name': 'edx.library.index.reindexed',
'category': 'library_index'
}
@classmethod
def normalize_structure_key(cls, structure_key):
""" Normalizes structure key for use in indexing """
return normalize_key_for_search(structure_key)
@classmethod
def _fetch_top_level(cls, modulestore, structure_key):
""" Fetch the item from the modulestore location """
return modulestore.get_library(structure_key, depth=None)
@classmethod
def _get_location_info(cls, normalized_structure_key):
""" Builds location info dictionary """
return {"library": unicode(normalized_structure_key)}
@classmethod
def _id_modifier(cls, usage_id):
""" Modifies usage_id to submit to index """
return usage_id.replace(library_key=(usage_id.library_key.replace(version_guid=None, branch=None)))
@classmethod
def do_library_reindex(cls, modulestore, library_key):
"""
(Re)index all content within the given library, tracking the fact that a full reindex has taken place
"""
return cls._do_reindex(modulestore, library_key)
""" Management command to update libraries' search index """
from django.core.management import BaseCommand, CommandError
from optparse import make_option
from textwrap import dedent
from contentstore.courseware_index import LibrarySearchIndexer
from opaque_keys.edx.keys import CourseKey
from opaque_keys import InvalidKeyError
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import LibraryLocator
from .prompt import query_yes_no
from xmodule.modulestore.django import modulestore
class Command(BaseCommand):
"""
Command to reindex content libraries (single, multiple or all available)
Examples:
./manage.py reindex_library lib1 lib2 - reindexes libraries with keys lib1 and lib2
./manage.py reindex_library --all - reindexes all available libraries
"""
help = dedent(__doc__)
can_import_settings = True
args = "<library_id library_id ...>"
option_list = BaseCommand.option_list + (
make_option(
'--all',
action='store_true',
dest='all',
default=False,
help='Reindex all libraries'
),)
CONFIRMATION_PROMPT = u"Reindexing all libraries might be a time consuming operation. Do you want to continue?"
def _parse_library_key(self, raw_value):
""" Parses library key from string """
try:
result = CourseKey.from_string(raw_value)
except InvalidKeyError:
result = SlashSeparatedCourseKey.from_deprecated_string(raw_value)
if not isinstance(result, LibraryLocator):
raise CommandError(u"Argument {0} is not a library key".format(raw_value))
return result
def handle(self, *args, **options):
"""
By convention set by django developers, this method actually executes command's actions.
So, there could be no better docstring than emphasize this once again.
"""
if len(args) == 0 and not options.get('all', False):
raise CommandError(u"reindex_library requires one or more arguments: <library_id>")
store = modulestore()
if options.get('all', False):
if query_yes_no(self.CONFIRMATION_PROMPT, default="no"):
library_keys = [library.location.library_key.replace(branch=None) for library in store.get_libraries()]
else:
return
else:
library_keys = map(self._parse_library_key, args)
for library_key in library_keys:
LibrarySearchIndexer.do_library_reindex(store, library_key)
""" Tests for library reindex command """
import sys
import contextlib
import ddt
from django.core.management import call_command, CommandError
import mock
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, LibraryFactory
from opaque_keys import InvalidKeyError
from contentstore.management.commands.reindex_library import Command as ReindexCommand
from contentstore.courseware_index import SearchIndexingError
@contextlib.contextmanager
def nostderr():
"""
ContextManager to suppress stderr messages
http://stackoverflow.com/a/1810086/882918
"""
savestderr = sys.stderr
class Devnull(object):
""" /dev/null incarnation as output-stream-like object """
def write(self, _):
""" Write method - just does nothing"""
pass
sys.stderr = Devnull()
try:
yield
finally:
sys.stderr = savestderr
@ddt.ddt
class TestReindexLibrary(ModuleStoreTestCase):
""" Tests for library reindex command """
def setUp(self):
""" Setup method - create libraries and courses """
super(TestReindexLibrary, self).setUp()
self.store = modulestore()
self.first_lib = LibraryFactory.create(
org="test", library="lib1", display_name="run1", default_store=ModuleStoreEnum.Type.split
)
self.second_lib = LibraryFactory.create(
org="test", library="lib2", display_name="run2", default_store=ModuleStoreEnum.Type.split
)
self.first_course = CourseFactory.create(
org="test", course="course1", display_name="run1", default_store=ModuleStoreEnum.Type.split
)
self.second_course = CourseFactory.create(
org="test", course="course2", display_name="run1", default_store=ModuleStoreEnum.Type.split
)
REINDEX_PATH_LOCATION = 'contentstore.management.commands.reindex_library.LibrarySearchIndexer.do_library_reindex'
MODULESTORE_PATCH_LOCATION = 'contentstore.management.commands.reindex_library.modulestore'
YESNO_PATCH_LOCATION = 'contentstore.management.commands.reindex_library.query_yes_no'
def _get_lib_key(self, library):
""" Get's library key as it is passed to indexer """
return library.location.library_key
def _build_calls(self, *libraries):
""" BUilds a list of mock.call instances representing calls to reindexing method """
return [mock.call(self.store, self._get_lib_key(lib)) for lib in libraries]
def test_given_no_arguments_raises_command_error(self):
""" Test that raises CommandError for incorrect arguments """
with self.assertRaises(SystemExit), nostderr():
with self.assertRaisesRegexp(CommandError, ".* requires one or more arguments .*"):
call_command('reindex_library')
@ddt.data('qwerty', 'invalid_key', 'xblock-v1:qwe+rty')
def test_given_invalid_lib_key_raises_not_found(self, invalid_key):
""" Test that raises InvalidKeyError for invalid keys """
with self.assertRaises(InvalidKeyError):
call_command('reindex_library', invalid_key)
def test_given_course_key_raises_command_error(self):
""" Test that raises CommandError if course key is passed """
with self.assertRaises(SystemExit), nostderr():
with self.assertRaisesRegexp(CommandError, ".* is not a library key"):
call_command('reindex_library', unicode(self.first_course.id))
with self.assertRaises(SystemExit), nostderr():
with self.assertRaisesRegexp(CommandError, ".* is not a library key"):
call_command('reindex_library', unicode(self.second_course.id))
with self.assertRaises(SystemExit), nostderr():
with self.assertRaisesRegexp(CommandError, ".* is not a library key"):
call_command(
'reindex_library',
unicode(self.second_course.id),
unicode(self._get_lib_key(self.first_lib))
)
def test_given_id_list_indexes_libraries(self):
""" Test that reindexes libraries when given single library key or a list of library keys """
with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index, \
mock.patch(self.MODULESTORE_PATCH_LOCATION, mock.Mock(return_value=self.store)):
call_command('reindex_library', unicode(self._get_lib_key(self.first_lib)))
self.assertEqual(patched_index.mock_calls, self._build_calls(self.first_lib))
patched_index.reset_mock()
call_command('reindex_library', unicode(self._get_lib_key(self.second_lib)))
self.assertEqual(patched_index.mock_calls, self._build_calls(self.second_lib))
patched_index.reset_mock()
call_command(
'reindex_library',
unicode(self._get_lib_key(self.first_lib)),
unicode(self._get_lib_key(self.second_lib))
)
expected_calls = self._build_calls(self.first_lib, self.second_lib)
self.assertEqual(patched_index.mock_calls, expected_calls)
def test_given_all_key_prompts_and_reindexes_all_libraries(self):
""" Test that reindexes all libraries when --all key is given and confirmed """
with mock.patch(self.YESNO_PATCH_LOCATION) as patched_yes_no:
patched_yes_no.return_value = True
with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index, \
mock.patch(self.MODULESTORE_PATCH_LOCATION, mock.Mock(return_value=self.store)):
call_command('reindex_library', all=True)
patched_yes_no.assert_called_once_with(ReindexCommand.CONFIRMATION_PROMPT, default='no')
expected_calls = self._build_calls(self.first_lib, self.second_lib)
self.assertEqual(patched_index.mock_calls, expected_calls)
def test_given_all_key_prompts_and_reindexes_all_libraries_cancelled(self):
""" Test that does not reindex anything when --all key is given and cancelled """
with mock.patch(self.YESNO_PATCH_LOCATION) as patched_yes_no:
patched_yes_no.return_value = False
with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index, \
mock.patch(self.MODULESTORE_PATCH_LOCATION, mock.Mock(return_value=self.store)):
call_command('reindex_library', all=True)
patched_yes_no.assert_called_once_with(ReindexCommand.CONFIRMATION_PROMPT, default='no')
patched_index.assert_not_called()
def test_fail_fast_if_reindex_fails(self):
""" Test that fails on first reindexing exception """
with mock.patch(self.REINDEX_PATH_LOCATION) as patched_index:
patched_index.side_effect = SearchIndexingError("message", [])
with self.assertRaises(SearchIndexingError):
call_command('reindex_library', unicode(self._get_lib_key(self.second_lib)))
""" receiver of course_published events in order to trigger indexing task """ """ receivers of course_published and library_updated events in order to trigger indexing task """
from datetime import datetime from datetime import datetime
from pytz import UTC from pytz import UTC
from django.dispatch import receiver from django.dispatch import receiver
from xmodule.modulestore.django import SignalHandler from xmodule.modulestore.django import SignalHandler
from contentstore.courseware_index import indexing_is_enabled from contentstore.courseware_index import CoursewareSearchIndexer, LibrarySearchIndexer
@receiver(SignalHandler.course_published) @receiver(SignalHandler.course_published)
...@@ -15,5 +15,16 @@ def listen_for_course_publish(sender, course_key, **kwargs): # pylint: disable= ...@@ -15,5 +15,16 @@ def listen_for_course_publish(sender, course_key, **kwargs): # pylint: disable=
""" """
# import here, because signal is registered at startup, but items in tasks are not yet able to be loaded # import here, because signal is registered at startup, but items in tasks are not yet able to be loaded
from .tasks import update_search_index from .tasks import update_search_index
if indexing_is_enabled(): if CoursewareSearchIndexer.indexing_is_enabled():
update_search_index.delay(unicode(course_key), datetime.now(UTC).isoformat()) update_search_index.delay(unicode(course_key), datetime.now(UTC).isoformat())
@receiver(SignalHandler.library_updated)
def listen_for_library_update(sender, library_key, **kwargs): # pylint: disable=unused-argument
"""
Receives signal and kicks off celery task to update search index
"""
# import here, because signal is registered at startup, but items in tasks are not yet able to be loaded
from .tasks import update_library_index
if LibrarySearchIndexer.indexing_is_enabled():
update_library_index.delay(unicode(library_key), datetime.now(UTC).isoformat())
...@@ -10,7 +10,7 @@ from pytz import UTC ...@@ -10,7 +10,7 @@ from pytz import UTC
from django.contrib.auth.models import User from django.contrib.auth.models import User
from contentstore.courseware_index import CoursewareSearchIndexer, SearchIndexingError from contentstore.courseware_index import CoursewareSearchIndexer, LibrarySearchIndexer, SearchIndexingError
from contentstore.utils import initialize_permissions from contentstore.utils import initialize_permissions
from course_action_state.models import CourseRerunState from course_action_state.models import CourseRerunState
from opaque_keys.edx.keys import CourseKey from opaque_keys.edx.keys import CourseKey
...@@ -82,19 +82,36 @@ def deserialize_fields(json_fields): ...@@ -82,19 +82,36 @@ def deserialize_fields(json_fields):
return fields return fields
def _parse_time(time_isoformat):
""" Parses time from iso format """
return datetime.strptime(
# remove the +00:00 from the end of the formats generated within the system
time_isoformat.split('+')[0],
"%Y-%m-%dT%H:%M:%S.%f"
).replace(tzinfo=UTC)
@task() @task()
def update_search_index(course_id, triggered_time_isoformat): def update_search_index(course_id, triggered_time_isoformat):
""" Updates course search index. """ """ Updates course search index. """
try: try:
course_key = CourseKey.from_string(course_id) course_key = CourseKey.from_string(course_id)
triggered_time = datetime.strptime( CoursewareSearchIndexer.index(modulestore(), course_key, triggered_at=(_parse_time(triggered_time_isoformat)))
# remove the +00:00 from the end of the formats generated within the system
triggered_time_isoformat.split('+')[0],
"%Y-%m-%dT%H:%M:%S.%f"
).replace(tzinfo=UTC)
CoursewareSearchIndexer.index_course(modulestore(), course_key, triggered_at=triggered_time)
except SearchIndexingError as exc: except SearchIndexingError as exc:
LOGGER.error('Search indexing error for complete course %s - %s', course_id, unicode(exc)) LOGGER.error('Search indexing error for complete course %s - %s', course_id, unicode(exc))
else: else:
LOGGER.debug('Search indexing successful for complete course %s', course_id) LOGGER.debug('Search indexing successful for complete course %s', course_id)
@task()
def update_library_index(library_id, triggered_time_isoformat):
""" Updates course search index. """
try:
library_key = CourseKey.from_string(library_id)
LibrarySearchIndexer.index(modulestore(), library_key, triggered_at=(_parse_time(triggered_time_isoformat)))
except SearchIndexingError as exc:
LOGGER.error('Search indexing error for library %s - %s', library_id, unicode(exc))
else:
LOGGER.debug('Search indexing successful for library %s', library_id)
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
Testing indexing of the courseware as it is changed Testing indexing of the courseware as it is changed
""" """
import ddt import ddt
from lazy.lazy import lazy
import time import time
from datetime import datetime from datetime import datetime
from mock import patch from mock import patch
...@@ -9,22 +10,24 @@ from pytz import UTC ...@@ -9,22 +10,24 @@ from pytz import UTC
from uuid import uuid4 from uuid import uuid4
from unittest import skip from unittest import skip
from xmodule.library_tools import normalize_key_for_search
from xmodule.modulestore import ModuleStoreEnum from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import SignalHandler from xmodule.modulestore.django import SignalHandler
from xmodule.modulestore.edit_info import EditInfoMixin from xmodule.modulestore.edit_info import EditInfoMixin
from xmodule.modulestore.inheritance import InheritanceMixin from xmodule.modulestore.inheritance import InheritanceMixin
from xmodule.modulestore.mixed import MixedModuleStore from xmodule.modulestore.mixed import MixedModuleStore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, LibraryFactory
from xmodule.modulestore.tests.mongo_connection import MONGO_PORT_NUM, MONGO_HOST from xmodule.modulestore.tests.mongo_connection import MONGO_PORT_NUM, MONGO_HOST
from xmodule.modulestore.tests.test_cross_modulestore_import_export import MongoContentstoreBuilder from xmodule.modulestore.tests.test_cross_modulestore_import_export import MongoContentstoreBuilder
from xmodule.modulestore.tests.utils import create_modulestore_instance, LocationMixin, MixedSplitTestCase from xmodule.modulestore.tests.utils import create_modulestore_instance, LocationMixin, MixedSplitTestCase
from xmodule.tests import DATA_DIR from xmodule.tests import DATA_DIR
from xmodule.x_module import XModuleMixin from xmodule.x_module import XModuleMixin
from search.search_engine_base import SearchEngine from search.search_engine_base import SearchEngine
from contentstore.courseware_index import CoursewareSearchIndexer, INDEX_NAME, DOCUMENT_TYPE, SearchIndexingError from contentstore.courseware_index import CoursewareSearchIndexer, LibrarySearchIndexer, SearchIndexingError
from contentstore.signals import listen_for_course_publish from contentstore.signals import listen_for_course_publish, listen_for_library_update
COURSE_CHILD_STRUCTURE = { COURSE_CHILD_STRUCTURE = {
...@@ -116,6 +119,8 @@ class MixedWithOptionsTestCase(MixedSplitTestCase): ...@@ -116,6 +119,8 @@ class MixedWithOptionsTestCase(MixedSplitTestCase):
'xblock_mixins': modulestore_options['xblock_mixins'], 'xblock_mixins': modulestore_options['xblock_mixins'],
} }
INDEX_NAME = None
def setUp(self): def setUp(self):
super(MixedWithOptionsTestCase, self).setUp() super(MixedWithOptionsTestCase, self).setUp()
...@@ -123,9 +128,19 @@ class MixedWithOptionsTestCase(MixedSplitTestCase): ...@@ -123,9 +128,19 @@ class MixedWithOptionsTestCase(MixedSplitTestCase):
""" base version of setup_course_base is a no-op """ """ base version of setup_course_base is a no-op """
pass pass
def get_search_engine(self): @lazy
def searcher(self):
""" Centralized call to getting the search engine for the test """ """ Centralized call to getting the search engine for the test """
return SearchEngine.get_search_engine(INDEX_NAME) return SearchEngine.get_search_engine(self.INDEX_NAME)
def _get_default_search(self):
""" Returns field_dictionary for default search """
return {}
def search(self, field_dictionary=None):
""" Performs index search according to passed parameters """
fields = field_dictionary if field_dictionary else self._get_default_search()
return self.searcher.search(field_dictionary=fields)
def _perform_test_using_store(self, store_type, test_to_perform): def _perform_test_using_store(self, store_type, test_to_perform):
""" Helper method to run a test function that uses a specific store """ """ Helper method to run a test function that uses a specific store """
...@@ -162,6 +177,8 @@ class MixedWithOptionsTestCase(MixedSplitTestCase): ...@@ -162,6 +177,8 @@ class MixedWithOptionsTestCase(MixedSplitTestCase):
class TestCoursewareSearchIndexer(MixedWithOptionsTestCase): class TestCoursewareSearchIndexer(MixedWithOptionsTestCase):
""" Tests the operation of the CoursewareSearchIndexer """ """ Tests the operation of the CoursewareSearchIndexer """
WORKS_WITH_STORES = (ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def setUp(self): def setUp(self):
super(TestCoursewareSearchIndexer, self).setUp() super(TestCoursewareSearchIndexer, self).setUp()
...@@ -210,6 +227,8 @@ class TestCoursewareSearchIndexer(MixedWithOptionsTestCase): ...@@ -210,6 +227,8 @@ class TestCoursewareSearchIndexer(MixedWithOptionsTestCase):
publish_item=False, publish_item=False,
) )
INDEX_NAME = CoursewareSearchIndexer.INDEX_NAME
def reindex_course(self, store): def reindex_course(self, store):
""" kick off complete reindex of the course """ """ kick off complete reindex of the course """
return CoursewareSearchIndexer.do_course_reindex(store, self.course.id) return CoursewareSearchIndexer.do_course_reindex(store, self.course.id)
...@@ -217,38 +236,39 @@ class TestCoursewareSearchIndexer(MixedWithOptionsTestCase): ...@@ -217,38 +236,39 @@ class TestCoursewareSearchIndexer(MixedWithOptionsTestCase):
def index_recent_changes(self, store, since_time): def index_recent_changes(self, store, since_time):
""" index course using recent changes """ """ index course using recent changes """
trigger_time = datetime.now(UTC) trigger_time = datetime.now(UTC)
return CoursewareSearchIndexer.index_course( return CoursewareSearchIndexer.index(
store, store,
self.course.id, self.course.id,
triggered_at=trigger_time, triggered_at=trigger_time,
reindex_age=(trigger_time - since_time) reindex_age=(trigger_time - since_time)
) )
def _get_default_search(self):
return {"course": unicode(self.course.id)}
def _test_indexing_course(self, store): def _test_indexing_course(self, store):
""" indexing course tests """ """ indexing course tests """
searcher = self.get_search_engine() response = self.search()
response = searcher.search(field_dictionary={"course": unicode(self.course.id)})
self.assertEqual(response["total"], 0) self.assertEqual(response["total"], 0)
# Only published modules should be in the index # Only published modules should be in the index
added_to_index = self.reindex_course(store) added_to_index = self.reindex_course(store)
self.assertEqual(added_to_index, 3) self.assertEqual(added_to_index, 3)
response = searcher.search(field_dictionary={"course": unicode(self.course.id)}) response = self.search()
self.assertEqual(response["total"], 3) self.assertEqual(response["total"], 3)
# Publish the vertical as is, and any unpublished children should now be available # Publish the vertical as is, and any unpublished children should now be available
self.publish_item(store, self.vertical.location) self.publish_item(store, self.vertical.location)
self.reindex_course(store) self.reindex_course(store)
response = searcher.search(field_dictionary={"course": unicode(self.course.id)}) response = self.search()
self.assertEqual(response["total"], 4) self.assertEqual(response["total"], 4)
def _test_not_indexing_unpublished_content(self, store): def _test_not_indexing_unpublished_content(self, store):
""" add a new one, only appers in index once added """ """ add a new one, only appers in index once added """
searcher = self.get_search_engine()
# Publish the vertical to start with # Publish the vertical to start with
self.publish_item(store, self.vertical.location) self.publish_item(store, self.vertical.location)
self.reindex_course(store) self.reindex_course(store)
response = searcher.search(field_dictionary={"course": unicode(self.course.id)}) response = self.search()
self.assertEqual(response["total"], 4) self.assertEqual(response["total"], 4)
# Now add a new unit to the existing vertical # Now add a new unit to the existing vertical
...@@ -260,74 +280,71 @@ class TestCoursewareSearchIndexer(MixedWithOptionsTestCase): ...@@ -260,74 +280,71 @@ class TestCoursewareSearchIndexer(MixedWithOptionsTestCase):
modulestore=store, modulestore=store,
) )
self.reindex_course(store) self.reindex_course(store)
response = searcher.search(field_dictionary={"course": unicode(self.course.id)}) response = self.search()
self.assertEqual(response["total"], 4) self.assertEqual(response["total"], 4)
# Now publish it and we should find it # Now publish it and we should find it
# Publish the vertical as is, and everything should be available # Publish the vertical as is, and everything should be available
self.publish_item(store, self.vertical.location) self.publish_item(store, self.vertical.location)
self.reindex_course(store) self.reindex_course(store)
response = searcher.search(field_dictionary={"course": unicode(self.course.id)}) response = self.search()
self.assertEqual(response["total"], 5) self.assertEqual(response["total"], 5)
def _test_deleting_item(self, store): def _test_deleting_item(self, store):
""" test deleting an item """ """ test deleting an item """
searcher = self.get_search_engine()
# Publish the vertical to start with # Publish the vertical to start with
self.publish_item(store, self.vertical.location) self.publish_item(store, self.vertical.location)
self.reindex_course(store) self.reindex_course(store)
response = searcher.search(field_dictionary={"course": unicode(self.course.id)}) response = self.search()
self.assertEqual(response["total"], 4) self.assertEqual(response["total"], 4)
# just a delete should not change anything # just a delete should not change anything
self.delete_item(store, self.html_unit.location) self.delete_item(store, self.html_unit.location)
self.reindex_course(store) self.reindex_course(store)
response = searcher.search(field_dictionary={"course": unicode(self.course.id)}) response = self.search()
self.assertEqual(response["total"], 4) self.assertEqual(response["total"], 4)
# but after publishing, we should no longer find the html_unit # but after publishing, we should no longer find the html_unit
self.publish_item(store, self.vertical.location) self.publish_item(store, self.vertical.location)
self.reindex_course(store) self.reindex_course(store)
response = searcher.search(field_dictionary={"course": unicode(self.course.id)}) response = self.search()
self.assertEqual(response["total"], 3) self.assertEqual(response["total"], 3)
def _test_not_indexable(self, store): def _test_not_indexable(self, store):
""" test not indexable items """ """ test not indexable items """
searcher = self.get_search_engine()
# Publish the vertical to start with # Publish the vertical to start with
self.publish_item(store, self.vertical.location) self.publish_item(store, self.vertical.location)
self.reindex_course(store) self.reindex_course(store)
response = searcher.search(field_dictionary={"course": unicode(self.course.id)}) response = self.search()
self.assertEqual(response["total"], 4) self.assertEqual(response["total"], 4)
# Add a non-indexable item # Add a non-indexable item
ItemFactory.create( ItemFactory.create(
parent_location=self.vertical.location, parent_location=self.vertical.location,
category="problem", category="openassessment",
display_name="Some other content", display_name="Some other content",
publish_item=False, publish_item=False,
modulestore=store, modulestore=store,
) )
self.reindex_course(store) self.reindex_course(store)
response = searcher.search(field_dictionary={"course": unicode(self.course.id)}) response = self.search()
self.assertEqual(response["total"], 4) self.assertEqual(response["total"], 4)
# even after publishing, we should not find the non-indexable item # even after publishing, we should not find the non-indexable item
self.publish_item(store, self.vertical.location) self.publish_item(store, self.vertical.location)
self.reindex_course(store) self.reindex_course(store)
response = searcher.search(field_dictionary={"course": unicode(self.course.id)}) response = self.search()
self.assertEqual(response["total"], 4) self.assertEqual(response["total"], 4)
def _test_start_date_propagation(self, store): def _test_start_date_propagation(self, store):
""" make sure that the start date is applied at the right level """ """ make sure that the start date is applied at the right level """
searcher = self.get_search_engine()
early_date = self.course.start early_date = self.course.start
later_date = self.vertical.start later_date = self.vertical.start
# Publish the vertical # Publish the vertical
self.publish_item(store, self.vertical.location) self.publish_item(store, self.vertical.location)
self.reindex_course(store) self.reindex_course(store)
response = searcher.search(field_dictionary={"course": unicode(self.course.id)}) response = self.search()
self.assertEqual(response["total"], 4) self.assertEqual(response["total"], 4)
results = response["results"] results = response["results"]
...@@ -397,35 +414,35 @@ class TestCoursewareSearchIndexer(MixedWithOptionsTestCase): ...@@ -397,35 +414,35 @@ class TestCoursewareSearchIndexer(MixedWithOptionsTestCase):
with self.assertRaises(SearchIndexingError): with self.assertRaises(SearchIndexingError):
self.reindex_course(store) self.reindex_course(store)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split) @ddt.data(*WORKS_WITH_STORES)
def test_indexing_course(self, store_type): def test_indexing_course(self, store_type):
self._perform_test_using_store(store_type, self._test_indexing_course) self._perform_test_using_store(store_type, self._test_indexing_course)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split) @ddt.data(*WORKS_WITH_STORES)
def test_not_indexing_unpublished_content(self, store_type): def test_not_indexing_unpublished_content(self, store_type):
self._perform_test_using_store(store_type, self._test_not_indexing_unpublished_content) self._perform_test_using_store(store_type, self._test_not_indexing_unpublished_content)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split) @ddt.data(*WORKS_WITH_STORES)
def test_deleting_item(self, store_type): def test_deleting_item(self, store_type):
self._perform_test_using_store(store_type, self._test_deleting_item) self._perform_test_using_store(store_type, self._test_deleting_item)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split) @ddt.data(*WORKS_WITH_STORES)
def test_not_indexable(self, store_type): def test_not_indexable(self, store_type):
self._perform_test_using_store(store_type, self._test_not_indexable) self._perform_test_using_store(store_type, self._test_not_indexable)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split) @ddt.data(*WORKS_WITH_STORES)
def test_start_date_propagation(self, store_type): def test_start_date_propagation(self, store_type):
self._perform_test_using_store(store_type, self._test_start_date_propagation) self._perform_test_using_store(store_type, self._test_start_date_propagation)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split) @ddt.data(*WORKS_WITH_STORES)
def test_search_disabled(self, store_type): def test_search_disabled(self, store_type):
self._perform_test_using_store(store_type, self._test_search_disabled) self._perform_test_using_store(store_type, self._test_search_disabled)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split) @ddt.data(*WORKS_WITH_STORES)
def test_time_based_index(self, store_type): def test_time_based_index(self, store_type):
self._perform_test_using_store(store_type, self._test_time_based_index) self._perform_test_using_store(store_type, self._test_time_based_index)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split) @ddt.data(*WORKS_WITH_STORES)
def test_exception(self, store_type): def test_exception(self, store_type):
self._perform_test_using_store(store_type, self._test_exception) self._perform_test_using_store(store_type, self._test_exception)
...@@ -435,16 +452,18 @@ class TestCoursewareSearchIndexer(MixedWithOptionsTestCase): ...@@ -435,16 +452,18 @@ class TestCoursewareSearchIndexer(MixedWithOptionsTestCase):
class TestLargeCourseDeletions(MixedWithOptionsTestCase): class TestLargeCourseDeletions(MixedWithOptionsTestCase):
""" Tests to excerise deleting items from a course """ """ Tests to excerise deleting items from a course """
WORKS_WITH_STORES = (ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def _clean_course_id(self): def _clean_course_id(self):
""" Clean all documents from the index that have a specific course provided """ """ Clean all documents from the index that have a specific course provided """
if self.course_id: if self.course_id:
searcher = self.get_search_engine()
response = searcher.search(field_dictionary={"course": self.course_id}) response = self.searcher.search(field_dictionary={"course": self.course_id})
while response["total"] > 0: while response["total"] > 0:
for item in response["results"]: for item in response["results"]:
searcher.remove(DOCUMENT_TYPE, item["data"]["id"]) self.searcher.remove(CoursewareSearchIndexer.DOCUMENT_TYPE, item["data"]["id"])
searcher.remove(DOCUMENT_TYPE, item["data"]["id"]) self.searcher.remove(CoursewareSearchIndexer.DOCUMENT_TYPE, item["data"]["id"])
response = searcher.search(field_dictionary={"course": self.course_id}) response = self.searcher.search(field_dictionary={"course": self.course_id})
self.course_id = None self.course_id = None
def setUp(self): def setUp(self):
...@@ -457,8 +476,8 @@ class TestLargeCourseDeletions(MixedWithOptionsTestCase): ...@@ -457,8 +476,8 @@ class TestLargeCourseDeletions(MixedWithOptionsTestCase):
def assert_search_count(self, expected_count): def assert_search_count(self, expected_count):
""" Check that the search within this course will yield the expected number of results """ """ Check that the search within this course will yield the expected number of results """
searcher = self.get_search_engine()
response = searcher.search(field_dictionary={"course": self.course_id}) response = self.searcher.search(field_dictionary={"course": self.course_id})
self.assertEqual(response["total"], expected_count) self.assertEqual(response["total"], expected_count)
def _do_test_large_course_deletion(self, store, load_factor): def _do_test_large_course_deletion(self, store, load_factor):
...@@ -504,7 +523,7 @@ class TestLargeCourseDeletions(MixedWithOptionsTestCase): ...@@ -504,7 +523,7 @@ class TestLargeCourseDeletions(MixedWithOptionsTestCase):
@skip(("This test is to see how we handle very large courses, to ensure that the delete" @skip(("This test is to see how we handle very large courses, to ensure that the delete"
"procedure works smoothly - too long to run during the normal course of things")) "procedure works smoothly - too long to run during the normal course of things"))
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split) @ddt.data(*WORKS_WITH_STORES)
def test_large_course_deletion(self, store_type): def test_large_course_deletion(self, store_type):
self._perform_test_using_store(store_type, self._test_large_course_deletion) self._perform_test_using_store(store_type, self._test_large_course_deletion)
...@@ -512,7 +531,7 @@ class TestLargeCourseDeletions(MixedWithOptionsTestCase): ...@@ -512,7 +531,7 @@ class TestLargeCourseDeletions(MixedWithOptionsTestCase):
class TestTaskExecution(ModuleStoreTestCase): class TestTaskExecution(ModuleStoreTestCase):
""" """
Set of tests to ensure that the task code will do the right thing when Set of tests to ensure that the task code will do the right thing when
executed directly. The test course gets created without the listener executed directly. The test course and library gets created without the listeners
being present, which allows us to ensure that when the listener is being present, which allows us to ensure that when the listener is
executed, it is done as expected. executed, it is done as expected.
""" """
...@@ -520,6 +539,7 @@ class TestTaskExecution(ModuleStoreTestCase): ...@@ -520,6 +539,7 @@ class TestTaskExecution(ModuleStoreTestCase):
def setUp(self): def setUp(self):
super(TestTaskExecution, self).setUp() super(TestTaskExecution, self).setUp()
SignalHandler.course_published.disconnect(listen_for_course_publish) SignalHandler.course_published.disconnect(listen_for_course_publish)
SignalHandler.library_updated.disconnect(listen_for_library_update)
self.course = CourseFactory.create(start=datetime(2015, 3, 1, tzinfo=UTC)) self.course = CourseFactory.create(start=datetime(2015, 3, 1, tzinfo=UTC))
self.chapter = ItemFactory.create( self.chapter = ItemFactory.create(
...@@ -551,15 +571,215 @@ class TestTaskExecution(ModuleStoreTestCase): ...@@ -551,15 +571,215 @@ class TestTaskExecution(ModuleStoreTestCase):
publish_item=False, publish_item=False,
) )
self.library = LibraryFactory.create()
self.library_block1 = ItemFactory.create(
parent_location=self.library.location,
category="html",
display_name="Html Content",
publish_item=False,
)
self.library_block2 = ItemFactory.create(
parent_location=self.library.location,
category="html",
display_name="Html Content 2",
publish_item=False,
)
def test_task_indexing_course(self): def test_task_indexing_course(self):
""" Making sure that the receiver correctly fires off the task when invoked by signal """ """ Making sure that the receiver correctly fires off the task when invoked by signal """
searcher = SearchEngine.get_search_engine(INDEX_NAME) searcher = SearchEngine.get_search_engine(CoursewareSearchIndexer.INDEX_NAME)
response = searcher.search(field_dictionary={"course": unicode(self.course.id)}) response = searcher.search(field_dictionary={"course": unicode(self.course.id)})
self.assertEqual(response["total"], 0) self.assertEqual(response["total"], 0)
#update_search_index(unicode(self.course.id), datetime.now(UTC).isoformat())
listen_for_course_publish(self, self.course.id) listen_for_course_publish(self, self.course.id)
# Note that this test will only succeed if celery is working in inline mode # Note that this test will only succeed if celery is working in inline mode
response = searcher.search(field_dictionary={"course": unicode(self.course.id)}) response = searcher.search(field_dictionary={"course": unicode(self.course.id)})
self.assertEqual(response["total"], 3) self.assertEqual(response["total"], 3)
def test_task_library_update(self):
""" Making sure that the receiver correctly fires off the task when invoked by signal """
searcher = SearchEngine.get_search_engine(LibrarySearchIndexer.INDEX_NAME)
library_search_key = unicode(normalize_key_for_search(self.library.location.library_key))
response = searcher.search(field_dictionary={"library": library_search_key})
self.assertEqual(response["total"], 0)
listen_for_library_update(self, self.library.location.library_key)
# Note that this test will only succeed if celery is working in inline mode
response = searcher.search(field_dictionary={"library": library_search_key})
self.assertEqual(response["total"], 2)
@ddt.ddt
class TestLibrarySearchIndexer(MixedWithOptionsTestCase):
""" Tests the operation of the CoursewareSearchIndexer """
# libraries work only with split, so do library indexer
WORKS_WITH_STORES = (ModuleStoreEnum.Type.split, )
def setUp(self):
super(TestLibrarySearchIndexer, self).setUp()
self.library = None
self.html_unit1 = None
self.html_unit2 = None
def setup_course_base(self, store):
"""
Set up the for the course outline tests.
"""
self.library = LibraryFactory.create(modulestore=store)
self.html_unit1 = ItemFactory.create(
parent_location=self.library.location,
category="html",
display_name="Html Content",
modulestore=store,
publish_item=False,
)
self.html_unit2 = ItemFactory.create(
parent_location=self.library.location,
category="html",
display_name="Html Content 2",
modulestore=store,
publish_item=False,
)
INDEX_NAME = LibrarySearchIndexer.INDEX_NAME
def _get_default_search(self):
""" Returns field_dictionary for default search """
return {"library": unicode(self.library.location.library_key.replace(version_guid=None, branch=None))}
def reindex_library(self, store):
""" kick off complete reindex of the course """
return LibrarySearchIndexer.do_library_reindex(store, self.library.location.library_key)
def _get_contents(self, response):
""" Extracts contents from search response """
return [item['data']['content'] for item in response['results']]
def _test_indexing_library(self, store):
""" indexing course tests """
self.reindex_library(store)
response = self.search()
self.assertEqual(response["total"], 2)
added_to_index = self.reindex_library(store)
self.assertEqual(added_to_index, 2)
response = self.search()
self.assertEqual(response["total"], 2)
def _test_creating_item(self, store):
""" test updating an item """
self.reindex_library(store)
response = self.search()
self.assertEqual(response["total"], 2)
# updating a library item causes immediate reindexing
data = "Some data"
ItemFactory.create(
parent_location=self.library.location,
category="html",
display_name="Html Content 3",
data=data,
modulestore=store,
publish_item=False,
)
self.reindex_library(store)
response = self.search()
self.assertEqual(response["total"], 3)
html_contents = [cont['html_content'] for cont in self._get_contents(response)]
self.assertIn(data, html_contents)
def _test_updating_item(self, store):
""" test updating an item """
self.reindex_library(store)
response = self.search()
self.assertEqual(response["total"], 2)
# updating a library item causes immediate reindexing
new_data = "I'm new data"
self.html_unit1.data = new_data
self.update_item(store, self.html_unit1)
self.reindex_library(store)
response = self.search()
# TODO: MockSearchEngine never updates existing item: returns 3 items here - uncomment when it's fixed
# self.assertEqual(response["total"], 2)
html_contents = [cont['html_content'] for cont in self._get_contents(response)]
self.assertIn(new_data, html_contents)
def _test_deleting_item(self, store):
""" test deleting an item """
self.reindex_library(store)
response = self.search()
self.assertEqual(response["total"], 2)
# deleting a library item causes immediate reindexing
self.delete_item(store, self.html_unit1.location)
self.reindex_library(store)
response = self.search()
self.assertEqual(response["total"], 1)
def _test_not_indexable(self, store):
""" test not indexable items """
self.reindex_library(store)
response = self.search()
self.assertEqual(response["total"], 2)
# Add a non-indexable item
ItemFactory.create(
parent_location=self.library.location,
category="openassessment",
display_name="Assessment",
publish_item=False,
modulestore=store,
)
self.reindex_library(store)
response = self.search()
self.assertEqual(response["total"], 2)
@patch('django.conf.settings.SEARCH_ENGINE', None)
def _test_search_disabled(self, store):
""" if search setting has it as off, confirm that nothing is indexed """
indexed_count = self.reindex_library(store)
self.assertFalse(indexed_count)
@patch('django.conf.settings.SEARCH_ENGINE', 'search.tests.utils.ErroringIndexEngine')
def _test_exception(self, store):
""" Test that exception within indexing yields a SearchIndexingError """
with self.assertRaises(SearchIndexingError):
self.reindex_library(store)
@ddt.data(*WORKS_WITH_STORES)
def test_indexing_library(self, store_type):
self._perform_test_using_store(store_type, self._test_indexing_library)
@ddt.data(*WORKS_WITH_STORES)
def test_updating_item(self, store_type):
self._perform_test_using_store(store_type, self._test_updating_item)
@ddt.data(*WORKS_WITH_STORES)
def test_creating_item(self, store_type):
self._perform_test_using_store(store_type, self._test_creating_item)
@ddt.data(*WORKS_WITH_STORES)
def test_deleting_item(self, store_type):
self._perform_test_using_store(store_type, self._test_deleting_item)
@ddt.data(*WORKS_WITH_STORES)
def test_not_indexable(self, store_type):
self._perform_test_using_store(store_type, self._test_not_indexable)
@ddt.data(*WORKS_WITH_STORES)
def test_search_disabled(self, store_type):
self._perform_test_using_store(store_type, self._test_search_disabled)
@ddt.data(*WORKS_WITH_STORES)
def test_exception(self, store_type):
self._perform_test_using_store(store_type, self._test_exception)
...@@ -386,6 +386,7 @@ class TestLibraries(LibraryTestCase): ...@@ -386,6 +386,7 @@ class TestLibraries(LibraryTestCase):
html_block = modulestore().get_item(lc_block.children[0]) html_block = modulestore().get_item(lc_block.children[0])
self.assertEqual(html_block.data, data2) self.assertEqual(html_block.data, data2)
@patch("xmodule.library_tools.SearchEngine.get_search_engine", Mock(return_value=None))
def test_refreshes_children_if_capa_type_change(self): def test_refreshes_children_if_capa_type_change(self):
""" Tests that children are automatically refreshed if capa type field changes """ """ Tests that children are automatically refreshed if capa type field changes """
name1, name2 = "Option Problem", "Multiple Choice Problem" name1, name2 = "Option Problem", "Multiple Choice Problem"
......
...@@ -328,7 +328,7 @@ API_DATE_FORMAT = ENV_TOKENS.get('API_DATE_FORMAT', API_DATE_FORMAT) ...@@ -328,7 +328,7 @@ API_DATE_FORMAT = ENV_TOKENS.get('API_DATE_FORMAT', API_DATE_FORMAT)
# Example: {'CN': 'http://api.xuetangx.com/edx/video?s3_url='} # Example: {'CN': 'http://api.xuetangx.com/edx/video?s3_url='}
VIDEO_CDN_URL = ENV_TOKENS.get('VIDEO_CDN_URL', {}) VIDEO_CDN_URL = ENV_TOKENS.get('VIDEO_CDN_URL', {})
if FEATURES['ENABLE_COURSEWARE_INDEX']: if FEATURES['ENABLE_COURSEWARE_INDEX'] or FEATURES['ENABLE_LIBRARY_INDEX']:
# Use ElasticSearch for the search engine # Use ElasticSearch for the search engine
SEARCH_ENGINE = "search.elastic.ElasticSearchEngine" SEARCH_ENGINE = "search.elastic.ElasticSearchEngine"
......
...@@ -78,6 +78,7 @@ YOUTUBE['TEST_URL'] = "127.0.0.1:{0}/test_youtube/".format(YOUTUBE_PORT) ...@@ -78,6 +78,7 @@ YOUTUBE['TEST_URL'] = "127.0.0.1:{0}/test_youtube/".format(YOUTUBE_PORT)
YOUTUBE['TEXT_API']['url'] = "127.0.0.1:{0}/test_transcripts_youtube/".format(YOUTUBE_PORT) YOUTUBE['TEXT_API']['url'] = "127.0.0.1:{0}/test_transcripts_youtube/".format(YOUTUBE_PORT)
FEATURES['ENABLE_COURSEWARE_INDEX'] = True FEATURES['ENABLE_COURSEWARE_INDEX'] = True
FEATURES['ENABLE_LIBRARY_INDEX'] = True
SEARCH_ENGINE = "search.tests.mock_search_engine.MockSearchEngine" SEARCH_ENGINE = "search.tests.mock_search_engine.MockSearchEngine"
# Path at which to store the mock index # Path at which to store the mock index
MOCK_SEARCH_BACKING_FILE = ( MOCK_SEARCH_BACKING_FILE = (
......
...@@ -140,6 +140,9 @@ FEATURES = { ...@@ -140,6 +140,9 @@ FEATURES = {
# Enable the courseware search functionality # Enable the courseware search functionality
'ENABLE_COURSEWARE_INDEX': False, 'ENABLE_COURSEWARE_INDEX': False,
# Enable content libraries search functionality
'ENABLE_LIBRARY_INDEX': False,
# Enable course reruns, which will always use the split modulestore # Enable course reruns, which will always use the split modulestore
'ALLOW_COURSE_RERUNS': True, 'ALLOW_COURSE_RERUNS': True,
......
...@@ -80,6 +80,7 @@ FEATURES['ENTRANCE_EXAMS'] = True ...@@ -80,6 +80,7 @@ FEATURES['ENTRANCE_EXAMS'] = True
################################ SEARCH INDEX ################################ ################################ SEARCH INDEX ################################
FEATURES['ENABLE_COURSEWARE_INDEX'] = True FEATURES['ENABLE_COURSEWARE_INDEX'] = True
FEATURES['ENABLE_LIBRARY_INDEX'] = True
SEARCH_ENGINE = "search.elastic.ElasticSearchEngine" SEARCH_ENGINE = "search.elastic.ElasticSearchEngine"
############################################################################### ###############################################################################
......
...@@ -267,6 +267,7 @@ VIDEO_CDN_URL = { ...@@ -267,6 +267,7 @@ VIDEO_CDN_URL = {
# Courseware Search Index # Courseware Search Index
FEATURES['ENABLE_COURSEWARE_INDEX'] = True FEATURES['ENABLE_COURSEWARE_INDEX'] = True
FEATURES['ENABLE_LIBRARY_INDEX'] = True
SEARCH_ENGINE = "search.tests.mock_search_engine.MockSearchEngine" SEARCH_ENGINE = "search.tests.mock_search_engine.MockSearchEngine"
# Dummy secret key for dev/test # Dummy secret key for dev/test
......
...@@ -113,6 +113,7 @@ class CapaDescriptor(CapaFields, RawDescriptor): ...@@ -113,6 +113,7 @@ class CapaDescriptor(CapaFields, RawDescriptor):
Module implementing problems in the LON-CAPA format, Module implementing problems in the LON-CAPA format,
as implemented by capa.capa_problem as implemented by capa.capa_problem
""" """
INDEX_CONTENT_TYPE = 'CAPA'
module_class = CapaModule module_class = CapaModule
...@@ -186,6 +187,21 @@ class CapaDescriptor(CapaFields, RawDescriptor): ...@@ -186,6 +187,21 @@ class CapaDescriptor(CapaFields, RawDescriptor):
registered_tags = responsetypes.registry.registered_tags() registered_tags = responsetypes.registry.registered_tags()
return set([node.tag for node in tree.iter() if node.tag in registered_tags]) return set([node.tag for node in tree.iter() if node.tag in registered_tags])
def index_dictionary(self):
"""
Return dictionary prepared with module content and type for indexing.
"""
result = super(CapaDescriptor, self).index_dictionary()
if not result:
result = {}
index = {
'content_type': self.INDEX_CONTENT_TYPE,
'problem_types': list(self.problem_types),
"display_name": self.display_name
}
result.update(index)
return result
# Proxy to CapaModule for access to any of its attributes # Proxy to CapaModule for access to any of its attributes
answer_available = module_attr('answer_available') answer_available = module_attr('answer_available')
check_button_name = module_attr('check_button_name') check_button_name = module_attr('check_button_name')
......
...@@ -2,13 +2,19 @@ ...@@ -2,13 +2,19 @@
XBlock runtime services for LibraryContentModule XBlock runtime services for LibraryContentModule
""" """
from django.core.exceptions import PermissionDenied from django.core.exceptions import PermissionDenied
from opaque_keys.edx.locator import LibraryLocator from opaque_keys.edx.locator import LibraryLocator, LibraryUsageLocator
from search.search_engine_base import SearchEngine
from xmodule.library_content_module import ANY_CAPA_TYPE_VALUE from xmodule.library_content_module import ANY_CAPA_TYPE_VALUE
from xmodule.modulestore import ModuleStoreEnum from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.exceptions import ItemNotFoundError from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.capa_module import CapaDescriptor from xmodule.capa_module import CapaDescriptor
def normalize_key_for_search(library_key):
""" Normalizes library key for use with search indexing """
return library_key.replace(version_guid=None, branch=None)
class LibraryToolsService(object): class LibraryToolsService(object):
""" """
Service that allows LibraryContentModule to interact with libraries in the Service that allows LibraryContentModule to interact with libraries in the
...@@ -86,13 +92,25 @@ class LibraryToolsService(object): ...@@ -86,13 +92,25 @@ class LibraryToolsService(object):
result_json.append(info) result_json.append(info)
return result_json return result_json
def _problem_type_filter(self, library, capa_type):
""" Filters library children by capa type"""
search_engine = SearchEngine.get_search_engine(index="library_index")
if search_engine:
filter_clause = {
"library": unicode(normalize_key_for_search(library.location.library_key)),
"content_type": CapaDescriptor.INDEX_CONTENT_TYPE,
"problem_types": capa_type
}
search_result = search_engine.search(field_dictionary=filter_clause)
results = search_result.get('results', [])
return [LibraryUsageLocator.from_string(item['data']['id']) for item in results]
else:
return [key for key in library.children if self._filter_child(key, capa_type)]
def _filter_child(self, usage_key, capa_type): def _filter_child(self, usage_key, capa_type):
""" """
Filters children by CAPA problem type, if configured Filters children by CAPA problem type, if configured
""" """
if capa_type == ANY_CAPA_TYPE_VALUE:
return True
if usage_key.block_type != "problem": if usage_key.block_type != "problem":
return False return False
...@@ -137,7 +155,7 @@ class LibraryToolsService(object): ...@@ -137,7 +155,7 @@ class LibraryToolsService(object):
filter_children = (dest_block.capa_type != ANY_CAPA_TYPE_VALUE) filter_children = (dest_block.capa_type != ANY_CAPA_TYPE_VALUE)
if filter_children: if filter_children:
# Apply simple filtering based on CAPA problem types: # Apply simple filtering based on CAPA problem types:
source_blocks.extend([key for key in library.children if self._filter_child(key, dest_block.capa_type)]) source_blocks.extend(self._problem_type_filter(library, dest_block.capa_type))
else: else:
source_blocks.extend(library.children) source_blocks.extend(library.children)
......
...@@ -120,6 +120,7 @@ class BulkOpsRecord(object): ...@@ -120,6 +120,7 @@ class BulkOpsRecord(object):
def __init__(self): def __init__(self):
self._active_count = 0 self._active_count = 0
self.has_publish_item = False self.has_publish_item = False
self.has_library_updated_item = False
@property @property
def active(self): def active(self):
...@@ -248,7 +249,7 @@ class BulkOperationsMixin(object): ...@@ -248,7 +249,7 @@ class BulkOperationsMixin(object):
if bulk_ops_record.is_root: if bulk_ops_record.is_root:
self._start_outermost_bulk_operation(bulk_ops_record, course_key) self._start_outermost_bulk_operation(bulk_ops_record, course_key)
def _end_outermost_bulk_operation(self, bulk_ops_record, course_key, emit_signals=True): def _end_outermost_bulk_operation(self, bulk_ops_record, structure_key, emit_signals=True):
""" """
The outermost nested bulk_operation call: do the actual end of the bulk operation. The outermost nested bulk_operation call: do the actual end of the bulk operation.
...@@ -256,12 +257,12 @@ class BulkOperationsMixin(object): ...@@ -256,12 +257,12 @@ class BulkOperationsMixin(object):
""" """
pass pass
def _end_bulk_operation(self, course_key, emit_signals=True): def _end_bulk_operation(self, structure_key, emit_signals=True):
""" """
End the active bulk operation on course_key. End the active bulk operation on structure_key (course or library key).
""" """
# If no bulk op is active, return # If no bulk op is active, return
bulk_ops_record = self._get_bulk_ops_record(course_key) bulk_ops_record = self._get_bulk_ops_record(structure_key)
if not bulk_ops_record.active: if not bulk_ops_record.active:
return return
...@@ -272,9 +273,9 @@ class BulkOperationsMixin(object): ...@@ -272,9 +273,9 @@ class BulkOperationsMixin(object):
if bulk_ops_record.active: if bulk_ops_record.active:
return return
self._end_outermost_bulk_operation(bulk_ops_record, course_key, emit_signals) self._end_outermost_bulk_operation(bulk_ops_record, structure_key, emit_signals)
self._clear_bulk_ops_record(course_key) self._clear_bulk_ops_record(structure_key)
def _is_in_bulk_operation(self, course_key, ignore_case=False): def _is_in_bulk_operation(self, course_key, ignore_case=False):
""" """
...@@ -291,6 +292,15 @@ class BulkOperationsMixin(object): ...@@ -291,6 +292,15 @@ class BulkOperationsMixin(object):
signal_handler.send("course_published", course_key=course_id) signal_handler.send("course_published", course_key=course_id)
bulk_ops_record.has_publish_item = False bulk_ops_record.has_publish_item = False
def send_bulk_library_updated_signal(self, bulk_ops_record, library_id):
"""
Sends out the signal that library have been updated.
"""
signal_handler = getattr(self, 'signal_handler', None)
if signal_handler and bulk_ops_record.has_library_updated_item:
signal_handler.send("library_updated", library_key=library_id)
bulk_ops_record.has_library_updated_item = False
class EditInfo(object): class EditInfo(object):
""" """
...@@ -1326,6 +1336,23 @@ class ModuleStoreWriteBase(ModuleStoreReadBase, ModuleStoreWrite): ...@@ -1326,6 +1336,23 @@ class ModuleStoreWriteBase(ModuleStoreReadBase, ModuleStoreWrite):
else: else:
signal_handler.send("course_published", course_key=course_key) signal_handler.send("course_published", course_key=course_key)
def _flag_library_updated_event(self, library_key):
"""
Wrapper around calls to fire the library_updated signal
Unless we're nested in an active bulk operation, this simply fires the signal
otherwise a publish will be signalled at the end of the bulk operation
Arguments:
library_updated - library_updated to which the signal applies
"""
signal_handler = getattr(self, 'signal_handler', None)
if signal_handler:
bulk_record = self._get_bulk_ops_record(library_key) if isinstance(self, BulkOperationsMixin) else None
if bulk_record and bulk_record.active:
bulk_record.has_library_updated_item = True
else:
signal_handler.send("library_updated", library_key=library_key)
def only_xmodules(identifier, entry_points): def only_xmodules(identifier, entry_points):
"""Only use entry_points that are supplied by the xmodule package""" """Only use entry_points that are supplied by the xmodule package"""
......
...@@ -80,9 +80,11 @@ class SignalHandler(object): ...@@ -80,9 +80,11 @@ class SignalHandler(object):
""" """
course_published = django.dispatch.Signal(providing_args=["course_key"]) course_published = django.dispatch.Signal(providing_args=["course_key"])
library_updated = django.dispatch.Signal(providing_args=["library_key"])
_mapping = { _mapping = {
"course_published": course_published "course_published": course_published,
"library_updated": library_updated
} }
def __init__(self, modulestore_class): def __init__(self, modulestore_class):
......
...@@ -466,16 +466,17 @@ class MongoBulkOpsMixin(BulkOperationsMixin): ...@@ -466,16 +466,17 @@ class MongoBulkOpsMixin(BulkOperationsMixin):
# ensure it starts clean # ensure it starts clean
bulk_ops_record.dirty = False bulk_ops_record.dirty = False
def _end_outermost_bulk_operation(self, bulk_ops_record, course_id, emit_signals=True): def _end_outermost_bulk_operation(self, bulk_ops_record, structure_key, emit_signals=True):
""" """
Restart updating the meta-data inheritance cache for the given course. Restart updating the meta-data inheritance cache for the given course or library.
Refresh the meta-data inheritance cache now since it was temporarily disabled. Refresh the meta-data inheritance cache now since it was temporarily disabled.
""" """
if bulk_ops_record.dirty: if bulk_ops_record.dirty:
self.refresh_cached_metadata_inheritance_tree(course_id) self.refresh_cached_metadata_inheritance_tree(structure_key)
if emit_signals: if emit_signals:
self.send_bulk_published_signal(bulk_ops_record, course_id) self.send_bulk_published_signal(bulk_ops_record, structure_key)
self.send_bulk_library_updated_signal(bulk_ops_record, structure_key)
bulk_ops_record.dirty = False # brand spanking clean now bulk_ops_record.dirty = False # brand spanking clean now
......
...@@ -229,9 +229,9 @@ class SplitBulkWriteMixin(BulkOperationsMixin): ...@@ -229,9 +229,9 @@ class SplitBulkWriteMixin(BulkOperationsMixin):
# Ensure that any edits to the index don't pollute the initial_index # Ensure that any edits to the index don't pollute the initial_index
bulk_write_record.index = copy.deepcopy(bulk_write_record.initial_index) bulk_write_record.index = copy.deepcopy(bulk_write_record.initial_index)
def _end_outermost_bulk_operation(self, bulk_write_record, course_key, emit_signals=True): def _end_outermost_bulk_operation(self, bulk_write_record, structure_key, emit_signals=True):
""" """
End the active bulk write operation on course_key. End the active bulk write operation on structure_key (course or library key).
""" """
dirty = False dirty = False
...@@ -268,7 +268,8 @@ class SplitBulkWriteMixin(BulkOperationsMixin): ...@@ -268,7 +268,8 @@ class SplitBulkWriteMixin(BulkOperationsMixin):
self.db_connection.update_course_index(bulk_write_record.index, from_index=bulk_write_record.initial_index) self.db_connection.update_course_index(bulk_write_record.index, from_index=bulk_write_record.initial_index)
if dirty and emit_signals: if dirty and emit_signals:
self.send_bulk_published_signal(bulk_write_record, course_key) self.send_bulk_published_signal(bulk_write_record, structure_key)
self.send_bulk_library_updated_signal(bulk_write_record, structure_key)
def get_course_index(self, course_key, ignore_case=False): def get_course_index(self, course_key, ignore_case=False):
""" """
...@@ -1536,6 +1537,9 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1536,6 +1537,9 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
block_id=block_key.id, block_id=block_key.id,
) )
if isinstance(course_key, LibraryLocator):
self._flag_library_updated_event(course_key)
# reconstruct the new_item from the cache # reconstruct the new_item from the cache
return self.get_item(item_loc) return self.get_item(item_loc)
...@@ -1891,6 +1895,9 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -1891,6 +1895,9 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
else: else:
course_key = CourseLocator(version_guid=new_id) course_key = CourseLocator(version_guid=new_id)
if isinstance(course_key, LibraryLocator):
self._flag_library_updated_event(course_key)
# fetch and return the new item--fetching is unnecessary but a good qc step # fetch and return the new item--fetching is unnecessary but a good qc step
new_locator = course_key.make_usage_key(block_key.type, block_key.id) new_locator = course_key.make_usage_key(block_key.type, block_key.id)
return self.get_item(new_locator, **kwargs) return self.get_item(new_locator, **kwargs)
...@@ -2392,6 +2399,9 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase): ...@@ -2392,6 +2399,9 @@ class SplitMongoModuleStore(SplitBulkWriteMixin, ModuleStoreWriteBase):
else: else:
result = CourseLocator(version_guid=new_id) result = CourseLocator(version_guid=new_id)
if isinstance(usage_locator.course_key, LibraryLocator):
self._flag_library_updated_event(usage_locator.course_key)
return result return result
@contract(block_key=BlockKey, blocks='dict(BlockKey: BlockData)') @contract(block_key=BlockKey, blocks='dict(BlockKey: BlockData)')
......
...@@ -1659,18 +1659,26 @@ class CapaModuleTest(unittest.TestCase): ...@@ -1659,18 +1659,26 @@ class CapaModuleTest(unittest.TestCase):
@ddt.ddt @ddt.ddt
class CapaDescriptorTest(unittest.TestCase): class CapaDescriptorTest(unittest.TestCase):
def _create_descriptor(self, xml): def _create_descriptor(self, xml, name=None):
""" Creates a CapaDescriptor to run test against """ """ Creates a CapaDescriptor to run test against """
descriptor = CapaDescriptor(get_test_system(), scope_ids=1) descriptor = CapaDescriptor(get_test_system(), scope_ids=1)
descriptor.data = xml descriptor.data = xml
if name:
descriptor.display_name = name
return descriptor return descriptor
@ddt.data(*responsetypes.registry.registered_tags()) @ddt.data(*responsetypes.registry.registered_tags())
def test_all_response_types(self, response_tag): def test_all_response_types(self, response_tag):
""" Tests that every registered response tag is correctly returned """ """ Tests that every registered response tag is correctly returned """
xml = "<problem><{response_tag}></{response_tag}></problem>".format(response_tag=response_tag) xml = "<problem><{response_tag}></{response_tag}></problem>".format(response_tag=response_tag)
descriptor = self._create_descriptor(xml) name = "Some Capa Problem"
descriptor = self._create_descriptor(xml, name=name)
self.assertEquals(descriptor.problem_types, {response_tag}) self.assertEquals(descriptor.problem_types, {response_tag})
self.assertEquals(descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'display_name': name,
'problem_types': [response_tag]
})
def test_response_types_ignores_non_response_tags(self): def test_response_types_ignores_non_response_tags(self):
xml = textwrap.dedent(""" xml = textwrap.dedent("""
...@@ -1687,8 +1695,14 @@ class CapaDescriptorTest(unittest.TestCase): ...@@ -1687,8 +1695,14 @@ class CapaDescriptorTest(unittest.TestCase):
</multiplechoiceresponse> </multiplechoiceresponse>
</problem> </problem>
""") """)
descriptor = self._create_descriptor(xml) name = "Test Capa Problem"
descriptor = self._create_descriptor(xml, name=name)
self.assertEquals(descriptor.problem_types, {"multiplechoiceresponse"}) self.assertEquals(descriptor.problem_types, {"multiplechoiceresponse"})
self.assertEquals(descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'display_name': name,
'problem_types': ["multiplechoiceresponse"]
})
def test_response_types_multiple_tags(self): def test_response_types_multiple_tags(self):
xml = textwrap.dedent(""" xml = textwrap.dedent("""
...@@ -1710,8 +1724,16 @@ class CapaDescriptorTest(unittest.TestCase): ...@@ -1710,8 +1724,16 @@ class CapaDescriptorTest(unittest.TestCase):
</optionresponse> </optionresponse>
</problem> </problem>
""") """)
descriptor = self._create_descriptor(xml) name = "Other Test Capa Problem"
descriptor = self._create_descriptor(xml, name=name)
self.assertEquals(descriptor.problem_types, {"multiplechoiceresponse", "optionresponse"}) self.assertEquals(descriptor.problem_types, {"multiplechoiceresponse", "optionresponse"})
self.assertEquals(
descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'display_name': name,
'problem_types': ["optionresponse", "multiplechoiceresponse"]
}
)
class ComplexEncoderTest(unittest.TestCase): class ComplexEncoderTest(unittest.TestCase):
......
...@@ -18,6 +18,7 @@ from xmodule.modulestore.tests.utils import MixedSplitTestCase ...@@ -18,6 +18,7 @@ from xmodule.modulestore.tests.utils import MixedSplitTestCase
from xmodule.tests import get_test_system from xmodule.tests import get_test_system
from xmodule.validation import StudioValidationMessage from xmodule.validation import StudioValidationMessage
from xmodule.x_module import AUTHOR_VIEW from xmodule.x_module import AUTHOR_VIEW
from search.search_engine_base import SearchEngine
dummy_render = lambda block, _: Fragment(block.data) # pylint: disable=invalid-name dummy_render = lambda block, _: Fragment(block.data) # pylint: disable=invalid-name
...@@ -66,10 +67,17 @@ class LibraryContentTest(MixedSplitTestCase): ...@@ -66,10 +67,17 @@ class LibraryContentTest(MixedSplitTestCase):
module.xmodule_runtime = module_system module.xmodule_runtime = module_system
class TestLibraryContentModule(LibraryContentTest): class LibraryContentModuleTestMixin(object):
""" """
Basic unit tests for LibraryContentModule Basic unit tests for LibraryContentModule
""" """
problem_types = [
["multiplechoiceresponse"], ["optionresponse"], ["optionresponse", "coderesponse"],
["coderesponse", "optionresponse"]
]
problem_type_lookup = {}
def _get_capa_problem_type_xml(self, *args): def _get_capa_problem_type_xml(self, *args):
""" Helper function to create empty CAPA problem definition """ """ Helper function to create empty CAPA problem definition """
problem = "<problem>" problem = "<problem>"
...@@ -84,12 +92,10 @@ class TestLibraryContentModule(LibraryContentTest): ...@@ -84,12 +92,10 @@ class TestLibraryContentModule(LibraryContentTest):
Creates four blocks total. Creates four blocks total.
""" """
problem_types = [ self.problem_type_lookup = {}
["multiplechoiceresponse"], ["optionresponse"], ["optionresponse", "coderesponse"], for problem_type in self.problem_types:
["coderesponse", "optionresponse"] block = self.make_block("problem", self.library, data=self._get_capa_problem_type_xml(*problem_type))
] self.problem_type_lookup[block.location] = problem_type
for problem_type in problem_types:
self.make_block("problem", self.library, data=self._get_capa_problem_type_xml(*problem_type))
def test_lib_content_block(self): def test_lib_content_block(self):
""" """
...@@ -236,6 +242,42 @@ class TestLibraryContentModule(LibraryContentTest): ...@@ -236,6 +242,42 @@ class TestLibraryContentModule(LibraryContentTest):
self.assertNotIn(LibraryContentDescriptor.display_name, non_editable_metadata_fields) self.assertNotIn(LibraryContentDescriptor.display_name, non_editable_metadata_fields)
@patch('xmodule.library_tools.SearchEngine.get_search_engine', Mock(return_value=None))
class TestLibraryContentModuleNoSearchIndex(LibraryContentModuleTestMixin, LibraryContentTest):
"""
Tests for library container when no search index is available.
Tests fallback low-level CAPA problem introspection
"""
pass
search_index_mock = Mock(spec=SearchEngine) # pylint: disable=invalid-name
@patch('xmodule.library_tools.SearchEngine.get_search_engine', Mock(return_value=search_index_mock))
class TestLibraryContentModuleWithSearchIndex(LibraryContentModuleTestMixin, LibraryContentTest):
"""
Tests for library container with mocked search engine response.
"""
def _get_search_response(self, field_dictionary=None):
""" Mocks search response as returned by search engine """
target_type = field_dictionary.get('problem_types')
matched_block_locations = [
key for key, problem_types in
self.problem_type_lookup.items() if target_type in problem_types
]
return {
'results': [
{'data': {'id': str(location)}} for location in matched_block_locations
]
}
def setUp(self):
""" Sets up search engine mock """
super(TestLibraryContentModuleWithSearchIndex, self).setUp()
search_index_mock.search = Mock(side_effect=self._get_search_response)
@patch( @patch(
'xmodule.modulestore.split_mongo.caching_descriptor_system.CachingDescriptorSystem.render', VanillaRuntime.render 'xmodule.modulestore.split_mongo.caching_descriptor_system.CachingDescriptorSystem.render', VanillaRuntime.render
) )
......
...@@ -417,3 +417,17 @@ def create_user_partition_json(partition_id, name, description, groups, scheme=" ...@@ -417,3 +417,17 @@ def create_user_partition_json(partition_id, name, description, groups, scheme="
return UserPartition( return UserPartition(
partition_id, name, description, groups, MockUserPartitionScheme(scheme) partition_id, name, description, groups, MockUserPartitionScheme(scheme)
).to_json() ).to_json()
class TestWithSearchIndexMixin(object):
""" Mixin encapsulating search index creation """
TEST_INDEX_FILENAME = "test_root/index_file.dat"
def _create_search_index(self):
""" Creates search index backing file """
with open(self.TEST_INDEX_FILENAME, "w+") as index_file:
json.dump({}, index_file)
def _cleanup_index_file(self):
""" Removes search index backing file """
os.remove(self.TEST_INDEX_FILENAME)
...@@ -6,7 +6,7 @@ import ddt ...@@ -6,7 +6,7 @@ import ddt
import textwrap import textwrap
from nose.plugins.attrib import attr from nose.plugins.attrib import attr
from ..helpers import UniqueCourseTest from ..helpers import UniqueCourseTest, TestWithSearchIndexMixin
from ...pages.studio.auto_auth import AutoAuthPage from ...pages.studio.auto_auth import AutoAuthPage
from ...pages.studio.overview import CourseOutlinePage from ...pages.studio.overview import CourseOutlinePage
from ...pages.studio.library import StudioLibraryContentEditor, StudioLibraryContainerXBlockWrapper from ...pages.studio.library import StudioLibraryContentEditor, StudioLibraryContainerXBlockWrapper
...@@ -196,10 +196,19 @@ class LibraryContentTest(LibraryContentTestBase): ...@@ -196,10 +196,19 @@ class LibraryContentTest(LibraryContentTestBase):
@ddt.ddt @ddt.ddt
@attr('shard_3') @attr('shard_3')
class StudioLibraryContainerCapaFilterTest(LibraryContentTestBase): class StudioLibraryContainerCapaFilterTest(LibraryContentTestBase, TestWithSearchIndexMixin):
""" """
Test Library Content block in LMS Test Library Content block in LMS
""" """
def setUp(self):
""" SetUp method """
self._create_search_index()
super(StudioLibraryContainerCapaFilterTest, self).setUp()
def tearDown(self):
self._cleanup_index_file()
super(StudioLibraryContainerCapaFilterTest, self).tearDown()
def _get_problem_choice_group_text(self, name, items): def _get_problem_choice_group_text(self, name, items):
""" Generates Choice Group CAPA problem XML """ """ Generates Choice Group CAPA problem XML """
items_text = "\n".join([ items_text = "\n".join([
...@@ -231,7 +240,7 @@ class StudioLibraryContainerCapaFilterTest(LibraryContentTestBase): ...@@ -231,7 +240,7 @@ class StudioLibraryContainerCapaFilterTest(LibraryContentTestBase):
""" """
Populates library fixture with XBlock Fixtures Populates library fixture with XBlock Fixtures
""" """
library_fixture.add_children( items = (
XBlockFixtureDesc( XBlockFixtureDesc(
"problem", "Problem Choice Group 1", "problem", "Problem Choice Group 1",
data=self._get_problem_choice_group_text("Problem Choice Group 1 Text", [("1", False), ('2', True)]) data=self._get_problem_choice_group_text("Problem Choice Group 1 Text", [("1", False), ('2', True)])
...@@ -249,6 +258,7 @@ class StudioLibraryContainerCapaFilterTest(LibraryContentTestBase): ...@@ -249,6 +258,7 @@ class StudioLibraryContainerCapaFilterTest(LibraryContentTestBase):
data=self._get_problem_select_text("Problem Select 2 Text", ["Option 3", "Option 4"], "Option 4") data=self._get_problem_select_text("Problem Select 2 Text", ["Option 3", "Option 4"], "Option 4")
), ),
) )
library_fixture.add_children(*items)
@property @property
def _problem_headers(self): def _problem_headers(self):
......
...@@ -7,7 +7,7 @@ import textwrap ...@@ -7,7 +7,7 @@ import textwrap
from .base_studio_test import StudioLibraryTest from .base_studio_test import StudioLibraryTest
from ...fixtures.course import CourseFixture from ...fixtures.course import CourseFixture
from ..helpers import UniqueCourseTest from ..helpers import UniqueCourseTest, TestWithSearchIndexMixin
from ...pages.studio.library import StudioLibraryContentEditor, StudioLibraryContainerXBlockWrapper from ...pages.studio.library import StudioLibraryContentEditor, StudioLibraryContainerXBlockWrapper
from ...pages.studio.overview import CourseOutlinePage from ...pages.studio.overview import CourseOutlinePage
from ...fixtures.course import XBlockFixtureDesc from ...fixtures.course import XBlockFixtureDesc
...@@ -18,7 +18,7 @@ UNIT_NAME = 'Test Unit' ...@@ -18,7 +18,7 @@ UNIT_NAME = 'Test Unit'
@ddt.ddt @ddt.ddt
class StudioLibraryContainerTest(StudioLibraryTest, UniqueCourseTest): class StudioLibraryContainerTest(StudioLibraryTest, UniqueCourseTest, TestWithSearchIndexMixin):
""" """
Test Library Content block in LMS Test Library Content block in LMS
""" """
...@@ -26,6 +26,7 @@ class StudioLibraryContainerTest(StudioLibraryTest, UniqueCourseTest): ...@@ -26,6 +26,7 @@ class StudioLibraryContainerTest(StudioLibraryTest, UniqueCourseTest):
""" """
Install library with some content and a course using fixtures Install library with some content and a course using fixtures
""" """
self._create_search_index()
super(StudioLibraryContainerTest, self).setUp() super(StudioLibraryContainerTest, self).setUp()
# Also create a course: # Also create a course:
self.course_fixture = CourseFixture( self.course_fixture = CourseFixture(
...@@ -42,6 +43,11 @@ class StudioLibraryContainerTest(StudioLibraryTest, UniqueCourseTest): ...@@ -42,6 +43,11 @@ class StudioLibraryContainerTest(StudioLibraryTest, UniqueCourseTest):
subsection = self.outline.section(SECTION_NAME).subsection(SUBSECTION_NAME) subsection = self.outline.section(SECTION_NAME).subsection(SUBSECTION_NAME)
self.unit_page = subsection.expand_subsection().unit(UNIT_NAME).go_to() self.unit_page = subsection.expand_subsection().unit(UNIT_NAME).go_to()
def tearDown(self):
""" Tear down method: remove search index backing file """
self._cleanup_index_file()
super(StudioLibraryContainerTest, self).tearDown()
def populate_library_fixture(self, library_fixture): def populate_library_fixture(self, library_fixture):
""" """
Populate the children of the test course fixture. Populate the children of the test course fixture.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment