Commit 3aba9da8 by Renzo Lucioni

Support reading of programs exclusively from the cache

When enabled, these changes prevent LMS application code from hitting the discovery service in-process to retrieve programs. Instead, program data is pulled from cache entries populated by a management command, cache_programs.

LEARNER-382
parent ed5412eb
......@@ -5,9 +5,11 @@ import uuid
import mock
from django.contrib.auth import get_user_model
from django.core.cache import cache
from django.test import TestCase
from waffle.models import Switch
from openedx.core.djangoapps.catalog.cache import PROGRAM_CACHE_KEY_TPL, PROGRAM_UUIDS_CACHE_KEY
from openedx.core.djangoapps.catalog.models import CatalogIntegration
from openedx.core.djangoapps.catalog.tests.factories import CourseRunFactory, ProgramFactory, ProgramTypeFactory
from openedx.core.djangoapps.catalog.tests.mixins import CatalogIntegrationMixin
......@@ -17,14 +19,108 @@ from openedx.core.djangoapps.catalog.utils import (
get_programs_with_type,
get_course_runs,
)
from openedx.core.djangolib.testing.utils import skip_unless_lms
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase, skip_unless_lms
from student.tests.factories import UserFactory
UTILS_MODULE = 'openedx.core.djangoapps.catalog.utils'
User = get_user_model() # pylint: disable=invalid-name
@skip_unless_lms
@mock.patch(UTILS_MODULE + '.logger.warning')
class TestGetCachedPrograms(CacheIsolationTestCase):
ENABLED_CACHES = ['default']
def setUp(self):
super(TestGetCachedPrograms, self).setUp()
Switch.objects.create(name='read_cached_programs', active=True)
def test_get_many(self, mock_warning):
programs = ProgramFactory.create_batch(3)
# Cache details for 2 of 3 programs.
partial_programs = {
PROGRAM_CACHE_KEY_TPL.format(uuid=program['uuid']): program for program in programs[:2]
}
cache.set_many(partial_programs, None)
# When called before UUIDs are cached, the function should return an empty
# list and log a warning.
self.assertEqual(get_programs(), [])
mock_warning.assert_called_once_with('Program UUIDs are not cached.')
mock_warning.reset_mock()
# Cache UUIDs for all 3 programs.
cache.set(
PROGRAM_UUIDS_CACHE_KEY,
[program['uuid'] for program in programs],
None
)
actual_programs = get_programs()
# The 2 cached programs should be returned while a warning should be logged
# for the missing one.
self.assertEqual(
set(program['uuid'] for program in actual_programs),
set(program['uuid'] for program in partial_programs.values())
)
mock_warning.assert_called_with(
'Details for program {uuid} are not cached.'.format(uuid=programs[2]['uuid'])
)
mock_warning.reset_mock()
# We can't use a set comparison here because these values are dictionaries
# and aren't hashable. We've already verified that all programs came out
# of the cache above, so all we need to do here is verify the accuracy of
# the data itself.
for program in actual_programs:
key = PROGRAM_CACHE_KEY_TPL.format(uuid=program['uuid'])
self.assertEqual(program, partial_programs[key])
# Cache details for all 3 programs.
all_programs = {
PROGRAM_CACHE_KEY_TPL.format(uuid=program['uuid']): program for program in programs
}
cache.set_many(all_programs, None)
actual_programs = get_programs()
# All 3 programs should be returned.
self.assertEqual(
set(program['uuid'] for program in actual_programs),
set(program['uuid'] for program in all_programs.values())
)
self.assertFalse(mock_warning.called)
for program in actual_programs:
key = PROGRAM_CACHE_KEY_TPL.format(uuid=program['uuid'])
self.assertEqual(program, all_programs[key])
def test_get_one(self, mock_warning):
expected_program = ProgramFactory()
expected_uuid = expected_program['uuid']
self.assertEqual(get_programs(uuid=expected_uuid), None)
mock_warning.assert_called_once_with(
'Details for program {uuid} are not cached.'.format(uuid=expected_uuid)
)
mock_warning.reset_mock()
cache.set(
PROGRAM_CACHE_KEY_TPL.format(uuid=expected_uuid),
expected_program,
None
)
actual_program = get_programs(uuid=expected_uuid)
self.assertEqual(actual_program, expected_program)
self.assertFalse(mock_warning.called)
@skip_unless_lms
@mock.patch(UTILS_MODULE + '.get_edx_api_data')
class TestGetPrograms(CatalogIntegrationMixin, TestCase):
"""Tests covering retrieval of programs from the catalog service."""
......@@ -89,15 +185,6 @@ class TestGetPrograms(CatalogIntegrationMixin, TestCase):
self.assert_contract(mock_get_edx_api_data.call_args, program_uuid=self.uuid)
self.assertEqual(data, program)
def test_get_programs_by_types(self, mock_get_edx_api_data):
programs = ProgramFactory.create_batch(2)
mock_get_edx_api_data.return_value = programs
data = get_programs(types=self.types)
self.assert_contract(mock_get_edx_api_data.call_args, types=self.types)
self.assertEqual(data, programs)
def test_programs_unavailable(self, mock_get_edx_api_data):
mock_get_edx_api_data.return_value = []
......@@ -222,7 +309,7 @@ class TestGetCourseRuns(CatalogIntegrationMixin, TestCase):
self.assertFalse(mock_get_edx_api_data.called)
self.assertEqual(data, [])
@mock.patch(UTILS_MODULE + '.log.error')
@mock.patch(UTILS_MODULE + '.logger.error')
def test_service_user_missing(self, mock_log_error, mock_get_edx_api_data):
"""
Verify that no errors occur when the catalog service user is missing.
......
......@@ -2,16 +2,19 @@
import copy
import logging
import waffle
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.cache import cache
from edx_rest_api_client.client import EdxRestApiClient
from openedx.core.djangoapps.catalog.cache import PROGRAM_CACHE_KEY_TPL, PROGRAM_UUIDS_CACHE_KEY
from openedx.core.djangoapps.catalog.models import CatalogIntegration
from openedx.core.lib.edx_api_utils import get_edx_api_data
from openedx.core.lib.token_utils import JwtBuilder
log = logging.getLogger(__name__)
logger = logging.getLogger(__name__)
User = get_user_model() # pylint: disable=invalid-name
......@@ -24,55 +27,74 @@ def create_catalog_api_client(user, catalog_integration):
return EdxRestApiClient(catalog_integration.internal_api_url, jwt=jwt)
def get_programs(uuid=None, types=None): # pylint: disable=redefined-builtin
"""Retrieve marketable programs from the catalog service.
def get_programs(uuid=None):
"""Read programs from the cache.
The cache is populated by a management command, cache_programs.
Keyword Arguments:
uuid (string): UUID identifying a specific program.
types (list of string): List of program type names used to filter programs by type
(e.g., ["MicroMasters"] will only return MicroMasters programs,
["MicroMasters", "XSeries"] will return MicroMasters and XSeries programs).
uuid (string): UUID identifying a specific program to read from the cache.
Returns:
list of dict, representing programs.
dict, if a specific program is requested.
"""
catalog_integration = CatalogIntegration.current()
if catalog_integration.enabled:
try:
user = User.objects.get(username=catalog_integration.service_username)
except User.DoesNotExist:
return []
if waffle.switch_is_active('read_cached_programs'):
missing_details_msg_tpl = 'Details for program {uuid} are not cached.'
api = create_catalog_api_client(user, catalog_integration)
types_param = ','.join(types) if types else None
if uuid:
program = cache.get(PROGRAM_CACHE_KEY_TPL.format(uuid=uuid))
if not program:
logger.warning(missing_details_msg_tpl.format(uuid=uuid))
cache_key = '{base}.programs{types}'.format(
base=catalog_integration.CACHE_KEY,
types='.' + types_param if types_param else ''
)
return program
querystring = {
'exclude_utm': 1,
'status': ('active', 'retired',),
}
uuids = cache.get(PROGRAM_UUIDS_CACHE_KEY, [])
if not uuids:
logger.warning('Program UUIDs are not cached.')
if uuid:
querystring['use_full_course_serializer'] = 1
if types_param:
querystring['types'] = types_param
return get_edx_api_data(
catalog_integration,
'programs',
api=api,
resource_id=uuid,
cache_key=cache_key if catalog_integration.is_cache_enabled else None,
querystring=querystring,
)
programs = cache.get_many(PROGRAM_CACHE_KEY_TPL.format(uuid=uuid) for uuid in uuids)
programs = list(programs.values())
missing_uuids = set(uuids) - set(program['uuid'] for program in programs)
for uuid in missing_uuids:
logger.warning(missing_details_msg_tpl.format(uuid=uuid))
return programs
else:
return []
# Old implementation which may request programs in-process. To be removed
# as part of LEARNER-382.
catalog_integration = CatalogIntegration.current()
if catalog_integration.enabled:
try:
user = User.objects.get(username=catalog_integration.service_username)
except User.DoesNotExist:
return []
api = create_catalog_api_client(user, catalog_integration)
cache_key = '{base}.programs'.format(
base=catalog_integration.CACHE_KEY
)
querystring = {
'exclude_utm': 1,
'status': ('active', 'retired',),
}
if uuid:
querystring['use_full_course_serializer'] = 1
return get_edx_api_data(
catalog_integration,
'programs',
api=api,
resource_id=uuid,
cache_key=cache_key if catalog_integration.is_cache_enabled else None,
querystring=querystring,
)
else:
return []
def get_program_types(name=None):
......@@ -121,11 +143,19 @@ def get_programs_with_type(types=None):
list of dict, representing the active programs.
"""
programs_with_type = []
programs = get_programs(types=types)
programs = get_programs()
if programs:
program_types = {program_type['name']: program_type for program_type in get_program_types()}
for program in programs:
# This limited type filtering is sufficient for current needs and
# helps us avoid additional complexity when caching program data.
# However, if the need for additional filtering of programs should
# arise, consider using the cache_programs management command to
# cache the filtered lists of UUIDs.
if types and program['type'] not in types:
continue
# deepcopy the program dict here so we are not adding
# the type to the cached object
program_with_type = copy.deepcopy(program)
......@@ -148,7 +178,7 @@ def get_course_runs():
try:
user = User.objects.get(username=catalog_integration.service_username)
except User.DoesNotExist:
log.error(
logger.error(
'Catalog service user with username [%s] does not exist. Course runs will not be retrieved.',
catalog_integration.service_username,
)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment