Commit ec2e90f5 by Calen Pennington

Improve the structure cache metrics with a course context, more standardized…

Improve the structure cache metrics with a course context, more standardized from_cache tag, and compression sizes
parent c63ac868
......@@ -21,7 +21,6 @@ from mongodb_proxy import autoretry_read, MongoProxy
from xmodule.exceptions import HeartbeatFailure
from xmodule.modulestore import BlockData
from xmodule.modulestore.split_mongo import BlockKey
import dogstats_wrapper as dog_stats_api
new_contract('BlockData', BlockData)
......@@ -222,33 +221,40 @@ class CourseStructureCache(object):
except InvalidCacheBackendError:
self.no_cache_found = True
def get(self, key):
def get(self, key, course_context=None):
"""Pull the compressed, pickled struct data from cache and deserialize."""
if self.no_cache_found:
return None
compressed_pickled_data = self.cache.get(key)
if compressed_pickled_data is None:
return None
return pickle.loads(zlib.decompress(compressed_pickled_data))
with TIMER.timer("CourseStructureCache.get", course_context) as tagger:
compressed_pickled_data = self.cache.get(key)
tagger.tag(from_cache=str(compressed_pickled_data is not None).lower())
if compressed_pickled_data is None:
return None
tagger.measure('compressed_size', len(compressed_pickled_data))
def set(self, key, structure):
pickled_data = zlib.decompress(compressed_pickled_data)
tagger.measure('uncompressed_size', len(pickled_data))
return pickle.loads(pickled_data)
def set(self, key, structure, course_context=None):
"""Given a structure, will pickle, compress, and write to cache."""
if self.no_cache_found:
return None
pickled_data = pickle.dumps(structure, pickle.HIGHEST_PROTOCOL)
# 1 = Fastest (slightly larger results)
compressed_pickled_data = zlib.compress(pickled_data, 1)
with TIMER.timer("CourseStructureCache.set", course_context) as tagger:
pickled_data = pickle.dumps(structure, pickle.HIGHEST_PROTOCOL)
tagger.measure('uncompressed_size', len(pickled_data))
# record compressed course structure sizes
dog_stats_api.histogram(
'compressed_course_structure.size',
len(compressed_pickled_data),
tags=[key]
)
# Stuctures are immutable, so we set a timeout of "never"
self.cache.set(key, compressed_pickled_data, None)
# 1 = Fastest (slightly larger results)
compressed_pickled_data = zlib.compress(pickled_data, 1)
tagger.measure('compressed_size', len(compressed_pickled_data))
# Stuctures are immutable, so we set a timeout of "never"
self.cache.set(key, compressed_pickled_data, None)
class MongoConnection(object):
......@@ -311,14 +317,14 @@ class MongoConnection(object):
with TIMER.timer("get_structure", course_context) as tagger_get_structure:
cache = CourseStructureCache()
structure = cache.get(key)
tagger_get_structure.tag(from_cache='true' if structure else 'false')
structure = cache.get(key, course_context)
tagger_get_structure.tag(from_cache=str(bool(structure)).lower())
if not structure:
with TIMER.timer("get_structure.find_one", course_context) as tagger_find_one:
doc = self.structures.find_one({'_id': key})
tagger_find_one.measure("blocks", len(doc['blocks']))
structure = structure_from_mongo(doc, course_context)
cache.set(key, structure)
cache.set(key, structure, course_context)
return structure
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment