Skip to content

Commit

Permalink
Add cache value logging
Browse files Browse the repository at this point in the history
Added logging facililities into :class:`.CacheRegion`, to indicate key
events such as cache keys missing or regeneration of values.  As these can
be very high volume log messages, ``logging.DEBUG`` is used as the log
level for the events.  Pull request courtesy Stéphane Brunner.

Closes: #165
Pull-request: #165
Pull-request-sha: 9e519cf

Change-Id: I8266ee0302f6bd6768ab1f158152d2ce2cbc6c60
  • Loading branch information
sbrunner authored and zzzeek committed Oct 24, 2019
1 parent 896139b commit 33e5dcd
Show file tree
Hide file tree
Showing 8 changed files with 211 additions and 22 deletions.
2 changes: 1 addition & 1 deletion docs/build/changelog.rst
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ Changelog
==============

.. changelog::
:version: 0.8.1
:version: 0.9.0
:include_notes_from: unreleased

.. changelog::
Expand Down
9 changes: 9 additions & 0 deletions docs/build/unreleased/logging.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
.. change::
:tags: feature

Added logging facililities into :class:`.CacheRegion`, to indicate key
events such as cache keys missing or regeneration of values. As these can
be very high volume log messages, ``logging.DEBUG`` is used as the log
level for the events. Pull request courtesy Stéphane Brunner.


23 changes: 23 additions & 0 deletions docs/build/usage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -268,3 +268,26 @@ requests on behalf of the original dogpile.cache.pylibmc backend.

.. versionadded:: 0.4.4 Added support for the :class:`.ProxyBackend` class.


Configuring Logging
====================

.. versionadded:: 0.9.0

:class:`.CacheRegion` includes logging facilities that will emit debug log
messages when key cache events occur, including when keys are regenerated as
well as when hard invalidations occur. Using the `Python logging
<https://docs.python.org/3/library/logging.html>`_ module, set the log level to
``dogpile.cache`` to ``logging.DEBUG``::

logging.basicConfig()
logging.getLogger("dogpile.cache").setLevel(logging.DEBUG)

Debug logging will indicate time spent regenerating keys as well as when
keys are missing::

DEBUG:dogpile.cache.region:No value present for key: '__main__:load_user_info|2'
DEBUG:dogpile.cache.region:No value present for key: '__main__:load_user_info|1'
DEBUG:dogpile.cache.region:Cache value generated in 0.501 seconds for keys: ['__main__:load_user_info|2', '__main__:load_user_info|3', '__main__:load_user_info|4', '__main__:load_user_info|5']
DEBUG:dogpile.cache.region:Hard invalidation detected for key: '__main__:load_user_info|3'
DEBUG:dogpile.cache.region:Hard invalidation detected for key: '__main__:load_user_info|2'
2 changes: 1 addition & 1 deletion dogpile/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
__version__ = '0.8.1'
__version__ = "0.9.0"

from .lock import Lock # noqa
from .lock import NeedRegenerationException # noqa
60 changes: 41 additions & 19 deletions dogpile/cache/region.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
from __future__ import with_statement

import contextlib
import datetime
from functools import partial
from functools import wraps
import logging
from numbers import Number
import threading
import time
Expand All @@ -17,6 +19,7 @@
from .proxy import ProxyBackend
from .util import function_key_generator
from .util import function_multi_key_generator
from .util import repr_obj
from .. import Lock
from .. import NeedRegenerationException
from ..util import coerce_string_conf
Expand All @@ -32,6 +35,8 @@
"""

log = logging.getLogger(__name__)


class RegionInvalidationStrategy(object):
"""Region invalidation strategy interface
Expand Down Expand Up @@ -777,6 +782,29 @@ def get_multi(self, keys, expiration_time=None, ignore_expiration=False):
)
]

@contextlib.contextmanager
def _log_time(self, keys):
start_time = time.time()
yield
seconds = time.time() - start_time
log.debug(
"Cache value generated in %(seconds).3f seconds for key(s): "
"%(keys)r",
{"seconds": seconds, "keys": repr_obj(keys)},
)

def _is_cache_miss(self, value, orig_key):
if value is NO_VALUE:
log.debug("No value present for key: %r", orig_key)
elif value.metadata["v"] != value_version:
log.debug("Dogpile version update for key: %r", orig_key)
elif self.region_invalidator.is_hard_invalidated(value.metadata["ct"]):
log.debug("Hard invalidation detected for key: %r", orig_key)
else:
return False

return True

def get_or_create(
self,
key,
Expand Down Expand Up @@ -872,25 +900,23 @@ def dont_cache_none(value):

def get_value():
value = self.backend.get(key)
if (
value is NO_VALUE
or value.metadata["v"] != value_version
or self.region_invalidator.is_hard_invalidated(
value.metadata["ct"]
)
):
if self._is_cache_miss(value, orig_key):
raise NeedRegenerationException()

ct = value.metadata["ct"]
if self.region_invalidator.is_soft_invalidated(ct):
ct = time.time() - expiration_time - 0.0001

return value.payload, ct

def gen_value():
if creator_args:
created_value = creator(*creator_args[0], **creator_args[1])
else:
created_value = creator()
with self._log_time(orig_key):
if creator_args:
created_value = creator(
*creator_args[0], **creator_args[1]
)
else:
created_value = creator()
value = self._value(created_value)

if not should_cache_fn or should_cache_fn(created_value):
Expand Down Expand Up @@ -988,13 +1014,7 @@ def get_or_create_multi(
def get_value(key):
value = values.get(key, NO_VALUE)

if (
value is NO_VALUE
or value.metadata["v"] != value_version
or self.region_invalidator.is_hard_invalidated(
value.metadata["ct"]
)
):
if self._is_cache_miss(value, orig_key):
# dogpile.core understands a 0 here as
# "the value is not available", e.g.
# _has_value() will return False.
Expand Down Expand Up @@ -1053,7 +1073,9 @@ def async_creator(key, mutex):
# sort the keys, the idea is to prevent deadlocks.
# though haven't been able to simulate one anyway.
keys_to_get = sorted(mutexes)
new_values = creator(*keys_to_get)

with self._log_time(keys_to_get):
new_values = creator(*keys_to_get)

values_w_created = dict(
(orig_to_mangled[k], self._value(v))
Expand Down
27 changes: 27 additions & 0 deletions dogpile/cache/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,3 +160,30 @@ def mangle(key):
memoized_property = langhelpers.memoized_property
PluginLoader = langhelpers.PluginLoader
to_list = langhelpers.to_list


class repr_obj(object):

__slots__ = ("value", "max_chars")

def __init__(self, value, max_chars=300):
self.value = value
self.max_chars = max_chars

def __eq__(self, other):
return other.value == self.value

def __repr__(self):
rep = repr(self.value)
lenrep = len(rep)
if lenrep > self.max_chars:
segment_length = self.max_chars // 2
rep = (
rep[0:segment_length]
+ (
" ... (%d characters truncated) ... "
% (lenrep - self.max_chars)
)
+ rep[-segment_length:]
)
return rep
2 changes: 1 addition & 1 deletion dogpile/lock.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def _enter_create(self, value, createdtime):
has_value = True
if not self.mutex.acquire(False):
log.debug(
"creation function in progress " "elsewhere, returning"
"creation function in progress elsewhere, returning"
)
return NOT_REGENERATED
else:
Expand Down
108 changes: 108 additions & 0 deletions tests/cache/test_region.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,14 @@
from dogpile.cache import CacheRegion
from dogpile.cache import exception
from dogpile.cache import make_region
from dogpile.cache import util
from dogpile.cache.api import CacheBackend
from dogpile.cache.api import CachedValue
from dogpile.cache.api import NO_VALUE
from dogpile.cache.proxy import ProxyBackend
from dogpile.cache.region import _backend_loader
from dogpile.cache.region import RegionInvalidationStrategy
from dogpile.cache.region import value_version
from dogpile.util import compat
from . import assert_raises_message
from . import configparser
Expand Down Expand Up @@ -839,3 +841,109 @@ def test_actual_backend_noproxy(self):
reg = self._region()
assert isinstance(reg.backend, CacheBackend)
assert isinstance(reg.actual_backend, CacheBackend)


class LoggingTest(TestCase):
def _region(self, init_args={}, config_args={}, backend="mock"):
reg = CacheRegion(**init_args)
reg.configure(backend, **config_args)
return reg

def test_log_time(self):
reg = self._region()

times = [50, 55, 60]

def mock_time():
return times.pop(0)

with mock.patch("dogpile.cache.region.log") as mock_log, mock.patch(
"dogpile.cache.region.time", mock.Mock(time=mock_time)
):
with reg._log_time(["foo", "bar", "bat"]):
pass

eq_(
mock_log.mock_calls,
[
mock.call.debug(
"Cache value generated in %(seconds).3f "
"seconds for key(s): %(keys)r",
{
"seconds": 5,
"keys": util.repr_obj(["foo", "bar", "bat"]),
},
)
],
)

def test_repr_obj_truncated(self):

eq_(
repr(util.repr_obj(["some_big_long_name" for i in range(200)])),
"['some_big_long_name', 'some_big_long_name', "
"'some_big_long_name', 'some_big_long_name', 'some_big_long_name',"
" 'some_big_long_name', 'some_big_long_na ... "
"(4100 characters truncated) ... me_big_long_name', "
"'some_big_long_name', 'some_big_long_name', 'some_big_long_"
"name', 'some_big_long_name', 'some_big_long_name', "
"'some_big_long_name']",
)

def test_log_is_cache_miss(self):
reg = self._region()

with mock.patch("dogpile.cache.region.log") as mock_log:
is_(reg._is_cache_miss(NO_VALUE, "some key"), True)
eq_(
mock_log.mock_calls,
[mock.call.debug("No value present for key: %r", "some key")],
)

def test_log_is_value_version_miss(self):

reg = self._region()
inv = mock.Mock(is_hard_invalidated=lambda val: True)
with mock.patch(
"dogpile.cache.region.log"
) as mock_log, mock.patch.object(reg, "region_invalidator", inv):
is_(
reg._is_cache_miss(
CachedValue(
"some value", {"v": value_version - 5, "ct": 500}
),
"some key",
),
True,
)
eq_(
mock_log.mock_calls,
[
mock.call.debug(
"Dogpile version update for key: %r", "some key"
)
],
)

def test_log_is_hard_invalidated(self):

reg = self._region()
inv = mock.Mock(is_hard_invalidated=lambda val: True)
with mock.patch(
"dogpile.cache.region.log"
) as mock_log, mock.patch.object(reg, "region_invalidator", inv):
is_(
reg._is_cache_miss(
CachedValue("some value", {"v": value_version, "ct": 500}),
"some key",
),
True,
)
eq_(
mock_log.mock_calls,
[
mock.call.debug(
"Hard invalidation detected for key: %r", "some key"
)
],
)

0 comments on commit 33e5dcd

Please sign in to comment.