From b78c44e42892a190c470384f21533a145b3bd854 Mon Sep 17 00:00:00 2001 From: Venkata Reddy M Date: Fri, 27 May 2016 14:46:46 -0400 Subject: [PATCH 1/9] Added timer for cache get. --- redis_cache/rediscache.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/redis_cache/rediscache.py b/redis_cache/rediscache.py index 090171f..b534e56 100644 --- a/redis_cache/rediscache.py +++ b/redis_cache/rediscache.py @@ -344,7 +344,11 @@ def func(*args, **kwargs): key=cache_key) try: - return fetcher(cache_key) + from time import timer + start = timer.time() + result = fetcher(cache_key) + print "Cache took {0} to retireve data from the Redis Server".format(float(timer.time() - start) + return result except (ExpiredKeyException, CacheMissException) as e: ## Add some sort of cache miss handing here. pass From 0c4aa6021efa446bc9d6acdf1fde27a629a5b689 Mon Sep 17 00:00:00 2001 From: Venkata Reddy M Date: Wed, 1 Jun 2016 17:31:39 -0400 Subject: [PATCH 2/9] Created New Cache manager This may have some issues. Not tested completely. --- redis_cache/cache_manager.py | 179 +++++++++++++++++++++++++++++++++++ 1 file changed, 179 insertions(+) create mode 100644 redis_cache/cache_manager.py diff --git a/redis_cache/cache_manager.py b/redis_cache/cache_manager.py new file mode 100644 index 0000000..e988e8e --- /dev/null +++ b/redis_cache/cache_manager.py @@ -0,0 +1,179 @@ +import cPickle +from functools import wraps +import inspect + +from .connection import get_current_connection + + +class CacheManager(object): + """This is the base class for cache managers. + + *key_base* is used to prefix cache keys generated by this instance. + + *ttl* is cache key TTL in seconds. If it is omitted (or *None*) then keys + stored by this instance won't expire. + + *connection* allows binding the instance to an explicit Redis connection. + If it is omitted global connection defined with + :func:`redcache.connection.use_connection` will be used. + + If no connection is defined then no caching will happen.""" + + def __init__(self, key_base=u'cache', ttl=None, connection=None): + self._key_base = key_base + self._ttl = ttl + self._connection = connection + + @property + def connection(self): + """Connection property.""" + if self._connection: + return self._connection + else: + return get_current_connection() + + def key(self, f, args): + """Key generator for function *f* with positional arguments *args*. + + Example key: ``key_base:func_name:arg1:arg2``. + + **Instance and class methods** + + If the first argument of *f* is either *self* or *cls* it won't be used + while creating the key.""" + f_args = inspect.getargspec(f).args + + nameparts = [f.__name__] + if inspect.ismethod(f): + f_class = None + if inspect.isclass(f.im_self): + f_class = f.im_self # f is a class method + else: + f_class = f.im_self.__class__ # f is an instance method + + nameparts = [f_class.__name__] + nameparts + + argparts = [self._key_base, '.'.join(nameparts)] + if args and len(f_args) > 0: + idx = 0 + if f_args[0] in ('cls', 'self'): + idx = 1 + + for arg in args[idx:]: + argparts.append(unicode(arg)) + + key = u':'.join(argparts) + return key + + def after_load(self, data, f_args=None, f_kwargs=None): + """Process and return *data* after loading it from Redis. *f_args* and + *f_kwargs* contain positional and keywords args passed to decorated + function. + + Default implementation uses cPickle to unserialize data.""" + return cPickle.loads(data) + + def before_save(self, data, f_args=None, f_kwargs=None): + """Process and return *data* before saving it to Redis. *f_args* and + *f_kwargs* contain positional and keywords args passed to decorated + function. + + Default implementation uses cPickle to unserialize data.""" + return cPickle.dumps(data) + + def load(self, key, f_args=None, f_kwargs=None): + """Load data for *key* from Redis. *f_args* and *f_kwargs* contain + positional and keywords args passed to decorated function. + + Default implementation uses GET command.""" + return self.connection.get(key) + + def save(self, key, data, f_args=None, f_kwargs=None): + """Save data into Redis *key*. *f_args* and *f_kwargs* contain + positional and keywords args passed to decorated function. + + Default implementation uses SET command.""" + self.connection.set(key, data) + + def cache(self, f): + """Decorate *f* function to enable caching it. + + If the function returns *None* then it won't be cached.""" + @wraps(f) + def wrapper(*args, **kwargs): + key = self.key(f, args) + + data = None + if self.connection: + data = self.load(key, f_args=args, f_kwargs=kwargs) + + if data: + data = self.after_load(data, f_args=args, f_kwargs=kwargs) + else: + data = f(*args, **kwargs) + + if data is not None: + cached = self.before_save(data, f_args=args, + f_kwargs=kwargs) + + if self.connection: + self.save(key, cached, f_args=args, f_kwargs=kwargs) + + if self._ttl: + self.connection.expire(key, self._ttl) + + return data + + return wrapper + + +class DefaultCacheManager(CacheManager): + """This is default cache manager for simple caching of generic + functions. + + Basically it's equivalent to :py:class:`CacheManager` with default + settings.""" + + def cache(self, *args, **kwargs): + """Decorate *f* function to enable caching it. + + Use *ttl* keyword arg to override default infinite TTL. + + If the function returns *None* then it won't be cached.""" + ttl = kwargs.get('ttl', self._ttl) + + def decorator(f): + @wraps(f) + def wrapper(*args, **kwargs): + key = self.key(f, args) + + data = None + if self.connection: + data = self.load(key, f_args=args, f_kwargs=kwargs) + + if data: + data = self.after_load(data, f_args=args, f_kwargs=kwargs) + else: + data = f(*args, **kwargs) + + if data is not None: + cached = self.before_save(data, f_args=args, + f_kwargs=kwargs) + + if self.connection: + self.save(key, cached, f_args=args, + f_kwargs=kwargs) + + if ttl: + self.connection.expire(key, ttl) + + return data + + return wrapper + + if args: + return decorator(args[0]) + else: + return decorator + +default_cache = DefaultCacheManager() From a392dcdc1a38e7378a168fb6e2b81abcf218dead Mon Sep 17 00:00:00 2001 From: Venkata Reddy M Date: Wed, 1 Jun 2016 17:34:18 -0400 Subject: [PATCH 3/9] Added Connection.py --- redis_cache/connection.py | 73 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 redis_cache/connection.py diff --git a/redis_cache/connection.py b/redis_cache/connection.py new file mode 100644 index 0000000..0793da7 --- /dev/null +++ b/redis_cache/connection.py @@ -0,0 +1,73 @@ +from contextlib import contextmanager +from redis import Redis +from .local import LocalStack, release_local + + +class NoRedisConnectionException(Exception): + pass + + +@contextmanager +def Connection(connection=None): + if connection is None: + connection = Redis() + push_connection(connection) + try: + yield + finally: + popped = pop_connection() + assert popped == connection, \ + 'Unexpected Redis connection was popped off the stack. ' \ + 'Check your Redis connection setup.' + + +def push_connection(redis): + """Pushes the given connection on the stack.""" + _connection_stack.push(redis) + + +def pop_connection(): + """Pops the topmost connection from the stack.""" + return _connection_stack.pop() + + +def use_connection(redis=None): + """Clears the stack and uses the given connection. Protects against mixed + use of use_connection() and stacked connection contexts. + """ + assert len(_connection_stack) <= 1, \ + 'You should not mix Connection contexts with use_connection().' + release_local(_connection_stack) + + if redis is None: + redis = Redis() + push_connection(redis) + + +def get_current_connection(): + """Returns the current Redis connection (i.e. the topmost on the + connection stack). + """ + return _connection_stack.top + + +def resolve_connection(connection=None): + """Convenience function to resolve the given or the current connection. + Raises an exception if it cannot resolve a connection now. + """ + if connection is not None: + return connection + + connection = get_current_connection() + if connection is None: + raise NoRedisConnectionException( + 'Could not resolve a Redis connection.') + return connection + + +_connection_stack = LocalStack() + +__all__ = [ + 'Connection', 'get_current_connection', 'push_connection', + 'pop_connection', 'use_connection' +] From 3994890c07f813148cb21857679aa04b6909beb5 Mon Sep 17 00:00:00 2001 From: Venkata Reddy M Date: Wed, 1 Jun 2016 17:36:06 -0400 Subject: [PATCH 4/9] Added Some sample json example. --- redis_cache/json_example.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 redis_cache/json_example.py diff --git a/redis_cache/json_example.py b/redis_cache/json_example.py new file mode 100644 index 0000000..e4cd4f4 --- /dev/null +++ b/redis_cache/json_example.py @@ -0,0 +1,33 @@ +"""Example of using JSON to serialize cached objects.""" + +import json + +from cache_manager import CacheManager, get_current_connection, use_connection + + +class JsonCacheManager(CacheManager): + def before_save(self, data, **kwargs): + return json.dumps(data) + + def after_load(self, data, **kwargs): + return json.loads(data) + +json_cache_manager = JsonCacheManager() + + +def test_json_cache_manager(): + @json_cache_manager.cache + def cached(spam, eggs): + return {'spam': spam, 'eggs': eggs} + + result = cached('spam', 'eggs') + + connection = get_current_connection() + + print connection.get('cache:cached:spam:eggs') + connection.delete('cache:cached:spam:eggs') + + +if __name__ == '__main__': + use_connection() + test_json_cache_manager() From 84c5098b15b13653f13d9be5a31e28ee67b20b8a Mon Sep 17 00:00:00 2001 From: Venkata Reddy M Date: Fri, 3 Jun 2016 11:10:54 -0400 Subject: [PATCH 5/9] Added django cache manager --- redis_cache/django_cache_manager.py | 148 ++++++++++++++++++++++++++++ 1 file changed, 148 insertions(+) create mode 100644 redis_cache/django_cache_manager.py diff --git a/redis_cache/django_cache_manager.py b/redis_cache/django_cache_manager.py new file mode 100644 index 0000000..d207f72 --- /dev/null +++ b/redis_cache/django_cache_manager.py @@ -0,0 +1,148 @@ +''' +Created on May 11, 2016 + +@author: Venkata Reddy Mulam +''' +import logging +from importlib import import_module + +import redis +from django.conf import settings + +logger = logging.getLogger(getattr(settings, 'LOGGER_NAME', '')) +timeout = getattr(settings, 'DEFAULT_CACHE_TIMEOUT', None) + + +class CacheUpdateManager(object): + ''' + Object with pre-defined (dynamic changes are not allowed) Managers as its' properties + the keys that get passed in in the init are the only ones for this Update Manager + ''' + + def __init__(self, keys, cache): + managers = {key: CacheManager(key, cache, settings.CACHE_KEYS[key]) for key in keys} + self.__dict__.update(managers) + + def add_to_manager(self, namespace, ignore_args, view, func, *args, **kwargs): + manager = self.get(namespace) + if isinstance(manager, CacheManager): + logger.debug('Adding {0} to {1}'.format(func, manager)) +# func_str = '{0}.{1}'.format(func.__module__, func.__name__) + manager.append((func, ignore_args, view, namespace, args, kwargs)) +# manager.cache.set(namespace, manager, timeout) + return True + return False + + def update_namespace(self, namespace): + manager = self.get(namespace) + if isinstance(manager, CacheManager): + return manager.update(self) + + raise AttributeError('No Manager for namespace: {}'.format(namespace)) + + def update_all(self): + result = [] + for namespace in self.namespaces: + result.append(self.update_namespace(namespace)) + return result + + def flush_app(self, app_name, cache): + ''' + flushes all keys for the given app_name + ''' + pattern = '*.{0}.*'.format(app_name) + if isinstance(cache, redis.StrictRedis): + keys = cache.keys(pattern) + return cache.delete(*keys) + else: + return cache.delete_pattern(pattern) + + @property + def namespaces(self): + return self.__dict__.keys() + + def __getattr__(self, name): + if name in self.__dict__: + return name + raise AttributeError + + def get(self, name, default=None): + return self.__dict__.get(name, default) + + def __setattr__(self, name, value): + raise AttributeError + + def __delattr__(self, *args, **kwargs): + raise AttributeError + + @staticmethod + def get_cache_update_manager(): + from cache_utils.decorators import cache_update_manager + return cache_update_manager + + +class CacheManager(list): + ''' + A (glorified) list of functions and args which gets updated via the UpdateManager + ''' + views = list() + + def __init__(self, namespace, cache, app_name): + self.namespace = namespace + self.cache = cache + self.app_name = app_name + + def append(self, tup): + if isinstance(tup, tuple) and len(tup) == 6: + if tup[2]: + CacheManager.views.append(tup) + + return list.append(self, tup) + raise ValueError('CacheKey only takes objects of type: tuple, with (func, ignore_args, args, kwargs)') + + def update(self, update_manager): + ''' + reruns and resets the cache for all funcs in this Manager + ''' + from cache_utils.decorators import cache_create_key + logger.debug('Updating CacheManager - {0}'.format(self)) + num_updated = 0 + for func, ignore_args, view, namespace, args, kwargs in self: + try: + # =============================================================== + # import_module(func.split('.')[0]) + # real_func = eval(func) + # =============================================================== + result = func(*args, **kwargs) + except Exception as e: + logger.error('Cache Update for {0} failed for {1} '.format(self, func), exc_info=True) + continue + key = cache_create_key(self.namespace, ignore_args, func.__name__, *args, **kwargs) + self.cache.set(key, result, timeout) + + logger.debug('Updated {0}'.format(func)) + num_updated += 1 + update_manager.flush_app(self.app_name, self.cache) + + # Re caching all Views + for func, ignore_args, view, namespace, args, kwargs in CacheManager.views: + msg = "Updating {0}({1}{2})".format(func, args, kwargs) + print msg + view_key = cache_create_key(namespace, ignore_args, func.__name__, *args, **kwargs) + self.cache.delete(view_key) + func(*args, **kwargs) + + logger.debug('Finished updating CacheManager - {0}'.format(self)) + return num_updated + + def flush(self): + ''' flushes all keys for this namespace''' + pattern = '*{0}*'.format(self.namespace) + return self.cache.delete_pattern(pattern) + + @property + def list(self): + return self.__repr__() + + def __str__(self, *args, **kwargs): + return '{} : {}'.format(self.app_name, self.namespace) From 93856d638ac9d14bb14e7a23cdb68b851d485c54 Mon Sep 17 00:00:00 2001 From: Venkata Reddy M Date: Sun, 6 May 2018 08:55:48 -0400 Subject: [PATCH 6/9] Update test_rediscache.py --- redis_cache/test_rediscache.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/redis_cache/test_rediscache.py b/redis_cache/test_rediscache.py index f961d8c..071a08f 100644 --- a/redis_cache/test_rediscache.py +++ b/redis_cache/test_rediscache.py @@ -32,6 +32,19 @@ def test_expire(self): time.sleep(1.1) self.assertRaises(ExpiredKeyException, quick_c.get, "foo") quick_c.flush() + + def test_expire_new(self): + quick_c = SimpleCache() + + quick_c.store("foo", "bar", expire=2) + time.sleep(1.1) + self.assertRaises(ExpiredKeyException, quick_c.get, "foo") + quick_c.flush() + + quick_c.store("foo", "bar", expire=timedelta(seconds=2)) + time.sleep(1.2) + self.assertRaises(ExpiredKeyException, quick_c.get, "foo") + quick_c.flush() def test_miss(self): self.assertRaises(CacheMissException, self.c.get, "blablabla") From c8bea09b275243515ccc34f9044b317dfdbae06d Mon Sep 17 00:00:00 2001 From: Venkata Reddy M Date: Sun, 6 May 2018 08:56:10 -0400 Subject: [PATCH 7/9] Update requirements.txt --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 628b986..be75005 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ +python 2.7.* redis>=2.7.1 -Django \ No newline at end of file +Django From 67b2168e28492204b4f3babade58accd65c89806 Mon Sep 17 00:00:00 2001 From: Venkata Reddy M Date: Sun, 6 May 2018 08:57:25 -0400 Subject: [PATCH 8/9] Update README.md --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 3fee874..d7ed72e 100644 --- a/README.md +++ b/README.md @@ -10,12 +10,12 @@ redis-py 2.7.1 (see requirements.txt file) Installation: ------------- - pip install redis-simple-cache - + pip install redis-cache-python-layer + or to get the latest version - git clone git://github.com/vivekn/redis-simple-cache.git - cd redis-simple-cache + git clone https://github.com/MVReddy/redis_cache_python_layer + cd redis-cache-python-layer python setup.py install Usage: From b9dc4431f6c1293ecfb01f498a7d96b332ff5123 Mon Sep 17 00:00:00 2001 From: Venkata Reddy M Date: Sun, 6 May 2018 08:58:44 -0400 Subject: [PATCH 9/9] Update setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index ac2a2ea..e0b1427 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ def openf(fname): return open(os.path.join(os.path.dirname(__file__), fname)) setup( - name="redis-simple-cache", + name="redis-cache-python-layer", version="0.0.6", author="Vivek Narayanan, Flávio Juvenal, Sam Zaydel", author_email="flaviojuvenal@gmail.com",