Skip to content

Commit

Permalink
Extend and improve usage of Cache class (#446)
Browse files Browse the repository at this point in the history
  • Loading branch information
argaen authored Mar 8, 2019
1 parent cc65b3b commit 0698823
Show file tree
Hide file tree
Showing 16 changed files with 114 additions and 85 deletions.
4 changes: 2 additions & 2 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -77,15 +77,15 @@ Or as a decorator
from collections import namedtuple
from aiocache import cached, Cache, RedisCache
from aiocache import cached, Cache
from aiocache.serializers import PickleSerializer
# With this we can store python objects in backends like Redis!
Result = namedtuple('Result', "content, status")
@cached(
ttl=10, cache=RedisCache, key="key", serializer=PickleSerializer(), port=6379, namespace="main")
ttl=10, cache=Cache.REDIS, key="key", serializer=PickleSerializer(), port=6379, namespace="main")
async def cached_call():
print("Sleeping for three seconds zzzz.....")
await asyncio.sleep(3)
Expand Down
6 changes: 3 additions & 3 deletions aiocache/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

logger = logging.getLogger(__name__)

AIOCACHE_CACHES = {"memory": SimpleMemoryCache}
AIOCACHE_CACHES = {SimpleMemoryCache.NAME: SimpleMemoryCache}


try:
Expand All @@ -16,7 +16,7 @@
else:
from aiocache.backends.redis import RedisCache

AIOCACHE_CACHES["redis"] = RedisCache
AIOCACHE_CACHES[RedisCache.NAME] = RedisCache
del aioredis

try:
Expand All @@ -26,7 +26,7 @@
else:
from aiocache.backends.memcached import MemcachedCache

AIOCACHE_CACHES["memcached"] = MemcachedCache
AIOCACHE_CACHES[MemcachedCache.NAME] = MemcachedCache
del aiomcache


Expand Down
10 changes: 6 additions & 4 deletions aiocache/backends/memcached.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,10 +118,6 @@ async def _redlock_release(self, key, _):
async def _close(self, *args, _conn=None, **kwargs):
await self.client.close()

@classmethod
def parse_uri_path(self, path):
return {}


class MemcachedCache(MemcachedBackend, BaseCache):
"""
Expand All @@ -142,10 +138,16 @@ class MemcachedCache(MemcachedBackend, BaseCache):
:param pool_size: int size for memcached connections pool. Default is 2.
"""

NAME = "memcached"

def __init__(self, serializer=None, **kwargs):
super().__init__(**kwargs)
self.serializer = serializer or JsonSerializer()

@classmethod
def parse_uri_path(self, path):
return {}

def _build_key(self, key, namespace=None):
ns_key = super()._build_key(key, namespace=namespace).replace(" ", "_")
return str.encode(ns_key)
Expand Down
10 changes: 6 additions & 4 deletions aiocache/backends/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,10 +106,6 @@ def __delete(cls, key):

return 0

@classmethod
def parse_uri_path(cls, path):
return {}


class SimpleMemoryCache(SimpleMemoryBackend, BaseCache):
"""
Expand All @@ -127,6 +123,12 @@ class SimpleMemoryCache(SimpleMemoryBackend, BaseCache):
By default its 5.
"""

NAME = "memory"

def __init__(self, serializer=None, **kwargs):
super().__init__(**kwargs)
self.serializer = serializer or NullSerializer()

@classmethod
def parse_uri_path(cls, path):
return {}
34 changes: 18 additions & 16 deletions aiocache/backends/redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,22 +220,6 @@ async def _get_pool(self):

return self._pool

@classmethod
def parse_uri_path(self, path):
"""
Given a uri path, return the Redis specific configuration
options in that path string according to iana definition
http://www.iana.org/assignments/uri-schemes/prov/redis
:param path: string containing the path. Example: "/0"
:return: mapping containing the options. Example: {"db": "0"}
"""
options = {}
db, *_ = path[1:].split("/")
if db:
options["db"] = db
return options


class RedisCache(RedisBackend, BaseCache):
"""
Expand All @@ -261,10 +245,28 @@ class RedisCache(RedisBackend, BaseCache):
only for aioredis>=1. Default is None
"""

NAME = "redis"

def __init__(self, serializer=None, **kwargs):
super().__init__(**kwargs)
self.serializer = serializer or JsonSerializer()

@classmethod
def parse_uri_path(self, path):
"""
Given a uri path, return the Redis specific configuration
options in that path string according to iana definition
http://www.iana.org/assignments/uri-schemes/prov/redis
:param path: string containing the path. Example: "/0"
:return: mapping containing the options. Example: {"db": "0"}
"""
options = {}
db, *_ = path[1:].split("/")
if db:
options["db"] = db
return options

def _build_key(self, key, namespace=None):
if namespace is not None:
return "{}{}{}".format(namespace, ":" if namespace else "", key)
Expand Down
18 changes: 9 additions & 9 deletions aiocache/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import functools
import logging

from aiocache import SimpleMemoryCache, caches
from aiocache import Cache, caches
from aiocache.base import SENTINEL
from aiocache.lock import RedLock

Expand All @@ -16,7 +16,7 @@ class cached:
The cache is available in the function object as ``<function_name>.cache``.
In some cases you will need to send more args to configure the cache object.
An example would be endpoint and port for the RedisCache. You can send those args as
An example would be endpoint and port for the Redis cache. You can send those args as
kwargs and they will be propagated accordingly.
Only one cache instance is created per decorated call. If you expect high concurrency of calls
Expand All @@ -32,7 +32,7 @@ class cached:
:param key_builder: Callable that allows to build the function dynamically. It receives
the function plus same args and kwargs passed to the function.
:param cache: cache class to use when calling the ``set``/``get`` operations.
Default is ``aiocache.SimpleMemoryCache``.
Default is :class:`aiocache.SimpleMemoryCache`.
:param serializer: serializer instance to use when calling the ``dumps``/``loads``.
If its None, default one from the cache backend is used.
:param plugins: list plugins to use when calling the cmd hooks
Expand All @@ -50,7 +50,7 @@ def __init__(
ttl=SENTINEL,
key=None,
key_builder=None,
cache=SimpleMemoryCache,
cache=Cache.MEMORY,
serializer=None,
plugins=None,
alias=None,
Expand Down Expand Up @@ -139,7 +139,7 @@ class cached_stampede(cached):
while avoids for cache stampede effects.
In some cases you will need to send more args to configure the cache object.
An example would be endpoint and port for the RedisCache. You can send those args as
An example would be endpoint and port for the Redis cache. You can send those args as
kwargs and they will be propagated accordingly.
Only one cache instance is created per decorated function. If you expect high concurrency
Expand All @@ -154,7 +154,7 @@ class cached_stampede(cached):
+ function_name + args + kwargs
:param key_from_attr: str arg or kwarg name from the function to use as a key.
:param cache: cache class to use when calling the ``set``/``get`` operations.
Default is ``aiocache.SimpleMemoryCache``.
Default is :class:`aiocache.SimpleMemoryCache`.
:param serializer: serializer instance to use when calling the ``dumps``/``loads``.
Default is JsonSerializer.
:param plugins: list plugins to use when calling the cmd hooks
Expand Down Expand Up @@ -189,7 +189,7 @@ async def decorator(self, f, *args, **kwargs):
return result


def _get_cache(cache=SimpleMemoryCache, serializer=None, plugins=None, **cache_kwargs):
def _get_cache(cache=Cache.MEMORY, serializer=None, plugins=None, **cache_kwargs):
return cache(serializer=serializer, plugins=plugins, **cache_kwargs)


Expand Down Expand Up @@ -231,7 +231,7 @@ class multi_cached:
Receives the key the function and same args and kwargs as the called function.
:param ttl: int seconds to store the keys. Default is 0 which means no expiration.
:param cache: cache class to use when calling the ``multi_set``/``multi_get`` operations.
Default is ``aiocache.SimpleMemoryCache``.
Default is :class:`aiocache.SimpleMemoryCache`.
:param serializer: serializer instance to use when calling the ``dumps``/``loads``.
If its None, default one from the cache backend is used.
:param plugins: plugins to use when calling the cmd hooks
Expand All @@ -246,7 +246,7 @@ def __init__(
keys_from_attr,
key_builder=None,
ttl=SENTINEL,
cache=SimpleMemoryCache,
cache=Cache.MEMORY,
serializer=None,
plugins=None,
alias=None,
Expand Down
29 changes: 20 additions & 9 deletions aiocache/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
import warnings

from aiocache.exceptions import InvalidCacheType
from aiocache import AIOCACHE_CACHES
from aiocache import AIOCACHE_CACHES, SimpleMemoryCache, RedisCache, MemcachedCache
from aiocache.base import BaseCache


logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -55,20 +56,29 @@ class Cache:
:class:`aiocache.exceptions.InvalidCacheType` exception.
"""

MEMORY = "memory"
REDIS = "redis"
MEMCACHED = "memcached"
MEMORY = SimpleMemoryCache
REDIS = RedisCache
MEMCACHED = MemcachedCache

def __new__(cls, cache_type=MEMORY, **kwargs):
cache_class = cls.get_scheme_class(cache_type)
def __new__(cls, cache_class=MEMORY, **kwargs):
try:
assert issubclass(cache_class, BaseCache)
except AssertionError as e:
raise InvalidCacheType(
"Invalid cache type, you can only use {}".format(list(AIOCACHE_CACHES.keys()))
) from e
instance = cache_class.__new__(cache_class, **kwargs)
instance.__init__(**kwargs)
return instance

@classmethod
def _get_cache_class(cls, scheme):
return AIOCACHE_CACHES[scheme]

@classmethod
def get_scheme_class(cls, scheme):
try:
return AIOCACHE_CACHES[scheme]
return cls._get_cache_class(scheme)
except KeyError as e:
raise InvalidCacheType(
"Invalid cache type, you can only use {}".format(list(AIOCACHE_CACHES.keys()))
Expand Down Expand Up @@ -99,9 +109,10 @@ def from_url(cls, url):
"""
parsed_url = urllib.parse.urlparse(url)
kwargs = dict(urllib.parse.parse_qsl(parsed_url.query))
cache_class = Cache.get_scheme_class(parsed_url.scheme)

if parsed_url.path:
kwargs.update(Cache.get_scheme_class(parsed_url.scheme).parse_uri_path(parsed_url.path))
kwargs.update(cache_class.parse_uri_path(parsed_url.path))

if parsed_url.hostname:
kwargs["endpoint"] = parsed_url.hostname
Expand All @@ -112,7 +123,7 @@ def from_url(cls, url):
if parsed_url.password:
kwargs["password"] = parsed_url.password

return Cache(parsed_url.scheme, **kwargs)
return Cache(cache_class, **kwargs)


class CacheHandler:
Expand Down
3 changes: 2 additions & 1 deletion aiocache/serializers/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ class NullSerializer(BaseSerializer):
"""
This serializer does nothing. Its only recommended to be used by
:class:`aiocache.SimpleMemoryCache` because for other backends it will
produce incompatible data unless you work only with str types.
produce incompatible data unless you work only with str types because it
store data as is.
DISCLAIMER: Be careful with mutable types and memory storage. The following
behavior is considered normal (same as ``functools.lru_cache``)::
Expand Down
4 changes: 2 additions & 2 deletions docs/serializers.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@ Serializers can be attached to backends in order to serialize/deserialize data s

To use a specific serializer::

>>> from aiocache import SimpleMemoryCache
>>> from aiocache import Cache
>>> from aiocache.serializers import PickleSerializer
cache = SimpleMemoryCache(serializer=PickleSerializer())
cache = Cache(Cache.MEMORY, serializer=PickleSerializer())

Currently the following are built in:

Expand Down
8 changes: 4 additions & 4 deletions examples/cached_alias_config.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import asyncio

from aiocache import caches, SimpleMemoryCache, RedisCache
from aiocache import caches, Cache
from aiocache.serializers import StringSerializer, PickleSerializer

caches.set_config({
Expand Down Expand Up @@ -31,7 +31,7 @@ async def default_cache():
await cache.set("key", "value")

assert await cache.get("key") == "value"
assert isinstance(cache, SimpleMemoryCache)
assert isinstance(cache, Cache.MEMORY)
assert isinstance(cache.serializer, StringSerializer)


Expand All @@ -42,7 +42,7 @@ async def alt_cache():
await cache.set("key", "value")

assert await cache.get("key") == "value"
assert isinstance(cache, RedisCache)
assert isinstance(cache, Cache.REDIS)
assert isinstance(cache.serializer, PickleSerializer)
assert len(cache.plugins) == 2
assert cache.endpoint == "127.0.0.1"
Expand All @@ -56,7 +56,7 @@ def test_alias():
loop.run_until_complete(default_cache())
loop.run_until_complete(alt_cache())

cache = RedisCache()
cache = Cache(Cache.REDIS)
loop.run_until_complete(cache.delete("key"))
loop.run_until_complete(cache.close())

Expand Down
6 changes: 3 additions & 3 deletions examples/cached_decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,21 @@

from collections import namedtuple

from aiocache import cached, RedisCache
from aiocache import cached, Cache
from aiocache.serializers import PickleSerializer

Result = namedtuple('Result', "content, status")


@cached(
ttl=10, cache=RedisCache, key="key", serializer=PickleSerializer(),
ttl=10, cache=Cache.REDIS, key="key", serializer=PickleSerializer(),
port=6379, namespace="main")
async def cached_call():
return Result("content", 200)


def test_cached():
cache = RedisCache(endpoint="127.0.0.1", port=6379, namespace="main")
cache = Cache(Cache.REDIS, endpoint="127.0.0.1", port=6379, namespace="main")
loop = asyncio.get_event_loop()
loop.run_until_complete(cached_call())
assert loop.run_until_complete(cache.exists("key")) is True
Expand Down
Loading

0 comments on commit 0698823

Please sign in to comment.