Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Cache factory #430

Merged
merged 5 commits into from
Jan 4, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 8 additions & 7 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@ Using a cache is as simple as

>>> import asyncio
>>> loop = asyncio.get_event_loop()
>>> from aiocache import SimpleMemoryCache # Here you can also use RedisCache and MemcachedCache
>>> cache = SimpleMemoryCache()
>>> from aiocache import Cache
>>> cache = Cache(Cache.MEMORY) # Here you can also use Cache.REDIS and Cache.MEMCACHED, default is Cache.MEMORY
>>> loop.run_until_complete(cache.set('key', 'value'))
True
>>> loop.run_until_complete(cache.get('key'))
Expand All @@ -77,15 +77,15 @@ Or as a decorator

from collections import namedtuple

from aiocache import cached, RedisCache
from aiocache import cached, Cache
from aiocache.serializers import PickleSerializer
# With this we can store python objects in backends like Redis!

Result = namedtuple('Result', "content, status")


@cached(
ttl=10, cache=RedisCache, key="key", serializer=PickleSerializer(), port=6379, namespace="main")
ttl=10, cache=Cache.REDIS, key="key", serializer=PickleSerializer(), port=6379, namespace="main")
async def cached_call():
print("Sleeping for three seconds zzzz.....")
await asyncio.sleep(3)
Expand All @@ -97,21 +97,22 @@ Or as a decorator
loop.run_until_complete(cached_call())
loop.run_until_complete(cached_call())
loop.run_until_complete(cached_call())
cache = RedisCache(endpoint="127.0.0.1", port=6379, namespace="main")
cache = Cache(Cache.REDIS, endpoint="127.0.0.1", port=6379, namespace="main")
loop.run_until_complete(cache.delete("key"))

if __name__ == "__main__":
run()

The recommended approach to instantiate a new cache is using the `Cache` constructor. However you can also instantiate directly using `aiocache.RedisCache`, `aiocache.SimpleMemoryCache` or `aiocache.MemcachedCache`.


You can also setup cache aliases so its easy to reuse configurations

.. code-block:: python

import asyncio

from aiocache import caches, SimpleMemoryCache, RedisCache
from aiocache.serializers import StringSerializer, PickleSerializer
from aiocache import caches

# You can use either classes or strings for referencing classes
caches.set_config({
Expand Down
16 changes: 13 additions & 3 deletions aiocache/__init__.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import logging

from .backends.memory import SimpleMemoryCache
from .factory import caches
from .decorators import cached, cached_stampede, multi_cached
from ._version import __version__


Expand Down Expand Up @@ -30,4 +28,16 @@
del aiomcache


__all__ = ("caches", "cached", "cached_stampede", "multi_cached", *__cache_types, "__version__")
from .factory import caches, Cache # noqa: E402
from .decorators import cached, cached_stampede, multi_cached # noqa: E402


__all__ = (
"caches",
"Cache",
"cached",
"cached_stampede",
"multi_cached",
*__cache_types,
"__version__",
)
2 changes: 2 additions & 0 deletions aiocache/exceptions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
class InvalidCacheType(Exception):
pass
37 changes: 37 additions & 0 deletions aiocache/factory.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
from copy import deepcopy
import logging


from aiocache import SimpleMemoryCache, RedisCache, MemcachedCache
from aiocache.exceptions import InvalidCacheType


logger = logging.getLogger(__name__)


def _class_from_string(class_path):
Expand Down Expand Up @@ -26,6 +34,35 @@ def _create_cache(cache, serializer=None, plugins=None, **kwargs):
return instance


class Cache:

MEMORY = "memory"
REDIS = "redis"
MEMCACHED = "memcached"

_PROTOCOL_MAPPING = {
"memory": SimpleMemoryCache,
"redis": RedisCache,
"memcached": MemcachedCache,
}

def __new__(cls, cache_type=MEMORY, **kwargs):
try:
cache_class = cls.get_protocol_class(cache_type)
except KeyError as e:
raise InvalidCacheType(
"Invalid cache type, you can only use {}".format(list(cls._PROTOCOL_MAPPING.keys()))
) from e

instance = cache_class.__new__(cache_class, **kwargs)
instance.__init__(**kwargs)
return instance

@classmethod
def get_protocol_class(cls, protocol):
return cls._PROTOCOL_MAPPING[protocol]


class CacheHandler:

_config = {
Expand Down
8 changes: 4 additions & 4 deletions aiocache/lock.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,10 @@ class RedLock:

Example usage::

from aiocache import RedisCache
from aiocache import Cache
from aiocache.lock import RedLock

cache = RedisCache()
cache = Cache(Cache.REDIS)
async with RedLock(cache, 'key', lease=1): # Calls will wait here
result = await cache.get('key')
if result is not None:
Expand Down Expand Up @@ -112,7 +112,7 @@ class OptimisticLock:

Example usage::

cache = RedisCache()
cache = Cache(Cache.REDIS)

# The value stored in 'key' will be checked here
async with OptimisticLock(cache, 'key') as lock:
Expand All @@ -123,7 +123,7 @@ class OptimisticLock:
an :class:`aiocache.lock.OptimisticLockError` will be raised. A way to make
the same call crash would be to change the value inside the lock like::

cache = RedisCache()
cache = Cache(Cache.REDIS)

# The value stored in 'key' will be checked here
async with OptimisticLock(cache, 'key') as lock:
Expand Down
2 changes: 1 addition & 1 deletion aiocache/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class NullSerializer(BaseSerializer):
DISCLAIMER: Be careful with mutable types and memory storage. The following
behavior is considered normal (same as ``functools.lru_cache``)::

cache = SimpleMemoryCache()
cache = Cache()
my_list = [1]
await cache.set("key", my_list)
my_list.append(2)
Expand Down
4 changes: 2 additions & 2 deletions docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ Using a cache is as simple as

>>> import asyncio
>>> loop = asyncio.get_event_loop()
>>> from aiocache import SimpleMemoryCache
>>> cache = SimpleMemoryCache()
>>> from aiocache import Cache
>>> cache = Cache()
>>> loop.run_until_complete(cache.set('key', 'value'))
True
>>> loop.run_until_complete(cache.get('key'))
Expand Down
4 changes: 2 additions & 2 deletions docs/plugins.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@ Plugins

Plugins can be used to enrich the behavior of the cache. By default all caches are configured without any plugin but can add new ones in the constructor or after initializing the cache class::

>>> from aiocache import SimpleMemoryCache
>>> from aiocache import Cache
>>> from aiocache.plugins import TimingPlugin
cache = SimpleMemoryCache(plugins=[HitMissRatioPlugin()])
cache = Cache(plugins=[HitMissRatioPlugin()])
cache.plugins += [TimingPlugin()]

You can define your custom plugin by inheriting from `BasePlugin`_ and overriding the needed methods (the overrides NEED to be async). All commands have ``pre_<command_name>`` and ``post_<command_name>`` hooks.
Expand Down
3 changes: 2 additions & 1 deletion examples/cached_decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@


@cached(
ttl=10, cache=RedisCache, key="key", serializer=PickleSerializer(), port=6379, namespace="main")
ttl=10, cache=RedisCache, key="key", serializer=PickleSerializer(),
port=6379, namespace="main")
async def cached_call():
return Result("content", 200)

Expand Down
4 changes: 2 additions & 2 deletions examples/frameworks/sanic_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from sanic import Sanic
from sanic.response import json
from sanic.log import log
from aiocache import cached, SimpleMemoryCache
from aiocache import cached, Cache
from aiocache.serializers import JsonSerializer

app = Sanic(__name__)
Expand All @@ -24,7 +24,7 @@ async def expensive_call():


async def reuse_data():
cache = SimpleMemoryCache(serializer=JsonSerializer()) # Not ideal to define here
cache = Cache(serializer=JsonSerializer()) # Not ideal to define here
data = await cache.get("my_custom_key") # Note the key is defined in `cached` decorator
return data

Expand Down
4 changes: 2 additions & 2 deletions examples/marshmallow_serializer_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from marshmallow import fields, Schema, post_load

from aiocache import SimpleMemoryCache
from aiocache import Cache
from aiocache.serializers import BaseSerializer


Expand Down Expand Up @@ -47,7 +47,7 @@ class Meta:
strict = True


cache = SimpleMemoryCache(serializer=MarshmallowSerializer(), namespace="main")
cache = Cache(serializer=MarshmallowSerializer(), namespace="main")


async def serializer():
Expand Down
4 changes: 2 additions & 2 deletions examples/optimistic_lock.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@
import logging
import random

from aiocache import RedisCache
from aiocache import Cache
from aiocache.lock import OptimisticLock, OptimisticLockError


logger = logging.getLogger(__name__)
cache = RedisCache(endpoint='127.0.0.1', port=6379, namespace='main')
cache = Cache(Cache.REDIS, endpoint='127.0.0.1', port=6379, namespace='main')


async def expensive_function():
Expand Down
4 changes: 2 additions & 2 deletions examples/plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import random
import logging

from aiocache import SimpleMemoryCache
from aiocache import Cache
from aiocache.plugins import HitMissRatioPlugin, TimingPlugin, BasePlugin


Expand All @@ -18,7 +18,7 @@ async def post_set(self, *args, **kwargs):
logger.info("I'm the post_set hook being called with %s %s" % (args, kwargs))


cache = SimpleMemoryCache(
cache = Cache(
plugins=[HitMissRatioPlugin(), TimingPlugin(), MyCustomPlugin()],
namespace="main")

Expand Down
4 changes: 2 additions & 2 deletions examples/python_object.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import asyncio

from collections import namedtuple
from aiocache import RedisCache
from aiocache import Cache
from aiocache.serializers import PickleSerializer


MyObject = namedtuple("MyObject", ["x", "y"])
cache = RedisCache(serializer=PickleSerializer(), namespace="main")
cache = Cache(Cache.REDIS, serializer=PickleSerializer(), namespace="main")


async def complex_object():
Expand Down
4 changes: 2 additions & 2 deletions examples/redlock.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import asyncio
import logging

from aiocache import RedisCache
from aiocache import Cache
from aiocache.lock import RedLock


logger = logging.getLogger(__name__)
cache = RedisCache(endpoint='127.0.0.1', port=6379, namespace='main')
cache = Cache(Cache.REDIS, endpoint='127.0.0.1', port=6379, namespace='main')


async def expensive_function():
Expand Down
4 changes: 2 additions & 2 deletions examples/serializer_class.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import asyncio
import zlib

from aiocache import RedisCache
from aiocache import Cache
from aiocache.serializers import BaseSerializer


Expand All @@ -25,7 +25,7 @@ def loads(self, value):
return decompressed


cache = RedisCache(serializer=CompressionSerializer(), namespace="main")
cache = Cache(Cache.REDIS, serializer=CompressionSerializer(), namespace="main")


async def serializer():
Expand Down
4 changes: 2 additions & 2 deletions examples/serializer_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import json

from marshmallow import Schema, fields, post_load
from aiocache import RedisCache
from aiocache import Cache


class MyType:
Expand All @@ -28,7 +28,7 @@ def loads(value):
return MyTypeSchema().loads(value).data


cache = RedisCache(namespace="main")
cache = Cache(Cache.REDIS, namespace="main")


async def serializer_function():
Expand Down
4 changes: 2 additions & 2 deletions examples/simple_redis.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import asyncio

from aiocache import RedisCache
from aiocache import Cache


cache = RedisCache(endpoint="127.0.0.1", port=6379, namespace="main")
cache = Cache(Cache.REDIS, endpoint="127.0.0.1", port=6379, namespace="main")


async def redis():
Expand Down
8 changes: 4 additions & 4 deletions tests/acceptance/conftest.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import pytest

from aiocache import SimpleMemoryCache, RedisCache, MemcachedCache, caches
from aiocache import Cache, caches
from aiocache.backends.redis import RedisBackend


Expand Down Expand Up @@ -28,7 +28,7 @@ def reset_redis_pools():

@pytest.fixture
def redis_cache(event_loop):
cache = RedisCache(namespace="test")
cache = Cache(Cache.REDIS, namespace="test")
yield cache

event_loop.run_until_complete(cache.delete(pytest.KEY))
Expand All @@ -39,7 +39,7 @@ def redis_cache(event_loop):

@pytest.fixture
def memory_cache(event_loop):
cache = SimpleMemoryCache(namespace="test")
cache = Cache(namespace="test")
yield cache

event_loop.run_until_complete(cache.delete(pytest.KEY))
Expand All @@ -50,7 +50,7 @@ def memory_cache(event_loop):

@pytest.fixture
def memcached_cache(event_loop):
cache = MemcachedCache(namespace="test")
cache = Cache(Cache.MEMCACHED, namespace="test")
yield cache

event_loop.run_until_complete(cache.delete(pytest.KEY))
Expand Down
Loading