Skip to content

Commit

Permalink
Solved bug where brotli caused huge delays when storing cached data i…
Browse files Browse the repository at this point in the history
…n Redis
  • Loading branch information
maartenplieger committed Sep 24, 2024
1 parent 560c888 commit 1518d19
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 8 deletions.
5 changes: 2 additions & 3 deletions python/lib/adaguc/runAdaguc.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import os
from PIL import Image
from io import BytesIO
import brotli
import shutil
import random
import string
Expand Down Expand Up @@ -314,7 +313,7 @@ async def response_to_cache(redis_pool, key, headers: str, data):
entrytime
+ f"{len(cacheable_headers_json):06d}".encode("utf-8")
+ cacheable_headers_json
+ brotli.compress(data.getvalue()),
+ data.getvalue(),
ex=ttl,
)
await redis_client.aclose()
Expand All @@ -335,5 +334,5 @@ async def get_cached_response(redis_pool, key):
headers = json.loads(cached[16 : 16 + headers_len].decode("utf-8"))
headers.append(f"age: {age}")

data = brotli.decompress(cached[16 + headers_len :])
data = cached[16 + headers_len :]
return age, headers, BytesIO(data)
8 changes: 3 additions & 5 deletions python/python_fastapi_server/routers/caching_middleware.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import redis.asyncio as redis # This can also be used to connect to a Redis cluster

import json
import brotli

ADAGUC_REDIS = os.environ.get("ADAGUC_REDIS")

Expand All @@ -32,7 +31,7 @@ async def get_cached_response(redis_pool, request):
headers_len = int(cached[10:16].decode("utf-8"))
headers = json.loads(cached[16 : 16 + headers_len].decode("utf-8"))

data = brotli.decompress(cached[16 + headers_len :])
data = cached[16 + headers_len :]
return age, headers, data


Expand All @@ -51,15 +50,14 @@ async def response_to_cache(redis_pool, request, headers, data, ex: int):
entrytime = f"{calendar.timegm(datetime.utcnow().utctimetuple()):10d}".encode(
"utf-8"
)
compressed_data = brotli.compress(data)
if len(compressed_data) < MAX_SIZE_FOR_CACHING:
if len(data) < MAX_SIZE_FOR_CACHING:
redis_client = redis.Redis(connection_pool=redis_pool)
await redis_client.set(
key,
entrytime
+ f"{len(headers_json):06d}".encode("utf-8")
+ headers_json
+ brotli.compress(data),
+ data,
ex=ex,
)
await redis_client.aclose()
Expand Down

0 comments on commit 1518d19

Please sign in to comment.