|
| 1 | +# coding=utf-8 |
| 2 | +""" |
| 3 | + @project: MaxKB |
| 4 | + @Author:虎 |
| 5 | + @file: gzip.py |
| 6 | + @date:2025/2/27 10:03 |
| 7 | + @desc: |
| 8 | +""" |
| 9 | +from django.utils.cache import patch_vary_headers |
| 10 | +from django.utils.deprecation import MiddlewareMixin |
| 11 | +from django.utils.regex_helper import _lazy_re_compile |
| 12 | +from django.utils.text import compress_sequence, compress_string |
| 13 | + |
| 14 | +re_accepts_gzip = _lazy_re_compile(r"\bgzip\b") |
| 15 | + |
| 16 | + |
| 17 | +class GZipMiddleware(MiddlewareMixin): |
| 18 | + """ |
| 19 | + Compress content if the browser allows gzip compression. |
| 20 | + Set the Vary header accordingly, so that caches will base their storage |
| 21 | + on the Accept-Encoding header. |
| 22 | + """ |
| 23 | + |
| 24 | + max_random_bytes = 100 |
| 25 | + |
| 26 | + def process_response(self, request, response): |
| 27 | + if request.method != 'GET' or request.path.startswith('/api'): |
| 28 | + return response |
| 29 | + # It's not worth attempting to compress really short responses. |
| 30 | + if not response.streaming and len(response.content) < 200: |
| 31 | + return response |
| 32 | + |
| 33 | + # Avoid gzipping if we've already got a content-encoding. |
| 34 | + if response.has_header("Content-Encoding"): |
| 35 | + return response |
| 36 | + |
| 37 | + patch_vary_headers(response, ("Accept-Encoding",)) |
| 38 | + |
| 39 | + ae = request.META.get("HTTP_ACCEPT_ENCODING", "") |
| 40 | + if not re_accepts_gzip.search(ae): |
| 41 | + return response |
| 42 | + |
| 43 | + if response.streaming: |
| 44 | + if response.is_async: |
| 45 | + # pull to lexical scope to capture fixed reference in case |
| 46 | + # streaming_content is set again later. |
| 47 | + orignal_iterator = response.streaming_content |
| 48 | + |
| 49 | + async def gzip_wrapper(): |
| 50 | + async for chunk in orignal_iterator: |
| 51 | + yield compress_string( |
| 52 | + chunk, |
| 53 | + max_random_bytes=self.max_random_bytes, |
| 54 | + ) |
| 55 | + |
| 56 | + response.streaming_content = gzip_wrapper() |
| 57 | + else: |
| 58 | + response.streaming_content = compress_sequence( |
| 59 | + response.streaming_content, |
| 60 | + max_random_bytes=self.max_random_bytes, |
| 61 | + ) |
| 62 | + # Delete the `Content-Length` header for streaming content, because |
| 63 | + # we won't know the compressed size until we stream it. |
| 64 | + del response.headers["Content-Length"] |
| 65 | + else: |
| 66 | + # Return the compressed content only if it's actually shorter. |
| 67 | + compressed_content = compress_string( |
| 68 | + response.content, |
| 69 | + max_random_bytes=self.max_random_bytes, |
| 70 | + ) |
| 71 | + if len(compressed_content) >= len(response.content): |
| 72 | + return response |
| 73 | + response.content = compressed_content |
| 74 | + response.headers["Content-Length"] = str(len(response.content)) |
| 75 | + |
| 76 | + # If there is a strong ETag, make it weak to fulfill the requirements |
| 77 | + # of RFC 9110 Section 8.8.1 while also allowing conditional request |
| 78 | + # matches on ETags. |
| 79 | + etag = response.get("ETag") |
| 80 | + if etag and etag.startswith('"'): |
| 81 | + response.headers["ETag"] = "W/" + etag |
| 82 | + response.headers["Content-Encoding"] = "gzip" |
| 83 | + |
| 84 | + return response |
0 commit comments