diff --git a/docs/my-website/docs/caching/redis_cache.md b/docs/my-website/docs/caching/redis_cache.md index 3d70c5e3dbb6..8a580f087ca7 100644 --- a/docs/my-website/docs/caching/redis_cache.md +++ b/docs/my-website/docs/caching/redis_cache.md @@ -204,6 +204,7 @@ def __init__( s3_bucket_name: Optional[str] = None, s3_region_name: Optional[str] = None, s3_api_version: Optional[str] = None, + s3_path: Optional[str] = None, # if you wish to save to a spefic path s3_use_ssl: Optional[bool] = True, s3_verify: Optional[Union[bool, str]] = None, s3_endpoint_url: Optional[str] = None, diff --git a/litellm/integrations/s3.py b/litellm/integrations/s3.py index 0187d13d6a67..98614949ea98 100644 --- a/litellm/integrations/s3.py +++ b/litellm/integrations/s3.py @@ -16,6 +16,7 @@ class S3Logger: def __init__( self, s3_bucket_name=None, + s3_path=None, s3_region_name=None, s3_api_version=None, s3_use_ssl=True, @@ -57,6 +58,7 @@ def __init__( # done reading litellm.s3_callback_params self.bucket_name = s3_bucket_name + self.s3_path = s3_path # Create an S3 client with custom endpoint URL self.s3_client = boto3.client( "s3", @@ -122,7 +124,10 @@ def log_event(self, kwargs, response_obj, start_time, end_time, print_verbose): pass s3_object_key = ( - payload["id"] + "-time=" + str(start_time) + (self.s3_path.rstrip("/") + "/" if self.s3_path else "") + + payload["id"] + + "-time=" + + str(start_time) ) # we need the s3 key to include the time, so we log cache hits too import json