Skip to content

Commit

Permalink
Revert "(feat) Allow using include to include external YAML files in …
Browse files Browse the repository at this point in the history
…a config.yaml (#6922)"

This reverts commit 68e5982.
  • Loading branch information
ishaan-jaff committed Nov 27, 2024
1 parent 07223bd commit 5d13302
Show file tree
Hide file tree
Showing 13 changed files with 23 additions and 225 deletions.
59 changes: 0 additions & 59 deletions docs/my-website/docs/proxy/config_management.md

This file was deleted.

2 changes: 1 addition & 1 deletion docs/my-website/docs/proxy/configs.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import Image from '@theme/IdealImage';
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';

# Overview
# Proxy Config.yaml
Set model list, `api_base`, `api_key`, `temperature` & proxy server settings (`master-key`) on the config.yaml.

| Param Name | Description |
Expand Down
2 changes: 1 addition & 1 deletion docs/my-website/sidebars.js
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ const sidebars = {
{
"type": "category",
"label": "Config.yaml",
"items": ["proxy/configs", "proxy/config_management", "proxy/config_settings"]
"items": ["proxy/configs", "proxy/config_settings"]
},
{
type: "category",
Expand Down
10 changes: 0 additions & 10 deletions litellm/proxy/model_config.yaml

This file was deleted.

23 changes: 21 additions & 2 deletions litellm/proxy/proxy_config.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,24 @@
include:
- model_config.yaml
model_list:
- model_name: gpt-4o
litellm_params:
model: openai/gpt-4o
api_base: https://exampleopenaiendpoint-production.up.railway.app/
- model_name: fake-anthropic-endpoint
litellm_params:
model: anthropic/fake
api_base: https://exampleanthropicendpoint-production.up.railway.app/

router_settings:
provider_budget_config:
openai:
budget_limit: 0.3 # float of $ value budget for time period
time_period: 1d # can be 1d, 2d, 30d
anthropic:
budget_limit: 5
time_period: 1d
redis_host: os.environ/REDIS_HOST
redis_port: os.environ/REDIS_PORT
redis_password: os.environ/REDIS_PASSWORD

litellm_settings:
callbacks: ["datadog"]
55 changes: 0 additions & 55 deletions litellm/proxy/proxy_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -1377,16 +1377,6 @@ def is_yaml(self, config_file_path: str) -> bool:
_, file_extension = os.path.splitext(config_file_path)
return file_extension.lower() == ".yaml" or file_extension.lower() == ".yml"

def _load_yaml_file(self, file_path: str) -> dict:
"""
Load and parse a YAML file
"""
try:
with open(file_path, "r") as file:
return yaml.safe_load(file) or {}
except Exception as e:
raise Exception(f"Error loading yaml file {file_path}: {str(e)}")

async def _get_config_from_file(
self, config_file_path: Optional[str] = None
) -> dict:
Expand Down Expand Up @@ -1417,51 +1407,6 @@ async def _get_config_from_file(
"litellm_settings": {},
}

# Process includes
config = self._process_includes(
config=config, base_dir=os.path.dirname(os.path.abspath(file_path or ""))
)

verbose_proxy_logger.debug(f"loaded config={json.dumps(config, indent=4)}")
return config

def _process_includes(self, config: dict, base_dir: str) -> dict:
"""
Process includes by appending their contents to the main config
Handles nested config.yamls with `include` section
Example config: This will get the contents from files in `include` and append it
```yaml
include:
- model_config.yaml
litellm_settings:
callbacks: ["prometheus"]
```
"""
if "include" not in config:
return config

if not isinstance(config["include"], list):
raise ValueError("'include' must be a list of file paths")

# Load and append all included files
for include_file in config["include"]:
file_path = os.path.join(base_dir, include_file)
if not os.path.exists(file_path):
raise FileNotFoundError(f"Included file not found: {file_path}")

included_config = self._load_yaml_file(file_path)
# Simply update/extend the main config with included config
for key, value in included_config.items():
if isinstance(value, list) and key in config:
config[key].extend(value)
else:
config[key] = value

# Remove the include directive
del config["include"]
return config

async def save_config(self, new_config: dict):
Expand Down

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

4 changes: 0 additions & 4 deletions tests/proxy_unit_tests/example_config_yaml/models_file_1.yaml

This file was deleted.

4 changes: 0 additions & 4 deletions tests/proxy_unit_tests/example_config_yaml/models_file_2.yaml

This file was deleted.

69 changes: 0 additions & 69 deletions tests/proxy_unit_tests/test_proxy_config_unit_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,6 @@

from litellm.proxy.proxy_server import ProxyConfig

INVALID_FILES = ["config_with_missing_include.yaml"]


@pytest.mark.asyncio
async def test_basic_reading_configs_from_files():
Expand All @@ -40,9 +38,6 @@ async def test_basic_reading_configs_from_files():
print(files)

for file in files:
if file in INVALID_FILES: # these are intentionally invalid files
continue
print("reading file=", file)
config_path = os.path.join(example_config_yaml_path, file)
config = await proxy_config_instance.get_config(config_file_path=config_path)
print(config)
Expand Down Expand Up @@ -120,67 +115,3 @@ async def test_read_config_file_with_os_environ_vars():
os.environ[key] = _old_env_vars[key]
else:
del os.environ[key]


@pytest.mark.asyncio
async def test_basic_include_directive():
"""
Test that the include directive correctly loads and merges configs
"""
proxy_config_instance = ProxyConfig()
current_path = os.path.dirname(os.path.abspath(__file__))
config_path = os.path.join(
current_path, "example_config_yaml", "config_with_include.yaml"
)

config = await proxy_config_instance.get_config(config_file_path=config_path)

# Verify the included model list was merged
assert len(config["model_list"]) > 0
assert any(
model["model_name"] == "included-model" for model in config["model_list"]
)

# Verify original config settings remain
assert config["litellm_settings"]["callbacks"] == ["prometheus"]


@pytest.mark.asyncio
async def test_missing_include_file():
"""
Test that a missing included file raises FileNotFoundError
"""
proxy_config_instance = ProxyConfig()
current_path = os.path.dirname(os.path.abspath(__file__))
config_path = os.path.join(
current_path, "example_config_yaml", "config_with_missing_include.yaml"
)

with pytest.raises(FileNotFoundError):
await proxy_config_instance.get_config(config_file_path=config_path)


@pytest.mark.asyncio
async def test_multiple_includes():
"""
Test that multiple files in the include list are all processed correctly
"""
proxy_config_instance = ProxyConfig()
current_path = os.path.dirname(os.path.abspath(__file__))
config_path = os.path.join(
current_path, "example_config_yaml", "config_with_multiple_includes.yaml"
)

config = await proxy_config_instance.get_config(config_file_path=config_path)

# Verify models from both included files are present
assert len(config["model_list"]) == 2
assert any(
model["model_name"] == "included-model-1" for model in config["model_list"]
)
assert any(
model["model_name"] == "included-model-2" for model in config["model_list"]
)

# Verify original config settings remain
assert config["litellm_settings"]["callbacks"] == ["prometheus"]

0 comments on commit 5d13302

Please sign in to comment.