Skip to content

Commit

Permalink
(Feat) Add StructuredOutputs support for Fireworks.AI (BerriAI#7085)
Browse files Browse the repository at this point in the history
* fix model cost map fireworks ai "supports_response_schema": true,

* fix supports_response_schema

* fix map openai params fireworks ai

* test_map_response_format

* test_map_response_format
  • Loading branch information
ishaan-jaff authored and rajatvig committed Jan 15, 2025
1 parent 291f313 commit e6ba917
Show file tree
Hide file tree
Showing 5 changed files with 88 additions and 2 deletions.
7 changes: 7 additions & 0 deletions litellm/llms/fireworks_ai/chat/fireworks_ai_transformation.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,13 @@ def map_openai_params(
else:
# pass through the value of tool choice
optional_params["tool_choice"] = value
elif (
param == "response_format" and value.get("type", None) == "json_schema"
):
optional_params["response_format"] = {
"type": "json_object",
"schema": value["json_schema"]["schema"],
}
elif param == "max_completion_tokens":
optional_params["max_tokens"] = value
elif param in supported_openai_params:
Expand Down
10 changes: 10 additions & 0 deletions litellm/model_prices_and_context_window_backup.json
Original file line number Diff line number Diff line change
Expand Up @@ -6817,6 +6817,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/llama-v3p2-3b-instruct": {
Expand All @@ -6828,6 +6829,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/llama-v3p2-11b-vision-instruct": {
Expand All @@ -6840,6 +6842,7 @@
"mode": "chat",
"supports_function_calling": true,
"supports_vision": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"accounts/fireworks/models/llama-v3p2-90b-vision-instruct": {
Expand All @@ -6852,6 +6855,7 @@
"mode": "chat",
"supports_function_calling": true,
"supports_vision": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/firefunction-v2": {
Expand All @@ -6863,6 +6867,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/mixtral-8x22b-instruct-hf": {
Expand All @@ -6874,6 +6879,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/qwen2-72b-instruct": {
Expand All @@ -6885,6 +6891,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct": {
Expand All @@ -6896,6 +6903,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/yi-large": {
Expand All @@ -6907,6 +6915,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/deepseek-coder-v2-instruct": {
Expand All @@ -6918,6 +6927,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/nomic-ai/nomic-embed-text-v1.5": {
Expand Down
9 changes: 7 additions & 2 deletions litellm/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1763,9 +1763,14 @@ def supports_response_schema(model: str, custom_llm_provider: Optional[str]) ->
model=model, custom_llm_provider=custom_llm_provider
)

if custom_llm_provider == "predibase": # predibase supports this globally
return True
# providers that globally support response schema
PROVIDERS_GLOBALLY_SUPPORT_RESPONSE_SCHEMA = [
litellm.LlmProviders.PREDIBASE,
litellm.LlmProviders.FIREWORKS_AI,
]

if custom_llm_provider in PROVIDERS_GLOBALLY_SUPPORT_RESPONSE_SCHEMA:
return True
try:
## GET MODEL INFO
model_info = litellm.get_model_info(
Expand Down
10 changes: 10 additions & 0 deletions model_prices_and_context_window.json
Original file line number Diff line number Diff line change
Expand Up @@ -6817,6 +6817,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/llama-v3p2-3b-instruct": {
Expand All @@ -6828,6 +6829,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/llama-v3p2-11b-vision-instruct": {
Expand All @@ -6840,6 +6842,7 @@
"mode": "chat",
"supports_function_calling": true,
"supports_vision": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"accounts/fireworks/models/llama-v3p2-90b-vision-instruct": {
Expand All @@ -6852,6 +6855,7 @@
"mode": "chat",
"supports_function_calling": true,
"supports_vision": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/firefunction-v2": {
Expand All @@ -6863,6 +6867,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/mixtral-8x22b-instruct-hf": {
Expand All @@ -6874,6 +6879,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/qwen2-72b-instruct": {
Expand All @@ -6885,6 +6891,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct": {
Expand All @@ -6896,6 +6903,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/yi-large": {
Expand All @@ -6907,6 +6915,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/accounts/fireworks/models/deepseek-coder-v2-instruct": {
Expand All @@ -6918,6 +6927,7 @@
"litellm_provider": "fireworks_ai",
"mode": "chat",
"supports_function_calling": true,
"supports_response_schema": true,
"source": "https://fireworks.ai/pricing"
},
"fireworks_ai/nomic-ai/nomic-embed-text-v1.5": {
Expand Down
54 changes: 54 additions & 0 deletions tests/llm_translation/test_fireworks_ai_translation.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
) # Adds the parent directory to the system path

from litellm.llms.fireworks_ai.chat.fireworks_ai_transformation import FireworksAIConfig
from base_llm_unit_tests import BaseLLMChatTest

fireworks = FireworksAIConfig()

Expand All @@ -30,3 +31,56 @@ def test_map_openai_params_tool_choice():
# Test case 4: tool_choice is None
result = fireworks.map_openai_params({"tool_choice": None}, {}, "some_model")
assert result == {"tool_choice": None}


def test_map_response_format():
"""
Test that the response format is translated correctly.
h/t to https://github.com/DaveDeCaprio (@DaveDeCaprio) for the test case
Relevant Issue: https://github.com/BerriAI/litellm/issues/6797
Fireworks AI Ref: https://docs.fireworks.ai/structured-responses/structured-response-formatting#step-1-import-libraries
"""
response_format = {
"type": "json_schema",
"json_schema": {
"schema": {
"properties": {"result": {"type": "boolean"}},
"required": ["result"],
"type": "object",
},
"name": "BooleanResponse",
"strict": True,
},
}
result = fireworks.map_openai_params(
{"response_format": response_format}, {}, "some_model"
)
assert result == {
"response_format": {
"type": "json_object",
"schema": {
"properties": {"result": {"type": "boolean"}},
"required": ["result"],
"type": "object",
},
}
}


class TestFireworksAIChatCompletion(BaseLLMChatTest):
def get_base_completion_call_args(self) -> dict:
return {
"model": "fireworks_ai/accounts/fireworks/models/llama-v3p2-11b-vision-instruct"
}

def test_tool_call_no_arguments(self, tool_call_no_arguments):
"""Test that tool calls with no arguments is translated correctly. Relevant issue: https://github.com/BerriAI/litellm/issues/6833"""
pass

def test_multilingual_requests(self):
"""
Fireworks AI raises a 500 BadRequest error when the request contains invalid utf-8 sequences.
"""
pass

0 comments on commit e6ba917

Please sign in to comment.