Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(api/core/model_runtime/model_providers/__base/large_language_model.py): Add TEXT type checker #7407

Merged
merged 1 commit into from
Aug 19, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ def _code_block_mode_wrapper(self, model: str, credentials: dict, prompt_message
stream=stream,
user=user
)

model_parameters.pop("response_format")
stop = stop or []
stop.extend(["\n```", "```\n"])
Expand Down Expand Up @@ -249,10 +249,10 @@ def new_generator():
prompt_messages=prompt_messages,
input_generator=new_generator()
)

return response

def _code_block_mode_stream_processor(self, model: str, prompt_messages: list[PromptMessage],
def _code_block_mode_stream_processor(self, model: str, prompt_messages: list[PromptMessage],
input_generator: Generator[LLMResultChunk, None, None]
) -> Generator[LLMResultChunk, None, None]:
"""
Expand Down Expand Up @@ -310,7 +310,7 @@ def _code_block_mode_stream_processor(self, model: str, prompt_messages: list[Pr
)
)

def _code_block_mode_stream_processor_with_backtick(self, model: str, prompt_messages: list,
def _code_block_mode_stream_processor_with_backtick(self, model: str, prompt_messages: list,
input_generator: Generator[LLMResultChunk, None, None]) \
-> Generator[LLMResultChunk, None, None]:
"""
Expand Down Expand Up @@ -470,7 +470,7 @@ def _invoke(self, model: str, credentials: dict,
:return: full response or stream response chunk generator result
"""
raise NotImplementedError

@abstractmethod
def get_num_tokens(self, model: str, credentials: dict, prompt_messages: list[PromptMessage],
tools: Optional[list[PromptMessageTool]] = None) -> int:
Expand Down Expand Up @@ -792,6 +792,13 @@ def _validate_and_filter_model_parameters(self, model: str, model_parameters: di
if not isinstance(parameter_value, str):
raise ValueError(f"Model Parameter {parameter_name} should be string.")

# validate options
if parameter_rule.options and parameter_value not in parameter_rule.options:
raise ValueError(f"Model Parameter {parameter_name} should be one of {parameter_rule.options}.")
elif parameter_rule.type == ParameterType.TEXT:
if not isinstance(parameter_value, str):
raise ValueError(f"Model Parameter {parameter_name} should be text.")

# validate options
if parameter_rule.options and parameter_value not in parameter_rule.options:
raise ValueError(f"Model Parameter {parameter_name} should be one of {parameter_rule.options}.")
Expand Down