diff --git a/src/backend/base/langflow/components/models/anthropic.py b/src/backend/base/langflow/components/models/anthropic.py index 0de4a4a9e9c4..3a963ccc88ec 100644 --- a/src/backend/base/langflow/components/models/anthropic.py +++ b/src/backend/base/langflow/components/models/anthropic.py @@ -63,7 +63,7 @@ class AnthropicModelComponent(LCModelComponent): "Select if you want to use models that can work with tools. If yes, only those models will be shown." ), advanced=False, - value=True, + value=False, real_time_refresh=True, ), MessageTextInput( diff --git a/src/backend/base/langflow/components/models/google_generative_ai.py b/src/backend/base/langflow/components/models/google_generative_ai.py index a907af731cc5..51a1f676249d 100644 --- a/src/backend/base/langflow/components/models/google_generative_ai.py +++ b/src/backend/base/langflow/components/models/google_generative_ai.py @@ -70,6 +70,7 @@ class GoogleGenerativeAIComponent(LCModelComponent): name="tool_model_enabled", display_name="Tool Model Enabled", info="Whether to use the tool model.", + value=False, ), ] diff --git a/src/backend/base/langflow/components/models/groq.py b/src/backend/base/langflow/components/models/groq.py index db913ac152e8..ebaa27228ae8 100644 --- a/src/backend/base/langflow/components/models/groq.py +++ b/src/backend/base/langflow/components/models/groq.py @@ -70,7 +70,7 @@ class GroqModel(LCModelComponent): "Select if you want to use models that can work with tools. If yes, only those models will be shown." ), advanced=False, - value=True, + value=False, real_time_refresh=True, ), ] diff --git a/src/backend/base/langflow/components/models/nvidia.py b/src/backend/base/langflow/components/models/nvidia.py index 64940a6f3cb8..3b3063559089 100644 --- a/src/backend/base/langflow/components/models/nvidia.py +++ b/src/backend/base/langflow/components/models/nvidia.py @@ -43,7 +43,7 @@ class NVIDIAModelComponent(LCModelComponent): "Select if you want to use models that can work with tools. If yes, only those models will be shown." ), advanced=False, - value=True, + value=False, real_time_refresh=True, ), SecretStrInput( diff --git a/src/backend/base/langflow/components/models/ollama.py b/src/backend/base/langflow/components/models/ollama.py index 6d159697d98f..b5b8f52a8061 100644 --- a/src/backend/base/langflow/components/models/ollama.py +++ b/src/backend/base/langflow/components/models/ollama.py @@ -118,7 +118,7 @@ class ChatOllamaComponent(LCModelComponent): name="tool_model_enabled", display_name="Tool Model Enabled", info="Whether to enable tool calling in the model.", - value=True, + value=False, real_time_refresh=True, ), MessageTextInput( diff --git a/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json b/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json index 2669112af32c..ff968513c329 100644 --- a/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json +++ b/src/backend/base/langflow/initial_setup/starter_projects/Custom Component Maker.json @@ -1932,7 +1932,7 @@ "show": true, "title_case": false, "type": "code", - "value": "from typing import Any\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import ANTHROPIC_MODELS\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=\"https://api.anthropic.com\",\n real_time_refresh=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=True,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=self.base_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n for model in model_ids:\n model_with_tool = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n anthropic_api_url=self.base_url,\n )\n if not self.supports_tool_calling(model_with_tool):\n model_ids.remove(model)\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in (\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\") and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" + "value": "from typing import Any\n\nimport requests\nfrom loguru import logger\n\nfrom langflow.base.models.anthropic_constants import ANTHROPIC_MODELS\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.io import BoolInput, DropdownInput, IntInput, MessageTextInput, SecretStrInput, SliderInput\nfrom langflow.schema.dotdict import dotdict\n\n\nclass AnthropicModelComponent(LCModelComponent):\n display_name = \"Anthropic\"\n description = \"Generate text using Anthropic Chat&Completion LLMs with prefill support.\"\n icon = \"Anthropic\"\n name = \"AnthropicModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n value=4096,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n options=ANTHROPIC_MODELS,\n refresh_button=True,\n value=ANTHROPIC_MODELS[0],\n combobox=True,\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"Anthropic API Key\",\n info=\"Your Anthropic API key.\",\n value=None,\n required=True,\n real_time_refresh=True,\n ),\n SliderInput(\n name=\"temperature\",\n display_name=\"Temperature\",\n value=0.1,\n info=\"Run inference with this temperature. Must by in the closed interval [0.0, 1.0].\",\n range_spec=RangeSpec(min=0, max=1, step=0.01),\n ),\n MessageTextInput(\n name=\"base_url\",\n display_name=\"Anthropic API URL\",\n info=\"Endpoint of the Anthropic API. Defaults to 'https://api.anthropic.com' if not specified.\",\n value=\"https://api.anthropic.com\",\n real_time_refresh=True,\n ),\n BoolInput(\n name=\"tool_model_enabled\",\n display_name=\"Enable Tool Models\",\n info=(\n \"Select if you want to use models that can work with tools. If yes, only those models will be shown.\"\n ),\n advanced=False,\n value=False,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"prefill\", display_name=\"Prefill\", info=\"Prefill text to guide the model's response.\", advanced=True\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n try:\n output = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n max_tokens_to_sample=self.max_tokens,\n temperature=self.temperature,\n anthropic_api_url=self.base_url,\n streaming=self.stream,\n )\n except Exception as e:\n msg = \"Could not connect to Anthropic API.\"\n raise ValueError(msg) from e\n\n return output\n\n def get_models(self, tool_model_enabled: bool | None = None) -> list[str]:\n try:\n import anthropic\n\n client = anthropic.Anthropic(api_key=self.api_key)\n models = client.models.list(limit=20).data\n model_ids = [model.id for model in models]\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n model_ids = ANTHROPIC_MODELS\n if tool_model_enabled:\n try:\n from langchain_anthropic.chat_models import ChatAnthropic\n except ImportError as e:\n msg = \"langchain_anthropic is not installed. Please install it with `pip install langchain_anthropic`.\"\n raise ImportError(msg) from e\n for model in model_ids:\n model_with_tool = ChatAnthropic(\n model=self.model_name,\n anthropic_api_key=self.api_key,\n anthropic_api_url=self.base_url,\n )\n if not self.supports_tool_calling(model_with_tool):\n model_ids.remove(model)\n return model_ids\n\n def _get_exception_message(self, exception: Exception) -> str | None:\n \"\"\"Get a message from an Anthropic exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from anthropic import BadRequestError\n except ImportError:\n return None\n if isinstance(exception, BadRequestError):\n message = exception.body.get(\"error\", {}).get(\"message\")\n if message:\n return message\n return None\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None):\n if field_name in (\"base_url\", \"model_name\", \"tool_model_enabled\", \"api_key\") and field_value:\n try:\n if len(self.api_key) == 0:\n ids = ANTHROPIC_MODELS\n else:\n try:\n ids = self.get_models(tool_model_enabled=self.tool_model_enabled)\n except (ImportError, ValueError, requests.exceptions.RequestException) as e:\n logger.exception(f\"Error getting model names: {e}\")\n ids = ANTHROPIC_MODELS\n build_config[\"model_name\"][\"options\"] = ids\n build_config[\"model_name\"][\"value\"] = ids[0]\n except Exception as e:\n msg = f\"Error getting model names: {e}\"\n raise ValueError(msg) from e\n return build_config\n" }, "input_value": { "_input_type": "MessageInput",