diff --git a/camel/configs/__init__.py b/camel/configs/__init__.py index fcd9b7a9c4..a075d368e0 100644 --- a/camel/configs/__init__.py +++ b/camel/configs/__init__.py @@ -35,6 +35,7 @@ from .vllm_config import VLLM_API_PARAMS, VLLMConfig from .yi_config import YI_API_PARAMS, YiConfig from .zhipuai_config import ZHIPUAI_API_PARAMS, ZhipuAIConfig +from .linkup_config import LINKUP_API_PARAMS, LinkupConfig __all__ = [ 'BaseConfig', @@ -54,6 +55,8 @@ 'ZHIPUAI_API_PARAMS', 'GeminiConfig', 'Gemini_API_PARAMS', + 'LinkupConfig', + 'LINKUP_API_PARAMS', 'VLLMConfig', 'VLLM_API_PARAMS', 'SGLangConfig', diff --git a/camel/configs/linkup_config.py b/camel/configs/linkup_config.py new file mode 100644 index 0000000000..b3c371c1ff --- /dev/null +++ b/camel/configs/linkup_config.py @@ -0,0 +1,51 @@ +# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. ========= + +from __future__ import annotations +from typing import Any, Literal, Type, Union +from pydantic import BaseModel +from camel.configs.base_config import BaseConfig + +class LinkupConfig(BaseConfig): + r"""Defines the parameters for generating chat completions using the + Linkup API. + Reference: https://docs.linkup.so/pages/get-started/introduction + + Args: + depth: The depth of the search. Can be either "standard", for a + straighforward and fast search, or "deep" for a more powerful + agentic workflow. + output_type: The type of output which is expected: "sourcedAnswer" + will output the answer to the query and sources supporting it, and + "structured" will base the output on the format provided in + structured_output_schema. + structured_output_schema: If output_type is "structured", specify the + schema of the output. Supported formats are a pydantic.BaseModel + or a string representing a valid object JSON schema. + include_images: If output_type is "searchResults", specifies if the + response can include images. Default to False. + """ + + depth: Literal["standard", "deep"] = "deep" + output_type: Literal["sourcedAnswer", "structured"] = "sourcedAnswer" + structured_output_schema: Union[Type[BaseModel], str, None] = None + include_images: bool = False, + + def as_dict(self) -> dict[str, Any]: + config_dict = super().as_dict() + if "tools" in config_dict: + del config_dict["tools"] # Linkup does not support tool calling + return config_dict + +LINKUP_API_PARAMS = {param for param in LinkupConfig.model_fields.keys()} diff --git a/camel/models/__init__.py b/camel/models/__init__.py index a80a80d924..9ecec57765 100644 --- a/camel/models/__init__.py +++ b/camel/models/__init__.py @@ -38,6 +38,7 @@ from .vllm_model import VLLMModel from .yi_model import YiModel from .zhipuai_model import ZhipuAIModel +from .linkup_model import LinkupModel __all__ = [ 'BaseModelBackend', @@ -68,4 +69,5 @@ 'ModelProcessingError', 'DeepSeekModel', 'FishAudioModel', + 'LinkupModel', ] diff --git a/camel/models/linkup_model.py b/camel/models/linkup_model.py new file mode 100644 index 0000000000..9f6459ac0f --- /dev/null +++ b/camel/models/linkup_model.py @@ -0,0 +1,175 @@ +import os +from typing import Any, Dict, Optional, Union, List +from camel.models.base_model import BaseModelBackend +from camel.types import ChatCompletion, ModelType +from camel.utils import ( + BaseTokenCounter, + OpenAITokenCounter, + dependencies_required, +) +from camel.messages import OpenAIMessage +from camel.configs import LINKUP_API_PARAMS, LinkupConfig + +class LinkupModel(BaseModelBackend): + r"""Linkup API integrated into a unified BaseModelBackend interface. + + Args: + model_type (Union[ModelType, str]): Model for which a backend is + created, typically a search model. + model_config_dict (Optional[Dict[str, Any]], optional): A dictionary + that will be fed into Linkup's search API. If :obj:`None`, a default + configuration will be used. (default: :obj:`None`) + api_key (Optional[str], optional): The API key for authenticating with + the Linkup service. (default: :obj:`None`) + url (Optional[str], optional): The url to the Linkup service. + (default: :obj:`None`) + token_counter (Optional[BaseTokenCounter], optional): Token counter to + use for the model. If not provided, a default token counter will be used. + (default: :obj:`None`) + """ + def __init__( + self, + model_type: Union[ModelType, str], + model_config_dict: Optional[Dict[str, Any]] = None, + api_key: Optional[str] = None, + url: Optional[str] = None, + token_counter: Optional[BaseTokenCounter] = None, + ) -> None: + from linkup import LinkupClient + + # Initialize LinkupClient + if model_config_dict is None: + model_config_dict = LinkupConfig.as_dict() + + api_key = api_key or os.environ.get("LINKUP_API_KEY") + url = url or os.environ.get("LINKUP_API_BASE_URL") + super().__init__( + model_type, model_config_dict, api_key, url, token_counter + ) + + self.client = LinkupClient(api_key=self._api_key) + + def _convert_response_from_linkup_to_openai( + self, + response: str) -> ChatCompletion: + r"""Converts a Linkup `ChatResponse` to an OpenAI-style `ChatCompletion` + response. + + Args: + response (str): The response object from the Linkup API. + + Returns: + ChatCompletion: An OpenAI-compatible chat completion response. + """ + openai_response = ChatCompletion.construct( + id=None, + choices=[ + dict( + index=0, + message={ + "role": "assistant", + "content": response, + }, + finish_reason=None, + ) + ], + created=None, + model=self.model_type, + object="chat.completion", + ) + + return openai_response + + @property + def token_counter(self) -> BaseTokenCounter: + r"""Initialize the token counter for the model backend. + + Returns: + BaseTokenCounter: The token counter following the model's + tokenization style. + """ + if not self._token_counter: + self._token_counter = OpenAITokenCounter(ModelType.GPT_4O_MINI) + return self._token_counter + + def _convert_openai_to_linkup_messages( + self, + messages: List[OpenAIMessage], + ) -> List[str]: + r"""Converts OpenAI API messages to Linkup API messages. + + Args: + messages (List[OpenAIMessage]): A list of messages in OpenAI + format. + + Returns: + List[str]: A list of messages converted to str format. + """ + linkup_messages = [] + + for msg in messages: + role = msg.get("role") + if role == "system": + continue + elif role == "user": + content = str(msg.get("content")) + linkup_messages.append(content) + elif role == "assistant": + content = str(msg.get("content")) + linkup_messages.append(content) + else: + raise ValueError(f"Unsupported message role: {role}") + + return linkup_messages + + @dependencies_required('LINKUP_API_KEY') + def run( + self, + messages: List[OpenAIMessage], + ) -> ChatCompletion: + r"""Run inference of Anthropic chat completion. + + Args: + messages (List[OpenAIMessage]): Message list with the chat history + in OpenAI API format. + + Returns: + ChatCompletion: Response in the OpenAI API format. + """ + linkup_response = "" + linkup_messages = self._convert_openai_to_linkup_messages(messages) + for query in linkup_messages: + item_response = self.client.search( + query = query, + **self.model_config_dict, + ) + if self.model_config_dict.get('output_type') == "sourcedAnswer": + linkup_response += item_response.answer + '\n' + + return self._convert_response_from_linkup_to_openai(linkup_response) + + def check_model_config(self): + r"""Check whether the model configuration contains any unexpected + arguments to Linkup API. But Linkup API does not have any additional + arguments to check. + + Raises: + ValueError: If the model configuration dictionary contains any + unexpected arguments to Linkup API. + """ + for param in self.model_config_dict: + if param not in LINKUP_API_PARAMS: + raise ValueError( + f"Unexpected argument `{param}` is " + "input into Linkup model backend." + ) + + @property + def stream(self) -> bool: + r"""Returns whether the model is in stream mode, which sends partial + results each time. + + Returns: + bool: Whether the model is in stream mode. + """ + return False diff --git a/camel/models/model_factory.py b/camel/models/model_factory.py index 309c3dce67..3f00d237c8 100644 --- a/camel/models/model_factory.py +++ b/camel/models/model_factory.py @@ -28,6 +28,7 @@ from camel.models.openai_model import OpenAIModel from camel.models.qwen_model import QwenModel from camel.models.reka_model import RekaModel +from camel.models.linkup_model import LinkupModel from camel.models.samba_model import SambaModel from camel.models.sglang_model import SGLangModel from camel.models.stub_model import StubModel @@ -116,6 +117,8 @@ def create( model_class = MistralModel elif model_platform.is_reka and model_type.is_reka: model_class = RekaModel + elif model_platform.is_linkup and model_type.is_linkup: + model_class = LinkupModel elif model_platform.is_cohere and model_type.is_cohere: model_class = CohereModel elif model_platform.is_yi and model_type.is_yi: diff --git a/camel/types/enums.py b/camel/types/enums.py index 5e2a04474d..cb3d770cbf 100644 --- a/camel/types/enums.py +++ b/camel/types/enums.py @@ -142,6 +142,9 @@ class ModelType(UnifiedModelType, Enum): # DeepSeek models DEEPSEEK_CHAT = "deepseek-chat" + # Linkup + LINKUP = "linkup" + def __str__(self): return self.value @@ -293,6 +296,17 @@ def is_reka(self) -> bool: ModelType.REKA_EDGE, ModelType.REKA_FLASH, } + + @property + def is_linkup(self) -> bool: + r"""Returns whether this type of models is Linkup model. + + Returns: + bool: Whether this type of models is Linkup. + """ + return self in { + ModelType.LINKUP, + } @property def is_cohere(self) -> bool: @@ -634,6 +648,7 @@ class ModelPlatformType(Enum): NVIDIA = "nvidia" DEEPSEEK = "deepseek" SGLANG = "sglang" + LINKUP = "linkup" @property def is_openai(self) -> bool: @@ -644,6 +659,11 @@ def is_openai(self) -> bool: def is_azure(self) -> bool: r"""Returns whether this platform is azure.""" return self is ModelPlatformType.AZURE + + @property + def is_linkup(self) -> bool: + r"""Returns whether this platform is linkup.""" + return self is ModelPlatformType.LINKUP @property def is_anthropic(self) -> bool: diff --git a/camel/types/unified_model_type.py b/camel/types/unified_model_type.py index 631ab623cb..a40d984a20 100644 --- a/camel/types/unified_model_type.py +++ b/camel/types/unified_model_type.py @@ -97,6 +97,11 @@ def is_mistral(self) -> bool: def is_reka(self) -> bool: r"""Returns whether the model is a Reka model.""" return True + + @property + def is_linkup(self) -> bool: + r"""Returns whether the model is a Linkup model.""" + return True @property def is_cohere(self) -> bool: diff --git a/docs/key_modules/models.md b/docs/key_modules/models.md index 1e3e5aab42..783feff114 100644 --- a/docs/key_modules/models.md +++ b/docs/key_modules/models.md @@ -81,6 +81,7 @@ The following table lists currently supported model platforms by CAMEL. | vLLM | https://docs.vllm.ai/en/latest/models/supported_models.html | ----- | | Together AI | https://docs.together.ai/docs/chat-models | ----- | | LiteLLM | https://docs.litellm.ai/docs/providers | ----- | +| Linkup | https://docs.linkup.so/pages/api-reference/endpoint/post-search | ----- | ## 3. Using Models by API calling diff --git a/examples/models/linkup_model_example.py b/examples/models/linkup_model_example.py new file mode 100644 index 0000000000..4ff07791af --- /dev/null +++ b/examples/models/linkup_model_example.py @@ -0,0 +1,46 @@ +# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. ========= +from camel.agents import ChatAgent +from camel.configs import LinkupConfig +from camel.models import ModelFactory +from camel.types import ModelPlatformType, ModelType + +model = ModelFactory.create( + model_platform=ModelPlatformType.LINKUP, + model_type=ModelType.LINKUP, + model_config_dict=LinkupConfig.as_dict(), + api_key="d364f184-8406-4a72-b209-35d170057243", +) + +# Set agent +camel_agent = ChatAgent(model=model) + +user_msg = """ +Can you tell me which women were awared the Physics Nobel Prize +""" + +# Get response information +response = camel_agent.step(user_msg) +print(response.msgs[0].content) +''' +=============================================================================== + Hello CAMEL AI community! 🐫 I'm thrilled to connect with a group so + dedicated to the study of autonomous and communicative agents. Your work is + at the forefront of advancing AI technologies that can interact and operate + independently in complex environments. I look forward to learning from your + insights and contributing to the community in any way I can. Together, let's + continue to push the boundaries of what's possible in AI research and + development! 🚀 +=============================================================================== +'''