From b21816654c986f43dc324f34ef6e7df2b46010a9 Mon Sep 17 00:00:00 2001 From: Sarah Wooders Date: Tue, 11 Jun 2024 22:52:29 -0700 Subject: [PATCH 1/6] add mkdocs autodocs --- docs/docs/index.md | 20 +++ docs/mkdocs.yml | 23 +++ memgpt/client/client.py | 135 +++++++++++------ memgpt/config.py | 29 ++++ memgpt/data_types.py | 16 +++ poetry.lock | 310 +++++++++++++++++++++++++++++++++++++++- pyproject.toml | 3 + 7 files changed, 486 insertions(+), 50 deletions(-) create mode 100644 docs/docs/index.md create mode 100644 docs/mkdocs.yml diff --git a/docs/docs/index.md b/docs/docs/index.md new file mode 100644 index 0000000000..4ecf4769fa --- /dev/null +++ b/docs/docs/index.md @@ -0,0 +1,20 @@ +# Reference +# Components +::: memgpt.data_types.AgentState + +# Client + +## `AbstractClient` +::: memgpt.client.client.AbstractClient + +## `LocalClient` +::: memgpt.client.client.LocalClient + +## `RESTClient` +::: memgpt.client.client.RESTClient + +# Configuration + + + + diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml new file mode 100644 index 0000000000..d610d38a47 --- /dev/null +++ b/docs/mkdocs.yml @@ -0,0 +1,23 @@ +site_name: "MemGPT API Reference" + +theme: + name: "material" + features: + - navigation.instant + - navigation.footer + - navigation.indexes + - navigation.sections + - navigation.tabs + - navigation.tabs.sticky + - navigation.top + +nav: + - Home: index.md +plugins: +- mkdocstrings + # custom_templates: templates + # default_handler: python + # handlers: + # python: + # options: + # show_source: false diff --git a/memgpt/client/client.py b/memgpt/client/client.py index e519a8673b..a2430ab662 100644 --- a/memgpt/client/client.py +++ b/memgpt/client/client.py @@ -1,6 +1,7 @@ import datetime import time import uuid +from abc import ABC, abstractmethod from typing import Dict, List, Optional, Tuple, Union import requests @@ -63,25 +64,50 @@ def create_client(base_url: Optional[str] = None, token: Optional[str] = None): return RESTClient(base_url, token) -class AbstractClient(object): - def __init__( - self, - auto_save: bool = False, - debug: bool = False, - ): - self.auto_save = auto_save - self.debug = debug +# """ Python clients for MemGPT. +# +# Classes: +# Abtractclient: Description for `foo`. +# LocalClient: Description for `bar`. +# """ + + +class AbstractClient(ABC): + """The abstract class for MemGPT client. + + Attributes: + auto_save (bool): Description of `attr1`. + debug (bool): Description of `attr2`. + + args: + auto_save (bool): Description of `attr1`. + debug (bool): Description of `attr2`. + """ # agents - def list_agents(self): - """List all agents associated with a given user.""" + def list_agents(self) -> List[AgentState]: + """List all agents associated with a given user. + + Returns: + List[AgentState]: List of agent configurations. + """ raise NotImplementedError def agent_exists(self, agent_id: Optional[str] = None, agent_name: Optional[str] = None) -> bool: - """Check if an agent with the specified ID or name exists.""" + """Check if an agent with the specified ID or name exists. + + Args: + agent_id (str): The ID of the agent. + agent_name (str): The name of the agent. + + Returns: + bool: True if the agent exists, False otherwise. + + """ raise NotImplementedError + @abstractmethod def create_agent( self, name: Optional[str] = None, @@ -91,7 +117,19 @@ def create_agent( embedding_config: Optional[EmbeddingConfig] = None, llm_config: Optional[LLMConfig] = None, ) -> AgentState: - """Create a new agent with the specified configuration.""" + """Create a new agent with the specified configuration. + + Note: + Do not include the `self` parameter in the ``Args`` section. + + Args: + name (str): The first parameter. + preset (str): The second parameter. + + Returns: + AgentState: The state of the created agent. + + """ raise NotImplementedError def rename_agent(self, agent_id: uuid.UUID, new_name: str): @@ -230,6 +268,25 @@ def get_config(self): class RESTClient(AbstractClient): + """Client for the MemGPT REST API. + + The RESTAPI client corresponds to a single `user_id` and `token` pair. + + Attributes: + base_url (str): The base URL of the MemGPT server. + token (str): The user authentication token for the MemGPT server. + debug (bool): Print debug logs. + + args: + base_url (str): The base URL of the MemGPT server. + token (str): The user authentication token for the MemGPT server. + debug (bool): Print debug logs. + + Examples: + >>> from memgpt import RESTClient + >>> client = RESTClient(base_url="memgpt.localhost", token="user_token") + """ + def __init__( self, base_url: str, @@ -322,7 +379,6 @@ def rename_agent(self, agent_id: uuid.UUID, new_name: str): return self.get_agent_response_to_state(response_obj) def delete_agent(self, agent_id: uuid.UUID): - """Delete the agent.""" response = requests.delete(f"{self.base_url}/api/agents/{str(agent_id)}", headers=self.headers) assert response.status_code == 200, f"Failed to delete agent: {response.text}" @@ -365,23 +421,6 @@ def create_preset( tools: Optional[List[ToolModel]] = None, default_tools: bool = True, ) -> PresetModel: - """Create an agent preset - - :param name: Name of the preset - :type name: str - :param system: System prompt (text) - :type system: str - :param persona: Persona prompt (text) - :type persona: Optional[str] - :param human: Human prompt (text) - :type human: Optional[str] - :param tools: List of tools to connect, defaults to None - :type tools: Optional[List[Tool]], optional - :param default_tools: Whether to automatically include default tools, defaults to True - :type default_tools: bool, optional - :return: Preset object - :rtype: PresetModel - """ # provided tools schema = [] if tools: @@ -448,7 +487,6 @@ def save(self): def get_agent_archival_memory( self, agent_id: uuid.UUID, before: Optional[uuid.UUID] = None, after: Optional[uuid.UUID] = None, limit: Optional[int] = 1000 ): - """Paginated get for the archival memory for an agent""" params = {"limit": limit} if before: params["before"] = str(before) @@ -517,13 +555,11 @@ def create_persona(self, name: str, persona: str) -> PersonaModel: # sources def list_sources(self): - """List loaded sources""" response = requests.get(f"{self.base_url}/api/sources", headers=self.headers) response_json = response.json() return ListSourcesResponse(**response_json) def delete_source(self, source_id: uuid.UUID): - """Delete a source and associated data (including attached to agents)""" response = requests.delete(f"{self.base_url}/api/sources/{str(source_id)}", headers=self.headers) assert response.status_code == 200, f"Failed to delete source: {response.text}" @@ -532,7 +568,6 @@ def get_job_status(self, job_id: uuid.UUID): return JobModel(**response.json()) def load_file_into_source(self, filename: str, source_id: uuid.UUID, blocking=True): - """Load {filename} and insert into source""" files = {"file": open(filename, "rb")} # create job @@ -553,7 +588,6 @@ def load_file_into_source(self, filename: str, source_id: uuid.UUID, blocking=Tr return job def create_source(self, name: str) -> Source: - """Create a new source""" payload = {"name": name} response = requests.post(f"{self.base_url}/api/sources", json=payload, headers=self.headers) response_json = response.json() @@ -568,13 +602,11 @@ def create_source(self, name: str) -> Source: ) def attach_source_to_agent(self, source_id: uuid.UUID, agent_id: uuid.UUID): - """Attach a source to an agent""" params = {"agent_id": agent_id} response = requests.post(f"{self.base_url}/api/sources/{source_id}/attach", params=params, headers=self.headers) assert response.status_code == 200, f"Failed to attach source to agent: {response.text}" def detach_source(self, source_id: uuid.UUID, agent_id: uuid.UUID): - """Detach a source from an agent""" params = {"agent_id": str(agent_id)} response = requests.post(f"{self.base_url}/api/sources/{source_id}/detach", params=params, headers=self.headers) assert response.status_code == 200, f"Failed to detach source from agent: {response.text}" @@ -591,19 +623,32 @@ def get_config(self) -> ConfigResponse: class LocalClient(AbstractClient): + """Local Python client for MemGPT. + + The `LocalClient` is a Python client for the MemGPT server that runs locally. All instances of the the client use the same, default `user_id` (generated and placed in the `~/.memgpt/config` file.). + + Attributes: + auto_save (bool): Automatically save changes to the server. + user_id (str): The user ID for the MemGPT server. + debug (bool): Print debug logs. + + args: + auto_save (bool): Automatically save changes to the server. + user_id (str): The user ID for the MemGPT server. + debug (bool): Print debug logs. + + Examples: + >>> from memgpt import LocalClient + >>> client = LocalClient() + """ + def __init__( self, auto_save: bool = False, - user_id: Optional[str] = None, + user_id: Optional[str] = None, # TODO: this need to be a uuid.UUID debug: bool = False, ): - """ - Initializes a new instance of Client class. - :param auto_save: indicates whether to automatically save after every message. - :param quickstart: allows running quickstart on client init. - :param config: optional config settings to apply after quickstart - :param debug: indicates whether to display debug messages. - """ + self.auto_save = auto_save # determine user_id (pulled from local config) diff --git a/memgpt/config.py b/memgpt/config.py index 4d0bb9711a..a3df605512 100644 --- a/memgpt/config.py +++ b/memgpt/config.py @@ -32,6 +32,22 @@ def set_field(config, section, field, value): @dataclass class MemGPTConfig: + """The summary line for a class docstring should fit on one line. + + If the class has public attributes, they may be documented here + in an ``Attributes`` section and follow the same formatting as a + function's ``Args`` section. Alternatively, attributes may be documented + inline with the attribute's declaration (see __init__ method below). + + Properties created with the ``@property`` decorator should be documented + in the property's getter method. + + Attributes: + attr1 (str): Description of `attr1`. + attr2 (:obj:`int`, optional): Description of `attr2`. + + """ + config_path: str = os.getenv("MEMGPT_CONFIG_PATH") or os.path.join(MEMGPT_DIR, "config") anon_clientid: str = str(uuid.UUID(int=0)) @@ -87,6 +103,19 @@ def generate_uuid() -> str: @classmethod def load(cls) -> "MemGPTConfig": + """Returns a list of :class:`bluepy.blte.Service` objects representing + the services offered by the device. This will perform Bluetooth service + discovery if this has not already been done; otherwise it will return a + cached list of services immediately.. + + :param uuids: A list of string service UUIDs to be discovered, + defaults to None + :type uuids: list, optional + :return: A list of the discovered :class:`bluepy.blte.Service` objects, + which match the provided ``uuids`` + :rtype: list On Python 3.x, this returns a dictionary view object, + not a list + """ # avoid circular import from memgpt.migrate import VERSION_CUTOFF, config_is_compatible from memgpt.utils import printd diff --git a/memgpt/data_types.py b/memgpt/data_types.py index 4d60e3ba68..09acdb48ed 100644 --- a/memgpt/data_types.py +++ b/memgpt/data_types.py @@ -750,6 +750,22 @@ def __init__( class AgentState: + """The summary line for a class docstring should fit on one line. + + If the class has public attributes, they may be documented here + in an ``Attributes`` section and follow the same formatting as a + function's ``Args`` section. Alternatively, attributes may be documented + inline with the attribute's declaration (see __init__ method below). + + Properties created with the ``@property`` decorator should be documented + in the property's getter method. + + Attributes: + attr1 (str): Description of `attr1`. + attr2 (:obj:`int`, optional): Description of `attr2`. + + """ + def __init__( self, name: str, diff --git a/poetry.lock b/poetry.lock index 3f6b50b8ed..8977561d12 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "aiohttp" @@ -269,6 +269,20 @@ cryptography = ">=2.5" msal = ">=1.24.0" msal-extensions = ">=0.3.0" +[[package]] +name = "babel" +version = "2.15.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, + {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + [[package]] name = "backoff" version = "1.11.1" @@ -1253,6 +1267,23 @@ smb = ["smbprotocol"] ssh = ["paramiko"] tqdm = ["tqdm"] +[[package]] +name = "ghp-import" +version = "2.1.0" +description = "Copy your docs directly to the gh-pages branch." +optional = false +python-versions = "*" +files = [ + {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, + {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, +] + +[package.dependencies] +python-dateutil = ">=2.8.1" + +[package.extras] +dev = ["flake8", "markdown", "twine", "wheel"] + [[package]] name = "google-auth" version = "2.29.0" @@ -1364,6 +1395,20 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] +[[package]] +name = "griffe" +version = "0.45.3" +description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "griffe-0.45.3-py3-none-any.whl", hash = "sha256:ed1481a680ae3e28f91a06e0d8a51a5c9b97555aa2527abc2664447cc22337d6"}, + {file = "griffe-0.45.3.tar.gz", hash = "sha256:02ee71cc1a5035864b97bd0dbfff65c33f6f2c8854d3bd48a791905c2b8a44b9"}, +] + +[package.dependencies] +colorama = ">=0.4" + [[package]] name = "grpcio" version = "1.63.0" @@ -1853,7 +1898,7 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jinja2" version = "3.1.4" description = "A very fast and expressive template engine." -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, @@ -2286,6 +2331,21 @@ files = [ httpx = ">=0.20.0" pydantic = ">=1.10" +[[package]] +name = "markdown" +version = "3.6" +description = "Python implementation of John Gruber's Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, +] + +[package.extras] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] +testing = ["coverage", "pyyaml"] + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -2314,7 +2374,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, @@ -2423,6 +2483,17 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +[[package]] +name = "mergedeep" +version = "1.3.4" +description = "A deep merge function for 🐍." +optional = false +python-versions = ">=3.6" +files = [ + {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, + {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, +] + [[package]] name = "milvus-lite" version = "2.4.6" @@ -2452,6 +2523,150 @@ files = [ {file = "minijinja-2.0.1.tar.gz", hash = "sha256:e774beffebfb8a1ad17e638ef70917cf5e94593f79acb8a8fff7d983169f3a4e"}, ] +[[package]] +name = "mkdocs" +version = "1.6.0" +description = "Project documentation with Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs-1.6.0-py3-none-any.whl", hash = "sha256:1eb5cb7676b7d89323e62b56235010216319217d4af5ddc543a91beb8d125ea7"}, + {file = "mkdocs-1.6.0.tar.gz", hash = "sha256:a73f735824ef83a4f3bcb7a231dcab23f5a838f88b7efc54a0eef5fbdbc3c512"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} +ghp-import = ">=1.0" +jinja2 = ">=2.11.1" +markdown = ">=3.3.6" +markupsafe = ">=2.0.1" +mergedeep = ">=1.3.4" +mkdocs-get-deps = ">=0.2.0" +packaging = ">=20.5" +pathspec = ">=0.11.1" +pyyaml = ">=5.1" +pyyaml-env-tag = ">=0.1" +watchdog = ">=2.0" + +[package.extras] +i18n = ["babel (>=2.9.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.4)", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"] + +[[package]] +name = "mkdocs-autorefs" +version = "1.0.1" +description = "Automatically link across pages in MkDocs." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs_autorefs-1.0.1-py3-none-any.whl", hash = "sha256:aacdfae1ab197780fb7a2dac92ad8a3d8f7ca8049a9cbe56a4218cd52e8da570"}, + {file = "mkdocs_autorefs-1.0.1.tar.gz", hash = "sha256:f684edf847eced40b570b57846b15f0bf57fb93ac2c510450775dcf16accb971"}, +] + +[package.dependencies] +Markdown = ">=3.3" +markupsafe = ">=2.0.1" +mkdocs = ">=1.1" + +[[package]] +name = "mkdocs-get-deps" +version = "0.2.0" +description = "MkDocs extension that lists all dependencies according to a mkdocs.yml file" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134"}, + {file = "mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c"}, +] + +[package.dependencies] +mergedeep = ">=1.3.4" +platformdirs = ">=2.2.0" +pyyaml = ">=5.1" + +[[package]] +name = "mkdocs-material" +version = "9.5.26" +description = "Documentation that simply works" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs_material-9.5.26-py3-none-any.whl", hash = "sha256:5d01fb0aa1c7946a1e3ae8689aa2b11a030621ecb54894e35aabb74c21016312"}, + {file = "mkdocs_material-9.5.26.tar.gz", hash = "sha256:56aeb91d94cffa43b6296fa4fbf0eb7c840136e563eecfd12c2d9e92e50ba326"}, +] + +[package.dependencies] +babel = ">=2.10,<3.0" +colorama = ">=0.4,<1.0" +jinja2 = ">=3.0,<4.0" +markdown = ">=3.2,<4.0" +mkdocs = ">=1.6,<2.0" +mkdocs-material-extensions = ">=1.3,<2.0" +paginate = ">=0.5,<1.0" +pygments = ">=2.16,<3.0" +pymdown-extensions = ">=10.2,<11.0" +regex = ">=2022.4" +requests = ">=2.26,<3.0" + +[package.extras] +git = ["mkdocs-git-committers-plugin-2 (>=1.1,<2.0)", "mkdocs-git-revision-date-localized-plugin (>=1.2.4,<2.0)"] +imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"] +recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"] + +[[package]] +name = "mkdocs-material-extensions" +version = "1.3.1" +description = "Extension pack for Python Markdown and MkDocs Material." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, + {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, +] + +[[package]] +name = "mkdocstrings" +version = "0.25.1" +description = "Automatic documentation from sources, for MkDocs." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocstrings-0.25.1-py3-none-any.whl", hash = "sha256:da01fcc2670ad61888e8fe5b60afe9fee5781017d67431996832d63e887c2e51"}, + {file = "mkdocstrings-0.25.1.tar.gz", hash = "sha256:c3a2515f31577f311a9ee58d089e4c51fc6046dbd9e9b4c3de4c3194667fe9bf"}, +] + +[package.dependencies] +click = ">=7.0" +Jinja2 = ">=2.11.1" +Markdown = ">=3.3" +MarkupSafe = ">=1.1" +mkdocs = ">=1.4" +mkdocs-autorefs = ">=0.3.1" +mkdocstrings-python = {version = ">=0.5.2", optional = true, markers = "extra == \"python\""} +platformdirs = ">=2.2.0" +pymdown-extensions = ">=6.3" + +[package.extras] +crystal = ["mkdocstrings-crystal (>=0.3.4)"] +python = ["mkdocstrings-python (>=0.5.2)"] +python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] + +[[package]] +name = "mkdocstrings-python" +version = "1.10.3" +description = "A Python handler for mkdocstrings." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mkdocstrings_python-1.10.3-py3-none-any.whl", hash = "sha256:11ff6d21d3818fb03af82c3ea6225b1534837e17f790aa5f09626524171f949b"}, + {file = "mkdocstrings_python-1.10.3.tar.gz", hash = "sha256:321cf9c732907ab2b1fedaafa28765eaa089d89320f35f7206d00ea266889d03"}, +] + +[package.dependencies] +griffe = ">=0.44" +mkdocstrings = ">=0.25" + [[package]] name = "mkl" version = "2021.4.0" @@ -3322,6 +3537,16 @@ files = [ {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] +[[package]] +name = "paginate" +version = "0.5.6" +description = "Divides large result sets into pages for easier browsing" +optional = false +python-versions = "*" +files = [ + {file = "paginate-0.5.6.tar.gz", hash = "sha256:5e6007b6a9398177a7e1648d04fdd9f8c9766a1a945bceac82f1929e8c78af2d"}, +] + [[package]] name = "pandas" version = "2.2.2" @@ -4069,6 +4294,24 @@ benchmarks = ["pytest-benchmark"] tests = ["duckdb", "ml_dtypes", "pandas (>=1.4,<2.1)", "polars[pandas,pyarrow]", "pytest", "semver", "tensorflow", "tqdm"] torch = ["torch"] +[[package]] +name = "pymdown-extensions" +version = "10.8.1" +description = "Extension pack for Python Markdown." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pymdown_extensions-10.8.1-py3-none-any.whl", hash = "sha256:f938326115884f48c6059c67377c46cf631c733ef3629b6eed1349989d1b30cb"}, + {file = "pymdown_extensions-10.8.1.tar.gz", hash = "sha256:3ab1db5c9e21728dabf75192d71471f8e50f216627e9a1fa9535ecb0231b9940"}, +] + +[package.dependencies] +markdown = ">=3.6" +pyyaml = "*" + +[package.extras] +extra = ["pygments (>=2.12)"] + [[package]] name = "pymilvus" version = "2.4.3" @@ -4352,7 +4595,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -4387,6 +4629,20 @@ files = [ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] +[[package]] +name = "pyyaml-env-tag" +version = "0.1" +description = "A custom YAML tag for referencing environment variables in YAML files. " +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, + {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, +] + +[package.dependencies] +pyyaml = "*" + [[package]] name = "qdrant-client" version = "1.9.1" @@ -5818,6 +6074,50 @@ platformdirs = ">=3.9.1,<5" docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +[[package]] +name = "watchdog" +version = "4.0.1" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"}, + {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"}, + {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"}, + {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"}, + {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"}, + {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"}, + {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"}, + {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"}, + {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"}, + {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"}, + {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"}, + {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"}, + {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"}, + {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"}, + {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"}, + {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"}, + {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"}, + {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + [[package]] name = "watchfiles" version = "0.22.0" @@ -6340,4 +6640,4 @@ server = ["fastapi", "uvicorn", "websockets"] [metadata] lock-version = "2.0" python-versions = "<3.13,>=3.10" -content-hash = "bfa14c084ae06f7d5ceb561406794d93f90808c20b098af13110f4ebe38c7928" +content-hash = "89d20bbd472b19e8e0b185dbaff0cbf6700442720e0e1f571cce52108e3a2975" diff --git a/pyproject.toml b/pyproject.toml index f160324997..dbbdc1c02f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,6 +65,9 @@ httpx-sse = "^0.4.0" isort = { version = "^5.13.2", optional = true } llama-index-embeddings-ollama = {version = "^0.1.2", optional = true} protobuf = "3.20.0" +mkdocstrings = {extras = ["python"], version = "^0.25.1"} +mkdocs = "^1.6.0" +mkdocs-material = "^9.5.26" [tool.poetry.extras] local = ["llama-index-embeddings-huggingface"] From 7b762259ce76f49dc0d4926a66401f631f26a66d Mon Sep 17 00:00:00 2001 From: Sarah Wooders Date: Thu, 13 Jun 2024 14:16:56 -0700 Subject: [PATCH 2/6] add more docs --- docs/docs/index.md | 18 ++---------- docs/mkdocs.yml | 3 +- memgpt/client/client.py | 62 ++++++++++++++++++++++++++++++++--------- 3 files changed, 54 insertions(+), 29 deletions(-) diff --git a/docs/docs/index.md b/docs/docs/index.md index 4ecf4769fa..2368c82cce 100644 --- a/docs/docs/index.md +++ b/docs/docs/index.md @@ -1,19 +1,7 @@ -# Reference -# Components -::: memgpt.data_types.AgentState +::: memgpt.client.client + options: + group_by_category: true -# Client - -## `AbstractClient` -::: memgpt.client.client.AbstractClient - -## `LocalClient` -::: memgpt.client.client.LocalClient - -## `RESTClient` -::: memgpt.client.client.RESTClient - -# Configuration diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index d610d38a47..32ca6f7dec 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -12,7 +12,8 @@ theme: - navigation.top nav: - - Home: index.md + - Data Models: data_models.md + - Client: index.md plugins: - mkdocstrings # custom_templates: templates diff --git a/memgpt/client/client.py b/memgpt/client/client.py index a2430ab662..4c2d897a02 100644 --- a/memgpt/client/client.py +++ b/memgpt/client/client.py @@ -56,22 +56,43 @@ from memgpt.server.rest_api.tools.index import CreateToolResponse from memgpt.server.server import SyncServer +""" Python clients for MemGPT. + +Classes: + AbstractClient: Description for `foo`. + RESTClient: Description for `foo`. + LocalClient: Description for `bar`. + +Functions: + create_client: Description for `baz`. +""" + def create_client(base_url: Optional[str] = None, token: Optional[str] = None): + """Create a MemGPT client. + + Args: + base_url (str): The base URL of the MemGPT server. + token (str): The user authentication token for the MemGPT server. + + Returns: + client (LocalClient | RESTClient): The MemGPT client. + + Examples: + Creating a local client: + >>> from memgpt import create_client + >>> client = create_client() + + Creating a client for a REST server: + >>> from memgpt import create_client + >>> client = create_client(base_url="memgpt.localhost", token="user_token") + """ if base_url is None: return LocalClient() else: return RESTClient(base_url, token) -# """ Python clients for MemGPT. -# -# Classes: -# Abtractclient: Description for `foo`. -# LocalClient: Description for `bar`. -# """ - - class AbstractClient(ABC): """The abstract class for MemGPT client. @@ -119,12 +140,13 @@ def create_agent( ) -> AgentState: """Create a new agent with the specified configuration. - Note: - Do not include the `self` parameter in the ``Args`` section. - Args: name (str): The first parameter. - preset (str): The second parameter. + preset (str): Name of the preset to start the agent. + persona (str): Name of the persona template to start the agent + human (str): Name of the human template to set for the agent. + embedding_config (EmbeddingConfig): Embedding configuration for the agent. + llm_config (LLMConfig): LLM configuration for the agent. Returns: AgentState: The state of the created agent. @@ -148,9 +170,23 @@ def create_preset(self, preset: Preset): raise NotImplementedError def delete_preset(self, preset_id: uuid.UUID): + """Delete a preset. + + Args: + preset_id (uuid.UUID): The ID of the preset. + + Returns: + None + """ raise NotImplementedError - def list_presets(self): + def list_presets(self) -> List[Preset]: + """List all available presets. + + Returns: + presets (List[Preset]): List of presets. + + """ raise NotImplementedError # memory From 3d4873e915265ec1c4ae2a246574920cd3940d64 Mon Sep 17 00:00:00 2001 From: Sarah Wooders Date: Sat, 6 Jul 2024 10:33:44 -0700 Subject: [PATCH 3/6] overwrote a bunch of functions --- docs/docs/index.md | 8 - docs/mkdocs.yml | 39 ++- memgpt/client/client.py | 430 +++++++++++++++--------- memgpt/memory.py | 2 +- memgpt/server/rest_api/agents/config.py | 25 ++ memgpt/server/rest_api/agents/index.py | 33 +- memgpt/server/rest_api/agents/memory.py | 13 +- 7 files changed, 368 insertions(+), 182 deletions(-) delete mode 100644 docs/docs/index.md diff --git a/docs/docs/index.md b/docs/docs/index.md deleted file mode 100644 index 2368c82cce..0000000000 --- a/docs/docs/index.md +++ /dev/null @@ -1,8 +0,0 @@ -::: memgpt.client.client - options: - group_by_category: true - - - - - diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 32ca6f7dec..fab0c4195f 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -13,12 +13,35 @@ theme: nav: - Data Models: data_models.md - - Client: index.md + - Client: + #- Overview: index.md + - RESTClient: client/rest_client.md + - LocalClient: client/local_client.md plugins: -- mkdocstrings - # custom_templates: templates - # default_handler: python - # handlers: - # python: - # options: - # show_source: false + - mkdocstrings + +#site_name: "MemGPT API Reference" +# +#theme: +# name: "material" +# features: +# - navigation.instant +# - navigation.footer +# - navigation.indexes +# - navigation.sections +# - navigation.tabs +# - navigation.tabs.sticky +# - navigation.top +# +#nav: +# - Data Models: data_models.md +# - Client: index.md +#plugins: +#- mkdocstrings +# # custom_templates: templates +# # default_handler: python +# # handlers: +# # python: +# # options: +# # show_source: false +# \ No newline at end of file diff --git a/memgpt/client/client.py b/memgpt/client/client.py index 24ded7aa75..1413f950ba 100644 --- a/memgpt/client/client.py +++ b/memgpt/client/client.py @@ -7,13 +7,15 @@ import requests from memgpt.config import MemGPTConfig -from memgpt.constants import BASE_TOOLS, DEFAULT_HUMAN, DEFAULT_PERSONA, DEFAULT_PRESET +from memgpt.constants import BASE_TOOLS, DEFAULT_HUMAN, DEFAULT_PERSONA from memgpt.data_sources.connectors import DataConnector from memgpt.data_types import AgentState, EmbeddingConfig, LLMConfig, Preset, Source from memgpt.functions.functions import parse_source_code from memgpt.functions.schema_generator import generate_schema from memgpt.memory import BaseMemory, ChatMemory, get_memory_functions +from memgpt.models.chat_completion_response import Message from memgpt.models.pydantic_models import ( + AgentStateModel, HumanModel, JobModel, JobStatus, @@ -25,44 +27,25 @@ ) from memgpt.server.rest_api.agents.command import CommandResponse from memgpt.server.rest_api.agents.config import GetAgentResponse -from memgpt.server.rest_api.agents.index import CreateAgentResponse, ListAgentsResponse +from memgpt.server.rest_api.agents.index import CreateAgentResponse from memgpt.server.rest_api.agents.memory import ( ArchivalMemoryObject, GetAgentArchivalMemoryResponse, GetAgentMemoryResponse, InsertAgentArchivalMemoryResponse, - UpdateAgentMemoryResponse, ) from memgpt.server.rest_api.agents.message import ( GetAgentMessagesResponse, UserMessageResponse, ) from memgpt.server.rest_api.config.index import ConfigResponse -from memgpt.server.rest_api.humans.index import ListHumansResponse from memgpt.server.rest_api.interface import QueuingInterface from memgpt.server.rest_api.models.index import ListModelsResponse -from memgpt.server.rest_api.personas.index import ListPersonasResponse -from memgpt.server.rest_api.presets.index import ( - CreatePresetResponse, - CreatePresetsRequest, - ListPresetsResponse, -) from memgpt.server.rest_api.sources.index import ListSourcesResponse # import pydantic response objects from memgpt.server.rest_api from memgpt.server.rest_api.tools.index import CreateToolRequest, ListToolsResponse from memgpt.server.server import SyncServer - -""" Python clients for MemGPT. - -Classes: - AbstractClient: Description for `foo`. - RESTClient: Description for `foo`. - LocalClient: Description for `bar`. - -Functions: - create_client: Description for `baz`. -""" from memgpt.utils import get_human_text @@ -325,30 +308,52 @@ def __init__( self.base_url = base_url self.headers = {"accept": "application/json", "authorization": f"Bearer {token}"} - def list_agents(self): + def list_agents(self) -> List[AgentState]: + """Return list of available agents + + Returns: + List[AgentState]: List of agents + """ response = requests.get(f"{self.base_url}/api/agents", headers=self.headers) - return ListAgentsResponse(**response.json()) + if response.status_code != 200: + raise ValueError(f"Failed to list agents: {response.text}") + agents = [AgentStateModel(**agent) for agent in response.json()] + return [self._convert_agent_state_model(agent) for agent in agents] - def agent_exists(self, agent_id: Optional[str] = None, agent_name: Optional[str] = None) -> bool: - response = requests.get(f"{self.base_url}/api/agents/{str(agent_id)}/config", headers=self.headers) - if response.status_code == 404: - # not found error - return False - elif response.status_code == 200: - return True - else: - raise ValueError(f"Failed to check if agent exists: {response.text}") + def agent_exists(self, agent_id: Optional[str] = None, name: Optional[str] = None) -> bool: + """Check if an agent with the specified ID or name exists. + + Args: + agent_id (str): The ID of the agent. + name (str): The name of the agent. + + Returns: + bool: True if the agent exists, False otherwise. + """ + return self.get_agent(agent_id=agent_id, name=name) is not None - def get_tool(self, tool_name: str): - response = requests.get(f"{self.base_url}/api/tools/{tool_name}", headers=self.headers) + def get_tool(self, name: str) -> Optional[ToolModel]: + """Get the tool by name + + Args: + name (str): Name of the tool + + Returns: + Optional[ToolModel]: The tool model (None if the tool does not exist) + + """ + response = requests.get(f"{self.base_url}/api/tools/{name}", headers=self.headers) if response.status_code != 200: - raise ValueError(f"Failed to get tool: {response.text}") + if response.status_code == 404: + return None + else: + raise ValueError(f"Failed to get tool: {response.text}") return ToolModel(**response.json()) def create_agent( self, name: Optional[str] = None, - preset: Optional[str] = None, # TODO: this should actually be re-named preset_name + # model configs embedding_config: Optional[EmbeddingConfig] = None, llm_config: Optional[LLMConfig] = None, # memory @@ -359,12 +364,16 @@ def create_agent( metadata: Optional[Dict] = {"human:": DEFAULT_HUMAN, "persona": DEFAULT_PERSONA}, ) -> AgentState: """ - Create an agent + Create a persistent agent, with configuration for embedding, LLM, memory, and tools. Args: - name (str): Name of the agent - tools (List[str]): List of tools (by name) to attach to the agent - include_base_tools (bool): Whether to include base tools (default: `True`) + name (str): Name of the agent. + embedding_config (EmbeddingConfig): Embedding configuration for the agent. + llm_config (LLMConfig): LLM configuration for the agent. + memory (BaseMemory): Memory for the agent. + tools (List[str]): List of tool names to include in the agent. + include_base_tools (bool): Whether to include base tools in the agent (default: `True`) + metadata (Dict): Metadata for the agent. Returns: agent_state (AgentState): State of the the created agent. @@ -390,7 +399,6 @@ def create_agent( payload = { "config": { "name": name, - "preset": preset, "persona": memory.memory["persona"].value, "human": memory.memory["human"].value, "function_names": tool_names, @@ -401,9 +409,39 @@ def create_agent( if response.status_code != 200: raise ValueError(f"Status {response.status_code} - Failed to create agent: {response.text}") response_obj = CreateAgentResponse(**response.json()) - return self.get_agent_response_to_state(response_obj) + return self._convert_get_agent_response(response_obj) + + def _convert_agent_state_model(self, response: AgentStateModel) -> AgentState: + llm_config = LLMConfig( + model=response.llm_config.model, + model_endpoint_type=response.llm_config.model_endpoint_type, + model_endpoint=response.llm_config.model_endpoint, + model_wrapper=response.llm_config.model_wrapper, + context_window=response.llm_config.context_window, + ) + embedding_config = EmbeddingConfig( + embedding_endpoint_type=response.embedding_config.embedding_endpoint_type, + embedding_endpoint=response.embedding_config.embedding_endpoint, + embedding_model=response.embedding_config.embedding_model, + embedding_dim=response.embedding_config.embedding_dim, + embedding_chunk_size=response.embedding_config.embedding_chunk_size, + ) + agent_state = AgentState( + id=response.id, + name=response.name, + user_id=response.user_id, + llm_config=llm_config, + embedding_config=embedding_config, + state=response.state, + system=response.system, + tools=response.tools, + _metadata=response.metadata, + # load datetime from timestampe + created_at=datetime.datetime.fromtimestamp(response.created_at, tz=datetime.timezone.utc), + ) + return agent_state - def get_agent_response_to_state(self, response: Union[GetAgentResponse, CreateAgentResponse]) -> AgentState: + def _convert_get_agent_response(self, response: Union[GetAgentResponse, CreateAgentResponse]) -> AgentState: # TODO: eventually remove this conversion llm_config = LLMConfig( model=response.agent_state.llm_config.model, @@ -434,87 +472,148 @@ def get_agent_response_to_state(self, response: Union[GetAgentResponse, CreateAg ) return agent_state - def rename_agent(self, agent_id: uuid.UUID, new_name: str): + def rename_agent(self, agent_id: uuid.UUID, new_name: str) -> AgentState: + """Rename an agent + + Args: + agent_id (str): ID of the agent + new_name (str): New name for the agent + + Returns: + agent_state (AgentState): Modified agent state + """ response = requests.patch(f"{self.base_url}/api/agents/{str(agent_id)}/rename", json={"agent_name": new_name}, headers=self.headers) assert response.status_code == 200, f"Failed to rename agent: {response.text}" response_obj = GetAgentResponse(**response.json()) - return self.get_agent_response_to_state(response_obj) + return self._convert_get_agent_response(response_obj) - def delete_agent(self, agent_id: uuid.UUID): - response = requests.delete(f"{self.base_url}/api/agents/{str(agent_id)}", headers=self.headers) - assert response.status_code == 200, f"Failed to delete agent: {response.text}" + def delete_agent(self, agent_id: Optional[uuid.UUID], name: Optional[str] = None): + """Delete an agent by ID or name - def get_agent(self, agent_id: Optional[str] = None, agent_name: Optional[str] = None) -> AgentState: - response = requests.get(f"{self.base_url}/api/agents/{str(agent_id)}/config", headers=self.headers) - assert response.status_code == 200, f"Failed to get agent: {response.text}" - response_obj = GetAgentResponse(**response.json()) - return self.get_agent_response_to_state(response_obj) - - def get_preset(self, name: str) -> PresetModel: - # TODO: remove - response = requests.get(f"{self.base_url}/api/presets/{name}", headers=self.headers) - assert response.status_code == 200, f"Failed to get preset: {response.text}" - return PresetModel(**response.json()) + Args: + agent_id (str): ID of the agent + name (str): Name of the agent + """ + if agent_id: + response = requests.delete(f"{self.base_url}/api/agents/{str(agent_id)}", headers=self.headers) + elif name: + agent_state = self.get_agent(name=name) + reponse = requests.delete(f"{self.base_url}/api/agents/{str(agent_state.id)}", headers=self.headers) + else: + raise ValueError("Must provide either agent_id or name") + assert response.status_code == 200, f"Failed to delete agent: {response.text}" - def create_preset( - self, - name: str, - description: Optional[str] = None, - system_name: Optional[str] = None, - persona_name: Optional[str] = None, - human_name: Optional[str] = None, - tools: Optional[List[ToolModel]] = None, - default_tools: bool = True, - ) -> PresetModel: - # TODO: remove - # provided tools - schema = [] - if tools: - for tool in tools: - schema.append(tool.json_schema) - - # include default tools - default_preset = self.get_preset(name=DEFAULT_PRESET) - if default_tools: - # TODO - # from memgpt.functions.functions import load_function_set - # load_function_set() - # return - for function in default_preset.functions_schema: - schema.append(function) - - payload = CreatePresetsRequest( - name=name, - description=description, - system_name=system_name, - persona_name=persona_name, - human_name=human_name, - functions_schema=schema, - ) - response = requests.post(f"{self.base_url}/api/presets", json=payload.model_dump(), headers=self.headers) - assert response.status_code == 200, f"Failed to create preset: {response.text}" - return CreatePresetResponse(**response.json()).preset + def get_agent(self, agent_id: Optional[str] = None, name: Optional[str] = None) -> Optional[AgentState]: + """Get agent by ID or name - def delete_preset(self, preset_id: uuid.UUID): - response = requests.delete(f"{self.base_url}/api/presets/{str(preset_id)}", headers=self.headers) - assert response.status_code == 200, f"Failed to delete preset: {response.text}" + Args: + agent_id (str): ID of the agent + name (str): Name of the agent - def list_presets(self) -> List[PresetModel]: - response = requests.get(f"{self.base_url}/api/presets", headers=self.headers) - return ListPresetsResponse(**response.json()).presets + Returns: + Optional[AgentState]: The agent state (`None` if the agent does not exist) + """ + if agent_id: + response = requests.get(f"{self.base_url}/api/agents/{str(agent_id)}/config", headers=self.headers) + if response.status_code != 200: + if response.status_code == 404: + return None + else: + raise ValueError(f"Failed to get agent: {response.text}") + response_obj = GetAgentResponse(**response.json()) + return self._convert_get_agent_response(response_obj) + elif name: + response = requests.get(f"{self.base_url}/api/agents/{name}", headers=self.headers) + if response.status_code != 200: + if response.status_code == 404: + return None + else: + raise ValueError(f"Failed to get agent: {response.text}") + response_obj = AgentStateModel(**response.json()) + return self._convert_agent_state_model(response_obj) + else: + raise ValueError("Must provide either agent_id or agent_name") + + # def get_preset(self, name: str) -> PresetModel: + # # TODO: remove + # response = requests.get(f"{self.base_url}/api/presets/{name}", headers=self.headers) + # assert response.status_code == 200, f"Failed to get preset: {response.text}" + # return PresetModel(**response.json()) + + # def create_preset( + # self, + # name: str, + # description: Optional[str] = None, + # system_name: Optional[str] = None, + # persona_name: Optional[str] = None, + # human_name: Optional[str] = None, + # tools: Optional[List[ToolModel]] = None, + # default_tools: bool = True, + # ) -> PresetModel: + # # TODO: remove + # # provided tools + # schema = [] + # if tools: + # for tool in tools: + # schema.append(tool.json_schema) + + # # include default tools + # default_preset = self.get_preset(name=DEFAULT_PRESET) + # if default_tools: + # # TODO + # # from memgpt.functions.functions import load_function_set + # # load_function_set() + # # return + # for function in default_preset.functions_schema: + # schema.append(function) + + # payload = CreatePresetsRequest( + # name=name, + # description=description, + # system_name=system_name, + # persona_name=persona_name, + # human_name=human_name, + # functions_schema=schema, + # ) + # response = requests.post(f"{self.base_url}/api/presets", json=payload.model_dump(), headers=self.headers) + # assert response.status_code == 200, f"Failed to create preset: {response.text}" + # return CreatePresetResponse(**response.json()).preset + + # def delete_preset(self, preset_id: uuid.UUID): + # response = requests.delete(f"{self.base_url}/api/presets/{str(preset_id)}", headers=self.headers) + # assert response.status_code == 200, f"Failed to delete preset: {response.text}" + + # def list_presets(self) -> List[PresetModel]: + # response = requests.get(f"{self.base_url}/api/presets", headers=self.headers) + # return ListPresetsResponse(**response.json()).presets # memory def get_agent_memory(self, agent_id: uuid.UUID) -> GetAgentMemoryResponse: + """Get information abou the agent's memory + + Args: + agent_id (uuid.UUID): ID of the agent + + Returns: + memory (GetAgentMemoryResponse): The agent's memory + + """ + # TODO: fix server-size implementation response = requests.get(f"{self.base_url}/api/agents/{agent_id}/memory", headers=self.headers) return GetAgentMemoryResponse(**response.json()) - def update_agent_core_memory(self, agent_id: str, new_memory_contents: Dict) -> UpdateAgentMemoryResponse: - response = requests.post(f"{self.base_url}/api/agents/{agent_id}/memory", json=new_memory_contents, headers=self.headers) - return UpdateAgentMemoryResponse(**response.json()) - # agent interactions - def user_message(self, agent_id: str, message: str) -> Union[List[Dict], Tuple[List[Dict], int]]: + def user_message(self, agent_id: str, message: str) -> GetAgentMessagesResponse: + """Send a message to the agent as a user + + Args: + agent_id (str): ID of the agent + message (str): Message to send + + Returns: + + """ return self.send_message(agent_id, message, role="user") def run_command(self, agent_id: str, command: str) -> Union[str, None]: @@ -552,7 +651,7 @@ def delete_archival_memory(self, agent_id: uuid.UUID, memory_id: uuid.UUID): def get_messages( self, agent_id: uuid.UUID, before: Optional[uuid.UUID] = None, after: Optional[uuid.UUID] = None, limit: Optional[int] = 1000 - ) -> GetAgentMessagesResponse: + ) -> List[Message]: params = {"before": before, "after": after, "limit": limit} response = requests.get(f"{self.base_url}/api/agents/{agent_id}/messages-cursor", params=params, headers=self.headers) if response.status_code != 200: @@ -560,6 +659,24 @@ def get_messages( return GetAgentMessagesResponse(**response.json()) def send_message(self, agent_id: uuid.UUID, message: str, role: str, stream: Optional[bool] = False) -> UserMessageResponse: + """Send a message to the agent + + Args: + agent_id (uuid.UUID): ID of the agent + message (str): Message to send + role (str): Role of the message ("user", "system", "assistant") + stream (bool): Stream the message response + + Returns: + UserMessageResponse: The message response + + Examples: + Sending a user message: + >>> response = client.send_message(agent_id=agent_id, message="Hello", role="user", stream=False) + >>> print("Usage", response.usage) # view usage stats + >>> print("Messages", response.messages) # view response message + + """ data = {"message": message, "role": role, "stream": stream} response = requests.post(f"{self.base_url}/api/agents/{agent_id}/messages", json=data, headers=self.headers) if response.status_code != 200: @@ -568,43 +685,43 @@ def send_message(self, agent_id: uuid.UUID, message: str, role: str, stream: Opt # humans / personas - def list_humans(self) -> ListHumansResponse: - response = requests.get(f"{self.base_url}/api/humans", headers=self.headers) - return ListHumansResponse(**response.json()) - - def create_human(self, name: str, human: str) -> HumanModel: - data = {"name": name, "text": human} - response = requests.post(f"{self.base_url}/api/humans", json=data, headers=self.headers) - if response.status_code != 200: - raise ValueError(f"Failed to create human: {response.text}") - return HumanModel(**response.json()) - - def list_personas(self) -> ListPersonasResponse: - response = requests.get(f"{self.base_url}/api/personas", headers=self.headers) - return ListPersonasResponse(**response.json()) - - def create_persona(self, name: str, persona: str) -> PersonaModel: - data = {"name": name, "text": persona} - response = requests.post(f"{self.base_url}/api/personas", json=data, headers=self.headers) - if response.status_code != 200: - raise ValueError(f"Failed to create persona: {response.text}") - return PersonaModel(**response.json()) - - def get_persona(self, name: str) -> PersonaModel: - response = requests.get(f"{self.base_url}/api/personas/{name}", headers=self.headers) - if response.status_code == 404: - return None - elif response.status_code != 200: - raise ValueError(f"Failed to get persona: {response.text}") - return PersonaModel(**response.json()) - - def get_human(self, name: str) -> HumanModel: - response = requests.get(f"{self.base_url}/api/humans/{name}", headers=self.headers) - if response.status_code == 404: - return None - elif response.status_code != 200: - raise ValueError(f"Failed to get human: {response.text}") - return HumanModel(**response.json()) + # def list_humans(self) -> ListHumansResponse: + # response = requests.get(f"{self.base_url}/api/humans", headers=self.headers) + # return ListHumansResponse(**response.json()) + + # def create_human(self, name: str, human: str) -> HumanModel: + # data = {"name": name, "text": human} + # response = requests.post(f"{self.base_url}/api/humans", json=data, headers=self.headers) + # if response.status_code != 200: + # raise ValueError(f"Failed to create human: {response.text}") + # return HumanModel(**response.json()) + + # def list_personas(self) -> ListPersonasResponse: + # response = requests.get(f"{self.base_url}/api/personas", headers=self.headers) + # return ListPersonasResponse(**response.json()) + + # def create_persona(self, name: str, persona: str) -> PersonaModel: + # data = {"name": name, "text": persona} + # response = requests.post(f"{self.base_url}/api/personas", json=data, headers=self.headers) + # if response.status_code != 200: + # raise ValueError(f"Failed to create persona: {response.text}") + # return PersonaModel(**response.json()) + + # def get_persona(self, name: str) -> PersonaModel: + # response = requests.get(f"{self.base_url}/api/personas/{name}", headers=self.headers) + # if response.status_code == 404: + # return None + # elif response.status_code != 200: + # raise ValueError(f"Failed to get persona: {response.text}") + # return PersonaModel(**response.json()) + + # def get_human(self, name: str) -> HumanModel: + # response = requests.get(f"{self.base_url}/api/humans/{name}", headers=self.headers) + # if response.status_code == 404: + # return None + # elif response.status_code != 200: + # raise ValueError(f"Failed to get human: {response.text}") + # return HumanModel(**response.json()) # sources @@ -794,16 +911,17 @@ def list_agents(self): self.interface.clear() return self.server.list_agents(user_id=self.user_id) - def agent_exists(self, agent_id: Optional[str] = None, agent_name: Optional[str] = None) -> bool: - if not (agent_id or agent_name): - raise ValueError(f"Either agent_id or agent_name must be provided") - if agent_id and agent_name: - raise ValueError(f"Only one of agent_id or agent_name can be provided") - existing = self.list_agents() - if agent_id: - return agent_id in [agent["id"] for agent in existing["agents"]] - else: - return agent_name in [agent["name"] for agent in existing["agents"]] + def agent_exists(self, agent_id: Optional[str] = None, name: Optional[str] = None) -> bool: + """Check if an agent with the specified ID or name exists. + + Args: + agent_id (str): The ID of the agent. + name (str): The name of the agent. + + Returns: + bool: True if the agent exists, False otherwise. + """ + return self.get_agent(agent_id=agent_id, name=name) is not None def create_agent( self, diff --git a/memgpt/memory.py b/memgpt/memory.py index f405ebd4c3..4679685dfe 100644 --- a/memgpt/memory.py +++ b/memgpt/memory.py @@ -65,7 +65,7 @@ def __str__(self) -> str: return "" -class BaseMemory: +class BaseMemory(BaseModel): def __init__(self): self.memory = {} diff --git a/memgpt/server/rest_api/agents/config.py b/memgpt/server/rest_api/agents/config.py index 12332b9356..1fce953215 100644 --- a/memgpt/server/rest_api/agents/config.py +++ b/memgpt/server/rest_api/agents/config.py @@ -91,6 +91,30 @@ def get_agent_config( sources=attached_sources, ) + @router.get("/agents/{agent_name}", tags=["agents"], response_model=AgentStateModel) + def get_agent( + agent_name: str, + user_id: uuid.UUID = Depends(get_current_user_with_server), + ): + """Get agent by name""" + interface.clear() + if not server.ms.get_agent(user_id=user_id, agent_name=agent_name): + raise HTTPException(status_code=404, detail=f"Agent agent_name={agent_name} not found.") + + agent_state = server.ms.get_agent(user_id=user_id, agent_name=agent_name) + return AgentStateModel( + id=agent_state.id, + name=agent_state.name, + user_id=agent_state.user_id, + llm_config=LLMConfigModel(**vars(agent_state.llm_config)), + embedding_config=EmbeddingConfigModel(**vars(agent_state.embedding_config)), + state=agent_state.state, + created_at=int(agent_state.created_at.timestamp()), + tools=agent_state.tools, + system=agent_state.system, + metadata=agent_state._metadata, + ) + @router.patch("/agents/{agent_id}/rename", tags=["agents"], response_model=GetAgentResponse) def update_agent_name( agent_id: uuid.UUID, @@ -129,6 +153,7 @@ def update_agent_name( created_at=int(agent_state.created_at.timestamp()), tools=agent_state.tools, system=agent_state.system, + metadata=agent_state._metadata, ), last_run_at=None, # TODO sources=attached_sources, diff --git a/memgpt/server/rest_api/agents/index.py b/memgpt/server/rest_api/agents/index.py index 73b11e89ea..7b92eae64a 100644 --- a/memgpt/server/rest_api/agents/index.py +++ b/memgpt/server/rest_api/agents/index.py @@ -41,7 +41,20 @@ class CreateAgentResponse(BaseModel): def setup_agents_index_router(server: SyncServer, interface: QueuingInterface, password: str): get_current_user_with_server = partial(partial(get_current_user, server), password) - @router.get("/agents", tags=["agents"], response_model=ListAgentsResponse) + # @router.get("/agents", tags=["agents"], response_model=ListAgentsResponse) + # def list_agents( + # user_id: uuid.UUID = Depends(get_current_user_with_server), + # ): + # """ + # List all agents associated with a given user. + + # This endpoint retrieves a list of all agents and their configurations associated with the specified user ID. + # """ + # interface.clear() + # agents_data = server.list_agents(user_id=user_id) + # return ListAgentsResponse(**agents_data) + + @router.get("/agents", tags=["agents"], response_model=List[AgentStateModel]) def list_agents( user_id: uuid.UUID = Depends(get_current_user_with_server), ): @@ -51,8 +64,22 @@ def list_agents( This endpoint retrieves a list of all agents and their configurations associated with the specified user ID. """ interface.clear() - agents_data = server.list_agents(user_id=user_id) - return ListAgentsResponse(**agents_data) + agent_states = server.ms.list_agents(user_id=user_id) + return [ + AgentStateModel( + id=agent_state.id, + name=agent_state.name, + user_id=agent_state.user_id, + llm_config=LLMConfigModel(**vars(agent_state.llm_config)), + embedding_config=EmbeddingConfigModel(**vars(agent_state.embedding_config)), + state=agent_state.state, + created_at=int(agent_state.created_at.timestamp()), + tools=agent_state.tools, + system=agent_state.system, + metadata=agent_state._metadata, + ) + for agent_state in agent_states + ] @router.post("/agents", tags=["agents"], response_model=CreateAgentResponse) def create_agent( diff --git a/memgpt/server/rest_api/agents/memory.py b/memgpt/server/rest_api/agents/memory.py index 6e4d471415..522bdc3838 100644 --- a/memgpt/server/rest_api/agents/memory.py +++ b/memgpt/server/rest_api/agents/memory.py @@ -6,6 +6,7 @@ from fastapi.responses import JSONResponse from pydantic import BaseModel, Field +from memgpt.memory import BaseMemory from memgpt.server.rest_api.auth_token import get_current_user from memgpt.server.rest_api.interface import QueuingInterface from memgpt.server.server import SyncServer @@ -13,13 +14,13 @@ router = APIRouter() -class CoreMemory(BaseModel): - human: str | None = Field(None, description="Human element of the core memory.") - persona: str | None = Field(None, description="Persona element of the core memory.") +# class CoreMemory(BaseModel): +# human: str | None = Field(None, description="Human element of the core memory.") +# persona: str | None = Field(None, description="Persona element of the core memory.") class GetAgentMemoryResponse(BaseModel): - core_memory: CoreMemory = Field(..., description="The state of the agent's core memory.") + core_memory: BaseMemory = Field(..., description="The state of the agent's core memory.") recall_memory: int = Field(..., description="Size of the agent's recall memory.") archival_memory: int = Field(..., description="Size of the agent's archival memory.") @@ -31,8 +32,8 @@ class UpdateAgentMemoryRequest(BaseModel): class UpdateAgentMemoryResponse(BaseModel): - old_core_memory: CoreMemory = Field(..., description="The previous state of the agent's core memory.") - new_core_memory: CoreMemory = Field(..., description="The updated state of the agent's core memory.") + old_core_memory: BaseMemory = Field(..., description="The previous state of the agent's core memory.") + new_core_memory: BaseMemory = Field(..., description="The updated state of the agent's core memory.") class ArchivalMemoryObject(BaseModel): From bcef3e1adfb3ed6e6498f29d0980fc7cbf341a01 Mon Sep 17 00:00:00 2001 From: Sarah Wooders Date: Sat, 6 Jul 2024 11:44:47 -0700 Subject: [PATCH 4/6] finish docs on restclient --- memgpt/client/client.py | 210 ++++++++++++++++++++---- memgpt/server/rest_api/agents/memory.py | 19 ++- memgpt/server/server.py | 8 +- 3 files changed, 192 insertions(+), 45 deletions(-) diff --git a/memgpt/client/client.py b/memgpt/client/client.py index 1413f950ba..49bfe6adea 100644 --- a/memgpt/client/client.py +++ b/memgpt/client/client.py @@ -5,27 +5,35 @@ from typing import Dict, List, Optional, Tuple, Union import requests +from pydantic import BaseModel from memgpt.config import MemGPTConfig from memgpt.constants import BASE_TOOLS, DEFAULT_HUMAN, DEFAULT_PERSONA from memgpt.data_sources.connectors import DataConnector -from memgpt.data_types import AgentState, EmbeddingConfig, LLMConfig, Preset, Source +from memgpt.data_types import ( + AgentState, + EmbeddingConfig, + LLMConfig, + Message, + Preset, + Source, +) from memgpt.functions.functions import parse_source_code from memgpt.functions.schema_generator import generate_schema from memgpt.memory import BaseMemory, ChatMemory, get_memory_functions -from memgpt.models.chat_completion_response import Message from memgpt.models.pydantic_models import ( AgentStateModel, HumanModel, JobModel, JobStatus, LLMConfigModel, + MemGPTUsageStatistics, + PassageModel, PersonaModel, PresetModel, SourceModel, ToolModel, ) -from memgpt.server.rest_api.agents.command import CommandResponse from memgpt.server.rest_api.agents.config import GetAgentResponse from memgpt.server.rest_api.agents.index import CreateAgentResponse from memgpt.server.rest_api.agents.memory import ( @@ -49,6 +57,12 @@ from memgpt.utils import get_human_text +class MessageResponse(BaseModel): + # TODO: eventually REST endpoints should also return this + messages: List[Message] + usage: MemGPTUsageStatistics + + def create_client(base_url: Optional[str] = None, token: Optional[str] = None): """Create a MemGPT client. @@ -604,7 +618,7 @@ def get_agent_memory(self, agent_id: uuid.UUID) -> GetAgentMemoryResponse: # agent interactions - def user_message(self, agent_id: str, message: str) -> GetAgentMessagesResponse: + def user_message(self, agent_id: str, message: str) -> MessageResponse: """Send a message to the agent as a user Args: @@ -612,22 +626,28 @@ def user_message(self, agent_id: str, message: str) -> GetAgentMessagesResponse: message (str): Message to send Returns: + response (MessageResponse): The message response """ return self.send_message(agent_id, message, role="user") - def run_command(self, agent_id: str, command: str) -> Union[str, None]: - response = requests.post(f"{self.base_url}/api/agents/{str(agent_id)}/command", json={"command": command}, headers=self.headers) - return CommandResponse(**response.json()) - - def save(self): - raise NotImplementedError - # archival memory def get_agent_archival_memory( self, agent_id: uuid.UUID, before: Optional[uuid.UUID] = None, after: Optional[uuid.UUID] = None, limit: Optional[int] = 1000 - ): + ) -> List[PassageModel]: + """Get archival memory for an agent + + Args: + agent_id (uuid.UUID): ID of the agent + before (uuid.UUID): Get memories before this ID + after (uuid.UUID): Get memories after this ID + limit (int): Number of memories to return + + Returns: + passages (List[PassageModel]): List of memory passages + + """ params = {"limit": limit} if before: params["before"] = str(before) @@ -635,15 +655,32 @@ def get_agent_archival_memory( params["after"] = str(after) response = requests.get(f"{self.base_url}/api/agents/{str(agent_id)}/archival", params=params, headers=self.headers) assert response.status_code == 200, f"Failed to get archival memory: {response.text}" - return GetAgentArchivalMemoryResponse(**response.json()) + return [PassageModel(**passage) for passage in response.json()] - def insert_archival_memory(self, agent_id: uuid.UUID, memory: str) -> GetAgentArchivalMemoryResponse: + def insert_archival_memory(self, agent_id: uuid.UUID, memory: str) -> List[uuid.UUID]: + """Insert archival memory record for an agent + + Args: + agent_id (uuid.UUID): ID of the agent + memory (str): Memory to insert + + Returns: + memory_ids (List[uuid.UUID]): List of memory IDs corresponding to inserted passages + + """ response = requests.post(f"{self.base_url}/api/agents/{agent_id}/archival", json={"content": memory}, headers=self.headers) if response.status_code != 200: raise ValueError(f"Failed to insert archival memory: {response.text}") - return InsertAgentArchivalMemoryResponse(**response.json()) + response_obj = InsertAgentArchivalMemoryResponse(**response.json()) + return [uuid.UUID(id) for id in response_obj.ids] def delete_archival_memory(self, agent_id: uuid.UUID, memory_id: uuid.UUID): + """Delete archival memory record for an agent + + Args: + agent_id (uuid.UUID): ID of the agent + memory_id (uuid.UUID + """ response = requests.delete(f"{self.base_url}/api/agents/{agent_id}/archival?id={memory_id}", headers=self.headers) assert response.status_code == 200, f"Failed to delete archival memory: {response.text}" @@ -652,13 +689,27 @@ def delete_archival_memory(self, agent_id: uuid.UUID, memory_id: uuid.UUID): def get_messages( self, agent_id: uuid.UUID, before: Optional[uuid.UUID] = None, after: Optional[uuid.UUID] = None, limit: Optional[int] = 1000 ) -> List[Message]: + """Get agent messages + + Args: + agent_id (uuid.UUID): ID of the agent + before (uuid.UUID): Get messages before this ID + after (uuid.UUID): Get messages after this ID + limit (int): Number of messages to return + + Returns: + messages (List[Message]): List of messages + """ params = {"before": before, "after": after, "limit": limit} response = requests.get(f"{self.base_url}/api/agents/{agent_id}/messages-cursor", params=params, headers=self.headers) if response.status_code != 200: raise ValueError(f"Failed to get messages: {response.text}") - return GetAgentMessagesResponse(**response.json()) + response_obj = GetAgentMessagesResponse(**response.json()) + message_dicts = response_obj.messages + messages = [Message.dict_to_message(msg) for msg in message_dicts] # convert to Message objects + return messages - def send_message(self, agent_id: uuid.UUID, message: str, role: str, stream: Optional[bool] = False) -> UserMessageResponse: + def send_message(self, agent_id: uuid.UUID, message: str, role: str, stream: Optional[bool] = False) -> MessageResponse: """Send a message to the agent Args: @@ -668,7 +719,7 @@ def send_message(self, agent_id: uuid.UUID, message: str, role: str, stream: Opt stream (bool): Stream the message response Returns: - UserMessageResponse: The message response + response (MessageResponse): The message response Examples: Sending a user message: @@ -681,7 +732,10 @@ def send_message(self, agent_id: uuid.UUID, message: str, role: str, stream: Opt response = requests.post(f"{self.base_url}/api/agents/{agent_id}/messages", json=data, headers=self.headers) if response.status_code != 200: raise ValueError(f"Failed to send message: {response.text}") - return UserMessageResponse(**response.json()) + response_obj = UserMessageResponse(**response.json()) + message_dicts = response_obj.messages + messages = [Message.dict_to_message(msg) for msg in message_dicts] # convert to Message objects + return MessageResponse(messages=messages, usage=response_obj.usage) # humans / personas @@ -725,20 +779,52 @@ def send_message(self, agent_id: uuid.UUID, message: str, role: str, stream: Opt # sources - def list_sources(self): + def list_sources(self) -> List[SourceModel]: + """List all sources + + Returns: + sources (List[SourceModel]): List of sources + + """ response = requests.get(f"{self.base_url}/api/sources", headers=self.headers) response_json = response.json() - return ListSourcesResponse(**response_json) + return ListSourcesResponse(**response_json).sources def delete_source(self, source_id: uuid.UUID): + """Delete a source + + Args: + source_id (uuid.UUID): ID of the source + + """ response = requests.delete(f"{self.base_url}/api/sources/{str(source_id)}", headers=self.headers) assert response.status_code == 200, f"Failed to delete source: {response.text}" - def get_job_status(self, job_id: uuid.UUID): + def get_job(self, job_id: uuid.UUID): + """Get status of a job + + Args: + job_id (uuid.UUID): ID of the job + + Returns: + job (JobModel): Job status + + """ response = requests.get(f"{self.base_url}/api/sources/status/{str(job_id)}", headers=self.headers) return JobModel(**response.json()) - def load_file_into_source(self, filename: str, source_id: uuid.UUID, blocking=True): + def load_file_into_source(self, filename: str, source_id: uuid.UUID, blocking=True) -> JobModel: + """Load a file into a source + + Args: + filename (str): Name of the file + source_id (uuid.UUID): ID of the source + blocking (bool): Wait for job to complete + + Returns: + job (JobModel): Job information to tracking upload job status + + """ files = {"file": open(filename, "rb")} # create job @@ -759,6 +845,15 @@ def load_file_into_source(self, filename: str, source_id: uuid.UUID, blocking=Tr return job def create_source(self, name: str) -> Source: + """Create a new source + + Args: + name (str): Name of the source + + Returns: + source (Source): The created source + + """ payload = {"name": name} response = requests.post(f"{self.base_url}/api/sources", json=payload, headers=self.headers) response_json = response.json() @@ -773,24 +868,55 @@ def create_source(self, name: str) -> Source: ) def attach_source_to_agent(self, source_id: uuid.UUID, agent_id: uuid.UUID): + """Attach a source to an agent + + Args: + source_id (uuid.UUID): ID of the source + agent_id (uuid.UUID): ID of the agent + + """ params = {"agent_id": agent_id} response = requests.post(f"{self.base_url}/api/sources/{source_id}/attach", params=params, headers=self.headers) assert response.status_code == 200, f"Failed to attach source to agent: {response.text}" def detach_source(self, source_id: uuid.UUID, agent_id: uuid.UUID): + """Detach a source from an agent + + Args: + source_id (uuid.UUID): ID of the source + agent_id (uuid.UUID): ID of the agent + + """ params = {"agent_id": str(agent_id)} response = requests.post(f"{self.base_url}/api/sources/{source_id}/detach", params=params, headers=self.headers) assert response.status_code == 200, f"Failed to detach source from agent: {response.text}" # server configuration commands - def list_models(self) -> ListModelsResponse: + def list_models(self) -> List[LLMConfigModel]: + """List available model configurations on the server + + Returns: + models (List[LLMConfigModel]): List of model configurations + + """ response = requests.get(f"{self.base_url}/api/models", headers=self.headers) - return ListModelsResponse(**response.json()) + return ListModelsResponse(**response.json()).models + + def get_config(self) -> MemGPTConfig: + """Get the configuration for the MemGPT server - def get_config(self) -> ConfigResponse: + Returns: + config (MemGPTConfig): The server configuration + + """ response = requests.get(f"{self.base_url}/api/config", headers=self.headers) - return ConfigResponse(**response.json()) + config = ConfigResponse(**response.json()).config + llm_config = LLMConfig(**config["default_llm_config"]) + embedding_config = EmbeddingConfig(**config["default_embedding_config"]) + del config["default_llm_config"] + del config["default_embedding_config"] + return MemGPTConfig(**config, default_llm_config=llm_config, default_embedding_config=embedding_config) # tools @@ -800,7 +926,7 @@ def create_tool( name: Optional[str] = None, update: Optional[bool] = True, # TODO: actually use this tags: Optional[List[str]] = None, - ): + ) -> ToolModel: """Create a tool Args: @@ -809,7 +935,7 @@ def create_tool( update (bool, optional): Update the tool if it already exists. Defaults to True. Returns: - Tool object + tool (ToolModel): Created tool object """ # TODO: check if tool already exists @@ -833,19 +959,39 @@ def create_tool( raise ValueError(f"Failed to create tool: {response.text}") return ToolModel(**response.json()) - def list_tools(self) -> ListToolsResponse: + def list_tools(self) -> List[ToolModel]: + """List available tools + + Returns: + tools (List[ToolModel]): List of tools + + """ response = requests.get(f"{self.base_url}/api/tools", headers=self.headers) if response.status_code != 200: raise ValueError(f"Failed to list tools: {response.text}") return ListToolsResponse(**response.json()).tools def delete_tool(self, name: str): + """Delete a tool + + Args: + name (str): Name of the tool + + """ response = requests.delete(f"{self.base_url}/api/tools/{name}", headers=self.headers) if response.status_code != 200: raise ValueError(f"Failed to delete tool: {response.text}") - return response.json() - def get_tool(self, name: str): + def get_tool(self, name: str) -> ToolModel: + """Get tool information + + Args: + name (str): Name of the tool + + Returns: + tool (ToolModel): Tool object + + """ response = requests.get(f"{self.base_url}/api/tools/{name}", headers=self.headers) if response.status_code == 404: return None diff --git a/memgpt/server/rest_api/agents/memory.py b/memgpt/server/rest_api/agents/memory.py index 522bdc3838..612c1e7a04 100644 --- a/memgpt/server/rest_api/agents/memory.py +++ b/memgpt/server/rest_api/agents/memory.py @@ -7,6 +7,7 @@ from pydantic import BaseModel, Field from memgpt.memory import BaseMemory +from memgpt.models.pydantic_models import PassageModel from memgpt.server.rest_api.auth_token import get_current_user from memgpt.server.rest_api.interface import QueuingInterface from memgpt.server.server import SyncServer @@ -95,7 +96,7 @@ def update_agent_memory( response = server.update_agent_core_memory(user_id=user_id, agent_id=agent_id, new_memory_contents=new_memory_contents) return UpdateAgentMemoryResponse(**response) - @router.get("/agents/{agent_id}/archival/all", tags=["agents"], response_model=GetAgentArchivalMemoryResponse) + @router.get("/agents/{agent_id}/archival/all", tags=["agents"], response_model=List[PassageModel]) def get_agent_archival_memory_all( agent_id: uuid.UUID, user_id: uuid.UUID = Depends(get_current_user_with_server), @@ -105,11 +106,12 @@ def get_agent_archival_memory_all( """ interface.clear() archival_memories = server.get_all_archival_memories(user_id=user_id, agent_id=agent_id) - print("archival_memories:", archival_memories) - archival_memory_objects = [ArchivalMemoryObject(id=passage["id"], contents=passage["contents"]) for passage in archival_memories] - return GetAgentArchivalMemoryResponse(archival_memory=archival_memory_objects) + return archival_memories + # print("archival_memories:", archival_memories) + # archival_memory_objects = [ArchivalMemoryObject(id=passage["id"], contents=passage["contents"]) for passage in archival_memories] + # return GetAgentArchivalMemoryResponse(archival_memory=archival_memory_objects) - @router.get("/agents/{agent_id}/archival", tags=["agents"], response_model=GetAgentArchivalMemoryResponse) + @router.get("/agents/{agent_id}/archival", tags=["agents"], response_model=List[PassageModel]) def get_agent_archival_memory( agent_id: uuid.UUID, after: Optional[int] = Query(None, description="Unique ID of the memory to start the query range at."), @@ -124,15 +126,16 @@ def get_agent_archival_memory( # TODO need to add support for non-postgres here # chroma will throw: # raise ValueError("Cannot run get_all_cursor with chroma") - _, archival_json_records = server.get_agent_archival_cursor( + _, archival_records = server.get_agent_archival_cursor( user_id=user_id, agent_id=agent_id, after=after, before=before, limit=limit, ) - archival_memory_objects = [ArchivalMemoryObject(id=passage["id"], contents=passage["text"]) for passage in archival_json_records] - return GetAgentArchivalMemoryResponse(archival_memory=archival_memory_objects) + return archival_records + # archival_memory_objects = [ArchivalMemoryObject(id=passage["id"], contents=passage["text"]) for passage in archival_json_records] + # return GetAgentArchivalMemoryResponse(archival_memory=archival_memory_objects) @router.post("/agents/{agent_id}/archival", tags=["agents"], response_model=InsertAgentArchivalMemoryResponse) def insert_agent_archival_memory( diff --git a/memgpt/server/server.py b/memgpt/server/server.py index 80be4230c9..fbfd045b4a 100644 --- a/memgpt/server/server.py +++ b/memgpt/server/server.py @@ -1066,7 +1066,7 @@ def get_agent_archival_cursor( limit: Optional[int] = 100, order_by: Optional[str] = "created_at", reverse: Optional[bool] = False, - ): + ) -> Tuple[uuid.UUID, List[PassageModel]]: if self.ms.get_user(user_id=user_id) is None: raise ValueError(f"User user_id={user_id} does not exist") if self.ms.get_agent(agent_id=agent_id, user_id=user_id) is None: @@ -1079,8 +1079,7 @@ def get_agent_archival_cursor( cursor, records = memgpt_agent.persistence_manager.archival_memory.storage.get_all_cursor( after=after, before=before, limit=limit, order_by=order_by, reverse=reverse ) - json_records = [vars(record) for record in records] - return cursor, json_records + return cursor, records def get_all_archival_memories(self, user_id: uuid.UUID, agent_id: uuid.UUID) -> list: # TODO deprecate (not safe to be returning an unbounded list) @@ -1094,8 +1093,7 @@ def get_all_archival_memories(self, user_id: uuid.UUID, agent_id: uuid.UUID) -> # Assume passages records = memgpt_agent.persistence_manager.archival_memory.storage.get_all() - - return [dict(id=str(r.id), contents=r.text) for r in records] + return records def insert_archival_memory(self, user_id: uuid.UUID, agent_id: uuid.UUID, memory_contents: str) -> uuid.UUID: if self.ms.get_user(user_id=user_id) is None: From 448934fb78901b62edcccb94edb5481a7cb8d742 Mon Sep 17 00:00:00 2001 From: Sarah Wooders Date: Sun, 7 Jul 2024 10:08:47 -0700 Subject: [PATCH 5/6] add categorization --- docs/mkdocs.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index fab0c4195f..64eee31a75 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -18,7 +18,12 @@ nav: - RESTClient: client/rest_client.md - LocalClient: client/local_client.md plugins: - - mkdocstrings +- mkdocstrings: + handlers: + python: + options: + group_by_category: true + show_category_heading: true #site_name: "MemGPT API Reference" # From 16640a71d5ba85d434decee39a3b2634e39047bf Mon Sep 17 00:00:00 2001 From: Sarah Wooders Date: Sun, 7 Jul 2024 10:09:21 -0700 Subject: [PATCH 6/6] add local/rest client files --- docs/docs/client/local_client.md | 8 ++++++++ docs/docs/client/rest_client.md | 8 ++++++++ 2 files changed, 16 insertions(+) create mode 100644 docs/docs/client/local_client.md create mode 100644 docs/docs/client/rest_client.md diff --git a/docs/docs/client/local_client.md b/docs/docs/client/local_client.md new file mode 100644 index 0000000000..39fa1e460d --- /dev/null +++ b/docs/docs/client/local_client.md @@ -0,0 +1,8 @@ +::: memgpt.client.client.LocalClient + options: + group_by_category: true + + + + + diff --git a/docs/docs/client/rest_client.md b/docs/docs/client/rest_client.md new file mode 100644 index 0000000000..5f4896d1c0 --- /dev/null +++ b/docs/docs/client/rest_client.md @@ -0,0 +1,8 @@ +::: memgpt.client.client.RESTClient + options: + group_by_category: true + group_by: "category" + + + +