From 5206048168fe0d3ec1df4c2e6234f65cf26ac798 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Thu, 8 Aug 2024 15:58:20 -0700 Subject: [PATCH 1/2] openai[patch],anthropic[patch]: Improve args section of jsdoc --- libs/langchain-anthropic/src/chat_models.ts | 23 +++++++++++++++---- .../langchain-openai/src/azure/chat_models.ts | 23 +++++++++++++++---- libs/langchain-openai/src/chat_models.ts | 23 +++++++++++++++---- 3 files changed, 57 insertions(+), 12 deletions(-) diff --git a/libs/langchain-anthropic/src/chat_models.ts b/libs/langchain-anthropic/src/chat_models.ts index f6f361b828b5..6b345573fd98 100644 --- a/libs/langchain-anthropic/src/chat_models.ts +++ b/libs/langchain-anthropic/src/chat_models.ts @@ -186,13 +186,28 @@ function extractToken(chunk: AIMessageChunk): string | undefined { * export ANTHROPIC_API_KEY="your-api-key" * ``` * - * ## Key args + * ## [Constructor args](/classes/langchain_anthropic.ChatAnthropic.html#constructor) * - * ### [Init args](/classes/langchain_anthropic.ChatAnthropic.html#constructor) + * ## [Runtime args](/interfaces/langchain_anthropic.ChatAnthropicCallOptions.html) * - * ### [Runtime args](/interfaces/langchain_anthropic.ChatAnthropicCallOptions.html) + * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc. + * They can also be passed via the `.bind`, or the second arg in the `.bindTools` method, like shown in the example below: * - * > See full list of supported init args and their descriptions in the [`constructor`](/classes/langchain_anthropic.ChatAnthropic.html#constructor) section. + * ```typescript + * // When calling `.bind`, call options should be passed via the first argument + * const llmWithArgsBound = llm.bind({ + * stop: ["\n"], + * tools: [...], + * }); + * + * // When calling `.bindTools`, call options should be passed via the second argument + * const llmWithTools = llm.bindTools( + * [...], + * { + * stop: ["stop on this token!"], + * } + * ); + * ``` * * ## Examples * diff --git a/libs/langchain-openai/src/azure/chat_models.ts b/libs/langchain-openai/src/azure/chat_models.ts index bb26fbf6633e..4ef8b44a18d9 100644 --- a/libs/langchain-openai/src/azure/chat_models.ts +++ b/libs/langchain-openai/src/azure/chat_models.ts @@ -26,13 +26,28 @@ import { * export AZURE_OPENAI_BASE_PATH="your-base-path" * ``` * - * ## Key args + * ## [Constructor args](/classes/langchain_openai.AzureChatOpenAI.html#constructor) * - * ### [Init args](/classes/langchain_openai.AzureChatOpenAI.html#constructor) + * ## [Runtime args](/interfaces/langchain_openai.ChatOpenAICallOptions.html) * - * ### [Runtime args](/interfaces/langchain_openai.ChatOpenAICallOptions.html) + * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc. + * They can also be passed via the `.bind`, or the second arg in the `.bindTools` method, like shown in the example below: * - * > See full list of supported init args and their descriptions in the [`constructor`](/classes/langchain_openai.AzureChatOpenAI.html#constructor) section. + * ```typescript + * // When calling `.bind`, call options should be passed via the first argument + * const llmWithArgsBound = llm.bind({ + * stop: ["\n"], + * tools: [...], + * }); + * + * // When calling `.bindTools`, call options should be passed via the second argument + * const llmWithTools = llm.bindTools( + * [...], + * { + * stop: ["stop on this token!"], + * } + * ); + * ``` * * ## Examples * diff --git a/libs/langchain-openai/src/chat_models.ts b/libs/langchain-openai/src/chat_models.ts index 3457e7de3dc8..fe45fe40c6ef 100644 --- a/libs/langchain-openai/src/chat_models.ts +++ b/libs/langchain-openai/src/chat_models.ts @@ -377,13 +377,28 @@ export interface ChatOpenAIFields * export OPENAI_API_KEY="your-api-key" * ``` * - * ## Key args + * ## [Constructor args](/classes/langchain_openai.ChatOpenAI.html#constructor) * - * ### [Init args](/classes/langchain_openai.ChatOpenAI.html#constructor) + * ## [Runtime args](/interfaces/langchain_openai.ChatOpenAICallOptions.html) * - * ### [Runtime args](/interfaces/langchain_openai.ChatOpenAICallOptions.html) + * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc. + * They can also be passed via the `.bind`, or the second arg in the `.bindTools` method, like shown in the example below: * - * > See full list of supported init args and their descriptions in the [`constructor`](/classes/langchain_openai.ChatOpenAI.html#constructor) section. + * ```typescript + * // When calling `.bind`, call options should be passed via the first argument + * const llmWithArgsBound = llm.bind({ + * stop: ["\n"], + * tools: [...], + * }); + * + * // When calling `.bindTools`, call options should be passed via the second argument + * const llmWithTools = llm.bindTools( + * [...], + * { + * stop: ["stop on this token!"], + * } + * ); + * ``` * * ## Examples * From 4a76907ed8d702ccdcf245219de70d3900492208 Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Thu, 8 Aug 2024 16:27:15 -0700 Subject: [PATCH 2/2] Apply suggestions from code review --- libs/langchain-anthropic/src/chat_models.ts | 4 ++-- libs/langchain-openai/src/azure/chat_models.ts | 4 ++-- libs/langchain-openai/src/chat_models.ts | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/libs/langchain-anthropic/src/chat_models.ts b/libs/langchain-anthropic/src/chat_models.ts index 6b345573fd98..18bb5f3ddd32 100644 --- a/libs/langchain-anthropic/src/chat_models.ts +++ b/libs/langchain-anthropic/src/chat_models.ts @@ -191,7 +191,7 @@ function extractToken(chunk: AIMessageChunk): string | undefined { * ## [Runtime args](/interfaces/langchain_anthropic.ChatAnthropicCallOptions.html) * * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc. - * They can also be passed via the `.bind`, or the second arg in the `.bindTools` method, like shown in the example below: + * They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below: * * ```typescript * // When calling `.bind`, call options should be passed via the first argument @@ -204,7 +204,7 @@ function extractToken(chunk: AIMessageChunk): string | undefined { * const llmWithTools = llm.bindTools( * [...], * { - * stop: ["stop on this token!"], + * tool_choice: "auto", * } * ); * ``` diff --git a/libs/langchain-openai/src/azure/chat_models.ts b/libs/langchain-openai/src/azure/chat_models.ts index 4ef8b44a18d9..aeb54aa9b607 100644 --- a/libs/langchain-openai/src/azure/chat_models.ts +++ b/libs/langchain-openai/src/azure/chat_models.ts @@ -31,7 +31,7 @@ import { * ## [Runtime args](/interfaces/langchain_openai.ChatOpenAICallOptions.html) * * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc. - * They can also be passed via the `.bind`, or the second arg in the `.bindTools` method, like shown in the example below: + * They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below: * * ```typescript * // When calling `.bind`, call options should be passed via the first argument @@ -44,7 +44,7 @@ import { * const llmWithTools = llm.bindTools( * [...], * { - * stop: ["stop on this token!"], + * tool_choice: "auto", * } * ); * ``` diff --git a/libs/langchain-openai/src/chat_models.ts b/libs/langchain-openai/src/chat_models.ts index fe45fe40c6ef..8bed8690d640 100644 --- a/libs/langchain-openai/src/chat_models.ts +++ b/libs/langchain-openai/src/chat_models.ts @@ -382,7 +382,7 @@ export interface ChatOpenAIFields * ## [Runtime args](/interfaces/langchain_openai.ChatOpenAICallOptions.html) * * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc. - * They can also be passed via the `.bind`, or the second arg in the `.bindTools` method, like shown in the example below: + * They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below: * * ```typescript * // When calling `.bind`, call options should be passed via the first argument @@ -395,7 +395,7 @@ export interface ChatOpenAIFields * const llmWithTools = llm.bindTools( * [...], * { - * stop: ["stop on this token!"], + * tool_choice: "auto", * } * ); * ```