From d8a7cfe08033e69c53330abdb4c7382528803277 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Thu, 8 Aug 2024 15:06:12 -0700 Subject: [PATCH 1/6] aws[patch]: Update JSDoc on bedrock converse --- libs/langchain-aws/src/chat_models.ts | 428 +++++++++++++++++- .../src/tests/chat_models.int.test.ts | 4 +- 2 files changed, 424 insertions(+), 8 deletions(-) diff --git a/libs/langchain-aws/src/chat_models.ts b/libs/langchain-aws/src/chat_models.ts index 7b3a58dc29e9..4be1ff66c1f5 100644 --- a/libs/langchain-aws/src/chat_models.ts +++ b/libs/langchain-aws/src/chat_models.ts @@ -169,22 +169,438 @@ export interface ChatBedrockConverseCallOptions } /** - * Integration with AWS Bedrock Converse API. + * AWS Bedrock Converse chat model integration. + * + * Setup: + * Install `@langchain/aws` and set the following environment variables: + * + * ```bash + * npm install @langchain/aws + * export BEDROCK_AWS_REGION="your-aws-region" + * export BEDROCK_AWS_SECRET_ACCESS_KEY="your-aws-secret-access-key" + * export BEDROCK_AWS_ACCESS_KEY_ID="your-aws-access-key-id" + * ``` + * + * ### [Constructor args](/classes/langchain_aws.ChatBedrockConverse.html#constructor) + * + * ### [Runtime args](/interfaces/langchain_aws.ChatBedrockConverseCallOptions.html) + * + * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc. + * They can also be passed via the `.bind`, or the second arg in the `.bindTools` method, like shown in the example below: + * + * ```typescript + * // When calling `.bind`, call options should be passed via the first argument + * const llmWithArgsBound = llm.bind({ + * stop: ["\n"], + * tools: [...], + * }); + * + * // When calling `.bindTools`, call options should be passed via the second argument + * const llmWithTools = llm.bindTools( + * [...], + * { + * tool_choice: "auto", + * } + * ); + * ``` + * + * ## Examples + * + *
+ * Instantiate * - * @example * ```typescript - * import { ChatBedrockConverse } from "@langchain/aws"; + * import { ChatBedrockConverse } from '@langchain/aws'; * - * const model = new ChatBedrockConverse({ - * region: process.env.BEDROCK_AWS_REGION ?? "us-east-1", + * const llm = new ChatBedrockConverse({ + * model: "anthropic.claude-3-5-sonnet-20240620-v1:0", + * temperature: 0, + * maxTokens: undefined, + * timeout: undefined, + * maxRetries: 2, + * region: process.env.BEDROCK_AWS_REGION, * credentials: { * secretAccessKey: process.env.BEDROCK_AWS_SECRET_ACCESS_KEY!, * accessKeyId: process.env.BEDROCK_AWS_ACCESS_KEY_ID!, * }, + * // other params... * }); + * ``` + *
+ * + *
+ * + *
+ * Invoking + * + * ```typescript + * const messages = [ + * { + * type: "system" as const, + * content: "You are a helpful translator. Translate the user sentence to French.", + * }, + * { + * type: "human" as const, + * content: "I love programming.", + * }, + * ]; + * const result = await llm.invoke(messages); + * console.log(result); + * ``` + * + * ```txt + * AIMessage { + * "id": "81a27f7a-550c-473d-8307-c2fbb9c74956", + * "content": "Here's the translation to French:\n\nJ'adore la programmation.", + * "response_metadata": { + * "$metadata": { + * "httpStatusCode": 200, + * "requestId": "81a27f7a-550c-473d-8307-c2fbb9c74956", + * "attempts": 1, + * "totalRetryDelay": 0 + * }, + * "metrics": { + * "latencyMs": 1109 + * }, + * "stopReason": "end_turn", + * "usage": { + * "inputTokens": 25, + * "outputTokens": 19, + * "totalTokens": 44 + * } + * }, + * "usage_metadata": { + * "input_tokens": 25, + * "output_tokens": 19, + * "total_tokens": 44 + * } + * } + * ``` + *
+ * + *
+ * + *
+ * Streaming Chunks * - * const res = await model.invoke([new HumanMessage("Print hello world")]); + * ```typescript + * for await (const chunk of await llm.stream(messages)) { + * console.log(chunk); + * } + * ``` + * + * ```txt + * AIMessageChunk { + * "content": "" + * "response_metadata": { + * "messageStart": { + * "p": "abcdefghijk", + * "role": "assistant" + * } + * } + * } + * AIMessageChunk { + * "content": "Here" + * } + * AIMessageChunk { + * "content": "'s" + * } + * AIMessageChunk { + * "content": " the translation" + * } + * AIMessageChunk { + * "content": " to" + * } + * AIMessageChunk { + * "content": " French:\n\nJ" + * } + * AIMessageChunk { + * "content": "'adore la" + * } + * AIMessageChunk { + * "content": " programmation." + * } + * AIMessageChunk { + * "content": "" + * "response_metadata": { + * "contentBlockStop": { + * "contentBlockIndex": 0, + * "p": "abcdefghijk" + * } + * } + * } + * AIMessageChunk { + * "content": "" + * "response_metadata": { + * "messageStop": { + * "stopReason": "end_turn" + * } + * } + * } + * AIMessageChunk { + * "content": "" + * "response_metadata": { + * "metadata": { + * "metrics": { + * "latencyMs": 838 + * }, + * "p": "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123", + * "usage": { + * "inputTokens": 25, + * "outputTokens": 19, + * "totalTokens": 44 + * } + * } + * } + * "usage_metadata": { + * "input_tokens": 25, + * "output_tokens": 19, + * "total_tokens": 44 + * } + * } + * ``` + *
+ * + *
+ * + *
+ * Aggregate Streamed Chunks + * + * ```typescript + * import { AIMessageChunk } from '@langchain/core/messages'; + * import { concat } from '@langchain/core/utils/stream'; + * + * const stream = await llm.stream(messages); + * let full: AIMessageChunk | undefined; + * for await (const chunk of stream) { + * full = !full ? chunk : concat(full, chunk); + * } + * console.log(full); + * ``` + * + * ```txt + * AIMessageChunk { + * "content": "Here's the translation to French:\n\nJ'adore la programmation.", + * "response_metadata": { + * "messageStart": { + * "p": "ab", + * "role": "assistant" + * }, + * "contentBlockStop": { + * "contentBlockIndex": 0, + * "p": "abcdefghijklmnopqrstuvwxyzABCDEFGHIJK" + * }, + * "messageStop": { + * "stopReason": "end_turn" + * }, + * "metadata": { + * "metrics": { + * "latencyMs": 838 + * }, + * "p": "abcdefghijklmnopqrstuvwxyz", + * "usage": { + * "inputTokens": 25, + * "outputTokens": 19, + * "totalTokens": 44 + * } + * } + * }, + * "usage_metadata": { + * "input_tokens": 25, + * "output_tokens": 19, + * "total_tokens": 44 + * } + * } * ``` + *
+ * + *
+ * + *
+ * Bind tools + * + * ```typescript + * import { z } from 'zod'; + * + * const GetWeather = { + * name: "GetWeather", + * description: "Get the current weather in a given location", + * schema: z.object({ + * location: z.string().describe("The city and state, e.g. San Francisco, CA") + * }), + * } + * + * const GetPopulation = { + * name: "GetPopulation", + * description: "Get the current population in a given location", + * schema: z.object({ + * location: z.string().describe("The city and state, e.g. San Francisco, CA") + * }), + * } + * + * const llmWithTools = llm.bindTools( + * [GetWeather, GetPopulation], + * { + * // strict: true // enforce tool args schema is respected + * } + * ); + * const aiMsg = await llmWithTools.invoke( + * "Which city is hotter today and which is bigger: LA or NY?" + * ); + * console.log(aiMsg.tool_calls); + * ``` + * + * ```txt + * [ + * { + * id: 'tooluse_hIaiqfweRtSiJyi6J4naJA', + * name: 'GetWeather', + * args: { location: 'Los Angeles, CA' }, + * type: 'tool_call' + * }, + * { + * id: 'tooluse_nOS8B0UlTd2FdpH4MSHw9w', + * name: 'GetWeather', + * args: { location: 'New York, NY' }, + * type: 'tool_call' + * }, + * { + * id: 'tooluse_XxMpZiETQ5aVS5opVDyIaw', + * name: 'GetPopulation', + * args: { location: 'Los Angeles, CA' }, + * type: 'tool_call' + * }, + * { + * id: 'tooluse_GpYvAfldT2aR8VQfH-p4PQ', + * name: 'GetPopulation', + * args: { location: 'New York, NY' }, + * type: 'tool_call' + * } + * ] + * ``` + *
+ * + *
+ * + *
+ * Structured Output + * + * ```typescript + * import { z } from 'zod'; + * + * const Joke = z.object({ + * setup: z.string().describe("The setup of the joke"), + * punchline: z.string().describe("The punchline to the joke"), + * rating: z.number().optional().describe("How funny the joke is, from 1 to 10") + * }).describe('Joke to tell user.'); + * + * const structuredLlm = llm.withStructuredOutput(Joke); + * const jokeResult = await structuredLlm.invoke("Tell me a joke about cats"); + * console.log(jokeResult); + * ``` + * + * ```txt + * { + * setup: "Why don't cats play poker in the jungle?", + * punchline: 'Too many cheetahs!', + * rating: 7 + * } + * ``` + *
+ * + *
+ * + *
+ * Multimodal + * + * ```typescript + * import { HumanMessage } from '@langchain/core/messages'; + * + * const imageUrl = "https://example.com/image.jpg"; + * const imageData = await fetch(imageUrl).then(res => res.arrayBuffer()); + * const base64Image = Buffer.from(imageData).toString('base64'); + * + * const message = new HumanMessage({ + * content: [ + * { type: "text", text: "describe the weather in this image" }, + * { + * type: "image_url", + * image_url: { url: `data:image/jpeg;base64,${base64Image}` }, + * }, + * ] + * }); + * + * const imageDescriptionAiMsg = await llm.invoke([message]); + * console.log(imageDescriptionAiMsg.content); + * ``` + * + * ```txt + * The weather in this image appears to be clear and pleasant. The sky is a vibrant blue with scattered white clouds, suggesting a sunny day with good visibility. The clouds are light and wispy, indicating fair weather conditions. There's no sign of rain, storm, or any adverse weather patterns. The lush green grass on the rolling hills looks well-watered and healthy, which could indicate recent rainfall or generally favorable weather conditions. Overall, the image depicts a beautiful, calm day with blue skies and sunshine - perfect weather for enjoying the outdoors. + * ``` + *
+ * + *
+ * + *
+ * Usage Metadata + * + * ```typescript + * const aiMsgForMetadata = await llm.invoke(messages); + * console.log(aiMsgForMetadata.usage_metadata); + * ``` + * + * ```txt + * { input_tokens: 25, output_tokens: 19, total_tokens: 44 } + * ``` + *
+ * + *
+ * + *
+ * Stream Usage Metadata + * + * ```typescript + * const streamForMetadata = await llm.stream(messages); + * let fullForMetadata: AIMessageChunk | undefined; + * for await (const chunk of streamForMetadata) { + * fullForMetadata = !fullForMetadata ? chunk : concat(fullForMetadata, chunk); + * } + * console.log(fullForMetadata?.usage_metadata); + * ``` + * + * ```txt + * { input_tokens: 25, output_tokens: 19, total_tokens: 44 } + * ``` + *
+ * + *
+ * + *
+ * Response Metadata + * + * ```typescript + * const aiMsgForResponseMetadata = await llm.invoke(messages); + * console.log(aiMsgForResponseMetadata.response_metadata); + * ``` + * + * ```txt + * { + * '$metadata': { + * httpStatusCode: 200, + * requestId: '5de2a2e5-d1dc-4dff-bb02-31361f4107bc', + * extendedRequestId: undefined, + * cfId: undefined, + * attempts: 1, + * totalRetryDelay: 0 + * }, + * metrics: { latencyMs: 1163 }, + * stopReason: 'end_turn', + * usage: { inputTokens: 25, outputTokens: 19, totalTokens: 44 } + * } + * ``` + *
+ * + *
*/ export class ChatBedrockConverse extends BaseChatModel diff --git a/libs/langchain-aws/src/tests/chat_models.int.test.ts b/libs/langchain-aws/src/tests/chat_models.int.test.ts index cd3cc4d16f38..2e2c519a5ca5 100644 --- a/libs/langchain-aws/src/tests/chat_models.int.test.ts +++ b/libs/langchain-aws/src/tests/chat_models.int.test.ts @@ -159,8 +159,8 @@ test("populates ID field on AIMessage", async () => { expect(response.id?.length).toBeGreaterThan(1); /** - * Bedrock Converse does not include an ID in - * the response of a streaming call. + Bedrock Converse does not include an ID in + the response of a streaming call. */ // Streaming From 20f863bf5136d264704d813b2a28ae1c6dc897da Mon Sep 17 00:00:00 2001 From: bracesproul Date: Thu, 8 Aug 2024 15:06:48 -0700 Subject: [PATCH 2/6] cr --- libs/langchain-aws/src/tests/chat_models.int.test.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/langchain-aws/src/tests/chat_models.int.test.ts b/libs/langchain-aws/src/tests/chat_models.int.test.ts index 2e2c519a5ca5..cd3cc4d16f38 100644 --- a/libs/langchain-aws/src/tests/chat_models.int.test.ts +++ b/libs/langchain-aws/src/tests/chat_models.int.test.ts @@ -159,8 +159,8 @@ test("populates ID field on AIMessage", async () => { expect(response.id?.length).toBeGreaterThan(1); /** - Bedrock Converse does not include an ID in - the response of a streaming call. + * Bedrock Converse does not include an ID in + * the response of a streaming call. */ // Streaming From d0edfe3dd2fe3361485a3e509e722b5446df02b6 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Thu, 8 Aug 2024 15:50:15 -0700 Subject: [PATCH 3/6] format --- libs/langchain-aws/src/chat_models.ts | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/libs/langchain-aws/src/chat_models.ts b/libs/langchain-aws/src/chat_models.ts index 4be1ff66c1f5..99d63b4690ae 100644 --- a/libs/langchain-aws/src/chat_models.ts +++ b/libs/langchain-aws/src/chat_models.ts @@ -184,17 +184,17 @@ export interface ChatBedrockConverseCallOptions * ### [Constructor args](/classes/langchain_aws.ChatBedrockConverse.html#constructor) * * ### [Runtime args](/interfaces/langchain_aws.ChatBedrockConverseCallOptions.html) - * + * * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc. * They can also be passed via the `.bind`, or the second arg in the `.bindTools` method, like shown in the example below: - * + * * ```typescript * // When calling `.bind`, call options should be passed via the first argument * const llmWithArgsBound = llm.bind({ * stop: ["\n"], * tools: [...], * }); - * + * * // When calling `.bindTools`, call options should be passed via the second argument * const llmWithTools = llm.bindTools( * [...], @@ -291,13 +291,13 @@ export interface ChatBedrockConverseCallOptions * * ```txt * AIMessageChunk { - * "content": "" + * "content": "" * "response_metadata": { * "messageStart": { * "p": "abcdefghijk", * "role": "assistant" * } - * } + * } * } * AIMessageChunk { * "content": "Here" @@ -321,24 +321,24 @@ export interface ChatBedrockConverseCallOptions * "content": " programmation." * } * AIMessageChunk { - * "content": "" + * "content": "" * "response_metadata": { * "contentBlockStop": { * "contentBlockIndex": 0, * "p": "abcdefghijk" * } - * } + * } * } * AIMessageChunk { - * "content": "" + * "content": "" * "response_metadata": { * "messageStop": { * "stopReason": "end_turn" * } - * } + * } * } * AIMessageChunk { - * "content": "" + * "content": "" * "response_metadata": { * "metadata": { * "metrics": { @@ -351,7 +351,7 @@ export interface ChatBedrockConverseCallOptions * "totalTokens": 44 * } * } - * } + * } * "usage_metadata": { * "input_tokens": 25, * "output_tokens": 19, From de23061472162f24458a4afcfaf5ea94423b130a Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Thu, 8 Aug 2024 15:52:54 -0700 Subject: [PATCH 4/6] Update libs/langchain-aws/src/chat_models.ts --- libs/langchain-aws/src/chat_models.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/langchain-aws/src/chat_models.ts b/libs/langchain-aws/src/chat_models.ts index 99d63b4690ae..1a98a797f52c 100644 --- a/libs/langchain-aws/src/chat_models.ts +++ b/libs/langchain-aws/src/chat_models.ts @@ -181,9 +181,9 @@ export interface ChatBedrockConverseCallOptions * export BEDROCK_AWS_ACCESS_KEY_ID="your-aws-access-key-id" * ``` * - * ### [Constructor args](/classes/langchain_aws.ChatBedrockConverse.html#constructor) + * ## [Constructor args](/classes/langchain_aws.ChatBedrockConverse.html#constructor) * - * ### [Runtime args](/interfaces/langchain_aws.ChatBedrockConverseCallOptions.html) + * ## [Runtime args](/interfaces/langchain_aws.ChatBedrockConverseCallOptions.html) * * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc. * They can also be passed via the `.bind`, or the second arg in the `.bindTools` method, like shown in the example below: From 591aba2937bc3ab492a6b1b6eeeb233cad5aea2c Mon Sep 17 00:00:00 2001 From: Brace Sproul Date: Thu, 8 Aug 2024 16:28:20 -0700 Subject: [PATCH 5/6] Update libs/langchain-aws/src/chat_models.ts --- libs/langchain-aws/src/chat_models.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/langchain-aws/src/chat_models.ts b/libs/langchain-aws/src/chat_models.ts index 1a98a797f52c..9ce93efdd4e4 100644 --- a/libs/langchain-aws/src/chat_models.ts +++ b/libs/langchain-aws/src/chat_models.ts @@ -186,7 +186,7 @@ export interface ChatBedrockConverseCallOptions * ## [Runtime args](/interfaces/langchain_aws.ChatBedrockConverseCallOptions.html) * * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc. - * They can also be passed via the `.bind`, or the second arg in the `.bindTools` method, like shown in the example below: +* They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below: * * ```typescript * // When calling `.bind`, call options should be passed via the first argument From 8d0c2e5d5a33d46a7d2e73ca1f5be69b83634331 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Thu, 8 Aug 2024 16:31:54 -0700 Subject: [PATCH 6/6] cr --- libs/langchain-aws/src/chat_models.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/langchain-aws/src/chat_models.ts b/libs/langchain-aws/src/chat_models.ts index 9ce93efdd4e4..f39c485b28bb 100644 --- a/libs/langchain-aws/src/chat_models.ts +++ b/libs/langchain-aws/src/chat_models.ts @@ -186,7 +186,7 @@ export interface ChatBedrockConverseCallOptions * ## [Runtime args](/interfaces/langchain_aws.ChatBedrockConverseCallOptions.html) * * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc. -* They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below: + * They can also be passed via `.bind`, or the second arg in `.bindTools`, like shown in the examples below: * * ```typescript * // When calling `.bind`, call options should be passed via the first argument