diff --git a/libs/langchain-community/package.json b/libs/langchain-community/package.json index 7b441119bdbd..a6870ab61aab 100644 --- a/libs/langchain-community/package.json +++ b/libs/langchain-community/package.json @@ -78,7 +78,7 @@ "@google-cloud/storage": "^7.7.0", "@gradientai/nodejs-sdk": "^1.2.0", "@huggingface/inference": "^2.6.4", - "@ibm-cloud/watsonx-ai": "^1.1.0", + "@ibm-cloud/watsonx-ai": "^1.3.0", "@jest/globals": "^29.5.0", "@lancedb/lancedb": "^0.13.0", "@langchain/core": "workspace:*", diff --git a/libs/langchain-community/src/chat_models/ibm.ts b/libs/langchain-community/src/chat_models/ibm.ts index 8c329c70a9af..992419649fb1 100644 --- a/libs/langchain-community/src/chat_models/ibm.ts +++ b/libs/langchain-community/src/chat_models/ibm.ts @@ -33,6 +33,7 @@ import { } from "@langchain/core/outputs"; import { AsyncCaller } from "@langchain/core/utils/async_caller"; import { + RequestCallbacks, TextChatMessagesTextChatMessageAssistant, TextChatParameterTools, TextChatParams, @@ -81,12 +82,14 @@ export interface WatsonxDeltaStream { export interface WatsonxCallParams extends Partial> { maxRetries?: number; + watsonxCallbacks?: RequestCallbacks; } export interface WatsonxCallOptionsChat extends Omit, WatsonxCallParams { promptIndex?: number; tool_choice?: TextChatParameterTools | string | "auto" | "any"; + watsonxCallbacks?: RequestCallbacks; } type ChatWatsonxToolType = BindToolsInput | TextChatParameterTools; @@ -420,6 +423,8 @@ export class ChatWatsonx< streaming: boolean; + watsonxCallbacks?: RequestCallbacks; + constructor(fields: ChatWatsonxInput & WatsonxAuth) { super(fields); if ( @@ -450,7 +455,7 @@ export class ChatWatsonx< this.n = fields?.n ?? this.n; this.model = fields?.model ?? this.model; this.version = fields?.version ?? this.version; - + this.watsonxCallbacks = fields?.watsonxCallbacks ?? this.watsonxCallbacks; const { watsonxAIApikey, watsonxAIAuthType, @@ -502,6 +507,10 @@ export class ChatWatsonx< return { ...params, ...toolChoiceResult }; } + invocationCallbacks(options: this["ParsedCallOptions"]) { + return options.watsonxCallbacks ?? this.watsonxCallbacks; + } + override bindTools( tools: ChatWatsonxToolType[], kwargs?: Partial @@ -590,15 +599,19 @@ export class ChatWatsonx< ...this.invocationParams(options), ...this.scopeId(), }; + const watsonxCallbacks = this.invocationCallbacks(options); const watsonxMessages = _convertMessagesToWatsonxMessages( messages, this.model ); const callback = () => - this.service.textChat({ - ...params, - messages: watsonxMessages, - }); + this.service.textChat( + { + ...params, + messages: watsonxMessages, + }, + watsonxCallbacks + ); const { result } = await this.completionWithRetry(callback, options); const generations: ChatGeneration[] = []; for (const part of result.choices) { @@ -638,12 +651,16 @@ export class ChatWatsonx< messages, this.model ); + const watsonxCallbacks = this.invocationCallbacks(options); const callback = () => - this.service.textChatStream({ - ...params, - messages: watsonxMessages, - returnObject: true, - }); + this.service.textChatStream( + { + ...params, + messages: watsonxMessages, + returnObject: true, + }, + watsonxCallbacks + ); const stream = await this.completionWithRetry(callback, options); let defaultRole; let usage: TextChatUsage | undefined; diff --git a/libs/langchain-community/src/chat_models/tests/ibm.int.test.ts b/libs/langchain-community/src/chat_models/tests/ibm.int.test.ts index be8d6615a402..ae47345a1add 100644 --- a/libs/langchain-community/src/chat_models/tests/ibm.int.test.ts +++ b/libs/langchain-community/src/chat_models/tests/ibm.int.test.ts @@ -517,7 +517,9 @@ describe("Tests for chat", () => { } ); const llmWithTools = service.bindTools([calculatorTool]); - const res = await llmWithTools.invoke("What is 3 * 12"); + const res = await llmWithTools.invoke( + "You are bad at calculations and need to use calculator at all times. What is 3 * 12" + ); expect(res).toBeInstanceOf(AIMessage); expect(res.tool_calls?.[0].name).toBe("calculator"); @@ -572,7 +574,7 @@ describe("Tests for chat", () => { ); const llmWithTools = service.bindTools([calculatorTool]); const res = await llmWithTools.invoke( - "What is 3 * 12? Also, what is 11 + 49?" + "You are bad at calculations and need to use calculator at all times. What is 3 * 12? Also, what is 11 + 49?" ); expect(res).toBeInstanceOf(AIMessage); @@ -619,7 +621,9 @@ describe("Tests for chat", () => { }, ], }); - const res = await modelWithTools.invoke("What is 32 * 122"); + const res = await modelWithTools.invoke( + "You are bad at calculations and need to use calculator at all times. What is 32 * 122" + ); expect(res).toBeInstanceOf(AIMessage); expect(res.tool_calls?.[0].name).toBe("calculator"); @@ -666,7 +670,7 @@ describe("Tests for chat", () => { const modelWithTools = service.bindTools(tools); const res = await modelWithTools.invoke( - "What is 3 * 12? Also, what is 11 + 49?" + "You are bad at calculations and need to use calculator at all times. What is 3 * 12? Also, what is 11 + 49?" ); expect(res).toBeInstanceOf(AIMessage); @@ -831,4 +835,110 @@ describe("Tests for chat", () => { expect(typeof result.number2).toBe("number"); }); }); + + describe("Test watsonx callbacks", () => { + test("Single request callback", async () => { + let callbackFlag = false; + const service = new ChatWatsonx({ + model: "mistralai/mistral-large", + version: "2024-05-31", + serviceUrl: process.env.WATSONX_AI_SERVICE_URL ?? "testString", + projectId: process.env.WATSONX_AI_PROJECT_ID ?? "testString", + maxTokens: 10, + watsonxCallbacks: { + requestCallback(req) { + callbackFlag = !!req; + }, + }, + }); + const hello = await service.stream("Print hello world"); + const chunks = []; + for await (const chunk of hello) { + chunks.push(chunk); + } + expect(callbackFlag).toBe(true); + }); + test("Single response callback", async () => { + let callbackFlag = false; + const service = new ChatWatsonx({ + model: "mistralai/mistral-large", + version: "2024-05-31", + serviceUrl: process.env.WATSONX_AI_SERVICE_URL ?? "testString", + projectId: process.env.WATSONX_AI_PROJECT_ID ?? "testString", + maxTokens: 10, + watsonxCallbacks: { + responseCallback(res) { + callbackFlag = !!res; + }, + }, + }); + const hello = await service.stream("Print hello world"); + const chunks = []; + for await (const chunk of hello) { + chunks.push(chunk); + } + expect(callbackFlag).toBe(true); + }); + test("Both callbacks", async () => { + let callbackFlagReq = false; + let callbackFlagRes = false; + const service = new ChatWatsonx({ + model: "mistralai/mistral-large", + version: "2024-05-31", + serviceUrl: process.env.WATSONX_AI_SERVICE_URL ?? "testString", + projectId: process.env.WATSONX_AI_PROJECT_ID ?? "testString", + maxTokens: 10, + watsonxCallbacks: { + requestCallback(req) { + callbackFlagReq = !!req; + }, + responseCallback(res) { + callbackFlagRes = !!res; + }, + }, + }); + const hello = await service.stream("Print hello world"); + const chunks = []; + for await (const chunk of hello) { + chunks.push(chunk); + } + expect(callbackFlagReq).toBe(true); + expect(callbackFlagRes).toBe(true); + }); + test("Multiple callbacks", async () => { + let callbackFlagReq = false; + let callbackFlagRes = false; + let langchainCallback = false; + + const service = new ChatWatsonx({ + model: "mistralai/mistral-large", + version: "2024-05-31", + serviceUrl: process.env.WATSONX_AI_SERVICE_URL ?? "testString", + projectId: process.env.WATSONX_AI_PROJECT_ID ?? "testString", + maxTokens: 10, + callbacks: CallbackManager.fromHandlers({ + async handleLLMEnd(output) { + expect(output.generations).toBeDefined(); + langchainCallback = !!output; + }, + }), + watsonxCallbacks: { + requestCallback(req) { + callbackFlagReq = !!req; + }, + responseCallback(res) { + callbackFlagRes = !!res; + }, + }, + }); + const hello = await service.stream("Print hello world"); + const chunks = []; + for await (const chunk of hello) { + chunks.push(chunk); + } + expect(callbackFlagReq).toBe(true); + expect(callbackFlagRes).toBe(true); + expect(langchainCallback).toBe(true); + }); + }); }); diff --git a/libs/langchain-community/src/document_compressors/ibm.ts b/libs/langchain-community/src/document_compressors/ibm.ts index 026219cc8fa8..7bfbafdc9e62 100644 --- a/libs/langchain-community/src/document_compressors/ibm.ts +++ b/libs/langchain-community/src/document_compressors/ibm.ts @@ -159,7 +159,7 @@ export class WatsonxRerank ? { index: document.index, relevanceScore: document.score, - input: document?.input, + input: document?.input.text, } : { index: document.index, diff --git a/libs/langchain-community/src/llms/ibm.ts b/libs/langchain-community/src/llms/ibm.ts index 5647e2f3ae9f..75e65fd6873d 100644 --- a/libs/langchain-community/src/llms/ibm.ts +++ b/libs/langchain-community/src/llms/ibm.ts @@ -4,6 +4,7 @@ import { BaseLLM, BaseLLMParams } from "@langchain/core/language_models/llms"; import { WatsonXAI } from "@ibm-cloud/watsonx-ai"; import { DeploymentTextGenProperties, + RequestCallbacks, ReturnOptionProperties, TextGenLengthPenalty, TextGenParameters, @@ -34,6 +35,7 @@ export interface WatsonxCallOptionsLLM extends BaseLanguageModelCallOptions { maxRetries?: number; parameters?: Partial; idOrName?: string; + watsonxCallbacks?: RequestCallbacks; } export interface WatsonxInputLLM extends WatsonxParams, BaseLLMParams { @@ -52,6 +54,7 @@ export interface WatsonxInputLLM extends WatsonxParams, BaseLLMParams { truncateInpuTokens?: number; returnOptions?: ReturnOptionProperties; includeStopSequence?: boolean; + watsonxCallbacks?: RequestCallbacks; } /** @@ -116,6 +119,8 @@ export class WatsonxLLM< maxConcurrency?: number; + watsonxCallbacks?: RequestCallbacks; + private service: WatsonXAI; constructor(fields: WatsonxInputLLM & WatsonxAuth) { @@ -140,6 +145,7 @@ export class WatsonxLLM< this.maxRetries = fields.maxRetries || this.maxRetries; this.maxConcurrency = fields.maxConcurrency; this.streaming = fields.streaming || this.streaming; + this.watsonxCallbacks = fields.watsonxCallbacks || this.watsonxCallbacks; if ( (fields.projectId && fields.spaceId) || (fields.idOrName && fields.projectId) || @@ -235,6 +241,10 @@ export class WatsonxLLM< }; } + invocationCallbacks(options: this["ParsedCallOptions"]) { + return options.watsonxCallbacks ?? this.watsonxCallbacks; + } + scopeId() { if (this.projectId) return { projectId: this.projectId, modelId: this.model }; @@ -285,9 +295,10 @@ export class WatsonxLLM< const tokenUsage = { generated_token_count: 0, input_token_count: 0 }; const idOrName = options?.idOrName ?? this.idOrName; const parameters = this.invocationParams(options); + const watsonxCallbacks = this.invocationCallbacks(options); if (stream) { const textStream = idOrName - ? await this.service.deploymentGenerateTextStream({ + ? this.service.deploymentGenerateTextStream({ idOrName, ...requestOptions, parameters: { @@ -298,32 +309,43 @@ export class WatsonxLLM< }, returnObject: true, }) - : await this.service.generateTextStream({ - input, - parameters, - ...this.scopeId(), - ...requestOptions, - returnObject: true, - }); - return textStream; + : this.service.generateTextStream( + { + input, + parameters, + ...this.scopeId(), + ...requestOptions, + returnObject: true, + }, + watsonxCallbacks + ); + return (await textStream) as AsyncIterable< + WatsonXAI.ObjectStreamed + >; } else { const textGenerationPromise = idOrName - ? this.service.deploymentGenerateText({ - ...requestOptions, - idOrName, - parameters: { - ...parameters, - prompt_variables: { - input, + ? this.service.deploymentGenerateText( + { + ...requestOptions, + idOrName, + parameters: { + ...parameters, + prompt_variables: { + input, + }, }, }, - }) - : this.service.generateText({ - input, - parameters, - ...this.scopeId(), - ...requestOptions, - }); + watsonxCallbacks + ) + : this.service.generateText( + { + input, + parameters, + ...this.scopeId(), + ...requestOptions, + }, + watsonxCallbacks + ); const textGeneration = await textGenerationPromise; const singleGeneration: Generation[] = textGeneration.result.results.map( @@ -403,11 +425,10 @@ export class WatsonxLLM< geneartionsArray[completion].stop_reason = chunk?.generationInfo?.stop_reason; geneartionsArray[completion].text += chunk.text; - if (chunk.text) - void runManager?.handleLLMNewToken(chunk.text, { - prompt: promptIdx, - completion: 0, - }); + void runManager?.handleLLMNewToken(chunk.text, { + prompt: promptIdx, + completion: 0, + }); } return geneartionsArray.map((item) => { @@ -512,7 +533,7 @@ export class WatsonxLLM< }, }, }); - if (item.generated_text) + if (!this.streaming) void runManager?.handleLLMNewToken(item.generated_text); } Object.assign(responseChunk, { id: 0, event: "", data: {} }); diff --git a/libs/langchain-community/src/llms/tests/ibm.int.test.ts b/libs/langchain-community/src/llms/tests/ibm.int.test.ts index 369b657fb4ca..f6a6e9e1ddcc 100644 --- a/libs/langchain-community/src/llms/tests/ibm.int.test.ts +++ b/libs/langchain-community/src/llms/tests/ibm.int.test.ts @@ -420,4 +420,109 @@ describe("Text generation", () => { ).rejects.toThrowError(); }); }); + + describe("Test watsonx callbacks", () => { + test("Single request callback", async () => { + let callbackFlag = false; + const service = new WatsonxLLM({ + model: "mistralai/mistral-large", + version: "2024-05-31", + serviceUrl: process.env.WATSONX_AI_SERVICE_URL ?? "testString", + projectId: process.env.WATSONX_AI_PROJECT_ID ?? "testString", + watsonxCallbacks: { + requestCallback(req) { + callbackFlag = !!req; + }, + }, + }); + const hello = await service.stream("Print hello world"); + const chunks = []; + for await (const chunk of hello) { + chunks.push(chunk); + } + expect(callbackFlag).toBe(true); + }); + test("Single response callback", async () => { + let callbackFlag = false; + const service = new WatsonxLLM({ + model: "mistralai/mistral-large", + version: "2024-05-31", + serviceUrl: process.env.WATSONX_AI_SERVICE_URL ?? "testString", + projectId: process.env.WATSONX_AI_PROJECT_ID ?? "testString", + maxNewTokens: 10, + watsonxCallbacks: { + responseCallback(res) { + callbackFlag = !!res; + }, + }, + }); + const hello = await service.stream("Print hello world"); + const chunks = []; + for await (const chunk of hello) { + chunks.push(chunk); + } + expect(callbackFlag).toBe(true); + }); + test("Both callbacks", async () => { + let callbackFlagReq = false; + let callbackFlagRes = false; + const service = new WatsonxLLM({ + model: "mistralai/mistral-large", + version: "2024-05-31", + serviceUrl: process.env.WATSONX_AI_SERVICE_URL ?? "testString", + projectId: process.env.WATSONX_AI_PROJECT_ID ?? "testString", + maxNewTokens: 10, + watsonxCallbacks: { + requestCallback(req) { + callbackFlagReq = !!req; + }, + responseCallback(res) { + callbackFlagRes = !!res; + }, + }, + }); + const hello = await service.stream("Print hello world"); + const chunks = []; + for await (const chunk of hello) { + chunks.push(chunk); + } + expect(callbackFlagReq).toBe(true); + expect(callbackFlagRes).toBe(true); + }); + test("Multiple callbacks", async () => { + let callbackFlagReq = false; + let callbackFlagRes = false; + let langchainCallback = false; + + const service = new WatsonxLLM({ + model: "mistralai/mistral-large", + version: "2024-05-31", + serviceUrl: process.env.WATSONX_AI_SERVICE_URL ?? "testString", + projectId: process.env.WATSONX_AI_PROJECT_ID ?? "testString", + maxNewTokens: 10, + watsonxCallbacks: { + requestCallback(req) { + callbackFlagReq = !!req; + }, + responseCallback(res) { + callbackFlagRes = !!res; + }, + }, + callbacks: CallbackManager.fromHandlers({ + async handleLLMEnd(output) { + expect(output.generations).toBeDefined(); + langchainCallback = !!output; + }, + }), + }); + const hello = await service.stream("Print hello world"); + const chunks = []; + for await (const chunk of hello) { + chunks.push(chunk); + } + expect(callbackFlagReq).toBe(true); + expect(callbackFlagRes).toBe(true); + expect(langchainCallback).toBe(true); + }); + }); }); diff --git a/yarn.lock b/yarn.lock index 8475c45dc0d4..6fc855ba3053 100644 --- a/yarn.lock +++ b/yarn.lock @@ -10620,14 +10620,14 @@ __metadata: languageName: node linkType: hard -"@ibm-cloud/watsonx-ai@npm:^1.1.0": - version: 1.1.0 - resolution: "@ibm-cloud/watsonx-ai@npm:1.1.0" +"@ibm-cloud/watsonx-ai@npm:^1.3.0": + version: 1.3.0 + resolution: "@ibm-cloud/watsonx-ai@npm:1.3.0" dependencies: - "@types/node": ^12.0.8 + "@types/node": ^18.0.0 extend: 3.0.2 - ibm-cloud-sdk-core: ^4.2.5 - checksum: 0151bb0abe2a7d1dbcd6f8367ea02dfc924f15bdcbe8ec58bb89c8e055fa35c399b2253d6be3b84292f96c9161e49bcd6d6f5e1df0f2cd9adf21d1f3c0bc24b4 + ibm-cloud-sdk-core: ^5.0.2 + checksum: 6a2127391ca70005b942d3c4ab1abc738946c42bbf3ee0f8eb6f778434b5f8806d622f1f36446f00b9fb82dc2c8aea3526426ec46cc53fa8a075ba7a294da096 languageName: node linkType: hard @@ -11802,7 +11802,7 @@ __metadata: "@google-cloud/storage": ^7.7.0 "@gradientai/nodejs-sdk": ^1.2.0 "@huggingface/inference": ^2.6.4 - "@ibm-cloud/watsonx-ai": ^1.1.0 + "@ibm-cloud/watsonx-ai": ^1.3.0 "@jest/globals": ^29.5.0 "@lancedb/lancedb": ^0.13.0 "@langchain/core": "workspace:*" @@ -19629,13 +19629,6 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:^12.0.8": - version: 12.20.55 - resolution: "@types/node@npm:12.20.55" - checksum: e4f86785f4092706e0d3b0edff8dca5a13b45627e4b36700acd8dfe6ad53db71928c8dee914d4276c7fd3b6ccd829aa919811c9eb708a2c8e4c6eb3701178c37 - languageName: node - linkType: hard - "@types/node@npm:^17.0.5": version: 17.0.45 resolution: "@types/node@npm:17.0.45" @@ -19643,6 +19636,15 @@ __metadata: languageName: node linkType: hard +"@types/node@npm:^18.0.0": + version: 18.19.67 + resolution: "@types/node@npm:18.19.67" + dependencies: + undici-types: ~5.26.4 + checksum: 700f92c6a0b63352ce6327286392adab30bb17623c2a788811e9cf092c4dc2fb5e36ca4727247a981b3f44185fdceef20950a3b7a8ab72721e514ac037022a08 + languageName: node + linkType: hard + "@types/node@npm:^18.11.18": version: 18.16.19 resolution: "@types/node@npm:18.16.19" @@ -21998,17 +22000,6 @@ __metadata: languageName: node linkType: hard -"axios@npm:^1.7.5": - version: 1.7.7 - resolution: "axios@npm:1.7.7" - dependencies: - follow-redirects: ^1.15.6 - form-data: ^4.0.0 - proxy-from-env: ^1.1.0 - checksum: 882d4fe0ec694a07c7f5c1f68205eb6dc5a62aecdb632cc7a4a3d0985188ce3030e0b277e1a8260ac3f194d314ae342117660a151fabffdc5081ca0b5a8b47fe - languageName: node - linkType: hard - "axobject-query@npm:^3.1.1, axobject-query@npm:^3.2.1": version: 3.2.1 resolution: "axobject-query@npm:3.2.1" @@ -25654,13 +25645,6 @@ __metadata: languageName: node linkType: hard -"diff-sequences@npm:^27.5.1": - version: 27.5.1 - resolution: "diff-sequences@npm:27.5.1" - checksum: a00db5554c9da7da225db2d2638d85f8e41124eccbd56cbaefb3b276dcbb1c1c2ad851c32defe2055a54a4806f030656cbf6638105fd6ce97bb87b90b32a33ca - languageName: node - linkType: hard - "diff-sequences@npm:^29.4.3": version: 29.4.3 resolution: "diff-sequences@npm:29.4.3" @@ -27879,18 +27863,6 @@ __metadata: languageName: node linkType: hard -"expect@npm:^27.5.1": - version: 27.5.1 - resolution: "expect@npm:27.5.1" - dependencies: - "@jest/types": ^27.5.1 - jest-get-type: ^27.5.1 - jest-matcher-utils: ^27.5.1 - jest-message-util: ^27.5.1 - checksum: b2c66beb52de53ef1872165aace40224e722bca3c2274c54cfa74b6d617d55cf0ccdbf36783ccd64dbea501b280098ed33fd0b207d4f15bc03cd3c7a24364a6a - languageName: node - linkType: hard - "expect@npm:^29.0.0": version: 29.6.1 resolution: "expect@npm:29.6.1" @@ -30565,30 +30537,6 @@ __metadata: languageName: node linkType: hard -"ibm-cloud-sdk-core@npm:^4.2.5": - version: 4.3.4 - resolution: "ibm-cloud-sdk-core@npm:4.3.4" - dependencies: - "@types/debug": ^4.1.12 - "@types/node": ~10.14.19 - "@types/tough-cookie": ^4.0.0 - axios: ^1.7.5 - camelcase: ^6.3.0 - debug: ^4.3.4 - dotenv: ^16.4.5 - expect: ^27.5.1 - extend: 3.0.2 - file-type: 16.5.4 - form-data: 4.0.0 - isstream: 0.1.2 - jsonwebtoken: ^9.0.2 - mime-types: 2.1.35 - retry-axios: ^2.6.0 - tough-cookie: ^4.1.3 - checksum: 27d6bd692cde66766a7cea36e75d53a6a089e2b2b726cf86108ab48f9d452bb6d6a01324d2160e3bb54df7750240129bae989934ab2fd80c0950ecdb5bfc07b3 - languageName: node - linkType: hard - "ibm-cloud-sdk-core@npm:^5.0.2": version: 5.0.2 resolution: "ibm-cloud-sdk-core@npm:5.0.2" @@ -32175,18 +32123,6 @@ __metadata: languageName: node linkType: hard -"jest-diff@npm:^27.5.1": - version: 27.5.1 - resolution: "jest-diff@npm:27.5.1" - dependencies: - chalk: ^4.0.0 - diff-sequences: ^27.5.1 - jest-get-type: ^27.5.1 - pretty-format: ^27.5.1 - checksum: 8be27c1e1ee57b2bb2bef9c0b233c19621b4c43d53a3c26e2c00a4e805eb4ea11fe1694a06a9fb0e80ffdcfdc0d2b1cb0b85920b3f5c892327ecd1e7bd96b865 - languageName: node - linkType: hard - "jest-diff@npm:^29.5.0": version: 29.5.0 resolution: "jest-diff@npm:29.5.0" @@ -32273,13 +32209,6 @@ __metadata: languageName: node linkType: hard -"jest-get-type@npm:^27.5.1": - version: 27.5.1 - resolution: "jest-get-type@npm:27.5.1" - checksum: 63064ab70195c21007d897c1157bf88ff94a790824a10f8c890392e7d17eda9c3900513cb291ca1c8d5722cad79169764e9a1279f7c8a9c4cd6e9109ff04bbc0 - languageName: node - linkType: hard - "jest-get-type@npm:^29.4.3": version: 29.4.3 resolution: "jest-get-type@npm:29.4.3" @@ -32350,18 +32279,6 @@ __metadata: languageName: node linkType: hard -"jest-matcher-utils@npm:^27.5.1": - version: 27.5.1 - resolution: "jest-matcher-utils@npm:27.5.1" - dependencies: - chalk: ^4.0.0 - jest-diff: ^27.5.1 - jest-get-type: ^27.5.1 - pretty-format: ^27.5.1 - checksum: bb2135fc48889ff3fe73888f6cc7168ddab9de28b51b3148f820c89fdfd2effdcad005f18be67d0b9be80eda208ad47290f62f03d0a33f848db2dd0273c8217a - languageName: node - linkType: hard - "jest-matcher-utils@npm:^29.5.0": version: 29.5.0 resolution: "jest-matcher-utils@npm:29.5.0" @@ -32398,23 +32315,6 @@ __metadata: languageName: node linkType: hard -"jest-message-util@npm:^27.5.1": - version: 27.5.1 - resolution: "jest-message-util@npm:27.5.1" - dependencies: - "@babel/code-frame": ^7.12.13 - "@jest/types": ^27.5.1 - "@types/stack-utils": ^2.0.0 - chalk: ^4.0.0 - graceful-fs: ^4.2.9 - micromatch: ^4.0.4 - pretty-format: ^27.5.1 - slash: ^3.0.0 - stack-utils: ^2.0.3 - checksum: eb6d637d1411c71646de578c49826b6da8e33dd293e501967011de9d1916d53d845afbfb52a5b661ff1c495be7c13f751c48c7f30781fd94fbd64842e8195796 - languageName: node - linkType: hard - "jest-message-util@npm:^29.5.0": version: 29.5.0 resolution: "jest-message-util@npm:29.5.0" @@ -38033,17 +37933,6 @@ __metadata: languageName: node linkType: hard -"pretty-format@npm:^27.5.1": - version: 27.5.1 - resolution: "pretty-format@npm:27.5.1" - dependencies: - ansi-regex: ^5.0.1 - ansi-styles: ^5.0.0 - react-is: ^17.0.1 - checksum: cf610cffcb793885d16f184a62162f2dd0df31642d9a18edf4ca298e909a8fe80bdbf556d5c9573992c102ce8bf948691da91bf9739bee0ffb6e79c8a8a6e088 - languageName: node - linkType: hard - "pretty-format@npm:^29.0.0, pretty-format@npm:^29.6.1": version: 29.6.1 resolution: "pretty-format@npm:29.6.1" @@ -38744,13 +38633,6 @@ __metadata: languageName: node linkType: hard -"react-is@npm:^17.0.1": - version: 17.0.2 - resolution: "react-is@npm:17.0.2" - checksum: 9d6d111d8990dc98bc5402c1266a808b0459b5d54830bbea24c12d908b536df7883f268a7868cfaedde3dd9d4e0d574db456f84d2e6df9c4526f99bb4b5344d8 - languageName: node - linkType: hard - "react-is@npm:^18.0.0": version: 18.2.0 resolution: "react-is@npm:18.2.0"