From f2836b75ed4ca5fca1226975857abfddc8cb67a4 Mon Sep 17 00:00:00 2001 From: Hemanth HM Date: Fri, 18 Oct 2024 22:12:21 +0000 Subject: [PATCH 1/2] fix: AILanguageModel interface --- .../src/experimental/llms/chrome_ai.ts | 81 ++++++++++++++++--- 1 file changed, 69 insertions(+), 12 deletions(-) diff --git a/libs/langchain-community/src/experimental/llms/chrome_ai.ts b/libs/langchain-community/src/experimental/llms/chrome_ai.ts index 618a8b6ecfbc..27716cdffb8f 100644 --- a/libs/langchain-community/src/experimental/llms/chrome_ai.ts +++ b/libs/langchain-community/src/experimental/llms/chrome_ai.ts @@ -5,25 +5,82 @@ import { GenerationChunk } from "@langchain/core/outputs"; import { IterableReadableStream } from "@langchain/core/utils/stream"; import { BaseLLMParams, LLM } from "@langchain/core/language_models/llms"; -export interface AI { - canCreateTextSession(): Promise; - createTextSession(options?: AITextSessionOptions): Promise; - defaultTextSessionOptions(): Promise; +export interface AILanguageModelFactory { + create(options?: AILanguageModelCreateOptions): Promise; + capabilities(): Promise; } -export interface AITextSession { - prompt(input: string): Promise; - promptStreaming(input: string): ReadableStream; +export interface AILanguageModel extends EventTarget { + prompt(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): Promise; + promptStreaming(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): ReadableStream; + + countPromptTokens(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): Promise; + + get maxTokens(): number; + get tokensSoFar(): number; + get tokensLeft(): number; + + get topK(): number; + get temperature(): number; + + oncontextoverflow: ((event: Event) => void); + + clone(options?: AILanguageModelCloneOptions): Promise; destroy(): void; - clone(): AITextSession; } -export interface AITextSessionOptions { +interface AILanguageModelCapabilities { + readonly available: AICapabilityAvailability; + languageAvailable(languageTag: string): AICapabilityAvailability; + + get defaultTopK(): number | undefined; + get maxTopK(): number | undefined; + get defaultTemperature(): number | undefined; + get maxTemperature(): number | undefined; +} + +interface AILanguageModelCreateOptions { + signal?: AbortSignal; + monitor?: AICreateMonitorCallback; + systemPrompt?: string; + initialPrompts?: AILanguageModelInitialPrompt[]; topK: number; temperature: number; } -export type AIModelAvailability = "readily" | "after-download" | "no"; +export interface AILanguageModelInitialPrompt { + role: AILanguageModelInitialPromptRole; + content: string; +} + +export interface AILanguageModelPrompt { + role: AILanguageModelPromptRole; + content: string; +} + +export interface AILanguageModelPromptOptions { + signal?: AbortSignal; +} + +export interface AILanguageModelCloneOptions { + signal?: AbortSignal; +} + +export type AILanguageModelPromptInput = string | AILanguageModelPrompt | AILanguageModelPrompt[]; + +enum AILanguageModelInitialPromptRole { + "system", + "user", + "assistant" +} + +enum AILanguageModelPromptRole { + "user", + "assistant" +} + +export type AICapabilityAvailability = "yes" | "no"; +export type AICreateMonitorCallback = () => void; export interface ChromeAIInputs extends BaseLLMParams { topK?: number; @@ -93,14 +150,14 @@ export class ChromeAI extends LLM { `Could not initialize ChromeAI instance. Make sure you are running a version of Chrome with the proper experimental flags enabled.\n\nError message: ${e.message}` ); } - const { available } = await aiInstance.assistant.capabilities(); + const { available } = await aiInstance.languageModel.capabilities(); if (available === "no") { throw new Error("The AI model is not available."); } else if (available === "after-download") { throw new Error("The AI model is not yet downloaded."); } - const session = await aiInstance.assistant.create({ + const session = await aiInstance.languageModel.create({ systemPrompt: this.systemPrompt, topK: this.topK, temperature: this.temperature, From 9ec87a64cc0ff15c1d671abf21bd0873a7d46308 Mon Sep 17 00:00:00 2001 From: jacoblee93 Date: Sun, 20 Oct 2024 00:16:19 -0700 Subject: [PATCH 2/2] Format and lint --- .../src/experimental/llms/chrome_ai.ts | 28 +++++++++++++------ 1 file changed, 20 insertions(+), 8 deletions(-) diff --git a/libs/langchain-community/src/experimental/llms/chrome_ai.ts b/libs/langchain-community/src/experimental/llms/chrome_ai.ts index 27716cdffb8f..c41ba13121a6 100644 --- a/libs/langchain-community/src/experimental/llms/chrome_ai.ts +++ b/libs/langchain-community/src/experimental/llms/chrome_ai.ts @@ -11,10 +11,19 @@ export interface AILanguageModelFactory { } export interface AILanguageModel extends EventTarget { - prompt(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): Promise; - promptStreaming(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): ReadableStream; - - countPromptTokens(input: AILanguageModelPromptInput, options?: AILanguageModelPromptOptions): Promise; + prompt( + input: AILanguageModelPromptInput, + options?: AILanguageModelPromptOptions + ): Promise; + promptStreaming( + input: AILanguageModelPromptInput, + options?: AILanguageModelPromptOptions + ): ReadableStream; + + countPromptTokens( + input: AILanguageModelPromptInput, + options?: AILanguageModelPromptOptions + ): Promise; get maxTokens(): number; get tokensSoFar(): number; @@ -23,7 +32,7 @@ export interface AILanguageModel extends EventTarget { get topK(): number; get temperature(): number; - oncontextoverflow: ((event: Event) => void); + oncontextoverflow: (event: Event) => void; clone(options?: AILanguageModelCloneOptions): Promise; destroy(): void; @@ -66,17 +75,20 @@ export interface AILanguageModelCloneOptions { signal?: AbortSignal; } -export type AILanguageModelPromptInput = string | AILanguageModelPrompt | AILanguageModelPrompt[]; +export type AILanguageModelPromptInput = + | string + | AILanguageModelPrompt + | AILanguageModelPrompt[]; enum AILanguageModelInitialPromptRole { "system", "user", - "assistant" + "assistant", } enum AILanguageModelPromptRole { "user", - "assistant" + "assistant", } export type AICapabilityAvailability = "yes" | "no";