Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

community[minor],docs[minor]: Add ChromeAI chat model #5903

Merged
merged 16 commits into from
Jun 28, 2024
Merged
4 changes: 4 additions & 0 deletions libs/langchain-community/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -518,6 +518,10 @@ chat_models/cloudflare_workersai.cjs
chat_models/cloudflare_workersai.js
chat_models/cloudflare_workersai.d.ts
chat_models/cloudflare_workersai.d.cts
chat_models/chrome_ai.cjs
chat_models/chrome_ai.js
chat_models/chrome_ai.d.ts
chat_models/chrome_ai.d.cts
chat_models/deepinfra.cjs
chat_models/deepinfra.js
chat_models/deepinfra.d.ts
Expand Down
1 change: 1 addition & 0 deletions libs/langchain-community/langchain.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,7 @@ export const config = {
"chat_models/bedrock": "chat_models/bedrock/index",
"chat_models/bedrock/web": "chat_models/bedrock/web",
"chat_models/cloudflare_workersai": "chat_models/cloudflare_workersai",
"chat_models/chrome_ai": "chat_models/chrome_ai",
"chat_models/deepinfra": "chat_models/deepinfra",
"chat_models/fireworks": "chat_models/fireworks",
"chat_models/friendli": "chat_models/friendli",
Expand Down
13 changes: 13 additions & 0 deletions libs/langchain-community/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -1870,6 +1870,15 @@
"import": "./chat_models/cloudflare_workersai.js",
"require": "./chat_models/cloudflare_workersai.cjs"
},
"./chat_models/chrome_ai": {
"types": {
"import": "./chat_models/chrome_ai.d.ts",
"require": "./chat_models/chrome_ai.d.cts",
"default": "./chat_models/chrome_ai.d.ts"
},
"import": "./chat_models/chrome_ai.js",
"require": "./chat_models/chrome_ai.cjs"
},
"./chat_models/deepinfra": {
"types": {
"import": "./chat_models/deepinfra.d.ts",
Expand Down Expand Up @@ -3555,6 +3564,10 @@
"chat_models/cloudflare_workersai.js",
"chat_models/cloudflare_workersai.d.ts",
"chat_models/cloudflare_workersai.d.cts",
"chat_models/chrome_ai.cjs",
"chat_models/chrome_ai.js",
"chat_models/chrome_ai.d.ts",
"chat_models/chrome_ai.d.cts",
"chat_models/deepinfra.cjs",
"chat_models/deepinfra.js",
"chat_models/deepinfra.d.ts",
Expand Down
181 changes: 181 additions & 0 deletions libs/langchain-community/src/chat_models/chrome_ai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,181 @@
import {
bracesproul marked this conversation as resolved.
Show resolved Hide resolved
SimpleChatModel,
type BaseChatModelParams,
} from "@langchain/core/language_models/chat_models";
import type { BaseLanguageModelCallOptions } from "@langchain/core/language_models/base";
import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";
import { BaseMessage, AIMessageChunk } from "@langchain/core/messages";
import { ChatGenerationChunk } from "@langchain/core/outputs";

export interface AI {
canCreateTextSession(): Promise<AIModelAvailability>;
createTextSession(options?: AITextSessionOptions): Promise<AITextSession>;
defaultTextSessionOptions(): Promise<AITextSessionOptions>;
}

export interface AITextSession {
prompt(input: string): Promise<string>;
promptStreaming(input: string): ReadableStream;
destroy(): void;
clone(): AITextSession;
}

export interface AITextSessionOptions {
topK: number;
temperature: number;
}

export enum AIModelAvailability {
Readily = "readily",
AfterDownload = "after-download",
No = "no",
}

export interface ChromeAIInputs extends BaseChatModelParams {
topK?: number;
temperature?: number;
}

export interface ChromeAICallOptions extends BaseLanguageModelCallOptions {}

function formatPrompt(messages: BaseMessage[]): string {
return messages
.map((message) => {
if (typeof message.content !== "string") {
throw new Error(
"ChatChromeAI does not support non-string message content."
);
}
return `<${message._getType()}>\n${
message.content
}\n</${message._getType()}>`;
})
.join("\n");
}

/**
* To use this model you need to have the `Built-in AI Early Preview Program`
* for Chrome. You can find more information about the program here:
* @link https://developer.chrome.com/docs/ai/built-in
*
* @example
* ```typescript
* // Initialize the ChatChromeAI model.
* const model = new ChatChromeAI({
* temperature: 0.5, // Optional. Default is 0.5.
* topK: 40, // Optional. Default is 40.
* });
*
* // Call the model with a message and await the response.
* const response = await model.invoke([
* new HumanMessage({ content: "My name is John." }),
* ]);
* ```
*/
export class ChatChromeAI extends SimpleChatModel<ChromeAICallOptions> {
session?: AITextSession;

temperature = 0.5;

topK = 40;

static lc_name() {
return "ChatChromeAI";
}

constructor(inputs: ChromeAIInputs) {
super(inputs);
this.temperature = inputs.temperature ?? this.temperature;
this.topK = inputs.topK ?? this.topK;
}

_llmType() {
return "chrome-ai";
}

/**
* Initialize the model. This method must be called before calling `.invoke()`.
*/
async initialize() {
if (typeof window === "undefined") {
throw new Error("ChatChromeAI can only be used in the browser.");
}

const ai: AI = (window as any).ai;
const canCreateTextSession = await ai.canCreateTextSession();
if (canCreateTextSession === AIModelAvailability.No) {
throw new Error("The AI model is not available.");
} else if (canCreateTextSession === AIModelAvailability.AfterDownload) {
throw new Error("The AI model is not yet downloaded.");
}

this.session = await ai.createTextSession({
topK: this.topK,
temperature: this.temperature,
});
}

/**
* Call `.destroy()` to free resources if you no longer need a session.
* When a session is destroyed, it can no longer be used, and any ongoing
* execution will be aborted. You may want to keep the session around if
* you intend to prompt the model often since creating a session can take
* some time.
*/
destroy() {
if (!this.session) {
return console.log("No session found. Returning.");
}
this.session.destroy();
}

async *_streamResponseChunks(
messages: BaseMessage[],
_options: this["ParsedCallOptions"],
runManager?: CallbackManagerForLLMRun
): AsyncGenerator<ChatGenerationChunk> {
if (!this.session) {
throw new Error("Session not found. Please call `.initialize()` first.");
}
const textPrompt = formatPrompt(messages);

const stream = this.session.promptStreaming(textPrompt);
const reader = stream.getReader();
Copy link
Collaborator

@jacoblee93 jacoblee93 Jun 26, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: I think these are iterable if we convert to an IterableReadableStream


try {
let previousLength = 0;
while (true) {
const { done, value } = await reader.read();
if (done) break;
const newContent = value.slice(previousLength);
previousLength = value.length;
yield new ChatGenerationChunk({
text: newContent,
message: new AIMessageChunk({
content: newContent,
additional_kwargs: {},
}),
});
await runManager?.handleLLMNewToken(newContent);
}
} finally {
reader.releaseLock();
}
}

async _call(
messages: BaseMessage[],
options: this["ParsedCallOptions"],
runManager?: CallbackManagerForLLMRun
): Promise<string> {
const chunks = [];
for await (const chunk of this._streamResponseChunks(
messages,
options,
runManager
)) {
chunks.push(chunk.text);
}
return chunks.join("");
}
}
34 changes: 34 additions & 0 deletions libs/langchain-community/src/chat_models/tests/chrome_ai.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
<!DOCTYPE html>
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How do we run this?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

added better setup instructions

<html>
<head>
<title>ChatChromeAI Example</title>
</head>
<body>
<h1>ChatChromeAI Example</h1>

<form id="inputForm">
<label for="inputField">Enter your input:</label><br />
<input type="text" id="inputField" name="inputField" /><br />
<button type="submit">Submit</button>
</form>

<script type="module">
import { ChatChromeAI } from "../../../dist/chat_models/chrome_ai.js";

const model = new ChatChromeAI();

document
.getElementById("inputForm")
.addEventListener("submit", async (event) => {
event.preventDefault();

const input = document.getElementById("inputField").value;
const humanMessage = ["human", input];

for await (const chunk of await model.stream(humanMessage)) {
console.log(chunk);
}
});
</script>
</body>
</html>
1 change: 1 addition & 0 deletions libs/langchain-community/src/load/import_map.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ export * as vectorstores__vectara from "../vectorstores/vectara.js";
export * as chat_models__alibaba_tongyi from "../chat_models/alibaba_tongyi.js";
export * as chat_models__baiduwenxin from "../chat_models/baiduwenxin.js";
export * as chat_models__cloudflare_workersai from "../chat_models/cloudflare_workersai.js";
export * as chat_models__chrome_ai from "../chat_models/chrome_ai.js";
export * as chat_models__deepinfra from "../chat_models/deepinfra.js";
export * as chat_models__fireworks from "../chat_models/fireworks.js";
export * as chat_models__friendli from "../chat_models/friendli.js";
Expand Down
Loading