Skip to content

Commit

Permalink
Update raycast-ollama extension
Browse files Browse the repository at this point in the history
- Merge pull request raycast#6 from MassimilianoPasquini97/OllamaResponseApiFix
- Updated changelog.md
- TextField apper if ModelsOnRegistry is undefined
- Fixed ModelsOnRegistry.lengh undefined
- Deleted \'error\' message on event emitter
- Deleted field no longher used by Ollama Generate Response API
  • Loading branch information
MassimilianoPasquini97 committed Nov 5, 2023
1 parent 8ce0200 commit 55dc45c
Show file tree
Hide file tree
Showing 5 changed files with 109 additions and 97 deletions.
5 changes: 5 additions & 0 deletions extensions/raycast-ollama/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
# raycast-ollama Changelog

## [BugFix] - 2023-11-5

- [BugFix] Fixed error `ModelsOnRegistry.lengh undefined`.
- [BugFix] Fixed error `SyntaxError: Unexpected end of JSON input` caused by Ollama Generate API response no longer providing `sample_count` and `sample_duration` fields.

## [Improvement] - 2023-10-21

- [Improvement] New Preference 'Input Source' permit to chose input between 'Selected Text' or 'Clipboard'. Default to 'Selected Text'.
Expand Down
146 changes: 82 additions & 64 deletions extensions/raycast-ollama/src/api/main.tsx
Original file line number Diff line number Diff line change
@@ -1,8 +1,4 @@
import {
OllamaApiGenerateRequestBody,
OllamaApiGenerateResponseDone,
OllamaApiGenerateResponseMetadata,
} from "./types";
import { OllamaApiGenerateRequestBody, OllamaApiGenerateResponse } from "./types";
import {
ErrorOllamaCustomModel,
ErrorOllamaModelNotInstalled,
Expand Down Expand Up @@ -38,9 +34,9 @@ export function ResultView(
const [loading, setLoading]: [boolean, React.Dispatch<React.SetStateAction<boolean>>] = React.useState(false);
const [answer, setAnswer]: [string, React.Dispatch<React.SetStateAction<string>>] = React.useState("");
const [answerMetadata, setAnswerMetadata]: [
OllamaApiGenerateResponseMetadata,
React.Dispatch<React.SetStateAction<OllamaApiGenerateResponseMetadata>>
] = React.useState({} as OllamaApiGenerateResponseMetadata);
OllamaApiGenerateResponse,
React.Dispatch<React.SetStateAction<OllamaApiGenerateResponse>>
] = React.useState({} as OllamaApiGenerateResponse);
async function HandleError(err: Error) {
if (err instanceof ErrorOllamaModelNotInstalled) {
await showToast({ style: Toast.Style.Failure, title: err.message, message: err.suggest });
Expand Down Expand Up @@ -232,27 +228,42 @@ export function ResultView(
<Detail.Metadata>
<Detail.Metadata.Label title="Model" text={answerMetadata.model} />
<Detail.Metadata.Separator />
<Detail.Metadata.Label
title="Generation Speed"
text={`${(answerMetadata.eval_count / (answerMetadata.eval_duration / 1e9)).toFixed(2)} token/s`}
/>
<Detail.Metadata.Label
title="Total Inference Duration"
text={`${(answerMetadata.total_duration / 1e9).toFixed(2)}s`}
/>
<Detail.Metadata.Label title="Load Duration" text={`${(answerMetadata.load_duration / 1e9).toFixed(2)}s`} />
<Detail.Metadata.Label title="Sample Duration" text={`${answerMetadata.sample_count} sample`} />
<Detail.Metadata.Label
title="Sample Duration"
text={`${(answerMetadata.sample_duration / 1e9).toFixed(2)}s`}
/>
<Detail.Metadata.Label title="Prompt Eval Count" text={`${answerMetadata.prompt_eval_count}`} />
<Detail.Metadata.Label
title="Prompt Eval Duration"
text={`${(answerMetadata.prompt_eval_duration / 1e9).toFixed(2)}s`}
/>
<Detail.Metadata.Label title="Eval Count" text={`${answerMetadata.eval_count}`} />
<Detail.Metadata.Label title="Eval Duration" text={`${(answerMetadata.eval_duration / 1e9).toFixed(2)}s`} />
{answerMetadata.eval_count && answerMetadata.eval_duration ? (
<Detail.Metadata.Label
title="Generation Speed"
text={`${(answerMetadata.eval_count / (answerMetadata.eval_duration / 1e9)).toFixed(2)} token/s`}
/>
) : null}
{answerMetadata.total_duration ? (
<Detail.Metadata.Label
title="Total Inference Duration"
text={`${(answerMetadata.total_duration / 1e9).toFixed(2)}s`}
/>
) : null}
{answerMetadata.load_duration ? (
<Detail.Metadata.Label
title="Load Duration"
text={`${(answerMetadata.load_duration / 1e9).toFixed(2)}s`}
/>
) : null}
{answerMetadata.prompt_eval_count ? (
<Detail.Metadata.Label title="Prompt Eval Count" text={`${answerMetadata.prompt_eval_count}`} />
) : null}
{answerMetadata.prompt_eval_duration ? (
<Detail.Metadata.Label
title="Prompt Eval Duration"
text={`${(answerMetadata.prompt_eval_duration / 1e9).toFixed(2)}s`}
/>
) : null}
{answerMetadata.eval_count ? (
<Detail.Metadata.Label title="Eval Count" text={`${answerMetadata.eval_count}`} />
) : null}
{answerMetadata.eval_duration ? (
<Detail.Metadata.Label
title="Eval Duration"
text={`${(answerMetadata.eval_duration / 1e9).toFixed(2)}s`}
/>
) : null}
</Detail.Metadata>
)
}
Expand All @@ -273,8 +284,8 @@ export function ListView(): JSX.Element {
const [selectedAnswer, setSelectedAnswer]: [string, React.Dispatch<React.SetStateAction<string>>] =
React.useState("0");
const [answerListHistory, setAnswerListHistory]: [
Map<string, [string, string, OllamaApiGenerateResponseDone][] | undefined>,
React.Dispatch<React.SetStateAction<Map<string, [string, string, OllamaApiGenerateResponseDone][] | undefined>>>
Map<string, [string, string, OllamaApiGenerateResponse][] | undefined>,
React.Dispatch<React.SetStateAction<Map<string, [string, string, OllamaApiGenerateResponse][] | undefined>>>
] = React.useState(new Map());
const [clipboardConversation, setClipboardConversation]: [string, React.Dispatch<React.SetStateAction<string>>] =
React.useState("");
Expand Down Expand Up @@ -316,9 +327,9 @@ export function ListView(): JSX.Element {
setAnswerListHistory((prevState) => {
let prevData = prevState.get(chatName);
if (prevData?.length === undefined) {
prevData = [[query, "", {} as OllamaApiGenerateResponseDone]];
prevData = [[query, "", {} as OllamaApiGenerateResponse]];
} else {
prevData.push([query, "", {} as OllamaApiGenerateResponseDone]);
prevData.push([query, "", {} as OllamaApiGenerateResponse]);
}
prevState.set(chatName, prevData);
setSelectedAnswer((prevData.length - 1).toString());
Expand Down Expand Up @@ -365,9 +376,7 @@ export function ListView(): JSX.Element {
});
await LocalStorage.getItem("answerListHistory").then((data) => {
if (data) {
const dataMap: Map<string, [string, string, OllamaApiGenerateResponseDone][]> = new Map(
JSON.parse(data as string)
);
const dataMap: Map<string, [string, string, OllamaApiGenerateResponse][]> = new Map(JSON.parse(data as string));
setAnswerListHistory(dataMap);
}
});
Expand Down Expand Up @@ -409,7 +418,7 @@ export function ListView(): JSX.Element {
}
setClipboardConversation(clipboard);
}
function ActionOllama(item?: [string, string, OllamaApiGenerateResponseDone]): JSX.Element {
function ActionOllama(item?: [string, string, OllamaApiGenerateResponse]): JSX.Element {
return (
<ActionPanel>
<ActionPanel.Section title="Ollama">
Expand Down Expand Up @@ -594,33 +603,42 @@ export function ListView(): JSX.Element {
<Detail.Metadata>
<Detail.Metadata.Label title="Model" text={item[2].model} />
<Detail.Metadata.Separator />
<Detail.Metadata.Label
title="Generation Speed"
text={`${(item[2].eval_count / (item[2].eval_duration / 1e9)).toFixed(2)} token/s`}
/>
<Detail.Metadata.Label
title="Total Inference Duration"
text={`${(item[2].total_duration / 1e9).toFixed(2)}s`}
/>
<Detail.Metadata.Label
title="Load Duration"
text={`${(item[2].load_duration / 1e9).toFixed(2)}s`}
/>
<Detail.Metadata.Label title="Sample Duration" text={`${item[2].sample_count} sample`} />
<Detail.Metadata.Label
title="Sample Duration"
text={`${(item[2].sample_duration / 1e9).toFixed(2)}s`}
/>
<Detail.Metadata.Label title="Prompt Eval Count" text={`${item[2].prompt_eval_count}`} />
<Detail.Metadata.Label
title="Prompt Eval Duration"
text={`${(item[2].prompt_eval_duration / 1e9).toFixed(2)}s`}
/>
<Detail.Metadata.Label title="Eval Count" text={`${item[2].eval_count}`} />
<Detail.Metadata.Label
title="Eval Duration"
text={`${(item[2].eval_duration / 1e9).toFixed(2)}s`}
/>
{item[2].eval_count && item[2].eval_duration ? (
<Detail.Metadata.Label
title="Generation Speed"
text={`${(item[2].eval_count / (item[2].eval_duration / 1e9)).toFixed(2)} token/s`}
/>
) : null}
{item[2].total_duration ? (
<Detail.Metadata.Label
title="Total Inference Duration"
text={`${(item[2].total_duration / 1e9).toFixed(2)}s`}
/>
) : null}
{item[2].load_duration ? (
<Detail.Metadata.Label
title="Load Duration"
text={`${(item[2].load_duration / 1e9).toFixed(2)}s`}
/>
) : null}
{item[2].prompt_eval_count ? (
<Detail.Metadata.Label title="Prompt Eval Count" text={`${item[2].prompt_eval_count}`} />
) : null}
{item[2].prompt_eval_duration ? (
<Detail.Metadata.Label
title="Prompt Eval Duration"
text={`${(item[2].prompt_eval_duration / 1e9).toFixed(2)}s`}
/>
) : null}
{item[2].eval_count ? (
<Detail.Metadata.Label title="Eval Count" text={`${item[2].eval_count}`} />
) : null}
{item[2].eval_duration ? (
<Detail.Metadata.Label
title="Eval Duration"
text={`${(item[2].eval_duration / 1e9).toFixed(2)}s`}
/>
) : null}
</Detail.Metadata>
)
}
Expand Down
17 changes: 9 additions & 8 deletions extensions/raycast-ollama/src/api/ollama.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import {
OllamaApiGenerateResponseUndone,
OllamaApiGenerateResponseDone,
OllamaApiGenerateResponse,
OllamaApiGenerateRequestBody,
OllamaApiEmbeddingsResponse,
OllamaApiTagsResponse,
Expand Down Expand Up @@ -172,7 +171,7 @@ export async function OllamaApiPull(model: string): Promise<EventEmitter> {
/**
* Perform text generation with the selected model.
* @param {OllamaApiGenerateRequestBody} body - Ollama Generate Body Request.
* @returns {Promise<EventEmitter>} Response from the Ollama API with an EventEmitter with two event: `data` where all generated text is passed on `string` format and `done` when inference is finished returning a `OllamaApiGenerateResponseDone` object contains all metadata of inference.
* @returns {Promise<EventEmitter>} Response from the Ollama API with an EventEmitter with two event: `data` where all generated text is passed on `string` format and `done` when inference is finished returning a `OllamaApiGenerateResponse` object contains all metadata of inference..
*/
export async function OllamaApiGenerate(body: OllamaApiGenerateRequestBody): Promise<EventEmitter> {
const host = parseOllamaHostUrl();
Expand Down Expand Up @@ -203,12 +202,14 @@ export async function OllamaApiGenerate(body: OllamaApiGenerateRequestBody): Pro
body?.on("data", (chunk) => {
if (chunk !== undefined) {
const buffer = Buffer.from(chunk);
const json: OllamaApiGenerateResponseUndone = JSON.parse(buffer.toString());
if (json.done) {
const lastJSON: OllamaApiGenerateResponseDone = JSON.parse(buffer.toString());
e.emit("done", lastJSON);
const json: OllamaApiGenerateResponse = JSON.parse(buffer.toString());
switch (json.done) {
case false:
e.emit("data", json.response);
break;
case true:
e.emit("done", json);
}
if (json.response !== undefined) e.emit("data", json.response);
}
});

Expand Down
30 changes: 8 additions & 22 deletions extensions/raycast-ollama/src/api/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,38 +61,24 @@ export interface OllamaApiGenerateOptionsRequestBody {
num_thread?: number;
}

export interface OllamaApiGenerateResponseUndone {
export interface OllamaApiGenerateResponse {
model: string;
created_at: string;
response: string;
done: boolean;
}

export interface OllamaApiGenerateResponseDone {
model: string;
created_at: string;
done: boolean;
context: number[];
total_duration: number;
load_duration: number;
sample_count: number;
sample_duration: number;
prompt_eval_count: number;
prompt_eval_duration: number;
eval_count: number;
eval_duration: number;
context?: number[];
total_duration?: number;
load_duration?: number;
prompt_eval_count?: number;
prompt_eval_duration?: number;
eval_count?: number;
eval_duration?: number;
}

export interface OllamaApiEmbeddingsResponse {
embedding: number[];
}

export interface OllamaApiGenerateResponse {
metadata: OllamaApiGenerateResponseMetadata;
answer: string;
error: boolean;
}

export interface OllamaApiGenerateResponseMetadata {
model: string;
total_duration: number;
Expand Down
8 changes: 5 additions & 3 deletions extensions/raycast-ollama/src/ollama-models.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -144,14 +144,16 @@ export default function Command(): JSX.Element {
</ActionPanel>
}
>
{ModelsOnRegistry.length === 0 && <Form.TextField id="Model" title="Model Name" placeholder="Model Name" />}
{ModelsOnRegistry.length > 0 && (
{ModelsOnRegistry.length === undefined || ModelsOnRegistry.length === 0 ? (
<Form.TextField id="Model" title="Model Name" placeholder="Model Name" />
) : null}
{ModelsOnRegistry.length && ModelsOnRegistry.length > 0 ? (
<Form.Dropdown id="Model" title="Model Name">
{ModelsOnRegistry.map((item) => {
return <Form.Dropdown.Item key={item} title={item} value={item} />;
})}
</Form.Dropdown>
)}
) : null}
</Form>
);

Expand Down

0 comments on commit 55dc45c

Please sign in to comment.