Skip to content

Commit aae3783

Browse files
committed
LocalAI: Model Gallery Admin panel. Fixes #411
1 parent 053aa12 commit aae3783

File tree

4 files changed

+315
-2
lines changed

4 files changed

+315
-2
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
import { z } from 'zod';
2+
3+
4+
export const wireLocalAIModelsAvailableOutputSchema = z.array(z.object({
5+
name: z.string(), // (e.g.) tinydream
6+
url: z.string(), // (e.g.) github:go-skynet/model-gallery/tinydream.yaml
7+
license: z.string(), // (e.g.) other
8+
gallery: z.object({
9+
url: z.string(), // (e.g.) github:go-skynet/model-gallery/index.yaml
10+
name: z.string(), // (e.g.) model-gallery
11+
}),
12+
urls: z.array(z.string()).optional(),
13+
files: z.array(z.object({
14+
filename: z.string(), // voice-en-us-amy-low.tar.gz
15+
uri: z.string(), // https://github.com/rhasspy/piper/releases/download/v0.0.2/voice-en-us-amy-low.tar.gz
16+
sha256: z.string().optional(), // often empty
17+
})).optional(),
18+
})).nullable(); // null if galleries are not served
19+
20+
export const wilreLocalAIModelsApplyOutputSchema = z.object({
21+
uuid: z.string().uuid(),
22+
status: z.string().url(),
23+
});
24+
25+
export const wireLocalAIModelsListOutputSchema = z.object({
26+
file_name: z.string(),
27+
error: z.string().nullable(),
28+
processed: z.boolean(),
29+
message: z.string().nullable(),
30+
progress: z.number(),
31+
file_size: z.string(),
32+
downloaded_size: z.string(),
33+
});

src/modules/llms/server/openai/openai.router.ts

+36
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ import { fixupHost } from '~/common/util/urlUtils';
1313
import { OpenAIWire, WireOpenAICreateImageOutput, wireOpenAICreateImageOutputSchema, WireOpenAICreateImageRequest } from './openai.wiretypes';
1414
import { azureModelToModelDescription, lmStudioModelToModelDescription, localAIModelToModelDescription, mistralModelsSort, mistralModelToModelDescription, oobaboogaModelToModelDescription, openAIModelToModelDescription, openRouterModelFamilySortFn, openRouterModelToModelDescription, togetherAIModelsToModelDescriptions } from './models.data';
1515
import { llmsChatGenerateWithFunctionsOutputSchema, llmsListModelsOutputSchema, ModelDescriptionSchema } from '../llm.server.types';
16+
import { wilreLocalAIModelsApplyOutputSchema, wireLocalAIModelsAvailableOutputSchema, wireLocalAIModelsListOutputSchema } from './localai.wiretypes';
1617

1718

1819
const openAIDialects = z.enum([
@@ -326,6 +327,41 @@ export const llmOpenAIRouter = createTRPCRouter({
326327
}
327328
}),
328329

330+
331+
/// Dialect-specific procedures ///
332+
333+
/* [LocalAI] List all Model Galleries */
334+
dialectLocalAI_galleryModelsAvailable: publicProcedure
335+
.input(listModelsInputSchema)
336+
.query(async ({ input: { access } }) => {
337+
const wireLocalAIModelsAvailable = await openaiGET(access, '/models/available');
338+
return wireLocalAIModelsAvailableOutputSchema.parse(wireLocalAIModelsAvailable);
339+
}),
340+
341+
/* [LocalAI] Download a model from a Model Gallery */
342+
dialectLocalAI_galleryModelsApply: publicProcedure
343+
.input(z.object({
344+
access: openAIAccessSchema,
345+
galleryName: z.string(),
346+
modelName: z.string(),
347+
}))
348+
.mutation(async ({ input: { access, galleryName, modelName } }) => {
349+
const galleryModelId = `${galleryName}@${modelName}`;
350+
const wireLocalAIModelApply = await openaiPOST(access, null, { id: galleryModelId }, '/models/apply');
351+
return wilreLocalAIModelsApplyOutputSchema.parse(wireLocalAIModelApply);
352+
}),
353+
354+
/* [LocalAI] Poll for a Model download Job status */
355+
dialectLocalAI_galleryModelsJob: publicProcedure
356+
.input(z.object({
357+
access: openAIAccessSchema,
358+
jobId: z.string(),
359+
}))
360+
.query(async ({ input: { access, jobId } }) => {
361+
const wireLocalAIModelsJobs = await openaiGET(access, `/models/jobs/${jobId}`);
362+
return wireLocalAIModelsListOutputSchema.parse(wireLocalAIModelsJobs);
363+
}),
364+
329365
});
330366

331367

Original file line numberDiff line numberDiff line change
@@ -0,0 +1,230 @@
1+
import * as React from 'react';
2+
3+
import { Alert, Box, Button, Card, CircularProgress, IconButton, LinearProgress, List, ListItem, Switch, Typography } from '@mui/joy';
4+
import CloseIcon from '@mui/icons-material/Close';
5+
6+
import { ExpanderAccordion } from '~/common/components/ExpanderAccordion';
7+
import { GoodModal } from '~/common/components/GoodModal';
8+
import { InlineError } from '~/common/components/InlineError';
9+
import { Link } from '~/common/components/Link';
10+
import { apiQuery } from '~/common/util/trpc.client';
11+
import { capitalizeFirstLetter } from '~/common/util/textUtils';
12+
13+
import type { OpenAIAccessSchema } from '../../server/openai/openai.router';
14+
15+
16+
function ListItemSwitch(props: { title: string, checked: boolean, onChange: (checked: boolean) => void }) {
17+
return (
18+
<ListItem variant='soft'>
19+
<Box sx={{ display: 'flex', alignItems: 'center', flex: 1 }}>
20+
{props.title}
21+
<Switch
22+
checked={props.checked}
23+
onChange={event => props.onChange(event.target.checked)}
24+
endDecorator={props.checked ? 'Show' : 'Hide'}
25+
sx={{ ml: 'auto' }}
26+
/>
27+
</Box>
28+
</ListItem>
29+
);
30+
}
31+
32+
33+
/**
34+
* Show the progress of a model install job by polling the server every 1 second until complete.
35+
* - uses the LocalAI /models/jobs API
36+
*/
37+
function ModelJobStatusChecker(props: { access: OpenAIAccessSchema, jobId: string }) {
38+
39+
// local state
40+
const [isPolling, setIsPolling] = React.useState(true);
41+
42+
// external state
43+
const { data, error } = apiQuery.llmOpenAI.dialectLocalAI_galleryModelsJob.useQuery({ access: props.access, jobId: props.jobId }, {
44+
enabled: isPolling,
45+
refetchInterval: 1000,
46+
});
47+
48+
// [effect] stop polling when job is done
49+
const isDone = data?.processed === true || data?.progress === 100;
50+
React.useEffect(() => {
51+
if (isDone)
52+
setIsPolling(false);
53+
}, [isDone]);
54+
55+
return <>
56+
57+
{!!error && <InlineError error={error} />}
58+
59+
{data && <Box sx={{ display: 'grid', gap: 1, my: 1 }}>
60+
{data.message && <Typography component='div' level='body-sm'>Message: {data.message}</Typography>}
61+
{data.file_name && <Typography component='div' level='body-sm'>File: {data.file_name}</Typography>}
62+
{data.file_size && <Typography component='div' level='body-sm'>File size: {data.file_size}</Typography>}
63+
{data.downloaded_size && <Typography component='div' level='body-sm'>Downloaded: {data.downloaded_size}</Typography>}
64+
</Box>}
65+
66+
{isPolling
67+
? <Alert variant='soft' color='primary'>Installation has begun. This may take a very long time.</Alert>
68+
: <Alert variant='soft' color={error ? 'warning' : 'success'}>
69+
{error ? 'Installation failed' : 'Installation complete'}
70+
</Alert>}
71+
72+
<LinearProgress determinate color={error ? 'warning' : isDone ? 'success' : 'primary'} value={data?.progress || 0} sx={{ mt: 1 }} />
73+
74+
</>;
75+
}
76+
77+
/**
78+
* Every model being installed has a panel showing the status.
79+
* - uses the LocalAI /models/apply API
80+
*/
81+
function ModelInstallPanel(props: { access: OpenAIAccessSchema, modelName: string, galleryName: string }) {
82+
83+
// state
84+
const [hideSelf, setHideSelf] = React.useState(false);
85+
86+
// external state
87+
const { data, error, mutate } = apiQuery.llmOpenAI.dialectLocalAI_galleryModelsApply.useMutation();
88+
89+
// [effect] auto-install
90+
React.useEffect(() => {
91+
mutate({ access: props.access, galleryName: props.galleryName, modelName: props.modelName });
92+
}, [mutate, props.access, props.galleryName, props.modelName]);
93+
94+
if (hideSelf)
95+
return null;
96+
97+
return (
98+
<Card sx={{ gap: 0, boxShadow: 'sm' }}>
99+
100+
<Box sx={{ display: 'flex', alignItems: 'center' }}>
101+
<Typography level='title-sm'>
102+
Installing <strong>{props.modelName}</strong> from the <strong>{props.galleryName}</strong>
103+
</Typography>
104+
<IconButton size='sm' onClick={() => setHideSelf(true)} sx={{ ml: 'auto' }}>
105+
<CloseIcon />
106+
</IconButton>
107+
</Box>
108+
109+
{!!error && <InlineError error={error} />}
110+
111+
{!!data?.uuid && <ModelJobStatusChecker access={props.access} jobId={data.uuid} />}
112+
113+
</Card>
114+
);
115+
}
116+
117+
118+
/**
119+
* Administration panel for LocalAI. Mainly to install models from the Gallery.
120+
*/
121+
export function LocalAIAdmin(props: { access: OpenAIAccessSchema, onClose: () => void }) {
122+
123+
// state
124+
const [installModels, setInstallModels] = React.useState<{ galleryName: string; modelName: string; }[]>([]);
125+
const [showVoiceModels, setShowVoiceModels] = React.useState(false);
126+
127+
// external state
128+
const { data, error } = apiQuery.llmOpenAI.dialectLocalAI_galleryModelsAvailable.useQuery({ access: props.access }, {
129+
staleTime: 1000 * 60,
130+
refetchOnWindowFocus: false,
131+
});
132+
133+
// derived state
134+
const galleryNotConfigured = data === null;
135+
136+
137+
const handleAppendInstall = React.useCallback((galleryName: string, modelName: string) => {
138+
setInstallModels(prev => {
139+
// if already in list, do not add
140+
if (prev.some(p => p.galleryName === galleryName && p.modelName === modelName))
141+
return prev;
142+
return [...prev, { galleryName, modelName }];
143+
});
144+
}, []);
145+
146+
147+
return (
148+
<GoodModal title='LocalAI Administration' dividers open onClose={props.onClose}>
149+
<Box sx={{ display: 'grid', gap: 'var(--Card-padding)' }}>
150+
151+
<Typography level='body-sm'>
152+
Install models from your LocalAI Model Gallery. We assume your LocalAI server is correcly
153+
configured and running.
154+
</Typography>
155+
156+
{/* Models being Installed */}
157+
{installModels.length > 0 && <>
158+
159+
<Typography level='title-lg'>
160+
Model Installation
161+
</Typography>
162+
163+
<List sx={{ gap: 1 }}>
164+
{installModels.map((params, index) =>
165+
<ModelInstallPanel key={'install-' + index} access={props.access} {...params} />,
166+
)}
167+
</List>
168+
169+
</>}
170+
171+
172+
<Typography level='title-md'>
173+
Available Models List
174+
</Typography>
175+
176+
{/* Errors */}
177+
{!!error && <InlineError error={error} />}
178+
{galleryNotConfigured && <InlineError error={<>
179+
Model galleries do not seem to be configured (null response).
180+
Please refer to the <Link href='https://localai.io/models/' target='_blank'>documentation</Link> for
181+
how to configure model galleries.
182+
</>} />}
183+
184+
{/* List loading */}
185+
{!data ? (
186+
<CircularProgress color='success' />
187+
) : (
188+
<List
189+
variant='outlined'
190+
sx={{
191+
'--ListItem-minHeight': '2.75rem',
192+
borderRadius: 'md',
193+
p: 0,
194+
}}
195+
>
196+
{data
197+
.filter(model => showVoiceModels || !model.name.startsWith('voice-'))
198+
.map((model) => (
199+
<ListItem key={model.name}>
200+
201+
{capitalizeFirstLetter(model.name)}
202+
203+
<Button
204+
color='neutral'
205+
size='sm'
206+
disabled={installModels.some(p => p.galleryName === model.gallery.name && p.modelName === model.name)}
207+
onClick={() => handleAppendInstall(model.gallery.name, model.name)}
208+
sx={{
209+
ml: 'auto',
210+
}}
211+
>
212+
Install
213+
</Button>
214+
</ListItem>
215+
))}
216+
217+
<ListItemSwitch title='Show Voice Models' checked={showVoiceModels} onChange={setShowVoiceModels} />
218+
219+
<ExpanderAccordion title='Debug: show JSON' startCollapsed sx={{ fontSize: 'sm' }}>
220+
<Box sx={{ whiteSpace: 'break-spaces' }}>
221+
{JSON.stringify(data, null, 2)}
222+
</Box>
223+
</ExpanderAccordion>
224+
</List>
225+
)}
226+
227+
</Box>
228+
</GoodModal>
229+
);
230+
}

src/modules/llms/vendors/localai/LocalAISourceSetup.tsx

+16-2
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
import * as React from 'react';
22
import { z } from 'zod';
33

4-
import { Typography } from '@mui/joy';
4+
import { Button, Typography } from '@mui/joy';
55
import CheckBoxOutlinedIcon from '@mui/icons-material/CheckBoxOutlined';
6+
67
import { ExpanderAccordion } from '~/common/components/ExpanderAccordion';
78
import { FormInputKey } from '~/common/components/forms/FormInputKey';
89
import { InlineError } from '~/common/components/InlineError';
@@ -13,11 +14,15 @@ import { DModelSourceId } from '../../store-llms';
1314
import { useLlmUpdateModels } from '../useLlmUpdateModels';
1415
import { useSourceSetup } from '../useSourceSetup';
1516

17+
import { LocalAIAdmin } from './LocalAIAdmin';
1618
import { ModelVendorLocalAI } from './localai.vendor';
1719

1820

1921
export function LocalAISourceSetup(props: { sourceId: DModelSourceId }) {
2022

23+
// state
24+
const [adminOpen, setAdminOpen] = React.useState(false);
25+
2126
// external state
2227
const { source, access, updateSetup } =
2328
useSourceSetup(props.sourceId, ModelVendorLocalAI);
@@ -69,9 +74,18 @@ export function LocalAISourceSetup(props: { sourceId: DModelSourceId }) {
6974
value={oaiHost} onChange={value => updateSetup({ oaiHost: value })}
7075
/>
7176

72-
<SetupFormRefetchButton refetch={refetch} disabled={!shallFetchSucceed || isFetching} loading={isFetching} error={isError} />
77+
<SetupFormRefetchButton
78+
refetch={refetch} disabled={!shallFetchSucceed || isFetching} loading={isFetching} error={isError}
79+
leftButton={
80+
<Button color='neutral' variant='solid' disabled={adminOpen} onClick={() => setAdminOpen(true)}>
81+
LocalAI Admin
82+
</Button>
83+
}
84+
/>
7385

7486
{isError && <InlineError error={error} />}
7587

88+
{adminOpen && <LocalAIAdmin access={access} onClose={() => setAdminOpen(false)} />}
89+
7690
</>;
7791
}

0 commit comments

Comments
 (0)