Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Flatten model configuration (i.e. extend from GenerationCommonConfig #72

Merged
merged 2 commits into from
Jun 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 4 additions & 7 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

40 changes: 24 additions & 16 deletions plugins/anthropic/src/claude.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@

import { Message } from '@genkit-ai/ai';
import {
GenerationCommonConfigSchema,
ModelAction,
defineModel,
modelRef,
type CandidateData,
Expand All @@ -29,19 +31,21 @@ import {
import Anthropic from '@anthropic-ai/sdk';
import z from 'zod';

const AnthropicConfigSchema = z.object({
tool_choice: z.union([
z.object({
type: z.literal('auto'),
}),
z.object({
type: z.literal('any'),
}),
z.object({
type: z.literal('tool'),
name: z.string(),
}),
]),
const AnthropicConfigSchema = GenerationCommonConfigSchema.extend({
tool_choice: z
.union([
z.object({
type: z.literal('auto'),
}),
z.object({
type: z.literal('any'),
}),
z.object({
type: z.literal('tool'),
name: z.string(),
}),
])
.optional(),
metadata: z
.object({
user_id: z.string().optional(),
Expand Down Expand Up @@ -391,7 +395,7 @@ function fromAnthropicContentBlockChunk(
*/
export function toAnthropicRequestBody(
modelName: string,
request: GenerateRequest,
request: GenerateRequest<typeof AnthropicConfigSchema>,
stream?: boolean
): Anthropic.Beta.Tools.Messages.MessageCreateParams {
const model = SUPPORTED_CLAUDE_MODELS[modelName];
Expand All @@ -408,8 +412,9 @@ export function toAnthropicRequestBody(
top_p: request.config?.topP,
temperature: request.config?.temperature,
stop_sequences: request.config?.stopSequences,
metadata: request.config?.metadata,
tool_choice: request.config?.tool_choice,
stream,
...(request.config?.custom || {}),
};

if (request.output?.format && request.output.format !== 'text') {
Expand All @@ -431,7 +436,10 @@ export function toAnthropicRequestBody(
* @returns The defined Claude model.
* @throws An error if the specified model is not supported.
*/
export function claudeModel(name: string, client: Anthropic) {
export function claudeModel(
name: string,
client: Anthropic
): ModelAction<typeof AnthropicConfigSchema> {
const modelId = `anthropic/${name}`;
const model = SUPPORTED_CLAUDE_MODELS[name];
if (!model) throw new Error(`Unsupported model: ${name}`);
Expand Down
18 changes: 6 additions & 12 deletions plugins/anthropic/tests/claude_test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -310,10 +310,8 @@ describe('toAnthropicRequestBody', () => {
],
output: { format: 'text' },
config: {
custom: {
metadata: {
user_id: 'exampleUser123',
},
metadata: {
user_id: 'exampleUser123',
},
},
},
Expand Down Expand Up @@ -345,10 +343,8 @@ describe('toAnthropicRequestBody', () => {
],
output: { format: 'text' },
config: {
custom: {
metadata: {
user_id: 'exampleUser123',
},
metadata: {
user_id: 'exampleUser123',
},
},
},
Expand Down Expand Up @@ -380,10 +376,8 @@ describe('toAnthropicRequestBody', () => {
],
output: { format: 'text' },
config: {
custom: {
metadata: {
user_id: 'exampleUser123',
},
metadata: {
user_id: 'exampleUser123',
},
},
},
Expand Down
24 changes: 8 additions & 16 deletions plugins/cohere/src/command.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@ import {
CandidateData,
defineModel,
GenerateRequest,
GenerationCommonConfigSchema,
MessageData,
ModelAction,
modelRef,
Part,
Role,
Expand All @@ -31,7 +33,7 @@ import { ChatStreamEndEventFinishReason } from 'cohere-ai/api';

import z from 'zod';

export const CohereConfigSchema = z.object({
export const CohereConfigSchema = GenerationCommonConfigSchema.extend({
frequencyPenalty: z.number().min(-2).max(2).optional(),
logitBias: z.record(z.string(), z.number().min(-100).max(100)).optional(),
logProbs: z.boolean().optional(),
Expand Down Expand Up @@ -381,21 +383,9 @@ function fromCohereStreamEvent(

export function toCohereRequestBody(
modelName: string,
request: GenerateRequest
request: GenerateRequest<typeof CohereConfigSchema>
): Cohere.ChatRequest | Cohere.ChatStreamRequest {
// Note: these types are the same in the Cohere API (not on the surface, e.g. one uses ChatRequestToolResultsItem and the other uses ChatStreamRequestToolResultsItem, but when the types are unwrapped they are exactly the same)
const mapToSnakeCase = <T extends Record<string, any>>(
obj: T
): Record<string, any> => {
return Object.entries(obj).reduce((acc, [key, value]) => {
const snakeCaseKey = key.replace(
/[A-Z]/g,
(letter) => `_${letter.toLowerCase()}`
);
acc[snakeCaseKey] = value;
return acc;
}, {});
};
const model = SUPPORTED_COMMAND_MODELS[modelName];
if (!model) throw new Error(`Unsupported model: ${modelName}`);
const mappedModelName = request.config?.version || model.version || modelName;
Expand Down Expand Up @@ -423,7 +413,6 @@ export function toCohereRequestBody(
rawPrompting: request.config?.rawPrompting,
tools: request.tools?.map(toCohereTool),
// toolResults: request.messages?.map(toCohereToolResult),
...mapToSnakeCase(request.config?.custom || {}),
};

for (const key in body) {
Expand All @@ -436,7 +425,10 @@ export function toCohereRequestBody(
/**
*
*/
export function commandModel(name: string, client: CohereClient) {
export function commandModel(
name: string,
client: CohereClient
): ModelAction<typeof CohereConfigSchema> {
const modelId = `cohere/${name}`;
const model = SUPPORTED_COMMAND_MODELS[name];
if (!model) throw new Error(`Unsupported model: ${name}`);
Expand Down
9 changes: 3 additions & 6 deletions plugins/groq/src/groq_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import {
CandidateData,
defineModel,
GenerateRequest,
GenerationCommonConfigSchema,
MessageData,
modelRef,
Part,
Expand All @@ -36,11 +37,7 @@ import {

import z from 'zod';

export const GroqConfigSchema = z.object({
temperature: z.number().min(0).max(1).optional(),
maxTokens: z.number().int().min(1).max(2048).optional(),
topP: z.number().min(0).max(1).optional(),
stop: z.string().optional(),
export const GroqConfigSchema = GenerationCommonConfigSchema.extend({
stream: z.boolean().optional(),
frequencyPenalty: z.number().optional(),
logitBias: z.record(z.number()).optional(),
Expand Down Expand Up @@ -397,7 +394,7 @@ export function toGroqRequestBody(
tools: request.tools?.map(toGroqTool),
model: request.config?.version || model.version || modelName,
temperature: request.config?.temperature,
max_tokens: request.config?.maxTokens,
max_tokens: request.config?.maxOutputTokens,
top_p: request.config?.topP,
stop: request.config?.stopSequences,
n: request.candidates,
Expand Down
2 changes: 1 addition & 1 deletion plugins/groq/tests/groq_test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ describe('toGroqRequestBody', () => {
config: {
temperature: 0.7,
stopSequences: ['\n'],
maxTokens: 100,
maxOutputTokens: 100,
topP: 0.9,
frequencyPenalty: 0.5,
logitBias: {
Expand Down
41 changes: 13 additions & 28 deletions plugins/mistral/src/mistral_llms.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@ import {
CandidateData,
defineModel,
GenerateRequest,
GenerationCommonConfigSchema,
MessageData,
ModelAction,
modelRef,
Part,
Role,
Expand All @@ -38,13 +40,6 @@ import type {

import z from 'zod';

export const MistralConfigSchema = z.object({
temperature: z.number().min(0).max(1).optional(),
maxTokens: z.number().int().optional(),
topP: z.number().min(0).max(1).optional(),
stopSequences: z.array(z.string()).optional(),
});

export const openMistral7B = modelRef({
name: 'mistral/open-mistral-7b',
info: {
Expand All @@ -58,7 +53,7 @@ export const openMistral7B = modelRef({
output: ['text', 'json'],
},
},
configSchema: MistralConfigSchema,
configSchema: GenerationCommonConfigSchema,
});

export const openMistral8x7B = modelRef({
Expand Down Expand Up @@ -104,7 +99,7 @@ export const openMistralSmall = modelRef({
output: ['text', 'json'],
},
},
configSchema: MistralConfigSchema,
configSchema: GenerationCommonConfigSchema,
});

export const openMistralMedium = modelRef({
Expand All @@ -120,7 +115,7 @@ export const openMistralMedium = modelRef({
output: ['text', 'json'],
},
},
configSchema: MistralConfigSchema,
configSchema: GenerationCommonConfigSchema,
});

export const openMistralLarge = modelRef({
Expand All @@ -136,7 +131,7 @@ export const openMistralLarge = modelRef({
output: ['text', 'json'],
},
},
configSchema: MistralConfigSchema,
configSchema: GenerationCommonConfigSchema,
});

function toMistralRole(role: Role): string {
Expand Down Expand Up @@ -254,27 +249,15 @@ function fromMistralChunkChoice(

export function toMistralRequestBody(
modelName: string,
request: GenerateRequest
request: GenerateRequest<typeof GenerationCommonConfigSchema>
) {
const mapToSnakeCase = <T extends Record<string, any>>(
obj: T
): Record<string, any> => {
return Object.entries(obj).reduce((acc, [key, value]) => {
const snakeCaseKey = key.replace(
/[A-Z]/g,
(letter) => `_${letter.toLowerCase()}`
);
acc[snakeCaseKey] = value;
return acc;
}, {});
};
const model = SUPPORTED_MISTRAL_MODELS[modelName];
if (!model) throw new Error(`Unsupported model: ${modelName}`);
const mistralMessages = toMistralMessages(request.messages);
const mappedModelName = request.config?.version || model.version || modelName;

let responseFormat;
if (request.config?.responseFormat !== 'json') {
if (request.output?.format !== 'json') {
responseFormat = { type: 'json_object' };
} else {
responseFormat = null;
Expand All @@ -283,13 +266,12 @@ export function toMistralRequestBody(
messages: mistralMessages,
tools: request.tools?.map(toMistralTool),
model: mappedModelName,
max_tokens: request.config?.maxTokens,
max_tokens: request.config?.maxOutputTokens,
temperature: request.config?.temperature,
top_p: request.config?.topP,
n: request.candidates,
stop_sequences: request.config?.stopSequences,
responseFormat: responseFormat,
...mapToSnakeCase(request.config?.custom || {}),
} as ChatRequest;

for (const key in body) {
Expand All @@ -299,7 +281,10 @@ export function toMistralRequestBody(
return body;
}

export function mistralModel(name: string, client: any) {
export function mistralModel(
name: string,
client: any
): ModelAction<typeof GenerationCommonConfigSchema> {
//Ugly any type, should be MistralClient but cannot import it here
const modelId = `mistral/${name}`;
const model = SUPPORTED_MISTRAL_MODELS[name];
Expand Down
Loading
Loading