diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 65df33cb86..0587b81402 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -29,7 +29,7 @@ repos:
files: \.(jsx?|tsx?|css|.md)$
exclude: \.*__generated__.*$
additional_dependencies:
- - prettier@3.2.4
+ - prettier@3.3.3
- repo: https://github.com/pre-commit/mirrors-eslint
rev: "8ddcbd412c0b348841f0f82c837702f432539652"
hooks:
diff --git a/app/src/pages/playground/PlaygroundChatTemplate.tsx b/app/src/pages/playground/PlaygroundChatTemplate.tsx
index 57ab17f892..f43a5aa9eb 100644
--- a/app/src/pages/playground/PlaygroundChatTemplate.tsx
+++ b/app/src/pages/playground/PlaygroundChatTemplate.tsx
@@ -36,6 +36,7 @@ import { usePlaygroundContext } from "@phoenix/contexts/PlaygroundContext";
import { useChatMessageStyles } from "@phoenix/hooks/useChatMessageStyles";
import {
ChatMessage,
+ createOpenAIResponseFormat,
generateMessageId,
PlaygroundChatTemplate as PlaygroundChatTemplateType,
PlaygroundInstance,
@@ -44,11 +45,16 @@ import { assertUnreachable } from "@phoenix/typeUtils";
import { safelyParseJSON } from "@phoenix/utils/jsonUtils";
import { ChatMessageToolCallsEditor } from "./ChatMessageToolCallsEditor";
+import {
+ RESPONSE_FORMAT_PARAM_CANONICAL_NAME,
+ RESPONSE_FORMAT_PARAM_NAME,
+} from "./constants";
import {
MessageContentRadioGroup,
MessageMode,
} from "./MessageContentRadioGroup";
import { MessageRolePicker } from "./MessageRolePicker";
+import { PlaygroundResponseFormat } from "./PlaygroundResponseFormat";
import { PlaygroundTools } from "./PlaygroundTools";
import {
createToolCallForProvider,
@@ -74,11 +80,18 @@ export function PlaygroundChatTemplate(props: PlaygroundChatTemplateProps) {
);
const instances = usePlaygroundContext((state) => state.instances);
const updateInstance = usePlaygroundContext((state) => state.updateInstance);
+ const upsertInvocationParameterInput = usePlaygroundContext(
+ (state) => state.upsertInvocationParameterInput
+ );
const playgroundInstance = instances.find((instance) => instance.id === id);
if (!playgroundInstance) {
throw new Error(`Playground instance ${id} not found`);
}
const hasTools = playgroundInstance.tools.length > 0;
+ const hasResponseFormat =
+ playgroundInstance.model.invocationParameters.find(
+ (p) => p.canonicalName === RESPONSE_FORMAT_PARAM_CANONICAL_NAME
+ ) != null;
const { template } = playgroundInstance;
if (template.__type !== "chat") {
throw new Error(`Invalid template type ${template.__type}`);
@@ -151,9 +164,28 @@ export function PlaygroundChatTemplate(props: PlaygroundChatTemplateProps) {
paddingBottom="size-100"
borderColor="dark"
borderTopWidth="thin"
- borderBottomWidth={hasTools ? "thin" : undefined}
+ borderBottomWidth={hasTools || hasResponseFormat ? "thin" : undefined}
>
+ } />}
+ disabled={hasResponseFormat}
+ onClick={() => {
+ upsertInvocationParameterInput({
+ instanceId: id,
+ invocationParameterInput: {
+ valueJson: createOpenAIResponseFormat(),
+ invocationName: RESPONSE_FORMAT_PARAM_NAME,
+ canonicalName: RESPONSE_FORMAT_PARAM_CANONICAL_NAME,
+ },
+ });
+ }}
+ >
+ Output Schema
+
+ }
+ bodyStyle={{ padding: 0 }}
+ extra={
+
+
+ } />}
+ variant="default"
+ size="compact"
+ onClick={() => {
+ deleteInvocationParameterInput({
+ instanceId: playgroundInstanceId,
+ invocationParameterInputInvocationName:
+ RESPONSE_FORMAT_PARAM_NAME,
+ });
+ }}
+ />
+
+ }
+ >
+
+
+
+
+
+
+
+ );
+}
diff --git a/app/src/pages/playground/__tests__/fixtures.ts b/app/src/pages/playground/__tests__/fixtures.ts
index 283beb8d42..7303bc7784 100644
--- a/app/src/pages/playground/__tests__/fixtures.ts
+++ b/app/src/pages/playground/__tests__/fixtures.ts
@@ -47,6 +47,12 @@ export const basePlaygroundSpan: PlaygroundSpan = {
invocationInputField: "value_int",
invocationName: "seed",
},
+ {
+ __typename: "JsonInvocationParameter",
+ canonicalName: "RESPONSE_FORMAT",
+ invocationInputField: "value_json",
+ invocationName: "response_format",
+ },
],
};
export const spanAttributesWithInputMessages = {
diff --git a/app/src/pages/playground/__tests__/playgroundUtils.test.ts b/app/src/pages/playground/__tests__/playgroundUtils.test.ts
index efeac6ed21..76dd518c87 100644
--- a/app/src/pages/playground/__tests__/playgroundUtils.test.ts
+++ b/app/src/pages/playground/__tests__/playgroundUtils.test.ts
@@ -5,6 +5,7 @@ import { LlmProviderToolCall } from "@phoenix/schemas/toolCallSchemas";
import {
_resetInstanceId,
_resetMessageId,
+ createOpenAIResponseFormat,
PlaygroundInput,
PlaygroundInstance,
} from "@phoenix/store";
@@ -13,6 +14,7 @@ import {
INPUT_MESSAGES_PARSING_ERROR,
MODEL_CONFIG_PARSING_ERROR,
MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR,
+ MODEL_CONFIG_WITH_RESPONSE_FORMAT_PARSING_ERROR,
OUTPUT_MESSAGES_PARSING_ERROR,
OUTPUT_VALUE_PARSING_ERROR,
SPAN_ATTRIBUTES_PARSING_ERROR,
@@ -144,7 +146,6 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
modelName: "gpt-4o",
},
template: defaultTemplate,
-
output: undefined,
},
parsingErrors: [
@@ -153,6 +154,7 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
OUTPUT_VALUE_PARSING_ERROR,
MODEL_CONFIG_PARSING_ERROR,
MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR,
+ MODEL_CONFIG_WITH_RESPONSE_FORMAT_PARSING_ERROR,
],
});
});
@@ -455,8 +457,7 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
...spanAttributesWithInputMessages.llm,
// only parameters defined on the span InvocationParameter[] field are parsed
// note that snake case keys are automatically converted to camel case
- invocation_parameters:
- '{"top_p": 0.5, "max_tokens": 100, "seed": 12345, "stop": ["stop", "me"]}',
+ invocation_parameters: `{"top_p": 0.5, "max_tokens": 100, "seed": 12345, "stop": ["stop", "me"], "response_format": ${JSON.stringify(createOpenAIResponseFormat())}}`,
},
}),
};
@@ -486,6 +487,11 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
invocationName: "stop",
valueStringList: ["stop", "me"],
},
+ {
+ canonicalName: "RESPONSE_FORMAT",
+ invocationName: "response_format",
+ valueJson: createOpenAIResponseFormat(),
+ },
],
},
} satisfies PlaygroundInstance,
@@ -548,7 +554,10 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
playgroundInstance: {
...expectedPlaygroundInstanceWithIO,
},
- parsingErrors: [MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR],
+ parsingErrors: [
+ MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR,
+ MODEL_CONFIG_WITH_RESPONSE_FORMAT_PARSING_ERROR,
+ ],
});
});
@@ -568,7 +577,10 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
playgroundInstance: {
...expectedPlaygroundInstanceWithIO,
},
- parsingErrors: [MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR],
+ parsingErrors: [
+ MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR,
+ MODEL_CONFIG_WITH_RESPONSE_FORMAT_PARSING_ERROR,
+ ],
});
});
@@ -600,6 +612,25 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
parsingErrors: [MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR],
});
});
+
+ it("should only return response format parsing errors if response format is defined AND malformed", () => {
+ const span = {
+ ...basePlaygroundSpan,
+ attributes: JSON.stringify({
+ ...spanAttributesWithInputMessages,
+ llm: {
+ ...spanAttributesWithInputMessages.llm,
+ invocation_parameters: `{"response_format": 1234}`,
+ },
+ }),
+ };
+ expect(transformSpanAttributesToPlaygroundInstance(span)).toEqual({
+ playgroundInstance: {
+ ...expectedPlaygroundInstanceWithIO,
+ },
+ parsingErrors: [MODEL_CONFIG_WITH_RESPONSE_FORMAT_PARSING_ERROR],
+ });
+ });
});
describe("getChatRole", () => {
diff --git a/app/src/pages/playground/constants.tsx b/app/src/pages/playground/constants.tsx
index 94a35ad183..ffd02164cf 100644
--- a/app/src/pages/playground/constants.tsx
+++ b/app/src/pages/playground/constants.tsx
@@ -28,9 +28,10 @@ export const MODEL_CONFIG_PARSING_ERROR =
"Unable to parse model config, expected llm.model_name to be present.";
export const MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR =
"Unable to parse model config, expected llm.invocation_parameters json string to be present.";
-// TODO(parker / apowell) - adjust this error message with anthropic support https://github.com/Arize-ai/phoenix/issues/5100
+export const MODEL_CONFIG_WITH_RESPONSE_FORMAT_PARSING_ERROR =
+ "Unable to parse invocation parameters response_format, expected llm.invocation_parameters.response_format to be a well formed json object or undefined.";
export const TOOLS_PARSING_ERROR =
- "Unable to parse tools, expected tools to be an array of valid OpenAI tools.";
+ "Unable to parse tools, expected tools to be an array of valid tools.";
export const modelProviderToModelPrefixMap: Record = {
AZURE_OPENAI: [],
@@ -45,9 +46,16 @@ export const TOOL_CHOICE_PARAM_CANONICAL_NAME: Extract<
export const TOOL_CHOICE_PARAM_NAME = "tool_choice";
+export const RESPONSE_FORMAT_PARAM_CANONICAL_NAME: Extract<
+ CanonicalParameterName,
+ "RESPONSE_FORMAT"
+> = "RESPONSE_FORMAT";
+
+export const RESPONSE_FORMAT_PARAM_NAME = "response_format";
+
/**
* List of parameter canonical names to ignore in the invocation parameters form
* These parameters are rendered else where on the page
*/
export const paramsToIgnoreInInvocationParametersForm: CanonicalParameterName[] =
- [TOOL_CHOICE_PARAM_CANONICAL_NAME];
+ [TOOL_CHOICE_PARAM_CANONICAL_NAME, RESPONSE_FORMAT_PARAM_CANONICAL_NAME];
diff --git a/app/src/pages/playground/playgroundUtils.ts b/app/src/pages/playground/playgroundUtils.ts
index 07c782369e..6306280a51 100644
--- a/app/src/pages/playground/playgroundUtils.ts
+++ b/app/src/pages/playground/playgroundUtils.ts
@@ -51,9 +51,12 @@ import {
INPUT_MESSAGES_PARSING_ERROR,
MODEL_CONFIG_PARSING_ERROR,
MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR,
+ MODEL_CONFIG_WITH_RESPONSE_FORMAT_PARSING_ERROR,
modelProviderToModelPrefixMap,
OUTPUT_MESSAGES_PARSING_ERROR,
OUTPUT_VALUE_PARSING_ERROR,
+ RESPONSE_FORMAT_PARAM_CANONICAL_NAME,
+ RESPONSE_FORMAT_PARAM_NAME,
SPAN_ATTRIBUTES_PARSING_ERROR,
TOOL_CHOICE_PARAM_CANONICAL_NAME,
TOOL_CHOICE_PARAM_NAME,
@@ -63,6 +66,7 @@ import { InvocationParameter } from "./InvocationParametersForm";
import {
chatMessageRolesSchema,
chatMessagesSchema,
+ JsonObjectSchema,
llmInputMessageSchema,
llmOutputMessageSchema,
LlmToolSchema,
@@ -70,6 +74,7 @@ import {
MessageSchema,
modelConfigSchema,
modelConfigWithInvocationParametersSchema,
+ modelConfigWithResponseFormatSchema,
outputSchema,
} from "./schemas";
import { PlaygroundSpan } from "./spanPlaygroundPageLoader";
@@ -343,6 +348,21 @@ export function getModelInvocationParametersFromAttributes(
};
}
+export function getResponseFormatFromAttributes(parsedAttributes: unknown) {
+ const { success, data } =
+ modelConfigWithResponseFormatSchema.safeParse(parsedAttributes);
+ if (!success) {
+ return {
+ responseFormat: undefined,
+ parsingErrors: [MODEL_CONFIG_WITH_RESPONSE_FORMAT_PARSING_ERROR],
+ };
+ }
+ return {
+ responseFormat: data.llm.invocation_parameters.response_format,
+ parsingErrors: [],
+ };
+}
+
/**
* Processes the tools from the span attributes into OpenAI tools to be used in the playground
* @param tools tools from the span attributes
@@ -440,13 +460,25 @@ export function transformSpanAttributesToPlaygroundInstance(
parsedAttributes,
modelSupportedInvocationParameters
);
+ // parse response format separately so that we can get distinct errors messages from the rest of
+ // the invocation parameters
+ const { parsingErrors: responseFormatParsingErrors } =
+ getResponseFormatFromAttributes(parsedAttributes);
// Merge invocation parameters into model config, if model config is present
modelConfig =
modelConfig != null
? {
...modelConfig,
- invocationParameters,
+ invocationParameters:
+ // remove response format from invocation parameters if there are parsing errors
+ responseFormatParsingErrors.length > 0
+ ? invocationParameters.filter(
+ (param) =>
+ param.invocationName !== RESPONSE_FORMAT_PARAM_NAME &&
+ param.canonicalName !== RESPONSE_FORMAT_PARAM_CANONICAL_NAME
+ )
+ : invocationParameters,
}
: null;
@@ -476,6 +508,7 @@ export function transformSpanAttributesToPlaygroundInstance(
...modelConfigParsingErrors,
...toolsParsingErrors,
...invocationParametersParsingErrors,
+ ...responseFormatParsingErrors,
],
};
}
@@ -604,10 +637,7 @@ export const toCamelCase = (str: string) =>
*/
export const transformInvocationParametersFromAttributesToInvocationParameterInputs =
(
- invocationParameters: Record<
- string,
- string | number | boolean | string[] | Record
- >,
+ invocationParameters: JsonObjectSchema,
modelSupportedInvocationParameters: InvocationParameter[]
): InvocationParameterInput[] => {
return Object.entries(invocationParameters)
@@ -824,12 +854,19 @@ export const getChatCompletionVariables = ({
targetProvider: instance.model.provider,
});
if (instance.tools.length > 0) {
- invocationParameters = invocationParameters.map((param) =>
- param.canonicalName === TOOL_CHOICE_PARAM_CANONICAL_NAME
- ? { ...param, valueJson: convertedToolChoice }
- : param
+ // ensure a single tool choice is added to the invocation parameters
+ invocationParameters = invocationParameters.filter(
+ (param) =>
+ param.invocationName !== TOOL_CHOICE_PARAM_NAME &&
+ param.canonicalName !== TOOL_CHOICE_PARAM_CANONICAL_NAME
);
+ invocationParameters.push({
+ canonicalName: TOOL_CHOICE_PARAM_CANONICAL_NAME,
+ invocationName: TOOL_CHOICE_PARAM_NAME,
+ valueJson: convertedToolChoice,
+ });
} else {
+ // remove tool choice if there are no tools
invocationParameters = invocationParameters.filter(
(param) =>
param.invocationName !== TOOL_CHOICE_PARAM_NAME &&
diff --git a/app/src/pages/playground/schemas.ts b/app/src/pages/playground/schemas.ts
index 9841d04c72..23ae9cc670 100644
--- a/app/src/pages/playground/schemas.ts
+++ b/app/src/pages/playground/schemas.ts
@@ -1,4 +1,5 @@
import { z } from "zod";
+import zodToJsonSchema from "zod-to-json-schema";
import {
LLMAttributePostfixes,
@@ -105,18 +106,37 @@ const chatMessageSchema = schemaForType()(
export const chatMessagesSchema = z.array(chatMessageSchema);
/**
- * Model generic invocation parameters schema in zod.
+ * The zod schema for JSON literal primitives
+ * @see {@link https://zod.dev/?id=json-type|Zod Documentation}
+ */
+const literalSchema = z.union([z.string(), z.number(), z.boolean(), z.null()]);
+type Literal = z.infer;
+type Json = Literal | { [key: string]: Json } | Json[];
+/**
+ * The zod schema for JSON
+ * @see {@link https://zod.dev/?id=json-type|Zod Documentation}
*/
-const invocationParameterSchema = z.record(
+export const jsonLiteralSchema: z.ZodType = z.lazy(() =>
z.union([
- z.boolean(),
- z.number(),
- z.string(),
- z.array(z.string()),
- z.record(z.unknown()),
+ literalSchema,
+ z.array(jsonLiteralSchema),
+ z.record(jsonLiteralSchema),
])
);
+export type JsonLiteralSchema = z.infer;
+
+export const jsonObjectSchema: z.ZodType<{ [key: string]: Json }> = z.lazy(() =>
+ z.record(jsonLiteralSchema)
+);
+
+export type JsonObjectSchema = z.infer;
+
+/**
+ * Model generic invocation parameters schema in zod.
+ */
+const invocationParameterSchema = jsonObjectSchema;
+
/**
* The type of the invocation parameters schema
*/
@@ -143,7 +163,15 @@ const stringToInvocationParametersSchema = z
return z.NEVER;
}
- return invocationParameterSchema.parse(json);
+ const { success, data } = invocationParameterSchema.safeParse(json);
+ if (!success) {
+ ctx.addIssue({
+ code: z.ZodIssueCode.custom,
+ message: "The invocation parameters must be a valid JSON object",
+ });
+ return z.NEVER;
+ }
+ return data;
})
.default("{}");
/**
@@ -167,6 +195,17 @@ export const modelConfigWithInvocationParametersSchema = z.object({
}),
});
+export const modelConfigWithResponseFormatSchema = z.object({
+ [SemanticAttributePrefixes.llm]: z.object({
+ [LLMAttributePostfixes.invocation_parameters]:
+ stringToInvocationParametersSchema.pipe(
+ z.object({
+ response_format: jsonObjectSchema.optional(),
+ })
+ ),
+ }),
+});
+
/**
* The zod schema for llm.tools.{i}.tool.json_schema attribute
* This will be a json string parsed into an object
@@ -225,3 +264,23 @@ export const llmToolSchema = z
.optional();
export type LlmToolSchema = z.infer;
+
+export const openAIResponseFormatSchema = z.lazy(() =>
+ z.object({
+ type: z.literal("json_schema"),
+ json_schema: z.object({
+ name: z.string().describe("The name of the schema"),
+ schema: jsonLiteralSchema,
+ strict: z.literal(true).describe("The schema must be strict"),
+ }),
+ })
+);
+
+export type OpenAIResponseFormat = z.infer;
+
+export const openAIResponseFormatJSONSchema = zodToJsonSchema(
+ openAIResponseFormatSchema,
+ {
+ removeAdditionalStrategy: "passthrough",
+ }
+);
diff --git a/app/src/schemas/toolChoiceSchemas.ts b/app/src/schemas/toolChoiceSchemas.ts
index 48e19dc0e3..52e4b5c38a 100644
--- a/app/src/schemas/toolChoiceSchemas.ts
+++ b/app/src/schemas/toolChoiceSchemas.ts
@@ -5,9 +5,9 @@ import { assertUnreachable, schemaForType } from "@phoenix/typeUtils";
/**
* OpenAI's tool choice schema
*
- * @see https://platform.openai.com/docs/api-reference/chat/create#chat-create-tool_choice
+ * @see https://platform.openAI.com/docs/api-reference/chat/create#chat-create-tool_choice
*/
-export const openaiToolChoiceSchema = schemaForType()(
+export const openAIToolChoiceSchema = schemaForType()(
z.union([
z.literal("auto"),
z.literal("none"),
@@ -19,7 +19,7 @@ export const openaiToolChoiceSchema = schemaForType()(
])
);
-export type OpenaiToolChoice = z.infer;
+export type OpenaiToolChoice = z.infer;
/**
* Anthropic's tool choice schema
@@ -53,19 +53,19 @@ export const anthropicToolChoiceToOpenaiToolChoice =
}
});
-export const openaiToolChoiceToAnthropicToolChoice =
- openaiToolChoiceSchema.transform((openai): AnthropicToolChoice => {
- if (typeof openai === "string") {
+export const openAIToolChoiceToAnthropicToolChoice =
+ openAIToolChoiceSchema.transform((openAI): AnthropicToolChoice => {
+ if (typeof openAI === "string") {
return { type: "auto" };
}
return {
type: "tool",
- name: openai.function.name,
+ name: openAI.function.name,
};
});
export const llmProviderToolChoiceSchema = z.union([
- openaiToolChoiceSchema,
+ openAIToolChoiceSchema,
anthropicToolChoiceSchema,
]);
@@ -88,10 +88,10 @@ export type ToolChoiceWithProvider =
export const detectToolChoiceProvider = (
toolChoice: unknown
): ToolChoiceWithProvider => {
- const { success: openaiSuccess, data: openaiData } =
- openaiToolChoiceSchema.safeParse(toolChoice);
- if (openaiSuccess) {
- return { provider: "OPENAI", toolChoice: openaiData };
+ const { success: openAISuccess, data: openAIData } =
+ openAIToolChoiceSchema.safeParse(toolChoice);
+ if (openAISuccess) {
+ return { provider: "OPENAI", toolChoice: openAIData };
}
const { success: anthropicSuccess, data: anthropicData } =
anthropicToolChoiceSchema.safeParse(toolChoice);
@@ -147,7 +147,7 @@ export const fromOpenAIToolChoice = ({
case "OPENAI":
return toolChoice as ProviderToToolChoiceMap[T];
case "ANTHROPIC":
- return openaiToolChoiceToAnthropicToolChoice.parse(
+ return openAIToolChoiceToAnthropicToolChoice.parse(
toolChoice
) as ProviderToToolChoiceMap[T];
default:
@@ -164,10 +164,10 @@ export const safelyConvertToolChoiceToProvider = ({
}): ProviderToToolChoiceMap[T] | null => {
try {
// convert incoming tool choice to the OpenAI format
- const openaiToolChoice = toOpenAIToolChoice(toolChoice);
+ const openAIToolChoice = toOpenAIToolChoice(toolChoice);
// convert the OpenAI format to the target provider format
return fromOpenAIToolChoice({
- toolChoice: openaiToolChoice,
+ toolChoice: openAIToolChoice,
targetProvider,
});
} catch (e) {
diff --git a/app/src/store/playground/playgroundStore.tsx b/app/src/store/playground/playgroundStore.tsx
index ccf3a3cc8c..75319ae6e8 100644
--- a/app/src/store/playground/playgroundStore.tsx
+++ b/app/src/store/playground/playgroundStore.tsx
@@ -8,6 +8,7 @@ import {
DEFAULT_MODEL_NAME,
DEFAULT_MODEL_PROVIDER,
} from "@phoenix/constants/generativeConstants";
+import { OpenAIResponseFormat } from "@phoenix/pages/playground/schemas";
import {
GenAIOperationType,
@@ -114,6 +115,22 @@ export function createPlaygroundInstance(): PlaygroundInstance {
};
}
+export function createOpenAIResponseFormat(): OpenAIResponseFormat {
+ return {
+ type: "json_schema",
+ json_schema: {
+ name: "response",
+ schema: {
+ type: "object",
+ properties: {},
+ required: [],
+ additionalProperties: false,
+ },
+ strict: true,
+ },
+ };
+}
+
/**
* Gets the initial instances for the playground store
* If the initial props has instances, those will be used.
@@ -373,6 +390,88 @@ export const createPlaygroundStore = (initialProps: InitialPlaygroundState) => {
}),
});
},
+ upsertInvocationParameterInput: ({
+ instanceId,
+ invocationParameterInput,
+ }) => {
+ const instance = get().instances.find((i) => i.id === instanceId);
+ if (!instance) {
+ return;
+ }
+ const currentInvocationParameterInput =
+ instance.model.invocationParameters.find(
+ (p) => p.invocationName === invocationParameterInput.invocationName
+ );
+
+ if (currentInvocationParameterInput) {
+ set({
+ instances: get().instances.map((instance) => {
+ if (instance.id === instanceId) {
+ return {
+ ...instance,
+ model: {
+ ...instance.model,
+ invocationParameters: instance.model.invocationParameters.map(
+ (p) =>
+ p.invocationName ===
+ invocationParameterInput.invocationName
+ ? invocationParameterInput
+ : p
+ ),
+ },
+ };
+ }
+ return instance;
+ }),
+ });
+ } else {
+ set({
+ instances: get().instances.map((instance) => {
+ if (instance.id === instanceId) {
+ return {
+ ...instance,
+ model: {
+ ...instance.model,
+ invocationParameters: [
+ ...instance.model.invocationParameters,
+ invocationParameterInput,
+ ],
+ },
+ };
+ }
+ return instance;
+ }),
+ });
+ }
+ },
+ deleteInvocationParameterInput: ({
+ instanceId,
+ invocationParameterInputInvocationName,
+ }) => {
+ const instance = get().instances.find((i) => i.id === instanceId);
+ if (!instance) {
+ return;
+ }
+ set({
+ instances: get().instances.map((instance) => {
+ if (instance.id === instanceId) {
+ return {
+ ...instance,
+ model: {
+ ...instance.model,
+ invocationParameters:
+ instance.model.invocationParameters.filter(
+ (p) =>
+ p.invocationName !==
+ invocationParameterInputInvocationName
+ ),
+ },
+ };
+ }
+ return instance;
+ }),
+ });
+ },
...initialProps,
});
return create(devtools(playgroundStore));
diff --git a/app/src/store/playground/types.ts b/app/src/store/playground/types.ts
index ef98a812e2..bf68b9065e 100644
--- a/app/src/store/playground/types.ts
+++ b/app/src/store/playground/types.ts
@@ -205,6 +205,20 @@ export interface PlaygroundState extends PlaygroundProps {
instanceId: number;
invocationParameters: InvocationParameterInput[];
}) => void;
+ /**
+ * Upsert an invocation parameter input for a model
+ */
+ upsertInvocationParameterInput: (params: {
+ instanceId: number;
+ invocationParameterInput: InvocationParameterInput;
+ }) => void;
+ /**
+ * Delete an invocation parameter input for a model
+ */
+ deleteInvocationParameterInput: (params: {
+ instanceId: number;
+ invocationParameterInputInvocationName: string;
+ }) => void;
/**
* Filter the invocation parameters for a model based on the model's supported parameters
*/