Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Support Response Format in Playground #5259

Merged
merged 13 commits into from
Nov 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ repos:
files: \.(jsx?|tsx?|css|.md)$
exclude: \.*__generated__.*$
additional_dependencies:
- prettier@3.2.4
- prettier@3.3.3
cephalization marked this conversation as resolved.
Show resolved Hide resolved
- repo: https://github.com/pre-commit/mirrors-eslint
rev: "8ddcbd412c0b348841f0f82c837702f432539652"
hooks:
Expand Down
35 changes: 34 additions & 1 deletion app/src/pages/playground/PlaygroundChatTemplate.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ import { usePlaygroundContext } from "@phoenix/contexts/PlaygroundContext";
import { useChatMessageStyles } from "@phoenix/hooks/useChatMessageStyles";
import {
ChatMessage,
createOpenAIResponseFormat,
generateMessageId,
PlaygroundChatTemplate as PlaygroundChatTemplateType,
PlaygroundInstance,
Expand All @@ -44,11 +45,16 @@ import { assertUnreachable } from "@phoenix/typeUtils";
import { safelyParseJSON } from "@phoenix/utils/jsonUtils";

import { ChatMessageToolCallsEditor } from "./ChatMessageToolCallsEditor";
import {
RESPONSE_FORMAT_PARAM_CANONICAL_NAME,
RESPONSE_FORMAT_PARAM_NAME,
} from "./constants";
import {
MessageContentRadioGroup,
MessageMode,
} from "./MessageContentRadioGroup";
import { MessageRolePicker } from "./MessageRolePicker";
import { PlaygroundResponseFormat } from "./PlaygroundResponseFormat";
import { PlaygroundTools } from "./PlaygroundTools";
import {
createToolCallForProvider,
Expand All @@ -74,11 +80,18 @@ export function PlaygroundChatTemplate(props: PlaygroundChatTemplateProps) {
);
const instances = usePlaygroundContext((state) => state.instances);
const updateInstance = usePlaygroundContext((state) => state.updateInstance);
const upsertInvocationParameterInput = usePlaygroundContext(
(state) => state.upsertInvocationParameterInput
);
const playgroundInstance = instances.find((instance) => instance.id === id);
if (!playgroundInstance) {
throw new Error(`Playground instance ${id} not found`);
}
const hasTools = playgroundInstance.tools.length > 0;
const hasResponseFormat =
playgroundInstance.model.invocationParameters.find(
(p) => p.canonicalName === RESPONSE_FORMAT_PARAM_CANONICAL_NAME
) != null;
const { template } = playgroundInstance;
if (template.__type !== "chat") {
throw new Error(`Invalid template type ${template.__type}`);
Expand Down Expand Up @@ -151,9 +164,28 @@ export function PlaygroundChatTemplate(props: PlaygroundChatTemplateProps) {
paddingBottom="size-100"
borderColor="dark"
borderTopWidth="thin"
borderBottomWidth={hasTools ? "thin" : undefined}
borderBottomWidth={hasTools || hasResponseFormat ? "thin" : undefined}
>
<Flex direction="row" justifyContent="end" gap="size-100">
<Button
variant="default"
size="compact"
aria-label="output schema"
icon={<Icon svg={<Icons.PlusOutline />} />}
disabled={hasResponseFormat}
onClick={() => {
upsertInvocationParameterInput({
instanceId: id,
invocationParameterInput: {
valueJson: createOpenAIResponseFormat(),
invocationName: RESPONSE_FORMAT_PARAM_NAME,
canonicalName: RESPONSE_FORMAT_PARAM_CANONICAL_NAME,
},
});
}}
>
Output Schema
</Button>
<Button
variant="default"
aria-label="add tool"
Expand Down Expand Up @@ -217,6 +249,7 @@ export function PlaygroundChatTemplate(props: PlaygroundChatTemplateProps) {
</Flex>
</View>
{hasTools ? <PlaygroundTools {...props} /> : null}
{hasResponseFormat ? <PlaygroundResponseFormat {...props} /> : null}
</DndContext>
);
}
Expand Down
2 changes: 1 addition & 1 deletion app/src/pages/playground/PlaygroundOutput.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -351,7 +351,7 @@ export function PlaygroundOutput(props: PlaygroundOutputProps) {

return (
<Card
title={<TitleWithAlphabeticIndex index={index} title="OutputContent" />}
title={<TitleWithAlphabeticIndex index={index} title="Output" />}
collapsible
variant="compact"
bodyStyle={{ padding: 0 }}
Expand Down
134 changes: 134 additions & 0 deletions app/src/pages/playground/PlaygroundResponseFormat.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
import React, { useCallback, useState } from "react";
import { JSONSchema7 } from "json-schema";

import {
Accordion,
AccordionItem,
Button,
Card,
Flex,
Icon,
Icons,
Text,
View,
} from "@arizeai/components";

import { CopyToClipboardButton } from "@phoenix/components";
import { JSONEditor } from "@phoenix/components/code";
import { LazyEditorWrapper } from "@phoenix/components/code/LazyEditorWrapper";
import { usePlaygroundContext } from "@phoenix/contexts/PlaygroundContext";
import { safelyParseJSON } from "@phoenix/utils/jsonUtils";

import {
RESPONSE_FORMAT_PARAM_CANONICAL_NAME,
RESPONSE_FORMAT_PARAM_NAME,
} from "./constants";
import { jsonObjectSchema, openAIResponseFormatJSONSchema } from "./schemas";
import { PlaygroundInstanceProps } from "./types";

/**
* The minimum height for the editor before it is initialized.
* This is to ensure that the editor is properly initialized when it is rendered outside of the viewport.
*/
const RESPONSE_FORMAT_EDITOR_PRE_INIT_HEIGHT = 400;

export function PlaygroundResponseFormat({
playgroundInstanceId,
}: PlaygroundInstanceProps) {
const deleteInvocationParameterInput = usePlaygroundContext(
(state) => state.deleteInvocationParameterInput
);
const instance = usePlaygroundContext((state) =>
state.instances.find((i) => i.id === playgroundInstanceId)
);
const upsertInvocationParameterInput = usePlaygroundContext(
(state) => state.upsertInvocationParameterInput
);

if (!instance) {
throw new Error(`Instance ${playgroundInstanceId} not found`);
}

const responseFormat = instance.model.invocationParameters.find(
(p) => p.invocationName === RESPONSE_FORMAT_PARAM_NAME
);

const [responseFormatDefinition, setResponseFormatDefinition] = useState(
JSON.stringify(responseFormat?.valueJson ?? {}, null, 2)
);

const onChange = useCallback(
(value: string) => {
setResponseFormatDefinition(value);
const { json: format } = safelyParseJSON(value);
if (format == null) {
return;
}
// Don't use data here returned by safeParse, as we want to allow for extra keys,
// there is no "deepPassthrough" to allow for extra keys
// at all levels of the schema, so we just use the json parsed value here,
// knowing that it is valid with potentially extra keys
const { success } = jsonObjectSchema.safeParse(format);
if (!success) {
return;
}
upsertInvocationParameterInput({
instanceId: playgroundInstanceId,
invocationParameterInput: {
invocationName: RESPONSE_FORMAT_PARAM_NAME,
valueJson: format,
canonicalName: RESPONSE_FORMAT_PARAM_CANONICAL_NAME,
},
});
},
[playgroundInstanceId, upsertInvocationParameterInput]
);

return (
<Accordion arrowPosition="start">
<AccordionItem id="response-format" title="Output Schema">
<View padding="size-200">
<Card
variant="compact"
title={
<Flex direction="row" gap="size-100">
<Text>Schema</Text>
</Flex>
}
bodyStyle={{ padding: 0 }}
extra={
<Flex direction="row" gap="size-100">
<CopyToClipboardButton text={responseFormatDefinition} />
<Button
aria-label="Delete Output Schema"
icon={<Icon svg={<Icons.TrashOutline />} />}
variant="default"
size="compact"
onClick={() => {
deleteInvocationParameterInput({
instanceId: playgroundInstanceId,
invocationParameterInputInvocationName:
RESPONSE_FORMAT_PARAM_NAME,
});
}}
/>
</Flex>
}
>
<LazyEditorWrapper
preInitializationMinHeight={
RESPONSE_FORMAT_EDITOR_PRE_INIT_HEIGHT
}
>
<JSONEditor
value={responseFormatDefinition}
onChange={onChange}
jsonSchema={openAIResponseFormatJSONSchema as JSONSchema7}
/>
</LazyEditorWrapper>
</Card>
</View>
</AccordionItem>
</Accordion>
);
}
6 changes: 6 additions & 0 deletions app/src/pages/playground/__tests__/fixtures.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,12 @@ export const basePlaygroundSpan: PlaygroundSpan = {
invocationInputField: "value_int",
invocationName: "seed",
},
{
__typename: "JsonInvocationParameter",
canonicalName: "RESPONSE_FORMAT",
invocationInputField: "value_json",
invocationName: "response_format",
},
],
};
export const spanAttributesWithInputMessages = {
Expand Down
41 changes: 36 additions & 5 deletions app/src/pages/playground/__tests__/playgroundUtils.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { LlmProviderToolCall } from "@phoenix/schemas/toolCallSchemas";
import {
_resetInstanceId,
_resetMessageId,
createOpenAIResponseFormat,
PlaygroundInput,
PlaygroundInstance,
} from "@phoenix/store";
Expand All @@ -13,6 +14,7 @@ import {
INPUT_MESSAGES_PARSING_ERROR,
MODEL_CONFIG_PARSING_ERROR,
MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR,
MODEL_CONFIG_WITH_RESPONSE_FORMAT_PARSING_ERROR,
OUTPUT_MESSAGES_PARSING_ERROR,
OUTPUT_VALUE_PARSING_ERROR,
SPAN_ATTRIBUTES_PARSING_ERROR,
Expand Down Expand Up @@ -144,7 +146,6 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
modelName: "gpt-4o",
},
template: defaultTemplate,

output: undefined,
},
parsingErrors: [
Expand All @@ -153,6 +154,7 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
OUTPUT_VALUE_PARSING_ERROR,
MODEL_CONFIG_PARSING_ERROR,
MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR,
MODEL_CONFIG_WITH_RESPONSE_FORMAT_PARSING_ERROR,
],
});
});
Expand Down Expand Up @@ -455,8 +457,7 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
...spanAttributesWithInputMessages.llm,
// only parameters defined on the span InvocationParameter[] field are parsed
// note that snake case keys are automatically converted to camel case
invocation_parameters:
'{"top_p": 0.5, "max_tokens": 100, "seed": 12345, "stop": ["stop", "me"]}',
invocation_parameters: `{"top_p": 0.5, "max_tokens": 100, "seed": 12345, "stop": ["stop", "me"], "response_format": ${JSON.stringify(createOpenAIResponseFormat())}}`,
},
}),
};
Expand Down Expand Up @@ -486,6 +487,11 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
invocationName: "stop",
valueStringList: ["stop", "me"],
},
{
canonicalName: "RESPONSE_FORMAT",
invocationName: "response_format",
valueJson: createOpenAIResponseFormat(),
},
],
},
} satisfies PlaygroundInstance,
Expand Down Expand Up @@ -548,7 +554,10 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
playgroundInstance: {
...expectedPlaygroundInstanceWithIO,
},
parsingErrors: [MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR],
parsingErrors: [
MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR,
MODEL_CONFIG_WITH_RESPONSE_FORMAT_PARSING_ERROR,
],
});
});

Expand All @@ -568,7 +577,10 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
playgroundInstance: {
...expectedPlaygroundInstanceWithIO,
},
parsingErrors: [MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR],
parsingErrors: [
MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR,
MODEL_CONFIG_WITH_RESPONSE_FORMAT_PARSING_ERROR,
],
});
});

Expand Down Expand Up @@ -600,6 +612,25 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
parsingErrors: [MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR],
});
});

it("should only return response format parsing errors if response format is defined AND malformed", () => {
const span = {
...basePlaygroundSpan,
attributes: JSON.stringify({
...spanAttributesWithInputMessages,
llm: {
...spanAttributesWithInputMessages.llm,
invocation_parameters: `{"response_format": 1234}`,
},
}),
};
expect(transformSpanAttributesToPlaygroundInstance(span)).toEqual({
playgroundInstance: {
...expectedPlaygroundInstanceWithIO,
},
parsingErrors: [MODEL_CONFIG_WITH_RESPONSE_FORMAT_PARSING_ERROR],
});
});
});

describe("getChatRole", () => {
Expand Down
14 changes: 11 additions & 3 deletions app/src/pages/playground/constants.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,10 @@ export const MODEL_CONFIG_PARSING_ERROR =
"Unable to parse model config, expected llm.model_name to be present.";
export const MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR =
"Unable to parse model config, expected llm.invocation_parameters json string to be present.";
// TODO(parker / apowell) - adjust this error message with anthropic support https://github.com/Arize-ai/phoenix/issues/5100
export const MODEL_CONFIG_WITH_RESPONSE_FORMAT_PARSING_ERROR =
"Unable to parse invocation parameters response_format, expected llm.invocation_parameters.response_format to be a well formed json object or undefined.";
export const TOOLS_PARSING_ERROR =
"Unable to parse tools, expected tools to be an array of valid OpenAI tools.";
"Unable to parse tools, expected tools to be an array of valid tools.";

export const modelProviderToModelPrefixMap: Record<ModelProvider, string[]> = {
AZURE_OPENAI: [],
Expand All @@ -45,9 +46,16 @@ export const TOOL_CHOICE_PARAM_CANONICAL_NAME: Extract<

export const TOOL_CHOICE_PARAM_NAME = "tool_choice";

export const RESPONSE_FORMAT_PARAM_CANONICAL_NAME: Extract<
CanonicalParameterName,
"RESPONSE_FORMAT"
> = "RESPONSE_FORMAT";

export const RESPONSE_FORMAT_PARAM_NAME = "response_format";

/**
* List of parameter canonical names to ignore in the invocation parameters form
* These parameters are rendered else where on the page
*/
export const paramsToIgnoreInInvocationParametersForm: CanonicalParameterName[] =
[TOOL_CHOICE_PARAM_CANONICAL_NAME];
[TOOL_CHOICE_PARAM_CANONICAL_NAME, RESPONSE_FORMAT_PARAM_CANONICAL_NAME];
Loading
Loading