Skip to content

Commit

Permalink
Safely parse invocation params schema, separately from other model co…
Browse files Browse the repository at this point in the history
…nfig
  • Loading branch information
cephalization committed Oct 22, 2024
1 parent 2b509e1 commit 419ff2a
Show file tree
Hide file tree
Showing 4 changed files with 117 additions and 33 deletions.
40 changes: 40 additions & 0 deletions app/src/pages/playground/__tests__/playgroundUtils.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import {
import {
INPUT_MESSAGES_PARSING_ERROR,
MODEL_CONFIG_PARSING_ERROR,
MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR,
OUTPUT_MESSAGES_PARSING_ERROR,
OUTPUT_VALUE_PARSING_ERROR,
SPAN_ATTRIBUTES_PARSING_ERROR,
Expand Down Expand Up @@ -333,6 +334,45 @@ describe("transformSpanAttributesToPlaygroundInstance", () => {
parsingErrors: [],
});
});

it("should still parse the model name and provider even if invocation parameters are malformed", () => {
const span = {
...basePlaygroundSpan,
attributes: JSON.stringify({
...spanAttributesWithInputMessages,
llm: {
...spanAttributesWithInputMessages.llm,
invocation_parameters: "invalid json",
},
}),
};
expect(transformSpanAttributesToPlaygroundInstance(span)).toEqual({
playgroundInstance: {
...expectedPlaygroundInstanceWithIO,
},
parsingErrors: [],
});
});

it("should return invocation parameters parsing errors if the invocation parameters are the wrong type", () => {
const span = {
...basePlaygroundSpan,
attributes: JSON.stringify({
...spanAttributesWithInputMessages,
llm: {
...spanAttributesWithInputMessages.llm,
invocation_parameters: null,
},
}),
};

expect(transformSpanAttributesToPlaygroundInstance(span)).toEqual({
playgroundInstance: {
...expectedPlaygroundInstanceWithIO,
},
parsingErrors: [MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR],
});
});
});

describe("getChatRole", () => {
Expand Down
4 changes: 3 additions & 1 deletion app/src/pages/playground/constants.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,9 @@ export const OUTPUT_VALUE_PARSING_ERROR =
export const SPAN_ATTRIBUTES_PARSING_ERROR =
"Unable to parse span attributes, attributes must be valid JSON.";
export const MODEL_CONFIG_PARSING_ERROR =
"Unable to parse model config, expected llm.model_name, or valid llm.invocation_parameters to be present.";
"Unable to parse model config, expected llm.model_name to be present.";
export const MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR =
"Unable to parse model config, expected llm.invocation_parameters json string to be present.";

export const modelProviderToModelPrefixMap: Record<ModelProvider, string[]> = {
AZURE_OPENAI: [],
Expand Down
28 changes: 20 additions & 8 deletions app/src/pages/playground/playgroundUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import {
ChatRoleMap,
INPUT_MESSAGES_PARSING_ERROR,
MODEL_CONFIG_PARSING_ERROR,
MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR,
modelProviderToModelPrefixMap,
OUTPUT_MESSAGES_PARSING_ERROR,
OUTPUT_VALUE_PARSING_ERROR,
Expand All @@ -29,6 +30,7 @@ import {
llmOutputMessageSchema,
MessageSchema,
modelConfigSchema,
modelConfigWithInvocationParametersSchema,
outputSchema,
providerSchemas,
} from "./schemas";
Expand Down Expand Up @@ -156,24 +158,34 @@ export function getModelProviderFromModelName(
}

/**
* Attempts to get the llm.model_name and inferred provider from the span attributes.
* Attempts to get the llm.model_name, inferred provider, and invocation parameters from the span attributes.
* @param parsedAttributes the JSON parsed span attributes
* @returns the model config if it exists or parsing errors if it does not
*/
function getModelConfigFromAttributes(
parsedAttributes: unknown
):
| { modelConfig: ModelConfig; parsingErrors: never[] }
| { modelConfig: null; parsingErrors: string[] } {
function getModelConfigFromAttributes(parsedAttributes: unknown): {
modelConfig: ModelConfig | null;
parsingErrors: string[];
} {
const { success, data } = modelConfigSchema.safeParse(parsedAttributes);
if (success) {
// parse invocation params separately, to avoid throwing away other model config if invocation params are invalid
const {
success: invocationParametersSuccess,
data: invocationParametersData,
} = modelConfigWithInvocationParametersSchema.safeParse(parsedAttributes);
const parsingErrors: string[] = [];
if (!invocationParametersSuccess) {
parsingErrors.push(MODEL_CONFIG_WITH_INVOCATION_PARAMETERS_PARSING_ERROR);
}
return {
modelConfig: {
modelName: data.llm.model_name,
provider: getModelProviderFromModelName(data.llm.model_name),
invocationParameters: data.llm.invocation_parameters,
invocationParameters: invocationParametersSuccess
? invocationParametersData.llm.invocation_parameters
: {},
},
parsingErrors: [],
parsingErrors,
};
}
return { modelConfig: null, parsingErrors: [MODEL_CONFIG_PARSING_ERROR] };
Expand Down
78 changes: 54 additions & 24 deletions app/src/pages/playground/schemas.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ const chatMessageSchema = schemaForType<ChatMessage>()(
export const chatMessagesSchema = z.array(chatMessageSchema);

/**
* Model invocation parameters schema in zod.
* Model graphql invocation parameters schema in zod.
*
* Includes all keys besides toolChoice
*/
Expand All @@ -112,40 +112,70 @@ const invocationParameterSchema = schemaForType<
})
);

/**
* The type of the invocation parameters schema
*/
export type InvocationParametersSchema = z.infer<
typeof invocationParameterSchema
>;

/**
* Transform a string to an invocation parameters schema.
*
* If the string is not valid JSON, return an empty object.
* If the string is valid JSON, but does not match the invocation parameters schema,
* map the snake cased keys to camel case and return the result.
*/
const stringToInvocationParametersSchema = z
.string()
.transform((s) => {
let json;
try {
json = JSON.parse(s);
} catch (e) {
return {};
}
// using the invocationParameterSchema as a base,
// apply all matching keys from the input string,
// and then map snake cased keys to camel case on top
return (
invocationParameterSchema
.passthrough()
.transform((o) => ({
...o,
// map snake cased keys to camel case, the first char after each _ is uppercase
...Object.fromEntries(
Object.entries(o).map(([k, v]) => [
k.replace(/_([a-z])/g, (_, char) => char.toUpperCase()),
v,
])
),
}))
// reparse the object to ensure the mapped keys are also validated
.transform(invocationParameterSchema.parse)
.parse(json)
);
})
.default("{}");

/**
* The zod schema for llm model config
* @see {@link https://github.com/Arize-ai/openinference/blob/main/spec/semantic_conventions.md|Semantic Conventions}
*/
export const modelConfigSchema = z.object({
[SemanticAttributePrefixes.llm]: z.object({
[LLMAttributePostfixes.model_name]: z.string(),
[LLMAttributePostfixes.invocation_parameters]: z
.string()
.transform((s) =>
// using the invocationParameterSchema as a base,
// apply all matching keys from the input string,
// and then map snake cased keys to camel case on top
invocationParameterSchema
.passthrough()
.transform((o) => ({
...o,
// map snake cased keys to camel case, the first char after each _ is uppercase
...Object.fromEntries(
Object.entries(o).map(([k, v]) => [
k.replace(/_([a-z])/g, (_, char) => char.toUpperCase()),
v,
])
),
}))
// reparse the object to ensure the mapped keys are also validated
.transform(invocationParameterSchema.parse)
.parse(JSON.parse(s))
)
.default("{}"),
}),
});

/**
* The zod schema for llm.invocation_parameters attributes
* @see {@link https://github.com/Arize-ai/openinference/blob/main/spec/semantic_conventions.md|Semantic Conventions}
*/
export const modelConfigWithInvocationParametersSchema = z.object({
[SemanticAttributePrefixes.llm]: z.object({
[LLMAttributePostfixes.invocation_parameters]:
stringToInvocationParametersSchema,
}),
});

Expand Down

0 comments on commit 419ff2a

Please sign in to comment.