Skip to content

Commit

Permalink
fix(generation): remove trailing comma in inferenceConfig resolver co…
Browse files Browse the repository at this point in the history
…de (#2933)
  • Loading branch information
atierian authored and tejas2008 committed Oct 29, 2024
1 parent 4e6bb2f commit 0b8faaf
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,12 @@ type Recipe {
}

type Query {
summarize(input: String): String @generation(aiModel: "anthropic.claude-3-haiku-20240307-v1:0", systemPrompt: "summarize the input.")
summarize(input: String): String
@generation(
aiModel: "anthropic.claude-3-haiku-20240307-v1:0"
systemPrompt: "summarize the input."
inferenceConfiguration: { temperature: 0.5 }
)

generateRecipe(description: String): Recipe
@generation(aiModel: "anthropic.claude-3-haiku-20240307-v1:0", systemPrompt: "You are a 3 star michelin chef that generates recipes.")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -519,7 +519,7 @@ export function request(ctx) {
const toolConfig = {"tools":[{"toolSpec":{"name":"responseType","description":"Generate a response type for the given field.","inputSchema":{"json":{"type":"object","properties":{"value":{"type":"string","description":"A UTF-8 character sequence."}},"required":["value"]}}}}],"toolChoice":{"tool":{"name":"responseType"}}};
const prompt = "Generate a string based on the description.";
const args = JSON.stringify(ctx.args);
const inferenceConfig = { inferenceConfig: {"maxTokens":100,"temperature":0.7,"topP":0.9} },;
const inferenceConfig = { inferenceConfig: {"maxTokens":100,"temperature":0.7,"topP":0.9} };
return {
resourcePath: '/model/anthropic.claude-3-haiku-20240307-v1:0/converse',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ const generateResolver = (fileName: string, values: Record<string, string>): str
*/
const getInferenceConfigResolverDefinition = (inferenceConfiguration?: InferenceConfiguration): string => {
return inferenceConfiguration && Object.keys(inferenceConfiguration).length > 0
? `{ inferenceConfig: ${JSON.stringify(inferenceConfiguration)} },`
? `{ inferenceConfig: ${JSON.stringify(inferenceConfiguration)} }`
: 'undefined';
};

Expand Down

0 comments on commit 0b8faaf

Please sign in to comment.