diff --git a/.stats.yml b/.stats.yml index 19868fc..36f4b58 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,2 @@ configured_endpoints: 68 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-b5b0e2c794b012919701c3fd43286af10fa25d33ceb8a881bec2636028f446e0.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-3904ef6b29a89c98f93a9b7da19879695f3c440564be6384db7af1b734611ede.yml diff --git a/audiospeech.go b/audiospeech.go index a3cb5b0..38f6f74 100644 --- a/audiospeech.go +++ b/audiospeech.go @@ -53,9 +53,9 @@ type AudioSpeechNewParams struct { // One of the available [TTS models](https://platform.openai.com/docs/models#tts): // `tts-1` or `tts-1-hd` Model param.Field[SpeechModel] `json:"model,required"` - // The voice to use when generating the audio. Supported voices are `alloy`, - // `echo`, `fable`, `onyx`, `nova`, and `shimmer`. Previews of the voices are - // available in the + // The voice to use when generating the audio. Supported voices are `alloy`, `ash`, + // `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the + // voices are available in the // [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). Voice param.Field[AudioSpeechNewParamsVoice] `json:"voice,required"` // The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, @@ -70,24 +70,27 @@ func (r AudioSpeechNewParams) MarshalJSON() (data []byte, err error) { return apijson.MarshalRoot(r) } -// The voice to use when generating the audio. Supported voices are `alloy`, -// `echo`, `fable`, `onyx`, `nova`, and `shimmer`. Previews of the voices are -// available in the +// The voice to use when generating the audio. Supported voices are `alloy`, `ash`, +// `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the +// voices are available in the // [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). type AudioSpeechNewParamsVoice string const ( AudioSpeechNewParamsVoiceAlloy AudioSpeechNewParamsVoice = "alloy" + AudioSpeechNewParamsVoiceAsh AudioSpeechNewParamsVoice = "ash" + AudioSpeechNewParamsVoiceCoral AudioSpeechNewParamsVoice = "coral" AudioSpeechNewParamsVoiceEcho AudioSpeechNewParamsVoice = "echo" AudioSpeechNewParamsVoiceFable AudioSpeechNewParamsVoice = "fable" AudioSpeechNewParamsVoiceOnyx AudioSpeechNewParamsVoice = "onyx" AudioSpeechNewParamsVoiceNova AudioSpeechNewParamsVoice = "nova" + AudioSpeechNewParamsVoiceSage AudioSpeechNewParamsVoice = "sage" AudioSpeechNewParamsVoiceShimmer AudioSpeechNewParamsVoice = "shimmer" ) func (r AudioSpeechNewParamsVoice) IsKnown() bool { switch r { - case AudioSpeechNewParamsVoiceAlloy, AudioSpeechNewParamsVoiceEcho, AudioSpeechNewParamsVoiceFable, AudioSpeechNewParamsVoiceOnyx, AudioSpeechNewParamsVoiceNova, AudioSpeechNewParamsVoiceShimmer: + case AudioSpeechNewParamsVoiceAlloy, AudioSpeechNewParamsVoiceAsh, AudioSpeechNewParamsVoiceCoral, AudioSpeechNewParamsVoiceEcho, AudioSpeechNewParamsVoiceFable, AudioSpeechNewParamsVoiceOnyx, AudioSpeechNewParamsVoiceNova, AudioSpeechNewParamsVoiceSage, AudioSpeechNewParamsVoiceShimmer: return true } return false diff --git a/betathread.go b/betathread.go index 254470e..b5a9884 100644 --- a/betathread.go +++ b/betathread.go @@ -209,7 +209,7 @@ func (r AssistantToolChoiceFunctionParam) MarshalJSON() (data []byte, err error) // `{"type": "function", "function": {"name": "my_function"}}` forces the model to // call that tool. // -// Union satisfied by [AssistantToolChoiceOptionBehavior] or [AssistantToolChoice]. +// Union satisfied by [AssistantToolChoiceOptionAuto] or [AssistantToolChoice]. type AssistantToolChoiceOptionUnion interface { implementsAssistantToolChoiceOptionUnion() } @@ -220,7 +220,7 @@ func init() { "", apijson.UnionVariant{ TypeFilter: gjson.String, - Type: reflect.TypeOf(AssistantToolChoiceOptionBehavior("")), + Type: reflect.TypeOf(AssistantToolChoiceOptionAuto("")), }, apijson.UnionVariant{ TypeFilter: gjson.JSON, @@ -233,25 +233,25 @@ func init() { // `auto` means the model can pick between generating a message or calling one or // more tools. `required` means the model must call one or more tools before // responding to the user. -type AssistantToolChoiceOptionBehavior string +type AssistantToolChoiceOptionAuto string const ( - AssistantToolChoiceOptionBehaviorNone AssistantToolChoiceOptionBehavior = "none" - AssistantToolChoiceOptionBehaviorAuto AssistantToolChoiceOptionBehavior = "auto" - AssistantToolChoiceOptionBehaviorRequired AssistantToolChoiceOptionBehavior = "required" + AssistantToolChoiceOptionAutoNone AssistantToolChoiceOptionAuto = "none" + AssistantToolChoiceOptionAutoAuto AssistantToolChoiceOptionAuto = "auto" + AssistantToolChoiceOptionAutoRequired AssistantToolChoiceOptionAuto = "required" ) -func (r AssistantToolChoiceOptionBehavior) IsKnown() bool { +func (r AssistantToolChoiceOptionAuto) IsKnown() bool { switch r { - case AssistantToolChoiceOptionBehaviorNone, AssistantToolChoiceOptionBehaviorAuto, AssistantToolChoiceOptionBehaviorRequired: + case AssistantToolChoiceOptionAutoNone, AssistantToolChoiceOptionAutoAuto, AssistantToolChoiceOptionAutoRequired: return true } return false } -func (r AssistantToolChoiceOptionBehavior) implementsAssistantToolChoiceOptionUnion() {} +func (r AssistantToolChoiceOptionAuto) implementsAssistantToolChoiceOptionUnion() {} -func (r AssistantToolChoiceOptionBehavior) implementsAssistantToolChoiceOptionUnionParam() {} +func (r AssistantToolChoiceOptionAuto) implementsAssistantToolChoiceOptionUnionParam() {} // Controls which (if any) tool is called by the model. `none` means the model will // not call any tools and instead generates a message. `auto` is the default value @@ -261,7 +261,7 @@ func (r AssistantToolChoiceOptionBehavior) implementsAssistantToolChoiceOptionUn // `{"type": "function", "function": {"name": "my_function"}}` forces the model to // call that tool. // -// Satisfied by [AssistantToolChoiceOptionBehavior], [AssistantToolChoiceParam]. +// Satisfied by [AssistantToolChoiceOptionAuto], [AssistantToolChoiceParam]. type AssistantToolChoiceOptionUnionParam interface { implementsAssistantToolChoiceOptionUnionParam() } diff --git a/betathread_test.go b/betathread_test.go index 849e257..b4d386f 100644 --- a/betathread_test.go +++ b/betathread_test.go @@ -193,7 +193,7 @@ func TestBetaThreadNewAndRunWithOptionalParams(t *testing.T) { }), }), }), - ToolChoice: openai.F[openai.AssistantToolChoiceOptionUnionParam](openai.AssistantToolChoiceOptionBehavior(openai.AssistantToolChoiceOptionBehaviorNone)), + ToolChoice: openai.F[openai.AssistantToolChoiceOptionUnionParam](openai.AssistantToolChoiceOptionAuto(openai.AssistantToolChoiceOptionAutoNone)), ToolResources: openai.F(openai.BetaThreadNewAndRunParamsToolResources{ CodeInterpreter: openai.F(openai.BetaThreadNewAndRunParamsToolResourcesCodeInterpreter{ FileIDs: openai.F([]string{"string"}), diff --git a/betathreadrun_test.go b/betathreadrun_test.go index 8f1b323..13feaac 100644 --- a/betathreadrun_test.go +++ b/betathreadrun_test.go @@ -50,7 +50,7 @@ func TestBetaThreadRunNewWithOptionalParams(t *testing.T) { Model: openai.F(openai.ChatModelGPT4o), ParallelToolCalls: openai.F(true), Temperature: openai.F(1.000000), - ToolChoice: openai.F[openai.AssistantToolChoiceOptionUnionParam](openai.AssistantToolChoiceOptionBehavior(openai.AssistantToolChoiceOptionBehaviorNone)), + ToolChoice: openai.F[openai.AssistantToolChoiceOptionUnionParam](openai.AssistantToolChoiceOptionAuto(openai.AssistantToolChoiceOptionAutoNone)), Tools: openai.F([]openai.AssistantToolUnionParam{openai.CodeInterpreterToolParam{ Type: openai.F(openai.CodeInterpreterToolTypeCodeInterpreter), }}), diff --git a/chatcompletion.go b/chatcompletion.go index 70e843f..939abd8 100644 --- a/chatcompletion.go +++ b/chatcompletion.go @@ -86,8 +86,7 @@ type ChatCompletion struct { Model string `json:"model,required"` // The object type, which is always `chat.completion`. Object ChatCompletionObject `json:"object,required"` - // The service tier used for processing the request. This field is only included if - // the `service_tier` parameter is specified in the request. + // The service tier used for processing the request. ServiceTier ChatCompletionServiceTier `json:"service_tier,nullable"` // This fingerprint represents the backend configuration that the model runs with. // @@ -222,8 +221,7 @@ func (r ChatCompletionObject) IsKnown() bool { return false } -// The service tier used for processing the request. This field is only included if -// the `service_tier` parameter is specified in the request. +// The service tier used for processing the request. type ChatCompletionServiceTier string const ( @@ -470,8 +468,7 @@ type ChatCompletionChunk struct { Model string `json:"model,required"` // The object type, which is always `chat.completion.chunk`. Object ChatCompletionChunkObject `json:"object,required"` - // The service tier used for processing the request. This field is only included if - // the `service_tier` parameter is specified in the request. + // The service tier used for processing the request. ServiceTier ChatCompletionChunkServiceTier `json:"service_tier,nullable"` // This fingerprint represents the backend configuration that the model runs with. // Can be used in conjunction with the `seed` request parameter to understand when @@ -768,8 +765,7 @@ func (r ChatCompletionChunkObject) IsKnown() bool { return false } -// The service tier used for processing the request. This field is only included if -// the `service_tier` parameter is specified in the request. +// The service tier used for processing the request. type ChatCompletionChunkServiceTier string const ( @@ -1589,7 +1585,7 @@ func (r ChatCompletionToolType) IsKnown() bool { // `none` is the default when no tools are present. `auto` is the default if tools // are present. // -// Satisfied by [ChatCompletionToolChoiceOptionBehavior], +// Satisfied by [ChatCompletionToolChoiceOptionAuto], // [ChatCompletionNamedToolChoiceParam]. type ChatCompletionToolChoiceOptionUnionParam interface { implementsChatCompletionToolChoiceOptionUnionParam() @@ -1598,24 +1594,23 @@ type ChatCompletionToolChoiceOptionUnionParam interface { // `none` means the model will not call any tool and instead generates a message. // `auto` means the model can pick between generating a message or calling one or // more tools. `required` means the model must call one or more tools. -type ChatCompletionToolChoiceOptionBehavior string +type ChatCompletionToolChoiceOptionAuto string const ( - ChatCompletionToolChoiceOptionBehaviorNone ChatCompletionToolChoiceOptionBehavior = "none" - ChatCompletionToolChoiceOptionBehaviorAuto ChatCompletionToolChoiceOptionBehavior = "auto" - ChatCompletionToolChoiceOptionBehaviorRequired ChatCompletionToolChoiceOptionBehavior = "required" + ChatCompletionToolChoiceOptionAutoNone ChatCompletionToolChoiceOptionAuto = "none" + ChatCompletionToolChoiceOptionAutoAuto ChatCompletionToolChoiceOptionAuto = "auto" + ChatCompletionToolChoiceOptionAutoRequired ChatCompletionToolChoiceOptionAuto = "required" ) -func (r ChatCompletionToolChoiceOptionBehavior) IsKnown() bool { +func (r ChatCompletionToolChoiceOptionAuto) IsKnown() bool { switch r { - case ChatCompletionToolChoiceOptionBehaviorNone, ChatCompletionToolChoiceOptionBehaviorAuto, ChatCompletionToolChoiceOptionBehaviorRequired: + case ChatCompletionToolChoiceOptionAutoNone, ChatCompletionToolChoiceOptionAutoAuto, ChatCompletionToolChoiceOptionAutoRequired: return true } return false } -func (r ChatCompletionToolChoiceOptionBehavior) implementsChatCompletionToolChoiceOptionUnionParam() { -} +func (r ChatCompletionToolChoiceOptionAuto) implementsChatCompletionToolChoiceOptionUnionParam() {} type ChatCompletionToolMessageParam struct { // The contents of the tool message. @@ -1815,9 +1810,6 @@ type ChatCompletionNewParams struct { // - If set to 'default', the request will be processed using the default service // tier with a lower uptime SLA and no latency guarentee. // - When not set, the default behavior is 'auto'. - // - // When this parameter is set, the response body will include the `service_tier` - // utilized. ServiceTier param.Field[ChatCompletionNewParamsServiceTier] `json:"service_tier"` // Up to 4 sequences where the API will stop generating further tokens. Stop param.Field[ChatCompletionNewParamsStopUnion] `json:"stop"` @@ -1881,7 +1873,7 @@ func (r ChatCompletionNewParams) MarshalJSON() (data []byte, err error) { // `none` is the default when no functions are present. `auto` is the default if // functions are present. // -// Satisfied by [ChatCompletionNewParamsFunctionCallBehavior], +// Satisfied by [ChatCompletionNewParamsFunctionCallAuto], // [ChatCompletionFunctionCallOptionParam]. // // Deprecated: deprecated @@ -1892,22 +1884,22 @@ type ChatCompletionNewParamsFunctionCallUnion interface { // `none` means the model will not call a function and instead generates a message. // `auto` means the model can pick between generating a message or calling a // function. -type ChatCompletionNewParamsFunctionCallBehavior string +type ChatCompletionNewParamsFunctionCallAuto string const ( - ChatCompletionNewParamsFunctionCallBehaviorNone ChatCompletionNewParamsFunctionCallBehavior = "none" - ChatCompletionNewParamsFunctionCallBehaviorAuto ChatCompletionNewParamsFunctionCallBehavior = "auto" + ChatCompletionNewParamsFunctionCallAutoNone ChatCompletionNewParamsFunctionCallAuto = "none" + ChatCompletionNewParamsFunctionCallAutoAuto ChatCompletionNewParamsFunctionCallAuto = "auto" ) -func (r ChatCompletionNewParamsFunctionCallBehavior) IsKnown() bool { +func (r ChatCompletionNewParamsFunctionCallAuto) IsKnown() bool { switch r { - case ChatCompletionNewParamsFunctionCallBehaviorNone, ChatCompletionNewParamsFunctionCallBehaviorAuto: + case ChatCompletionNewParamsFunctionCallAutoNone, ChatCompletionNewParamsFunctionCallAutoAuto: return true } return false } -func (r ChatCompletionNewParamsFunctionCallBehavior) implementsChatCompletionNewParamsFunctionCallUnion() { +func (r ChatCompletionNewParamsFunctionCallAuto) implementsChatCompletionNewParamsFunctionCallUnion() { } // Deprecated: deprecated @@ -2015,9 +2007,6 @@ func (r ChatCompletionNewParamsResponseFormatType) IsKnown() bool { // - If set to 'default', the request will be processed using the default service // tier with a lower uptime SLA and no latency guarentee. // - When not set, the default behavior is 'auto'. -// -// When this parameter is set, the response body will include the `service_tier` -// utilized. type ChatCompletionNewParamsServiceTier string const ( diff --git a/chatcompletion_test.go b/chatcompletion_test.go index 8a91514..5804de9 100644 --- a/chatcompletion_test.go +++ b/chatcompletion_test.go @@ -38,7 +38,7 @@ func TestChatCompletionNewWithOptionalParams(t *testing.T) { Voice: openai.F(openai.ChatCompletionAudioParamVoiceAlloy), }), FrequencyPenalty: openai.F(-2.000000), - FunctionCall: openai.F[openai.ChatCompletionNewParamsFunctionCallUnion](openai.ChatCompletionNewParamsFunctionCallBehavior(openai.ChatCompletionNewParamsFunctionCallBehaviorNone)), + FunctionCall: openai.F[openai.ChatCompletionNewParamsFunctionCallUnion](openai.ChatCompletionNewParamsFunctionCallAuto(openai.ChatCompletionNewParamsFunctionCallAutoNone)), Functions: openai.F([]openai.ChatCompletionNewParamsFunction{{ Name: openai.F("name"), Description: openai.F("description"), @@ -67,7 +67,7 @@ func TestChatCompletionNewWithOptionalParams(t *testing.T) { ResponseFormat: openai.F[openai.ChatCompletionNewParamsResponseFormatUnion](shared.ResponseFormatTextParam{ Type: openai.F(shared.ResponseFormatTextTypeText), }), - Seed: openai.F(int64(-9007199254740991)), + Seed: openai.F(int64(0)), ServiceTier: openai.F(openai.ChatCompletionNewParamsServiceTierAuto), Stop: openai.F[openai.ChatCompletionNewParamsStopUnion](shared.UnionString("string")), Store: openai.F(true), @@ -75,7 +75,7 @@ func TestChatCompletionNewWithOptionalParams(t *testing.T) { IncludeUsage: openai.F(true), }), Temperature: openai.F(1.000000), - ToolChoice: openai.F[openai.ChatCompletionToolChoiceOptionUnionParam](openai.ChatCompletionToolChoiceOptionBehavior(openai.ChatCompletionToolChoiceOptionBehaviorNone)), + ToolChoice: openai.F[openai.ChatCompletionToolChoiceOptionUnionParam](openai.ChatCompletionToolChoiceOptionAuto(openai.ChatCompletionToolChoiceOptionAutoNone)), Tools: openai.F([]openai.ChatCompletionToolParam{{ Function: openai.F(shared.FunctionDefinitionParam{ Name: openai.F("name"), diff --git a/completion_test.go b/completion_test.go index 45df624..b8f9124 100644 --- a/completion_test.go +++ b/completion_test.go @@ -39,7 +39,7 @@ func TestCompletionNewWithOptionalParams(t *testing.T) { MaxTokens: openai.F(int64(16)), N: openai.F(int64(1)), PresencePenalty: openai.F(-2.000000), - Seed: openai.F(int64(-9007199254740991)), + Seed: openai.F(int64(0)), Stop: openai.F[openai.CompletionNewParamsStopUnion](shared.UnionString("\n")), StreamOptions: openai.F(openai.ChatCompletionStreamOptionsParam{ IncludeUsage: openai.F(true), diff --git a/embedding.go b/embedding.go index 0560555..489fdea 100644 --- a/embedding.go +++ b/embedding.go @@ -171,7 +171,8 @@ type EmbeddingNewParams struct { // `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 // dimensions or less. // [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) - // for counting tokens. + // for counting tokens. Some models may also impose a limit on total number of + // tokens summed across inputs. Input param.Field[EmbeddingNewParamsInputUnion] `json:"input,required"` // ID of the model to use. You can use the // [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -201,7 +202,8 @@ func (r EmbeddingNewParams) MarshalJSON() (data []byte, err error) { // `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048 // dimensions or less. // [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken) -// for counting tokens. +// for counting tokens. Some models may also impose a limit on total number of +// tokens summed across inputs. // // Satisfied by [shared.UnionString], [EmbeddingNewParamsInputArrayOfStrings], // [EmbeddingNewParamsInputArrayOfTokens], diff --git a/finetuningjob.go b/finetuningjob.go index 02ee479..c2f2557 100644 --- a/finetuningjob.go +++ b/finetuningjob.go @@ -366,7 +366,7 @@ func (r FineTuningJobHyperparametersLearningRateMultiplierAuto) ImplementsFineTu // The number of epochs to train the model for. An epoch refers to one full cycle // through the training dataset. // -// Union satisfied by [FineTuningJobHyperparametersNEpochsBehavior] or +// Union satisfied by [FineTuningJobHyperparametersNEpochsAuto] or // [shared.UnionInt]. type FineTuningJobHyperparametersNEpochsUnion interface { ImplementsFineTuningJobHyperparametersNEpochsUnion() @@ -378,7 +378,7 @@ func init() { "", apijson.UnionVariant{ TypeFilter: gjson.String, - Type: reflect.TypeOf(FineTuningJobHyperparametersNEpochsBehavior("")), + Type: reflect.TypeOf(FineTuningJobHyperparametersNEpochsAuto("")), }, apijson.UnionVariant{ TypeFilter: gjson.Number, @@ -387,21 +387,21 @@ func init() { ) } -type FineTuningJobHyperparametersNEpochsBehavior string +type FineTuningJobHyperparametersNEpochsAuto string const ( - FineTuningJobHyperparametersNEpochsBehaviorAuto FineTuningJobHyperparametersNEpochsBehavior = "auto" + FineTuningJobHyperparametersNEpochsAutoAuto FineTuningJobHyperparametersNEpochsAuto = "auto" ) -func (r FineTuningJobHyperparametersNEpochsBehavior) IsKnown() bool { +func (r FineTuningJobHyperparametersNEpochsAuto) IsKnown() bool { switch r { - case FineTuningJobHyperparametersNEpochsBehaviorAuto: + case FineTuningJobHyperparametersNEpochsAutoAuto: return true } return false } -func (r FineTuningJobHyperparametersNEpochsBehavior) ImplementsFineTuningJobHyperparametersNEpochsUnion() { +func (r FineTuningJobHyperparametersNEpochsAuto) ImplementsFineTuningJobHyperparametersNEpochsUnion() { } // The object type, which is always "fine_tuning.job". @@ -1160,80 +1160,79 @@ func (r FineTuningJobNewParamsHyperparameters) MarshalJSON() (data []byte, err e // Number of examples in each batch. A larger batch size means that model // parameters are updated less frequently, but with lower variance. // -// Satisfied by [FineTuningJobNewParamsHyperparametersBatchSizeBehavior], +// Satisfied by [FineTuningJobNewParamsHyperparametersBatchSizeAuto], // [shared.UnionInt]. type FineTuningJobNewParamsHyperparametersBatchSizeUnion interface { ImplementsFineTuningJobNewParamsHyperparametersBatchSizeUnion() } -type FineTuningJobNewParamsHyperparametersBatchSizeBehavior string +type FineTuningJobNewParamsHyperparametersBatchSizeAuto string const ( - FineTuningJobNewParamsHyperparametersBatchSizeBehaviorAuto FineTuningJobNewParamsHyperparametersBatchSizeBehavior = "auto" + FineTuningJobNewParamsHyperparametersBatchSizeAutoAuto FineTuningJobNewParamsHyperparametersBatchSizeAuto = "auto" ) -func (r FineTuningJobNewParamsHyperparametersBatchSizeBehavior) IsKnown() bool { +func (r FineTuningJobNewParamsHyperparametersBatchSizeAuto) IsKnown() bool { switch r { - case FineTuningJobNewParamsHyperparametersBatchSizeBehaviorAuto: + case FineTuningJobNewParamsHyperparametersBatchSizeAutoAuto: return true } return false } -func (r FineTuningJobNewParamsHyperparametersBatchSizeBehavior) ImplementsFineTuningJobNewParamsHyperparametersBatchSizeUnion() { +func (r FineTuningJobNewParamsHyperparametersBatchSizeAuto) ImplementsFineTuningJobNewParamsHyperparametersBatchSizeUnion() { } // Scaling factor for the learning rate. A smaller learning rate may be useful to // avoid overfitting. // -// Satisfied by -// [FineTuningJobNewParamsHyperparametersLearningRateMultiplierBehavior], +// Satisfied by [FineTuningJobNewParamsHyperparametersLearningRateMultiplierAuto], // [shared.UnionFloat]. type FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion interface { ImplementsFineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion() } -type FineTuningJobNewParamsHyperparametersLearningRateMultiplierBehavior string +type FineTuningJobNewParamsHyperparametersLearningRateMultiplierAuto string const ( - FineTuningJobNewParamsHyperparametersLearningRateMultiplierBehaviorAuto FineTuningJobNewParamsHyperparametersLearningRateMultiplierBehavior = "auto" + FineTuningJobNewParamsHyperparametersLearningRateMultiplierAutoAuto FineTuningJobNewParamsHyperparametersLearningRateMultiplierAuto = "auto" ) -func (r FineTuningJobNewParamsHyperparametersLearningRateMultiplierBehavior) IsKnown() bool { +func (r FineTuningJobNewParamsHyperparametersLearningRateMultiplierAuto) IsKnown() bool { switch r { - case FineTuningJobNewParamsHyperparametersLearningRateMultiplierBehaviorAuto: + case FineTuningJobNewParamsHyperparametersLearningRateMultiplierAutoAuto: return true } return false } -func (r FineTuningJobNewParamsHyperparametersLearningRateMultiplierBehavior) ImplementsFineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion() { +func (r FineTuningJobNewParamsHyperparametersLearningRateMultiplierAuto) ImplementsFineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion() { } // The number of epochs to train the model for. An epoch refers to one full cycle // through the training dataset. // -// Satisfied by [FineTuningJobNewParamsHyperparametersNEpochsBehavior], +// Satisfied by [FineTuningJobNewParamsHyperparametersNEpochsAuto], // [shared.UnionInt]. type FineTuningJobNewParamsHyperparametersNEpochsUnion interface { ImplementsFineTuningJobNewParamsHyperparametersNEpochsUnion() } -type FineTuningJobNewParamsHyperparametersNEpochsBehavior string +type FineTuningJobNewParamsHyperparametersNEpochsAuto string const ( - FineTuningJobNewParamsHyperparametersNEpochsBehaviorAuto FineTuningJobNewParamsHyperparametersNEpochsBehavior = "auto" + FineTuningJobNewParamsHyperparametersNEpochsAutoAuto FineTuningJobNewParamsHyperparametersNEpochsAuto = "auto" ) -func (r FineTuningJobNewParamsHyperparametersNEpochsBehavior) IsKnown() bool { +func (r FineTuningJobNewParamsHyperparametersNEpochsAuto) IsKnown() bool { switch r { - case FineTuningJobNewParamsHyperparametersNEpochsBehaviorAuto: + case FineTuningJobNewParamsHyperparametersNEpochsAutoAuto: return true } return false } -func (r FineTuningJobNewParamsHyperparametersNEpochsBehavior) ImplementsFineTuningJobNewParamsHyperparametersNEpochsUnion() { +func (r FineTuningJobNewParamsHyperparametersNEpochsAuto) ImplementsFineTuningJobNewParamsHyperparametersNEpochsUnion() { } type FineTuningJobNewParamsIntegration struct { diff --git a/finetuningjob_test.go b/finetuningjob_test.go index 09aef6b..bd42c13 100644 --- a/finetuningjob_test.go +++ b/finetuningjob_test.go @@ -29,9 +29,9 @@ func TestFineTuningJobNewWithOptionalParams(t *testing.T) { Model: openai.F(openai.FineTuningJobNewParamsModelBabbage002), TrainingFile: openai.F("file-abc123"), Hyperparameters: openai.F(openai.FineTuningJobNewParamsHyperparameters{ - BatchSize: openai.F[openai.FineTuningJobNewParamsHyperparametersBatchSizeUnion](openai.FineTuningJobNewParamsHyperparametersBatchSizeBehavior(openai.FineTuningJobNewParamsHyperparametersBatchSizeBehaviorAuto)), - LearningRateMultiplier: openai.F[openai.FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion](openai.FineTuningJobNewParamsHyperparametersLearningRateMultiplierBehavior(openai.FineTuningJobNewParamsHyperparametersLearningRateMultiplierBehaviorAuto)), - NEpochs: openai.F[openai.FineTuningJobNewParamsHyperparametersNEpochsUnion](openai.FineTuningJobNewParamsHyperparametersNEpochsBehavior(openai.FineTuningJobNewParamsHyperparametersNEpochsBehaviorAuto)), + BatchSize: openai.F[openai.FineTuningJobNewParamsHyperparametersBatchSizeUnion](openai.FineTuningJobNewParamsHyperparametersBatchSizeAuto(openai.FineTuningJobNewParamsHyperparametersBatchSizeAutoAuto)), + LearningRateMultiplier: openai.F[openai.FineTuningJobNewParamsHyperparametersLearningRateMultiplierUnion](openai.FineTuningJobNewParamsHyperparametersLearningRateMultiplierAuto(openai.FineTuningJobNewParamsHyperparametersLearningRateMultiplierAutoAuto)), + NEpochs: openai.F[openai.FineTuningJobNewParamsHyperparametersNEpochsUnion](openai.FineTuningJobNewParamsHyperparametersNEpochsAuto(openai.FineTuningJobNewParamsHyperparametersNEpochsAutoAuto)), }), Integrations: openai.F([]openai.FineTuningJobNewParamsIntegration{{ Type: openai.F(openai.FineTuningJobNewParamsIntegrationsTypeWandb),