Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(api): update enum values, comments, and examples #181

Merged
merged 1 commit into from
Jan 22, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
configured_endpoints: 68
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-b5b0e2c794b012919701c3fd43286af10fa25d33ceb8a881bec2636028f446e0.yml
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai-3904ef6b29a89c98f93a9b7da19879695f3c440564be6384db7af1b734611ede.yml
17 changes: 10 additions & 7 deletions audiospeech.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,9 @@ type AudioSpeechNewParams struct {
// One of the available [TTS models](https://platform.openai.com/docs/models#tts):
// `tts-1` or `tts-1-hd`
Model param.Field[SpeechModel] `json:"model,required"`
// The voice to use when generating the audio. Supported voices are `alloy`,
// `echo`, `fable`, `onyx`, `nova`, and `shimmer`. Previews of the voices are
// available in the
// The voice to use when generating the audio. Supported voices are `alloy`, `ash`,
// `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the
// voices are available in the
// [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options).
Voice param.Field[AudioSpeechNewParamsVoice] `json:"voice,required"`
// The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`,
Expand All @@ -70,24 +70,27 @@ func (r AudioSpeechNewParams) MarshalJSON() (data []byte, err error) {
return apijson.MarshalRoot(r)
}

// The voice to use when generating the audio. Supported voices are `alloy`,
// `echo`, `fable`, `onyx`, `nova`, and `shimmer`. Previews of the voices are
// available in the
// The voice to use when generating the audio. Supported voices are `alloy`, `ash`,
// `coral`, `echo`, `fable`, `onyx`, `nova`, `sage` and `shimmer`. Previews of the
// voices are available in the
// [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options).
type AudioSpeechNewParamsVoice string

const (
AudioSpeechNewParamsVoiceAlloy AudioSpeechNewParamsVoice = "alloy"
AudioSpeechNewParamsVoiceAsh AudioSpeechNewParamsVoice = "ash"
AudioSpeechNewParamsVoiceCoral AudioSpeechNewParamsVoice = "coral"
AudioSpeechNewParamsVoiceEcho AudioSpeechNewParamsVoice = "echo"
AudioSpeechNewParamsVoiceFable AudioSpeechNewParamsVoice = "fable"
AudioSpeechNewParamsVoiceOnyx AudioSpeechNewParamsVoice = "onyx"
AudioSpeechNewParamsVoiceNova AudioSpeechNewParamsVoice = "nova"
AudioSpeechNewParamsVoiceSage AudioSpeechNewParamsVoice = "sage"
AudioSpeechNewParamsVoiceShimmer AudioSpeechNewParamsVoice = "shimmer"
)

func (r AudioSpeechNewParamsVoice) IsKnown() bool {
switch r {
case AudioSpeechNewParamsVoiceAlloy, AudioSpeechNewParamsVoiceEcho, AudioSpeechNewParamsVoiceFable, AudioSpeechNewParamsVoiceOnyx, AudioSpeechNewParamsVoiceNova, AudioSpeechNewParamsVoiceShimmer:
case AudioSpeechNewParamsVoiceAlloy, AudioSpeechNewParamsVoiceAsh, AudioSpeechNewParamsVoiceCoral, AudioSpeechNewParamsVoiceEcho, AudioSpeechNewParamsVoiceFable, AudioSpeechNewParamsVoiceOnyx, AudioSpeechNewParamsVoiceNova, AudioSpeechNewParamsVoiceSage, AudioSpeechNewParamsVoiceShimmer:
return true
}
return false
Expand Down
22 changes: 11 additions & 11 deletions betathread.go
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ func (r AssistantToolChoiceFunctionParam) MarshalJSON() (data []byte, err error)
// `{"type": "function", "function": {"name": "my_function"}}` forces the model to
// call that tool.
//
// Union satisfied by [AssistantToolChoiceOptionBehavior] or [AssistantToolChoice].
// Union satisfied by [AssistantToolChoiceOptionAuto] or [AssistantToolChoice].
type AssistantToolChoiceOptionUnion interface {
implementsAssistantToolChoiceOptionUnion()
}
Expand All @@ -220,7 +220,7 @@ func init() {
"",
apijson.UnionVariant{
TypeFilter: gjson.String,
Type: reflect.TypeOf(AssistantToolChoiceOptionBehavior("")),
Type: reflect.TypeOf(AssistantToolChoiceOptionAuto("")),
},
apijson.UnionVariant{
TypeFilter: gjson.JSON,
Expand All @@ -233,25 +233,25 @@ func init() {
// `auto` means the model can pick between generating a message or calling one or
// more tools. `required` means the model must call one or more tools before
// responding to the user.
type AssistantToolChoiceOptionBehavior string
type AssistantToolChoiceOptionAuto string

const (
AssistantToolChoiceOptionBehaviorNone AssistantToolChoiceOptionBehavior = "none"
AssistantToolChoiceOptionBehaviorAuto AssistantToolChoiceOptionBehavior = "auto"
AssistantToolChoiceOptionBehaviorRequired AssistantToolChoiceOptionBehavior = "required"
AssistantToolChoiceOptionAutoNone AssistantToolChoiceOptionAuto = "none"
AssistantToolChoiceOptionAutoAuto AssistantToolChoiceOptionAuto = "auto"
AssistantToolChoiceOptionAutoRequired AssistantToolChoiceOptionAuto = "required"
)

func (r AssistantToolChoiceOptionBehavior) IsKnown() bool {
func (r AssistantToolChoiceOptionAuto) IsKnown() bool {
switch r {
case AssistantToolChoiceOptionBehaviorNone, AssistantToolChoiceOptionBehaviorAuto, AssistantToolChoiceOptionBehaviorRequired:
case AssistantToolChoiceOptionAutoNone, AssistantToolChoiceOptionAutoAuto, AssistantToolChoiceOptionAutoRequired:
return true
}
return false
}

func (r AssistantToolChoiceOptionBehavior) implementsAssistantToolChoiceOptionUnion() {}
func (r AssistantToolChoiceOptionAuto) implementsAssistantToolChoiceOptionUnion() {}

func (r AssistantToolChoiceOptionBehavior) implementsAssistantToolChoiceOptionUnionParam() {}
func (r AssistantToolChoiceOptionAuto) implementsAssistantToolChoiceOptionUnionParam() {}

// Controls which (if any) tool is called by the model. `none` means the model will
// not call any tools and instead generates a message. `auto` is the default value
Expand All @@ -261,7 +261,7 @@ func (r AssistantToolChoiceOptionBehavior) implementsAssistantToolChoiceOptionUn
// `{"type": "function", "function": {"name": "my_function"}}` forces the model to
// call that tool.
//
// Satisfied by [AssistantToolChoiceOptionBehavior], [AssistantToolChoiceParam].
// Satisfied by [AssistantToolChoiceOptionAuto], [AssistantToolChoiceParam].
type AssistantToolChoiceOptionUnionParam interface {
implementsAssistantToolChoiceOptionUnionParam()
}
Expand Down
2 changes: 1 addition & 1 deletion betathread_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ func TestBetaThreadNewAndRunWithOptionalParams(t *testing.T) {
}),
}),
}),
ToolChoice: openai.F[openai.AssistantToolChoiceOptionUnionParam](openai.AssistantToolChoiceOptionBehavior(openai.AssistantToolChoiceOptionBehaviorNone)),
ToolChoice: openai.F[openai.AssistantToolChoiceOptionUnionParam](openai.AssistantToolChoiceOptionAuto(openai.AssistantToolChoiceOptionAutoNone)),
ToolResources: openai.F(openai.BetaThreadNewAndRunParamsToolResources{
CodeInterpreter: openai.F(openai.BetaThreadNewAndRunParamsToolResourcesCodeInterpreter{
FileIDs: openai.F([]string{"string"}),
Expand Down
2 changes: 1 addition & 1 deletion betathreadrun_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ func TestBetaThreadRunNewWithOptionalParams(t *testing.T) {
Model: openai.F(openai.ChatModelGPT4o),
ParallelToolCalls: openai.F(true),
Temperature: openai.F(1.000000),
ToolChoice: openai.F[openai.AssistantToolChoiceOptionUnionParam](openai.AssistantToolChoiceOptionBehavior(openai.AssistantToolChoiceOptionBehaviorNone)),
ToolChoice: openai.F[openai.AssistantToolChoiceOptionUnionParam](openai.AssistantToolChoiceOptionAuto(openai.AssistantToolChoiceOptionAutoNone)),
Tools: openai.F([]openai.AssistantToolUnionParam{openai.CodeInterpreterToolParam{
Type: openai.F(openai.CodeInterpreterToolTypeCodeInterpreter),
}}),
Expand Down
49 changes: 19 additions & 30 deletions chatcompletion.go
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,7 @@ type ChatCompletion struct {
Model string `json:"model,required"`
// The object type, which is always `chat.completion`.
Object ChatCompletionObject `json:"object,required"`
// The service tier used for processing the request. This field is only included if
// the `service_tier` parameter is specified in the request.
// The service tier used for processing the request.
ServiceTier ChatCompletionServiceTier `json:"service_tier,nullable"`
// This fingerprint represents the backend configuration that the model runs with.
//
Expand Down Expand Up @@ -222,8 +221,7 @@ func (r ChatCompletionObject) IsKnown() bool {
return false
}

// The service tier used for processing the request. This field is only included if
// the `service_tier` parameter is specified in the request.
// The service tier used for processing the request.
type ChatCompletionServiceTier string

const (
Expand Down Expand Up @@ -470,8 +468,7 @@ type ChatCompletionChunk struct {
Model string `json:"model,required"`
// The object type, which is always `chat.completion.chunk`.
Object ChatCompletionChunkObject `json:"object,required"`
// The service tier used for processing the request. This field is only included if
// the `service_tier` parameter is specified in the request.
// The service tier used for processing the request.
ServiceTier ChatCompletionChunkServiceTier `json:"service_tier,nullable"`
// This fingerprint represents the backend configuration that the model runs with.
// Can be used in conjunction with the `seed` request parameter to understand when
Expand Down Expand Up @@ -768,8 +765,7 @@ func (r ChatCompletionChunkObject) IsKnown() bool {
return false
}

// The service tier used for processing the request. This field is only included if
// the `service_tier` parameter is specified in the request.
// The service tier used for processing the request.
type ChatCompletionChunkServiceTier string

const (
Expand Down Expand Up @@ -1589,7 +1585,7 @@ func (r ChatCompletionToolType) IsKnown() bool {
// `none` is the default when no tools are present. `auto` is the default if tools
// are present.
//
// Satisfied by [ChatCompletionToolChoiceOptionBehavior],
// Satisfied by [ChatCompletionToolChoiceOptionAuto],
// [ChatCompletionNamedToolChoiceParam].
type ChatCompletionToolChoiceOptionUnionParam interface {
implementsChatCompletionToolChoiceOptionUnionParam()
Expand All @@ -1598,24 +1594,23 @@ type ChatCompletionToolChoiceOptionUnionParam interface {
// `none` means the model will not call any tool and instead generates a message.
// `auto` means the model can pick between generating a message or calling one or
// more tools. `required` means the model must call one or more tools.
type ChatCompletionToolChoiceOptionBehavior string
type ChatCompletionToolChoiceOptionAuto string

const (
ChatCompletionToolChoiceOptionBehaviorNone ChatCompletionToolChoiceOptionBehavior = "none"
ChatCompletionToolChoiceOptionBehaviorAuto ChatCompletionToolChoiceOptionBehavior = "auto"
ChatCompletionToolChoiceOptionBehaviorRequired ChatCompletionToolChoiceOptionBehavior = "required"
ChatCompletionToolChoiceOptionAutoNone ChatCompletionToolChoiceOptionAuto = "none"
ChatCompletionToolChoiceOptionAutoAuto ChatCompletionToolChoiceOptionAuto = "auto"
ChatCompletionToolChoiceOptionAutoRequired ChatCompletionToolChoiceOptionAuto = "required"
)

func (r ChatCompletionToolChoiceOptionBehavior) IsKnown() bool {
func (r ChatCompletionToolChoiceOptionAuto) IsKnown() bool {
switch r {
case ChatCompletionToolChoiceOptionBehaviorNone, ChatCompletionToolChoiceOptionBehaviorAuto, ChatCompletionToolChoiceOptionBehaviorRequired:
case ChatCompletionToolChoiceOptionAutoNone, ChatCompletionToolChoiceOptionAutoAuto, ChatCompletionToolChoiceOptionAutoRequired:
return true
}
return false
}

func (r ChatCompletionToolChoiceOptionBehavior) implementsChatCompletionToolChoiceOptionUnionParam() {
}
func (r ChatCompletionToolChoiceOptionAuto) implementsChatCompletionToolChoiceOptionUnionParam() {}

type ChatCompletionToolMessageParam struct {
// The contents of the tool message.
Expand Down Expand Up @@ -1815,9 +1810,6 @@ type ChatCompletionNewParams struct {
// - If set to 'default', the request will be processed using the default service
// tier with a lower uptime SLA and no latency guarentee.
// - When not set, the default behavior is 'auto'.
//
// When this parameter is set, the response body will include the `service_tier`
// utilized.
ServiceTier param.Field[ChatCompletionNewParamsServiceTier] `json:"service_tier"`
// Up to 4 sequences where the API will stop generating further tokens.
Stop param.Field[ChatCompletionNewParamsStopUnion] `json:"stop"`
Expand Down Expand Up @@ -1881,7 +1873,7 @@ func (r ChatCompletionNewParams) MarshalJSON() (data []byte, err error) {
// `none` is the default when no functions are present. `auto` is the default if
// functions are present.
//
// Satisfied by [ChatCompletionNewParamsFunctionCallBehavior],
// Satisfied by [ChatCompletionNewParamsFunctionCallAuto],
// [ChatCompletionFunctionCallOptionParam].
//
// Deprecated: deprecated
Expand All @@ -1892,22 +1884,22 @@ type ChatCompletionNewParamsFunctionCallUnion interface {
// `none` means the model will not call a function and instead generates a message.
// `auto` means the model can pick between generating a message or calling a
// function.
type ChatCompletionNewParamsFunctionCallBehavior string
type ChatCompletionNewParamsFunctionCallAuto string

const (
ChatCompletionNewParamsFunctionCallBehaviorNone ChatCompletionNewParamsFunctionCallBehavior = "none"
ChatCompletionNewParamsFunctionCallBehaviorAuto ChatCompletionNewParamsFunctionCallBehavior = "auto"
ChatCompletionNewParamsFunctionCallAutoNone ChatCompletionNewParamsFunctionCallAuto = "none"
ChatCompletionNewParamsFunctionCallAutoAuto ChatCompletionNewParamsFunctionCallAuto = "auto"
)

func (r ChatCompletionNewParamsFunctionCallBehavior) IsKnown() bool {
func (r ChatCompletionNewParamsFunctionCallAuto) IsKnown() bool {
switch r {
case ChatCompletionNewParamsFunctionCallBehaviorNone, ChatCompletionNewParamsFunctionCallBehaviorAuto:
case ChatCompletionNewParamsFunctionCallAutoNone, ChatCompletionNewParamsFunctionCallAutoAuto:
return true
}
return false
}

func (r ChatCompletionNewParamsFunctionCallBehavior) implementsChatCompletionNewParamsFunctionCallUnion() {
func (r ChatCompletionNewParamsFunctionCallAuto) implementsChatCompletionNewParamsFunctionCallUnion() {
}

// Deprecated: deprecated
Expand Down Expand Up @@ -2015,9 +2007,6 @@ func (r ChatCompletionNewParamsResponseFormatType) IsKnown() bool {
// - If set to 'default', the request will be processed using the default service
// tier with a lower uptime SLA and no latency guarentee.
// - When not set, the default behavior is 'auto'.
//
// When this parameter is set, the response body will include the `service_tier`
// utilized.
type ChatCompletionNewParamsServiceTier string

const (
Expand Down
6 changes: 3 additions & 3 deletions chatcompletion_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ func TestChatCompletionNewWithOptionalParams(t *testing.T) {
Voice: openai.F(openai.ChatCompletionAudioParamVoiceAlloy),
}),
FrequencyPenalty: openai.F(-2.000000),
FunctionCall: openai.F[openai.ChatCompletionNewParamsFunctionCallUnion](openai.ChatCompletionNewParamsFunctionCallBehavior(openai.ChatCompletionNewParamsFunctionCallBehaviorNone)),
FunctionCall: openai.F[openai.ChatCompletionNewParamsFunctionCallUnion](openai.ChatCompletionNewParamsFunctionCallAuto(openai.ChatCompletionNewParamsFunctionCallAutoNone)),
Functions: openai.F([]openai.ChatCompletionNewParamsFunction{{
Name: openai.F("name"),
Description: openai.F("description"),
Expand Down Expand Up @@ -67,15 +67,15 @@ func TestChatCompletionNewWithOptionalParams(t *testing.T) {
ResponseFormat: openai.F[openai.ChatCompletionNewParamsResponseFormatUnion](shared.ResponseFormatTextParam{
Type: openai.F(shared.ResponseFormatTextTypeText),
}),
Seed: openai.F(int64(-9007199254740991)),
Seed: openai.F(int64(0)),
ServiceTier: openai.F(openai.ChatCompletionNewParamsServiceTierAuto),
Stop: openai.F[openai.ChatCompletionNewParamsStopUnion](shared.UnionString("string")),
Store: openai.F(true),
StreamOptions: openai.F(openai.ChatCompletionStreamOptionsParam{
IncludeUsage: openai.F(true),
}),
Temperature: openai.F(1.000000),
ToolChoice: openai.F[openai.ChatCompletionToolChoiceOptionUnionParam](openai.ChatCompletionToolChoiceOptionBehavior(openai.ChatCompletionToolChoiceOptionBehaviorNone)),
ToolChoice: openai.F[openai.ChatCompletionToolChoiceOptionUnionParam](openai.ChatCompletionToolChoiceOptionAuto(openai.ChatCompletionToolChoiceOptionAutoNone)),
Tools: openai.F([]openai.ChatCompletionToolParam{{
Function: openai.F(shared.FunctionDefinitionParam{
Name: openai.F("name"),
Expand Down
2 changes: 1 addition & 1 deletion completion_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ func TestCompletionNewWithOptionalParams(t *testing.T) {
MaxTokens: openai.F(int64(16)),
N: openai.F(int64(1)),
PresencePenalty: openai.F(-2.000000),
Seed: openai.F(int64(-9007199254740991)),
Seed: openai.F(int64(0)),
Stop: openai.F[openai.CompletionNewParamsStopUnion](shared.UnionString("\n")),
StreamOptions: openai.F(openai.ChatCompletionStreamOptionsParam{
IncludeUsage: openai.F(true),
Expand Down
6 changes: 4 additions & 2 deletions embedding.go
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,8 @@ type EmbeddingNewParams struct {
// `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048
// dimensions or less.
// [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken)
// for counting tokens.
// for counting tokens. Some models may also impose a limit on total number of
// tokens summed across inputs.
Input param.Field[EmbeddingNewParamsInputUnion] `json:"input,required"`
// ID of the model to use. You can use the
// [List models](https://platform.openai.com/docs/api-reference/models/list) API to
Expand Down Expand Up @@ -201,7 +202,8 @@ func (r EmbeddingNewParams) MarshalJSON() (data []byte, err error) {
// `text-embedding-ada-002`), cannot be an empty string, and any array must be 2048
// dimensions or less.
// [Example Python code](https://cookbook.openai.com/examples/how_to_count_tokens_with_tiktoken)
// for counting tokens.
// for counting tokens. Some models may also impose a limit on total number of
// tokens summed across inputs.
//
// Satisfied by [shared.UnionString], [EmbeddingNewParamsInputArrayOfStrings],
// [EmbeddingNewParamsInputArrayOfTokens],
Expand Down
Loading