Skip to content

Commit

Permalink
o1 model support stream (#904)
Browse files Browse the repository at this point in the history
  • Loading branch information
ldnvnbl authored Nov 20, 2024
1 parent b3ece4d commit 1687616
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 28 deletions.
21 changes: 0 additions & 21 deletions chat_stream_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,27 +36,6 @@ func TestChatCompletionsStreamWrongModel(t *testing.T) {
}
}

func TestChatCompletionsStreamWithO1BetaLimitations(t *testing.T) {
config := openai.DefaultConfig("whatever")
config.BaseURL = "http://localhost/v1/chat/completions"
client := openai.NewClientWithConfig(config)
ctx := context.Background()

req := openai.ChatCompletionRequest{
Model: openai.O1Preview,
Messages: []openai.ChatCompletionMessage{
{
Role: openai.ChatMessageRoleUser,
Content: "Hello!",
},
},
}
_, err := client.CreateChatCompletionStream(ctx, req)
if !errors.Is(err, openai.ErrO1BetaLimitationsStreaming) {
t.Fatalf("CreateChatCompletion should return ErrO1BetaLimitationsStreaming, but returned: %v", err)
}
}

func TestCreateChatCompletionStream(t *testing.T) {
client, server, teardown := setupOpenAITestServer()
defer teardown()
Expand Down
7 changes: 0 additions & 7 deletions completion.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ var (

var (
ErrO1BetaLimitationsMessageTypes = errors.New("this model has beta-limitations, user and assistant messages only, system messages are not supported") //nolint:lll
ErrO1BetaLimitationsStreaming = errors.New("this model has beta-limitations, streaming not supported") //nolint:lll
ErrO1BetaLimitationsTools = errors.New("this model has beta-limitations, tools, function calling, and response format parameters are not supported") //nolint:lll
ErrO1BetaLimitationsLogprobs = errors.New("this model has beta-limitations, logprobs not supported") //nolint:lll
ErrO1BetaLimitationsOther = errors.New("this model has beta-limitations, temperature, top_p and n are fixed at 1, while presence_penalty and frequency_penalty are fixed at 0") //nolint:lll
Expand Down Expand Up @@ -199,12 +198,6 @@ func validateRequestForO1Models(request ChatCompletionRequest) error {
return ErrO1MaxTokensDeprecated
}

// Beta Limitations
// refs:https://platform.openai.com/docs/guides/reasoning/beta-limitations
// Streaming: not supported
if request.Stream {
return ErrO1BetaLimitationsStreaming
}
// Logprobs: not supported.
if request.LogProbs {
return ErrO1BetaLimitationsLogprobs
Expand Down

0 comments on commit 1687616

Please sign in to comment.