Skip to content

Commit

Permalink
#54: Unified response created and tested - passing
Browse files Browse the repository at this point in the history
  • Loading branch information
mkrueger12 committed Jan 1, 2024
1 parent 75008d0 commit eaf61ee
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 13 deletions.
6 changes: 3 additions & 3 deletions pkg/api/schemas/language.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@ type ProviderResponse struct {
}

type TokenCount struct {
PromptTokens int `json:"prompt_tokens"`
ResponseTokens int `json:"response_tokens"`
TotalTokens int `json:"total_tokens"`
PromptTokens float64`json:"prompt_tokens"`
ResponseTokens float64 `json:"response_tokens"`
TotalTokens float64 `json:"total_tokens"`
}

// ChatMessage is a message in a chat request.
Expand Down
19 changes: 10 additions & 9 deletions pkg/providers/openai/chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,16 +77,15 @@ func NewChatMessagesFromUnifiedRequest(request *schemas.UnifiedChatRequest) []Ch
// Chat sends a chat request to the specified OpenAI model.
func (c *Client) Chat(ctx context.Context, request *schemas.UnifiedChatRequest) (*schemas.UnifiedChatResponse, error) {
// Create a new chat request

chatRequest := c.createChatRequestSchema(request)

// TODO: this is suspicious we do zero remapping of OpenAI response and send it back as is.
// Does it really work well across providers?

chatResponse, err := c.doChatRequest(ctx, chatRequest)
if err != nil {
return nil, err
}

if len(chatResponse.Choices) == 0 {
if len(chatResponse.ProviderResponse.Message.Content) == 0 {
return nil, ErrEmptyResponse
}

Expand Down Expand Up @@ -163,6 +162,8 @@ func (c *Client) doChatRequest(ctx context.Context, payload *ChatRequest) (*sche
return nil, err
}



// Parse response
var response schemas.UnifiedChatResponse
var responsePayload schemas.ProviderResponse
Expand All @@ -175,9 +176,9 @@ func (c *Client) doChatRequest(ctx context.Context, payload *ChatRequest) (*sche
}

tokenCount = schemas.TokenCount{
PromptTokens: responseJSON["usage"].(map[string]interface{})["prompt_tokens"].(int),
ResponseTokens: responseJSON["usage"].(map[string]interface{})["completion_tokens"].(int),
TotalTokens: responseJSON["usage"].(map[string]interface{})["total_tokens"].(int),
PromptTokens: responseJSON["usage"].(map[string]interface{})["prompt_tokens"].(float64),
ResponseTokens: responseJSON["usage"].(map[string]interface{})["completion_tokens"].(float64),
TotalTokens: responseJSON["usage"].(map[string]interface{})["total_tokens"].(float64),
}

responsePayload = schemas.ProviderResponse{
Expand All @@ -192,10 +193,10 @@ func (c *Client) doChatRequest(ctx context.Context, payload *ChatRequest) (*sche
Created: float64(time.Now().Unix()),
Provider: "openai",
Router: "chat",
Model: payload.Model,
Model: responseJSON["model"].(string),
Cached: false,
ProviderResponse: responsePayload,
}

return &response, json.NewDecoder(resp.Body).Decode(&response)
return &response, nil
}
3 changes: 2 additions & 1 deletion pkg/providers/openai/client_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@ import (

"glide/pkg/api/schemas"

"github.com/stretchr/testify/require"
"glide/pkg/telemetry"

"github.com/stretchr/testify/require"
)

func TestOpenAIClient_ChatRequest(t *testing.T) {
Expand Down

0 comments on commit eaf61ee

Please sign in to comment.