Skip to content

Commit

Permalink
#61: Create OpenAI Chat Response Schema
Browse files Browse the repository at this point in the history
  • Loading branch information
mkrueger12 committed Jan 4, 2024
1 parent ff3dc22 commit 9312334
Show file tree
Hide file tree
Showing 3 changed files with 84 additions and 44 deletions.
55 changes: 34 additions & 21 deletions pkg/api/schemas/language.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,25 +9,51 @@ type UnifiedChatRequest struct {
// UnifiedChatResponse defines Glide's Chat Response Schema unified across all language models
type UnifiedChatResponse struct {
ID string `json:"id,omitempty"`
Created float64 `json:"created,omitempty"`
Created string `json:"created,omitempty"`
Provider string `json:"provider,omitempty"`
Router string `json:"router,omitempty"`
Model string `json:"model,omitempty"`
Cached bool `json:"cached,omitempty"`
ProviderResponse ProviderResponse `json:"provider_response,omitempty"`
ModelResponse ProviderResponse `json:"modelResponse,omitempty"`
}

// ProviderResponse contains data from the chosen provider
// ProviderResponse is the unified response from the provider.

type ProviderResponse struct {
ResponseID map[string]string `json:"response_id,omitempty"`
ResponseID map[string]string `json:"responseID,omitempty"`
Message ChatMessage `json:"message"`
TokenCount TokenCount `json:"token_count"`
TokenCount TokenCount `json:"tokenCount"`
}

type TokenCount struct {
PromptTokens float64 `json:"prompt_tokens"`
ResponseTokens float64 `json:"response_tokens"`
TotalTokens float64 `json:"total_tokens"`
PromptTokens float64 `json:"promptTokens"`
ResponseTokens float64 `json:"responseTokens"`
TotalTokens float64 `json:"totalTokens"`
}


// OpenAI Chat Response
type OpenAIChatCompletion struct {
ID string `json:"id"`
Object string `json:"object"`
Created string `json:"created"`
Model string `json:"model"`
SystemFingerprint string `json:"systemFingerprint"`
Choices []Choice `json:"choices"`
Usage Usage `json:"usage"`
}

type Choice struct {
Index int `json:"index"`
Message ChatMessage `json:"message"`
Logprobs interface{} `json:"logprobs"`
FinishReason string `json:"finishReason"`
}

type Usage struct {
PromptTokens float64 `json:"promptTokens"`
CompletionTokens float64 `json:"completionTokens"`
TotalTokens float64 `json:"totalTokens"`
}

// ChatMessage is a message in a chat request.
Expand All @@ -40,16 +66,3 @@ type ChatMessage struct {
// with a maximum length of 64 characters.
Name string `json:"name,omitempty"`
}

// ChatChoice is a choice in a chat response.
type ChatChoice struct {
Index int `json:"index"`
Message ChatMessage `json:"message"`
FinishReason string `json:"finish_reason"`
}

type Usage struct {
CompletionTokens float64 `json:"completion_tokens,omitempty"`
PromptTokens float64 `json:"prompt_tokens,omitempty"`
TotalTokens float64 `json:"total_tokens,omitempty"`
}
65 changes: 46 additions & 19 deletions pkg/providers/openai/chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ import (
"fmt"
"io"
"net/http"
"time"

"glide/pkg/providers/errs"

Expand Down Expand Up @@ -84,7 +83,7 @@ func (c *Client) Chat(ctx context.Context, request *schemas.UnifiedChatRequest)
return nil, err
}

if len(chatResponse.ProviderResponse.Message.Content) == 0 {
if len(chatResponse.ModelResponse.Message.Content) == 0 {
return nil, ErrEmptyResponse
}

Expand Down Expand Up @@ -165,36 +164,64 @@ func (c *Client) doChatRequest(ctx context.Context, payload *ChatRequest) (*sche
// Parse response
var response schemas.UnifiedChatResponse

var responsePayload schemas.ProviderResponse
var responsePayload schemas.OpenAIChatCompletion

var tokenCount schemas.TokenCount
var tokenCount schemas.Usage

var choices []schemas.Choice

message := responseJSON["choices"].([]interface{})[0].(map[string]interface{})["message"].(map[string]interface{})
messageStruct := schemas.ChatMessage{
Role: message["role"].(string),
Content: message["content"].(string),
}

tokenCount = schemas.TokenCount{
PromptTokens: responseJSON["usage"].(map[string]interface{})["prompt_tokens"].(float64),
ResponseTokens: responseJSON["usage"].(map[string]interface{})["completion_tokens"].(float64),
TotalTokens: responseJSON["usage"].(map[string]interface{})["total_tokens"].(float64),
choices = append(choices, schemas.Choice{
Index: 0,
Message: messageStruct,
Logprobs: responseJSON["choices"].([]interface{})[0].(map[string]interface{})["logprobs"],
FinishReason: responseJSON["choices"].([]interface{})[0].(map[string]interface{})["finish_reason"].(string),
})

tokenCount = schemas.Usage{
PromptTokens: responseJSON["usage"].(map[string]interface{})["prompt_tokens"].(float64),
CompletionTokens: responseJSON["usage"].(map[string]interface{})["completion_tokens"].(float64),
TotalTokens: responseJSON["usage"].(map[string]interface{})["total_tokens"].(float64),
}

responsePayload = schemas.ProviderResponse{
ResponseID: map[string]string{"system_fingerprint": responseJSON["system_fingerprint"].(string)},
Message: messageStruct,
TokenCount: tokenCount,
// Map response to OpenAIChatCompletion schema
responsePayload = schemas.OpenAIChatCompletion{
ID: responseJSON["id"].(string),
Created: responseJSON["object"].(string),
Model: responseJSON["model"].(string),
SystemFingerprint: responseJSON["system_fingerprint"].(string),
Choices: choices,
Usage: tokenCount,
}

// Map response to UnifiedChatResponse schema
response = schemas.UnifiedChatResponse{
ID: responseJSON["id"].(string),
Created: float64(time.Now().Unix()),
Provider: "openai",
Router: "chat",
Model: responseJSON["model"].(string),
Cached: false,
ProviderResponse: responsePayload,
ID: responsePayload.ID,
Created: responsePayload.Created,
Provider: providerName,
Router: "chat", // TODO: this will be the router used
Model: responsePayload.Model,
Cached: false,
ModelResponse: schemas.ProviderResponse{
ResponseID: map[string]string{
"system_fingerprint": responsePayload.SystemFingerprint,
},
Message: schemas.ChatMessage{
Role: responsePayload.Choices[0].Message.Role,
Content: responsePayload.Choices[0].Message.Content,
Name: "",
},
TokenCount: schemas.TokenCount{
PromptTokens: responsePayload.Usage.PromptTokens,
ResponseTokens: responsePayload.Usage.CompletionTokens,
TotalTokens: responsePayload.Usage.TotalTokens,
},
},
}

return &response, nil
Expand Down
8 changes: 4 additions & 4 deletions pkg/providers/openai/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,11 @@ func (p *Params) UnmarshalYAML(unmarshal func(interface{}) error) error {
}

type Config struct {
BaseURL string `yaml:"base_url" json:"baseUrl" validate:"required"`
ChatEndpoint string `yaml:"chat_endpoint" json:"chatEndpoint" validate:"required"`
BaseURL string `yaml:"baseUrl" json:"baseUrl" validate:"required"`
ChatEndpoint string `yaml:"chatEndpoint" json:"chatEndpoint" validate:"required"`
Model string `yaml:"model" json:"model" validate:"required"`
APIKey fields.Secret `yaml:"api_key" json:"-" validate:"required"`
DefaultParams *Params `yaml:"default_params,omitempty" json:"defaultParams"`
APIKey fields.Secret `yaml:"apiKey" json:"-" validate:"required"`
DefaultParams *Params `yaml:"defaultParams,omitempty" json:"defaultParams"`
}

// DefaultConfig for OpenAI models
Expand Down

0 comments on commit 9312334

Please sign in to comment.