Skip to content

Commit

Permalink
#42 Refined the OpenAI client structure
Browse files Browse the repository at this point in the history
  • Loading branch information
roma-glushko committed Dec 31, 2023
1 parent 7d1cfe9 commit a73e058
Show file tree
Hide file tree
Showing 10 changed files with 178 additions and 214 deletions.
4 changes: 4 additions & 0 deletions pkg/api/http/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,10 @@ func NewServer(config *ServerConfig, tel *telemetry.Telemetry, router *routers.R
}

func (srv *Server) Run() error {
srv.server.POST("/v1/language/{}/chat/", func(c context.Context, ctx *app.RequestContext) {
// TODO: call the lang router
})

srv.server.GET("/health", func(ctx context.Context, c *app.RequestContext) {
c.JSON(consts.StatusOK, utils.H{"healthy": true})
})
Expand Down
44 changes: 44 additions & 0 deletions pkg/api/schemas/language.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
package schemas

// ChatRequest defines Glide's Chat Request Schema unified across all language models
type ChatRequest struct {
Message []struct { // TODO: could we reuse ChatMessage?
Role string `json:"role"`
Content string `json:"content"`
} `json:"message"`
MessageHistory []string `json:"messageHistory"`
}

// ChatResponse defines Glide's Chat Response Schema unified across all language models
type ChatResponse struct {
ID string `json:"id,omitempty"`
Created float64 `json:"created,omitempty"`
Choices []*ChatChoice `json:"choices,omitempty"`
Model string `json:"model,omitempty"`
Object string `json:"object,omitempty"` // TODO: what does this mean "Object"?
Usage Usage `json:"usage,omitempty"`
}

// ChatMessage is a message in a chat request.
type ChatMessage struct {
// The role of the author of this message. One of system, user, or assistant.
Role string `json:"role"`
// The content of the message.
Content string `json:"content"`
// The name of the author of this message. May contain a-z, A-Z, 0-9, and underscores,
// with a maximum length of 64 characters.
Name string `json:"name,omitempty"`
}

// ChatChoice is a choice in a chat response.
type ChatChoice struct {
Index int `json:"index"`
Message ChatMessage `json:"message"`
FinishReason string `json:"finish_reason"`
}

type Usage struct {
CompletionTokens float64 `json:"completion_tokens,omitempty"`
PromptTokens float64 `json:"prompt_tokens,omitempty"`
TotalTokens float64 `json:"total_tokens,omitempty"`
}
2 changes: 1 addition & 1 deletion pkg/providers/secret.go → pkg/config/fields/secret.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package providers
package fields

import "encoding"

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package providers
package fields

import (
"testing"
Expand Down
12 changes: 12 additions & 0 deletions pkg/providers/language.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
package providers

import (
"context"

"glide/pkg/api/schemas"
)

// ChatModel defines the interface a provider should fulfill to be able to serve language chat requests
type ChatModel interface {
Chat(ctx *context.Context, request *schemas.ChatRequest) (*schemas.ChatResponse, error)
}
191 changes: 48 additions & 143 deletions pkg/providers/openai/chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,35 +4,16 @@ import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"reflect"
"strconv"
"strings"

"glide/pkg/providers"

"glide/pkg/telemetry"

"glide/pkg/api/schemas"
"go.uber.org/zap"
)

const (
defaultChatModel = "gpt-3.5-turbo"
defaultEndpoint = "/chat/completions"
)

// Client is a client for the OpenAI API.
type ProviderClient struct {
BaseURL string `json:"baseURL"`
HTTPClient *http.Client `json:"httpClient"`
Telemetry *telemetry.Telemetry `json:"telemetry"`
}

// ChatRequest is a request to complete a chat completion..
type ChatRequest struct {
// ChatRequestSchema is an OpenAI-specific request schema
type ChatRequestSchema struct {
Model string `json:"model"`
Messages []map[string]string `json:"messages"`
Temperature float64 `json:"temperature,omitempty"`
Expand All @@ -49,82 +30,41 @@ type ChatRequest struct {
Tools []string `json:"tools,omitempty"`
ToolChoice interface{} `json:"tool_choice,omitempty"`
ResponseFormat interface{} `json:"response_format,omitempty"`

// StreamingFunc is a function to be called for each chunk of a streaming response.
// Return an error to stop streaming early.
StreamingFunc func(ctx context.Context, chunk []byte) error `json:"-"`
}

// ChatMessage is a message in a chat request.
type ChatMessage struct {
// The role of the author of this message. One of system, user, or assistant.
Role string `json:"role"`
// The content of the message.
Content string `json:"content"`
// The name of the author of this message. May contain a-z, A-Z, 0-9, and underscores,
// with a maximum length of 64 characters.
Name string `json:"name,omitempty"`
}

// ChatChoice is a choice in a chat response.
type ChatChoice struct {
Index int `json:"index"`
Message ChatMessage `json:"message"`
FinishReason string `json:"finish_reason"`
}

// ChatResponse is a response to a chat request.
type ChatResponse struct {
ID string `json:"id,omitempty"`
Created float64 `json:"created,omitempty"`
Choices []*ChatChoice `json:"choices,omitempty"`
Model string `json:"model,omitempty"`
Object string `json:"object,omitempty"`
Usage struct {
CompletionTokens float64 `json:"completion_tokens,omitempty"`
PromptTokens float64 `json:"prompt_tokens,omitempty"`
TotalTokens float64 `json:"total_tokens,omitempty"`
} `json:"usage,omitempty"`
}

// Chat sends a chat request to the specified OpenAI model.
//
// Parameters:
// - payload: The user payload for the chat request.
// Returns:
// - *ChatResponse: a pointer to a ChatResponse
// - error: An error if the request failed.
func (c *ProviderClient) Chat(u *providers.UnifiedAPIData) (*ChatResponse, error) {
func (c *Client) Chat(ctx context.Context, request *schemas.ChatRequest) (*schemas.ChatResponse, error) {
// Create a new chat request
c.Telemetry.Logger.Info("creating new chat request")
chatRequest := c.createChatRequestSchema(request)

chatRequest := c.CreateChatRequest(u)

c.Telemetry.Logger.Info("chat request created")

// Send the chat request
// TODO: this is suspicious we do zero remapping of OpenAI response and send it back as is.
// Does it really work well across providers?
chatResponse, err := c.doChatRequest(ctx, chatRequest)
if err != nil {
return nil, err
}

resp, err := c.CreateChatResponse(context.Background(), chatRequest, u)
if len(chatResponse.Choices) == 0 {
return nil, ErrEmptyResponse
}

return resp, err
return chatResponse, nil
}

func (c *ProviderClient) CreateChatRequest(u *providers.UnifiedAPIData) *ChatRequest {
c.Telemetry.Logger.Info("creating chatRequest from payload")

func (c *Client) createChatRequestSchema(request *schemas.ChatRequest) *ChatRequestSchema {
var messages []map[string]string

// Add items from messageHistory first
messages = append(messages, u.MessageHistory...)
messages = append(messages, request.MessageHistory...)

Check failure on line 58 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Vulnerability Check

cannot use request.MessageHistory (variable of type []string) as []map[string]string value in argument to append

Check failure on line 58 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Lint

cannot use request.MessageHistory (variable of type []string) as []map[string]string value in argument to append

Check failure on line 58 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Build

cannot use request.MessageHistory (variable of type []string) as []map[string]string value in argument to append

Check failure on line 58 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Tests

cannot use request.MessageHistory (variable of type []string) as []map[string]string value in argument to append

// Add msg variable last
messages = append(messages, u.Message)
messages = append(messages, request.Message)

Check failure on line 61 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Vulnerability Check

cannot use request.Message (variable of type []struct{Role string "json:\"role\""; Content string "json:\"content\""}) as map[string]string value in argument to append

Check failure on line 61 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Lint

cannot use request.Message (variable of type []struct{Role string "json:\"role\""; Content string "json:\"content\""}) as map[string]string value in argument to append

Check failure on line 61 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Build

cannot use request.Message (variable of type []struct{Role string "json:\"role\""; Content string "json:\"content\""}) as map[string]string value in argument to append

Check failure on line 61 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Tests

cannot use request.Message (variable of type []struct{Role string "json:\"role\""; Content string "json:\"content\""}) as map[string]string value in argument to append

// Iterate through unifiedData.Params and add them to the request, otherwise leave the default value
defaultParams := u.Params

Check failure on line 64 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Vulnerability Check

undefined: u

Check failure on line 64 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Vulnerability Check

defaultParams declared and not used

Check failure on line 64 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Lint

defaultParams declared and not used

Check failure on line 64 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Lint

undefined: u

Check failure on line 64 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Build

defaultParams declared and not used

Check failure on line 64 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Build

undefined: u

Check failure on line 64 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Tests

defaultParams declared and not used

Check failure on line 64 in pkg/providers/openai/chat.go

View workflow job for this annotation

GitHub Actions / Tests

undefined: u

chatRequest := &ChatRequest{
Model: u.Model,
chatRequest := &ChatRequestSchema{
Model: c.config.Model,
Messages: messages,
Temperature: 0.8,
TopP: 1,
Expand All @@ -142,94 +82,59 @@ func (c *ProviderClient) CreateChatRequest(u *providers.UnifiedAPIData) *ChatReq
ResponseFormat: nil,
}

chatRequestValue := reflect.ValueOf(chatRequest).Elem()
chatRequestType := chatRequestValue.Type()

for i := 0; i < chatRequestValue.NumField(); i++ {
jsonTags := strings.Split(chatRequestType.Field(i).Tag.Get("json"), ",")
jsonTag := jsonTags[0]

if value, ok := defaultParams[jsonTag]; ok {
fieldValue := chatRequestValue.Field(i)
fieldValue.Set(reflect.ValueOf(value))
}
}

// c.Telemetry.Logger.Info("chatRequest created", zap.Any("chatRequest body", chatRequest))
// TODO: set params

return chatRequest
}

// CreateChatResponse creates chat Response.
func (c *ProviderClient) CreateChatResponse(ctx context.Context, r *ChatRequest, u *providers.UnifiedAPIData) (*ChatResponse, error) {
_ = ctx // keep this for future use

resp, err := c.createChatHTTP(r, u)
if err != nil {
return nil, err
}

if len(resp.Choices) == 0 {
return nil, ErrEmptyResponse
}

return resp, nil
}

func (c *ProviderClient) createChatHTTP(payload *ChatRequest, u *providers.UnifiedAPIData) (*ChatResponse, error) {
c.Telemetry.Logger.Info("running createChatHttp")

if payload.StreamingFunc != nil {
payload.Stream = true
}
func (c *Client) doChatRequest(ctx context.Context, payload *ChatRequestSchema) (*schemas.ChatResponse, error) {
// Build request payload
payloadBytes, err := json.Marshal(payload)
rawPayload, err := json.Marshal(payload)
if err != nil {
return nil, err
}

// Build request
if defaultBaseURL == "" {
c.Telemetry.Logger.Error("defaultBaseURL not set")
return nil, errors.New("baseURL not set")
return nil, fmt.Errorf("unable to marshal openai chat request payload: %w", err)
}

reqBody := bytes.NewBuffer(payloadBytes)

req, err := http.NewRequest(http.MethodPost, buildURL(defaultEndpoint), reqBody)
req, err := http.NewRequest(http.MethodPost, c.chatURL, bytes.NewBuffer(rawPayload))
if err != nil {
c.Telemetry.Logger.Error(err.Error())
return nil, err
return nil, fmt.Errorf("unable to create openai chat request: %w", err)
}

req.Header.Set("Authorization", "Bearer "+u.APIKey)
req.Header.Set("Authorization", "Bearer "+string(c.config.APIKey))
req.Header.Set("Content-Type", "application/json")

resp, err := providers.HTTPClient.Do(req)
// TODO: this could leak information from messages which may not be a desired thing to have
c.telemetry.Logger.Debug(
"openai chat request",
zap.String("chat_url", c.chatURL),
zap.Any("payload", payload),
)

resp, err := c.httpClient.Do(req)
if err != nil {
c.Telemetry.Logger.Error(err.Error())
return nil, err
return nil, fmt.Errorf("failed to send openai chat request: %w", err)
}
defer resp.Body.Close()

c.Telemetry.Logger.Info("Response Code: ", zap.String("response_code", strconv.Itoa(resp.StatusCode)))
defer resp.Body.Close() // TODO: handle this error

if resp.StatusCode != http.StatusOK {
bodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
c.Telemetry.Logger.Error(err.Error())
c.telemetry.Logger.Error("failed to read openai chat response", zap.Error(err))
}

c.Telemetry.Logger.Warn("Response Body: ", zap.String("response_body", string(bodyBytes)))
// TODO: Handle failure conditions

c.telemetry.Logger.Error(
"openai chat request failed",
zap.Int("status_code", resp.StatusCode),
zap.String("response", string(bodyBytes)),
)

// TODO: return errors
}

// Parse response
var response ChatResponse
var response schemas.ChatResponse

return &response, json.NewDecoder(resp.Body).Decode(&response)
}

func buildURL(suffix string) string {
// open ai implement:
return fmt.Sprintf("%s%s", defaultBaseURL, suffix)
}
Loading

0 comments on commit a73e058

Please sign in to comment.