Skip to content

Commit

Permalink
fix(go): fixed googleai/vertexai system prompt handling (#732)
Browse files Browse the repository at this point in the history
  • Loading branch information
pavelgj authored Aug 1, 2024
1 parent cb36ca9 commit a9af53c
Show file tree
Hide file tree
Showing 2 changed files with 52 additions and 6 deletions.
29 changes: 26 additions & 3 deletions go/plugins/googleai/googleai.go
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,10 @@ func generate(
input *ai.GenerateRequest,
cb func(context.Context, *ai.GenerateResponseChunk) error,
) (*ai.GenerateResponse, error) {
gm := newModel(client, model, input)
gm, err := newModel(client, model, input)
if err != nil {
return nil, err
}
cs, err := startChat(gm, input)
if err != nil {
return nil, err
Expand Down Expand Up @@ -300,7 +303,7 @@ func generate(
return r, nil
}

func newModel(client *genai.Client, model string, input *ai.GenerateRequest) *genai.GenerativeModel {
func newModel(client *genai.Client, model string, input *ai.GenerateRequest) (*genai.GenerativeModel, error) {
gm := client.GenerativeModel(model)
gm.SetCandidateCount(int32(input.Candidates))
if c, ok := input.Config.(*ai.GenerationCommonConfig); ok && c != nil {
Expand All @@ -320,7 +323,21 @@ func newModel(client *genai.Client, model string, input *ai.GenerateRequest) *ge
gm.SetTopP(float32(c.TopP))
}
}
return gm
for _, m := range input.Messages {
systemParts, err := convertParts(m.Content)
if err != nil {
return nil, err

}
// system prompts go into GenerativeModel.SystemInstruction field.
if m.Role == ai.RoleSystem {
gm.SystemInstruction = &genai.Content{
Parts: systemParts,
Role: string(m.Role),
}
}
}
return gm, nil
}

// startChat starts a chat session and configures it with the input messages.
Expand All @@ -332,6 +349,12 @@ func startChat(gm *genai.GenerativeModel, input *ai.GenerateRequest) (*genai.Cha
for len(messages) > 1 {
m := messages[0]
messages = messages[1:]

// skip system prompt message, it's handled separately.
if m.Role == ai.RoleSystem {
continue
}

parts, err := convertParts(m.Content)
if err != nil {
return nil, err
Expand Down
29 changes: 26 additions & 3 deletions go/plugins/vertexai/vertexai.go
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,10 @@ func generate(
input *ai.GenerateRequest,
cb func(context.Context, *ai.GenerateResponseChunk) error,
) (*ai.GenerateResponse, error) {
gm := newModel(client, model, input)
gm, err := newModel(client, model, input)
if err != nil {
return nil, err
}
cs, err := startChat(gm, input)
if err != nil {
return nil, err
Expand Down Expand Up @@ -307,7 +310,7 @@ func generate(
return r, nil
}

func newModel(client *genai.Client, model string, input *ai.GenerateRequest) *genai.GenerativeModel {
func newModel(client *genai.Client, model string, input *ai.GenerateRequest) (*genai.GenerativeModel, error) {
gm := client.GenerativeModel(model)
gm.SetCandidateCount(int32(input.Candidates))
if c, ok := input.Config.(*ai.GenerationCommonConfig); ok && c != nil {
Expand All @@ -327,7 +330,21 @@ func newModel(client *genai.Client, model string, input *ai.GenerateRequest) *ge
gm.SetTopP(float32(c.TopP))
}
}
return gm
for _, m := range input.Messages {
systemParts, err := convertParts(m.Content)
if err != nil {
return nil, err

}
// system prompts go into GenerativeModel.SystemInstruction field.
if m.Role == ai.RoleSystem {
gm.SystemInstruction = &genai.Content{
Parts: systemParts,
Role: string(m.Role),
}
}
}
return gm, nil
}

// startChat starts a chat session and configures it with the input messages.
Expand All @@ -339,6 +356,12 @@ func startChat(gm *genai.GenerativeModel, input *ai.GenerateRequest) (*genai.Cha
for len(messages) > 1 {
m := messages[0]
messages = messages[1:]

// skip system prompt message, it's handled separately.
if m.Role == ai.RoleSystem {
continue
}

parts, err := convertParts(m.Content)
if err != nil {
return nil, err
Expand Down

0 comments on commit a9af53c

Please sign in to comment.