Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upgrade model to gpt-3.5-turbo and set proxy #5

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions config.dev.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
{
"api_key": "your api key",
"proxy": "your proxy",
"auto_pass": true
}
6 changes: 6 additions & 0 deletions config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ type Configuration struct {
ApiKey string `json:"api_key"`
// 自动通过好友
AutoPass bool `json:"auto_pass"`
//代理地址
Proxy string `json:"proxy"`
}

var config *Configuration
Expand All @@ -39,12 +41,16 @@ func LoadConfig() *Configuration {
// 如果环境变量有配置,读取环境变量
ApiKey := os.Getenv("ApiKey")
AutoPass := os.Getenv("AutoPass")
Proxy := os.Getenv("Proxy")
if ApiKey != "" {
config.ApiKey = ApiKey
}
if AutoPass == "true" {
config.AutoPass = true
}
if Proxy != "" {
config.Proxy = Proxy
}
})
return config
}
73 changes: 50 additions & 23 deletions gtp/gtp.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,47 +3,62 @@ package gtp
import (
"bytes"
"encoding/json"
"github.com/869413421/wechatbot/config"
"io/ioutil"
"log"
"net/http"
"net/url"

"github.com/869413421/wechatbot/config"
)

const BASEURL = "https://api.openai.com/v1/"

// ChatGPTResponseBody 请求体
type ChatGPTResponseBody struct {
ID string `json:"id"`
Object string `json:"object"`
Created int `json:"created"`
Model string `json:"model"`
Choices []map[string]interface{} `json:"choices"`
Usage map[string]interface{} `json:"usage"`
ID string `json:"id"`
Object string `json:"object"`
Created int `json:"created"`
Model string `json:"model"`
Choices []ChoiceItem `json:"choices"`
Usage map[string]interface{} `json:"usage"`
}

type ChoiceItem struct {
Index int `json:"index"`
FinishReason string `json:"finish_reason"`
Message ChatGPTMessage `json:"message"`
}

// ChatGPTRequestBody 响应体
type ChatGPTRequestBody struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
MaxTokens int `json:"max_tokens"`
Temperature float32 `json:"temperature"`
TopP int `json:"top_p"`
FrequencyPenalty int `json:"frequency_penalty"`
PresencePenalty int `json:"presence_penalty"`
Model string `json:"model"`
Messages []ChatGPTMessage `json:"messages"`
MaxTokens int `json:"max_tokens"`
Temperature float32 `json:"temperature"`
TopP int `json:"top_p"`
FrequencyPenalty int `json:"frequency_penalty"`
PresencePenalty int `json:"presence_penalty"`
Stop []string `json:"stop"`
User string `json:"user"`
}

type ChatGPTMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}

// Completions gtp文本模型回复
//curl https://api.openai.com/v1/completions
//-H "Content-Type: application/json"
//-H "Authorization: Bearer your chatGPT key"
//-d '{"model": "text-davinci-003", "prompt": "give me good song", "temperature": 0, "max_tokens": 7}'
// curl https://api.openai.com/v1/chat/completions
// -H "Content-Type: application/json"
// -H "Authorization: Bearer your chatGPT key"
// -d '{"model": "gpt-3.5-turbo", "messages": [{"role":"system", "content":"You are a assistant"}, {"role":"user", "content": "give me good song"}], "temperature": 0, "max_tokens": 7}'
func Completions(msg string) (string, error) {
requestBody := ChatGPTRequestBody{
Model: "text-davinci-003",
Prompt: msg,
Model: "gpt-3.5-turbo",
Messages: []ChatGPTMessage{
{Role: "system", Content: "You are a helpful assistant."},
{Role: "user", Content: msg},
},
MaxTokens: 2048,
Temperature: 0.7,
TopP: 1,
Expand All @@ -56,15 +71,27 @@ func Completions(msg string) (string, error) {
return "", err
}
log.Printf("request gtp json string : %v", string(requestData))
req, err := http.NewRequest("POST", BASEURL+"completions", bytes.NewBuffer(requestData))
req, err := http.NewRequest("POST", BASEURL+"chat/completions", bytes.NewBuffer(requestData))
if err != nil {
return "", err
}

apiKey := config.LoadConfig().ApiKey
proxy := config.LoadConfig().Proxy
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+apiKey)
client := &http.Client{}
var client *http.Client
if len(proxy) == 0 {
client = &http.Client{}
} else {
proxyAddr, _ := url.Parse(proxy)
client = &http.Client{
Transport: &http.Transport{
Proxy: http.ProxyURL(proxyAddr),
},
}
}

response, err := client.Do(req)
if err != nil {
return "", err
Expand All @@ -85,7 +112,7 @@ func Completions(msg string) (string, error) {
var reply string
if len(gptResponseBody.Choices) > 0 {
for _, v := range gptResponseBody.Choices {
reply = v["text"].(string)
reply = v.Message.Content
break
}
}
Expand Down