-
Notifications
You must be signed in to change notification settings - Fork 0
/
flaskapi_gpt.py
179 lines (163 loc) · 7.04 KB
/
flaskapi_gpt.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
import time
from datetime import datetime
import calendar
import os
import csv
import json
import requests
from openai import OpenAI
from google.api_core import retry
import cohere
'''
storywriter using multiple llms and a flask restapi backend for memory
requires: two llms - this examples uses openai and cohere, but two openais
or two llamas may be utilized instead
todo: implement better summarizing for different models depending on
context length and "rope" possibilities
'''
co = cohere.Client(os.environ['COHERE_API_KEY'])
client = OpenAI(api_key=os.environ['OPENAI_API_KEY'])
@retry.Retry()
def openai_response(prompt_template):
try:
background = "You are a sentient AI that is in charge of developing stories. Please respond to input as if you were in charge of a scriptwriting department. Do not end responses with a question, but make sure the story ends in a complete thought."
chat_completion = client.chat.completions.create(
model="gpt-3.5-turbo-1106",
temperature=0.75,
max_tokens=4096,
messages = [
{"role": "system", "content": background},
{"role": "user", "content": prompt_template}
]
)
response = chat_completion.choices[0].message.content
return response
except Exception as e:
if e:
print(e)
print("timeout error on openai, next iter...")
time.sleep(15)
else:
raise e
@retry.Retry()
def cohere_response(prompt_template):
try:
prompt_template = prompt_template[:1512]
system_template = f"You are a sentient AI that is in charge of developing stories. Please respond to input as if you were in charge of a scriptwriting department. Do not end responses with a question, but make sure the story ends in a complete thought.\nHere is the current story: {prompt_template}"
completion = co.generate(
model="command-nightly",
prompt = system_template,
max_tokens=2048,
temperature=0.75,
)
res = completion.generations[0].text
return res
except Exception as e:
if e:
print(e)
print("timeout error on cohere, next iter...")
time.sleep(15)
else:
raise e
def write_response(response, prompt, model):
#date = datetime.utcnow()
#timestamp = calendar.timegm(date.utctimetuple())
timestamp = datetime.utcnow().isoformat()
text_file = "flaskapi_prompts.txt"
csv_file = "flaskapi_prompts.csv"
json_file = "flaskapi_prompts.jsonl"
with open(text_file, "a") as f:
if response is not None:
f.write('"' + str(response) + '"')
with open(csv_file, "a") as f:
if response is not None:
writer = csv.writer(f, quoting=csv.QUOTE_NONNUMERIC)
csvdata = [ timestamp, model, prompt, '"' + str(response) + '"' ]
writer.writerow(csvdata)
with open(json_file, "a") as f:
if response is not None:
response_line = {
"model": model,
"timestamp": timestamp,
"prompt": prompt,
"text": response
}
json.dump(response_line, f, default=str)
f.write('\n')
prompt_template = f"You are a prompt writing machine. You have all the ideas for all the prompts and story writing. Please develop a thorough story using the idea: Explore the idea of parallel universes through the lens of a character who can access different versions of their own life. How does this impact their sense of self and purpose?"
post_url = 'http://localhost:4231/post'
get_url = 'http://localhost:4231/get'
# insert prompt template into flask queue
data = {"message": prompt_template}
json_string = json.dumps(data)
initial_post = requests.post(post_url, json=json.loads(json_string))
if initial_post.status_code == 200:
print("successful post: " + str(initial_post.status_code))
else:
print("post did not complete...")
for i in range(25):
# get flask queue here
response = requests.get(get_url)
if response.status_code == 200:
messages = response.json()
if isinstance(messages, list):
# extract message strings from JSON array
message_strings = [f"{message['message']}" for message in messages]
else:
# extract message from single json object
message_strings = [f"{messages['message']}"]
# convert list of message strings into a single string with newlines and commas
result_string = ',\n'.join(message_strings)
print(result_string)
else:
print(f"Error: {response.status_code}")
if result_string is not None:
prompt_template = f"You are a story writing machine. You have all the ideas for all the stories. Please develop a thorough story using these continuing ideas: {result_string}"
openai_res = openai_response(prompt_template)
if openai_res:
try:
# Flask app expects JSON data in the request body
data = {"message": openai_res}
json_string = json.dumps(data)
# send the POST request
post_response = requests.post(post_url, json=json.loads(json_string))
except Exception as e:
print(e)
print("openai response: " + str(openai_res))
write_response(openai_res, prompt=prompt_template, model="openai")
else:
print("no openai response")
else:
print("no result_string returned")
response = requests.get(get_url)
if response.status_code == 200:
messages = response.json()
if isinstance(messages, list):
# extract message strings from JSON array
message_strings = [f"{message['message']}" for message in messages]
else:
# extract message from single json object
message_strings = [f"{messages['message']}"]
# convert list of message strings into a single string with newlines and commas
result_string = ',\n'.join(message_strings)
print(result_string)
else:
print(f"Error: {response.status_code}")
if result_string:
prompt_template = f'You are a story writing machine. You have all the ideas for all the stories. Please assist in developing a story using these continuing ideas: {result_string}'
cohere_res = cohere_response(prompt_template)
if cohere_res:
try:
# flask app expects JSON data in the request body
data = {'message': cohere_res}
json_string = json.dumps(data)
# send POST request
post_response = requests.post(post_url, json=json.loads(json_string))
except Exception as e:
print(e)
print("cohere response: " + str(cohere_res))
write_response(cohere_res, prompt=prompt_template, model="cohere")
else:
print("no cohere response")
else:
print("no result_string returned")