You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
GPT2 custom chat-bot single interaction. I am not using any thread in this implementation but still it says 'multiprocessing_chunksize'. Here we try to build chatbot using gpt2 and interact with our custom model which is trained by custom dataset(type : persona).
`#from bot_make.utils import helper
import shutil
from simpletransformers.conv_ai import ConvAIModel
import os
import pickle
from termcolor import colored
import requests
#my_model.interact()
os.chdir('single_interact/')
a = ['hello']
def single_response(user_input):
reply, history = my_model.interact_single(message=user_input, history=a)
for i in history :
a.append(i)
return reply
print("\n===============================================")
print("================= Conv AI.V ===================")
print("===============================================\n")
while 1:
talk = input(colored("You: ",'green'))
if talk=='exit':break
response = single_response(talk)
myset = set(a)
a = list(myset)
#print('Bot : ',response)
print(colored("Bot:",'red'),response)
#print('History of re : ',a)`
The text was updated successfully, but these errors were encountered:
GPT2 custom chat-bot single interaction. I am not using any thread in this implementation but still it says 'multiprocessing_chunksize'. Here we try to build chatbot using gpt2 and interact with our custom model which is trained by custom dataset(type : persona).
`#from bot_make.utils import helper
import shutil
from simpletransformers.conv_ai import ConvAIModel
import os
import pickle
from termcolor import colored
import requests
def cache_select(cache_name):
shutil.unpack_archive('base_model/'+cache_name,'./single_interact/cache_dir','zip')
print('done')
def cache_down(url):
try:
r = requests.get(url, allow_redirects=True)
open('single_interact/cache_dir.zip', 'wb').write(r.content)
shutil.unpack_archive('single_interact/cache_dir.zip','./single_interact/cache_dir','zip')
os.remove(f"single_interact/cache_dir.zip")
return True
except:
return False
def model_down(url):
try:
r = requests.get(url, allow_redirects=True)
open('single_interact/for_load.tar.gz', 'wb').write(r.content)
return True
except:
return False
print('Model and Cache down')
cache_ck = cache_down('https://bucket.s3.amazonaws.com/generated_data_1_model_2_cache_dir.zip')
model_ck = model_down('https://bucket.s3.amazonaws.com/generated_data_1_model_2.tar.gz')
print('Start BOT')
def model_load(model_name):
return pickle.load(open('single_interact/'+model_name, 'rb'))
#cache_select('base_model_small_cache_dir.zip')
my_model = model_load('for_load.tar.gz')
#my_model.interact()
os.chdir('single_interact/')
a = ['hello']
def single_response(user_input):
reply, history = my_model.interact_single(message=user_input, history=a)
for i in history :
a.append(i)
return reply
print("\n===============================================")
print("================= Conv AI.V ===================")
print("===============================================\n")
while 1:
talk = input(colored("You: ",'green'))
if talk=='exit':break
response = single_response(talk)
The text was updated successfully, but these errors were encountered: