forked from llmware-ai/llmware
-
Notifications
You must be signed in to change notification settings - Fork 0
/
text2sql-end-to-end-2.py
113 lines (73 loc) · 3.83 KB
/
text2sql-end-to-end-2.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
""" This example shows an end-to-end recipe for querying SQL database using only natural language.
The example shows the following steps:
1. Loading "slim-sql-tool" and running initial tests to confirm installation.
2. Generating a SQL table from a sample CSV file included with the slim-sql-tool install.
3. Asking basic natural language questions:
A. Looks up the table schema
B. Packages the table schema with query
C. Runs inference to convert text into SQL
D. Queries the database with the generated SQL
E. Returns result
3. All work performed on an integrated 'llmware-sqlite-experimental.db' that can be deleted safely anytime
as part of experimentation lifecycle.
UPDATE: please see also the related example in Use_Cases/agent_with_custom_tables.py, which illustrates a more
generalized version of this script running on Postgres.
"""
import os
from llmware.agents import SQLTables, LLMfx
from llmware.models import ModelCatalog
from llmware.configs import LLMWareConfig
def sql_e2e_test_script(table_name="customers1",create_new_table=False):
""" This is the end-to-end execution script. """
# create table if needed to set up
if create_new_table:
# looks to pull sample csv 'customer_table.csv' from slim-sql-tool model package files
sql_tool_repo_path = os.path.join(LLMWareConfig().get_model_repo_path(), "slim-sql-tool")
if not os.path.exists(sql_tool_repo_path):
ModelCatalog().load_model("llmware/slim-sql-tool")
files = os.listdir(sql_tool_repo_path)
csv_file = "customer_table.csv"
if csv_file in files:
# to create a testing table from a csv
sql_db = SQLTables(experimental=True)
sql_db.create_new_table_from_csv(sql_tool_repo_path, csv_file, table_name=table_name)
# end - creating table
print("update: successfully created new db table")
else:
print("something has gone wrong - could not find customer_table.csv inside the slim-sql-tool file package")
# query starts here
agent = LLMfx()
agent.load_tool("sql", sample=False, get_logits=True, temperature=0.0)
# Pass direct queries to the DB
query_list = ["Which customers have vip customer status of yes?",
"What is the highest annual spend of any customer?",
"Which customer has account number 1234953",
"Which customer has the lowest annual spend?",
"Is Susan Soinsin a vip customer?"]
for i, query in enumerate(query_list):
# query_db method is doing all of the work
# -- looks up the table schema in the db using the table_name
# -- packages the text-2-sql query prompt
# -- executes sql method to convert the prompt into a sql query
# -- attempts to execute the sql query on the db
# -- returns the db results as 'research' output
response = agent.query_db(query, table=table_name)
for x in range(0,len(agent.research_list)):
print("research: ", x, agent.research_list[x])
return 0
def delete_table(table_name):
""" Start fresh in testing - delete table in experimental local SQLite DB """
sql_db = SQLTables(experimental=True)
sql_db.delete_table(table_name, confirm_delete=True)
return True
def delete_db():
""" Start fresh in testing - deletes SQLite DB and starts over. """
sql_db = SQLTables(experimental=True)
sql_db.delete_experimental_db(confirm_delete=True)
return True
if __name__ == "__main__":
ModelCatalog().get_llm_toolkit(tool_list=["sql"])
# run an end-to-end test
sql_e2e_test_script(table_name="customer1",create_new_table=True)
# third - delete and start fresh for further testing
delete_table("customer1")