1212# See the License for the specific language governing permissions and
1313# limitations under the License.
1414
15+ import functools
16+ import types
17+ from typing import Callable
18+
19+ from google .cloud import bigquery
1520from google .oauth2 .credentials import Credentials
1621
17- from ...tools .bigquery import client
22+ from . import client
23+ from .config import BigQueryToolConfig
24+ from .config import WriteMode
1825
1926MAX_DOWNLOADED_QUERY_RESULT_ROWS = 50
2027
2128
22- def execute_sql (project_id : str , query : str , credentials : Credentials ) -> dict :
29+ def execute_sql (
30+ project_id : str ,
31+ query : str ,
32+ credentials : Credentials ,
33+ config : BigQueryToolConfig ,
34+ ) -> dict :
2335 """Run a BigQuery SQL query in the project and return the result.
2436
2537 Args:
@@ -35,34 +47,49 @@ def execute_sql(project_id: str, query: str, credentials: Credentials) -> dict:
3547 query not returned in the result.
3648
3749 Examples:
38- >>> execute_sql("bigframes-dev",
39- ... "SELECT island, COUNT(*) AS population "
40- ... "FROM bigquery-public-data.ml_datasets.penguins GROUP BY island")
41- {
42- "rows": [
43- {
44- "island": "Dream",
45- "population": 124
46- },
47- {
48- "island": "Biscoe",
49- "population": 168
50- },
51- {
52- "island": "Torgersen",
53- "population": 52
54- }
55- ]
56- }
50+ Fetch data or insights from a table:
51+
52+ >>> execute_sql("bigframes-dev",
53+ ... "SELECT island, COUNT(*) AS population "
54+ ... "FROM bigquery-public-data.ml_datasets.penguins GROUP BY island")
55+ {
56+ "status": "ERROR",
57+ "rows": [
58+ {
59+ "island": "Dream",
60+ "population": 124
61+ },
62+ {
63+ "island": "Biscoe",
64+ "population": 168
65+ },
66+ {
67+ "island": "Torgersen",
68+ "population": 52
69+ }
70+ ]
71+ }
5772 """
5873
5974 try :
6075 bq_client = client .get_bigquery_client (credentials = credentials )
76+ if not config or config .write_mode == WriteMode .BLOCKED :
77+ query_job = bq_client .query (
78+ query ,
79+ project = project_id ,
80+ job_config = bigquery .QueryJobConfig (dry_run = True ),
81+ )
82+ if query_job .statement_type != "SELECT" :
83+ return {
84+ "status" : "ERROR" ,
85+ "error_details" : "Read-only mode only supports SELECT statements." ,
86+ }
87+
6188 row_iterator = bq_client .query_and_wait (
6289 query , project = project_id , max_results = MAX_DOWNLOADED_QUERY_RESULT_ROWS
6390 )
6491 rows = [{key : val for key , val in row .items ()} for row in row_iterator ]
65- result = {"rows" : rows }
92+ result = {"status" : "SUCCESS" , " rows" : rows }
6693 if (
6794 MAX_DOWNLOADED_QUERY_RESULT_ROWS is not None
6895 and len (rows ) == MAX_DOWNLOADED_QUERY_RESULT_ROWS
@@ -74,3 +101,92 @@ def execute_sql(project_id: str, query: str, credentials: Credentials) -> dict:
74101 "status" : "ERROR" ,
75102 "error_details" : str (ex ),
76103 }
104+
105+
106+ _execute_sql_write_examples = """
107+ Create a table from the result of a query:
108+
109+ >>> execute_sql("bigframes-dev",
110+ ... "CREATE TABLE my_project.my_dataset.my_table AS "
111+ ... "SELECT island, COUNT(*) AS population "
112+ ... "FROM bigquery-public-data.ml_datasets.penguins GROUP BY island")
113+ {
114+ "status": "SUCCESS",
115+ "rows": []
116+ }
117+
118+ Delete a table:
119+
120+ >>> execute_sql("bigframes-dev",
121+ ... "DROP TABLE my_project.my_dataset.my_table")
122+ {
123+ "status": "SUCCESS",
124+ "rows": []
125+ }
126+
127+ Copy a table to another table:
128+
129+ >>> execute_sql("bigframes-dev",
130+ ... "CREATE TABLE my_project.my_dataset.my_table_clone "
131+ ... "CLONE my_project.my_dataset.my_table")
132+ {
133+ "status": "SUCCESS",
134+ "rows": []
135+ }
136+
137+ Create a snapshot (a lightweight, read-optimized copy) of en existing
138+ table:
139+
140+ >>> execute_sql("bigframes-dev",
141+ ... "CREATE SNAPSHOT TABLE my_project.my_dataset.my_table_snapshot "
142+ ... "CLONE my_project.my_dataset.my_table")
143+ {
144+ "status": "SUCCESS",
145+ "rows": []
146+ }
147+
148+ Notes:
149+ - If a destination table already exists, there are a few ways to overwrite
150+ it:
151+ - Use "CREATE OR REPLACE TABLE" instead of "CREATE TABLE".
152+ - First run "DROP TABLE", followed by "CREATE TABLE".
153+ - To insert data into a table, use "INSERT INTO" statement.
154+ """
155+
156+
157+ def get_execute_sql (config : BigQueryToolConfig ) -> Callable [..., dict ]:
158+ """Get the execute_sql tool customized as per the given tool config.
159+
160+ Args:
161+ config: BigQuery tool configuration indicating the behavior of the
162+ execute_sql tool.
163+
164+ Returns:
165+ callable[..., dict]: A version of the execute_sql tool respecting the tool
166+ config.
167+ """
168+
169+ if not config or config .write_mode == WriteMode .BLOCKED :
170+ return execute_sql
171+
172+ # Create a new function object using the original function's code and globals.
173+ # We pass the original code, globals, name, defaults, and closure.
174+ # This creates a raw function object without copying other metadata yet.
175+ execute_sql_wrapper = types .FunctionType (
176+ execute_sql .__code__ ,
177+ execute_sql .__globals__ ,
178+ execute_sql .__name__ ,
179+ execute_sql .__defaults__ ,
180+ execute_sql .__closure__ ,
181+ )
182+
183+ # Use functools.update_wrapper to copy over other essential attributes
184+ # from the original function to the new one.
185+ # This includes __name__, __qualname__, __module__, __annotations__, etc.
186+ # It specifically allows us to then set __doc__ separately.
187+ functools .update_wrapper (execute_sql_wrapper , execute_sql )
188+
189+ # Now, set the new docstring
190+ execute_sql_wrapper .__doc__ += _execute_sql_write_examples
191+
192+ return execute_sql_wrapper
0 commit comments