diff --git a/client/app/pages/queries/query.html b/client/app/pages/queries/query.html
index 97148729b9..bab5e22c8e 100644
--- a/client/app/pages/queries/query.html
+++ b/client/app/pages/queries/query.html
@@ -283,6 +283,9 @@
Data Scanned
{{ queryResult.query_result.data.metadata.data_scanned | prettySize}}
+ Query cost in USD
+ {{ queryResult.query_result.data.metadata.query_cost }}
+
diff --git a/redash/query_runner/big_query.py b/redash/query_runner/big_query.py
index 03defe1fcd..56c62653c8 100644
--- a/redash/query_runner/big_query.py
+++ b/redash/query_runner/big_query.py
@@ -1,5 +1,6 @@
import datetime
import logging
+import os
import sys
import time
from base64 import b64decode
@@ -12,6 +13,7 @@
from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
+EXPOSE_COST = settings.parse_boolean(os.environ.get('GBQ_EXPOSE_COST', 'false'))
try:
import apiclient.errors
@@ -92,7 +94,17 @@ def enabled(cls):
@classmethod
def configuration_schema(cls):
- return {
+ if EXPOSE_COST:
+ schema['order'].append('cost_per_tb')
+ schema['properties'].update({
+ 'cost_per_tb': {
+ 'type': 'number',
+ 'title': 'Google Big Query cost per Tb scanned',
+ 'default': 1.1
+ }
+ })
+
+ schema.update({
'type': 'object',
'properties': {
'projectId': {
@@ -132,7 +144,9 @@ def configuration_schema(cls):
'required': ['jsonKeyFile', 'projectId'],
"order": ['projectId', 'jsonKeyFile', 'loadSchema', 'useStandardSql', 'location', 'totalMBytesProcessedLimit', 'maximumBillingTier', 'userDefinedFunctionResourceUri'],
'secret': ['jsonKeyFile']
- }
+ })
+
+ return schema
def _get_bigquery_service(self):
scope = [
@@ -146,7 +160,7 @@ def _get_bigquery_service(self):
http = httplib2.Http(timeout=settings.BIGQUERY_HTTP_TIMEOUT)
http = creds.authorize(http)
- return build("bigquery", "v2", http=http)
+ return build("bigquery", "v2", http=http, cache_discovery=False)
def _get_project_id(self):
return self.configuration["projectId"]
@@ -207,7 +221,7 @@ def _get_query_result(self, jobs, query):
rows = []
- while ("rows" in query_reply) and current_row < query_reply['totalRows']:
+ while ("rows" in query_reply) and int(current_row) < int(query_reply['totalRows']):
for row in query_reply["rows"]:
rows.append(transform_row(row, query_reply["schema"]["fields"]))
@@ -231,12 +245,20 @@ def _get_query_result(self, jobs, query):
else types_map.get(f['type'], "string")
} for f in query_reply["schema"]["fields"]]
+ qbytes = int(query_reply['totalBytesProcessed'])
+
data = {
"columns": columns,
"rows": rows,
- 'metadata': {'data_scanned': int(query_reply['totalBytesProcessed'])}
+ 'metadata': {
+ 'data_scanned': qbytes
+ }
}
+ if EXPOSE_COST:
+ price = self.configuration.get('cost_per_tb', 1.1)
+ data['metadata'].update({'query_cost': '${0:.2f}'.format(price * qbytes * 10e-12)})
+
return data
def _get_columns_schema(self, table_data):
@@ -317,4 +339,4 @@ def run_query(self, query, user):
return json_data, error
-register(BigQuery)
+register(BigQuery)
\ No newline at end of file