From a1f84aeb7d738aa5b0e20eb58956f6c0a9f80ac1 Mon Sep 17 00:00:00 2001 From: Arik Fraimovich Date: Mon, 28 Jan 2019 09:22:29 +0200 Subject: [PATCH 1/2] Move BigQueryGCE to its own file --- redash/query_runner/big_query.py | 56 ---------------------- redash/query_runner/big_query_gce.py | 72 ++++++++++++++++++++++++++++ 2 files changed, 72 insertions(+), 56 deletions(-) create mode 100644 redash/query_runner/big_query_gce.py diff --git a/redash/query_runner/big_query.py b/redash/query_runner/big_query.py index 594d79d203..4e90b8e988 100644 --- a/redash/query_runner/big_query.py +++ b/redash/query_runner/big_query.py @@ -18,7 +18,6 @@ from apiclient.discovery import build from apiclient.errors import HttpError from oauth2client.service_account import ServiceAccountCredentials - from oauth2client.contrib import gce enabled = True except ImportError: @@ -312,61 +311,6 @@ def run_query(self, query, user): return json_data, error -class BigQueryGCE(BigQuery): - @classmethod - def type(cls): - return "bigquery_gce" - - @classmethod - def enabled(cls): - try: - # check if we're on a GCE instance - requests.get('http://metadata.google.internal') - except requests.exceptions.ConnectionError: - return False - - return True - - @classmethod - def configuration_schema(cls): - return { - 'type': 'object', - 'properties': { - 'totalMBytesProcessedLimit': { - "type": "number", - 'title': 'Total MByte Processed Limit' - }, - 'userDefinedFunctionResourceUri': { - "type": "string", - 'title': 'UDF Source URIs (i.e. gs://bucket/date_utils.js, gs://bucket/string_utils.js )' - }, - 'useStandardSql': { - "type": "boolean", - 'title': "Use Standard SQL", - "default": True, - }, - 'location': { - "type": "string", - "title": "Processing Location", - "default": "US", - }, - 'loadSchema': { - "type": "boolean", - "title": "Load Schema" - } - } - } - - def _get_project_id(self): - return requests.get('http://metadata/computeMetadata/v1/project/project-id', headers={'Metadata-Flavor': 'Google'}).content - - def _get_bigquery_service(self): - credentials = gce.AppAssertionCredentials(scope='https://www.googleapis.com/auth/bigquery') - http = httplib2.Http() - http = credentials.authorize(http) - - return build("bigquery", "v2", http=http) register(BigQuery) -register(BigQueryGCE) diff --git a/redash/query_runner/big_query_gce.py b/redash/query_runner/big_query_gce.py new file mode 100644 index 0000000000..c57d9b30e4 --- /dev/null +++ b/redash/query_runner/big_query_gce.py @@ -0,0 +1,72 @@ +import requests +import httplib2 + +try: + from apiclient.discovery import build + from oauth2client.contrib import gce + + enabled = True +except ImportError: + enabled = False + +from .big_query import BigQuery + +class BigQueryGCE(BigQuery): + @classmethod + def type(cls): + return "bigquery_gce" + + @classmethod + def enabled(cls): + if not enabled: + return False + + try: + # check if we're on a GCE instance + requests.get('http://metadata.google.internal') + except requests.exceptions.ConnectionError: + return False + + return True + + @classmethod + def configuration_schema(cls): + return { + 'type': 'object', + 'properties': { + 'totalMBytesProcessedLimit': { + "type": "number", + 'title': 'Total MByte Processed Limit' + }, + 'userDefinedFunctionResourceUri': { + "type": "string", + 'title': 'UDF Source URIs (i.e. gs://bucket/date_utils.js, gs://bucket/string_utils.js )' + }, + 'useStandardSql': { + "type": "boolean", + 'title': "Use Standard SQL", + "default": True, + }, + 'location': { + "type": "string", + "title": "Processing Location", + "default": "US", + }, + 'loadSchema': { + "type": "boolean", + "title": "Load Schema" + } + } + } + + def _get_project_id(self): + return requests.get('http://metadata/computeMetadata/v1/project/project-id', headers={'Metadata-Flavor': 'Google'}).content + + def _get_bigquery_service(self): + credentials = gce.AppAssertionCredentials(scope='https://www.googleapis.com/auth/bigquery') + http = httplib2.Http() + http = credentials.authorize(http) + + return build("bigquery", "v2", http=http) + +register(BigQueryGCE) \ No newline at end of file From 03f046381547facd8f3954d4fd0eaefd58a656d8 Mon Sep 17 00:00:00 2001 From: Arik Fraimovich Date: Mon, 28 Jan 2019 09:27:23 +0200 Subject: [PATCH 2/2] Add missing import --- redash/query_runner/big_query.py | 2 -- redash/query_runner/big_query_gce.py | 5 ++++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/redash/query_runner/big_query.py b/redash/query_runner/big_query.py index 4e90b8e988..274f63da8a 100644 --- a/redash/query_runner/big_query.py +++ b/redash/query_runner/big_query.py @@ -311,6 +311,4 @@ def run_query(self, query, user): return json_data, error - - register(BigQuery) diff --git a/redash/query_runner/big_query_gce.py b/redash/query_runner/big_query_gce.py index c57d9b30e4..2fb7d9db05 100644 --- a/redash/query_runner/big_query_gce.py +++ b/redash/query_runner/big_query_gce.py @@ -9,8 +9,10 @@ except ImportError: enabled = False +from redash.query_runner import register from .big_query import BigQuery + class BigQueryGCE(BigQuery): @classmethod def type(cls): @@ -69,4 +71,5 @@ def _get_bigquery_service(self): return build("bigquery", "v2", http=http) -register(BigQueryGCE) \ No newline at end of file + +register(BigQueryGCE)