Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Move BigQueryGCE to its own file #3356

Merged
merged 2 commits into from
Jan 28, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
58 changes: 0 additions & 58 deletions redash/query_runner/big_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.service_account import ServiceAccountCredentials
from oauth2client.contrib import gce

enabled = True
except ImportError:
Expand Down Expand Up @@ -312,61 +311,4 @@ def run_query(self, query, user):
return json_data, error


class BigQueryGCE(BigQuery):
@classmethod
def type(cls):
return "bigquery_gce"

@classmethod
def enabled(cls):
try:
# check if we're on a GCE instance
requests.get('http://metadata.google.internal')
except requests.exceptions.ConnectionError:
return False

return True

@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'totalMBytesProcessedLimit': {
"type": "number",
'title': 'Total MByte Processed Limit'
},
'userDefinedFunctionResourceUri': {
"type": "string",
'title': 'UDF Source URIs (i.e. gs://bucket/date_utils.js, gs://bucket/string_utils.js )'
},
'useStandardSql': {
"type": "boolean",
'title': "Use Standard SQL",
"default": True,
},
'location': {
"type": "string",
"title": "Processing Location",
"default": "US",
},
'loadSchema': {
"type": "boolean",
"title": "Load Schema"
}
}
}

def _get_project_id(self):
return requests.get('http://metadata/computeMetadata/v1/project/project-id', headers={'Metadata-Flavor': 'Google'}).content

def _get_bigquery_service(self):
credentials = gce.AppAssertionCredentials(scope='https://www.googleapis.com/auth/bigquery')
http = httplib2.Http()
http = credentials.authorize(http)

return build("bigquery", "v2", http=http)


register(BigQuery)
register(BigQueryGCE)
75 changes: 75 additions & 0 deletions redash/query_runner/big_query_gce.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
import requests
import httplib2

try:
from apiclient.discovery import build
from oauth2client.contrib import gce

enabled = True
except ImportError:
enabled = False

from redash.query_runner import register
from .big_query import BigQuery


class BigQueryGCE(BigQuery):
@classmethod
def type(cls):
return "bigquery_gce"

@classmethod
def enabled(cls):
if not enabled:
return False

try:
# check if we're on a GCE instance
requests.get('http://metadata.google.internal')
except requests.exceptions.ConnectionError:
return False

return True

@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'totalMBytesProcessedLimit': {
"type": "number",
'title': 'Total MByte Processed Limit'
},
'userDefinedFunctionResourceUri': {
"type": "string",
'title': 'UDF Source URIs (i.e. gs://bucket/date_utils.js, gs://bucket/string_utils.js )'
},
'useStandardSql': {
"type": "boolean",
'title': "Use Standard SQL",
"default": True,
},
'location': {
"type": "string",
"title": "Processing Location",
"default": "US",
},
'loadSchema': {
"type": "boolean",
"title": "Load Schema"
}
}
}

def _get_project_id(self):
return requests.get('http://metadata/computeMetadata/v1/project/project-id', headers={'Metadata-Flavor': 'Google'}).content

def _get_bigquery_service(self):
credentials = gce.AppAssertionCredentials(scope='https://www.googleapis.com/auth/bigquery')
http = httplib2.Http()
http = credentials.authorize(http)

return build("bigquery", "v2", http=http)


register(BigQueryGCE)