Skip to content

Commit

Permalink
add partition key marker to Athena and Presto columns (re getredash#185)
Browse files Browse the repository at this point in the history
  • Loading branch information
Allen Short authored and jezdez committed Aug 16, 2018
1 parent ff55413 commit 8134845
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 4 deletions.
14 changes: 12 additions & 2 deletions redash/query_runner/athena.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,9 +156,10 @@ def get_schema(self, get_stats=False):

schema = {}
query = """
SELECT table_schema, table_name, column_name, data_type as column_type
SELECT table_schema, table_name, column_name, data_type as column_type, comment as extra_info
FROM information_schema.columns
WHERE table_schema NOT IN ('information_schema')
ORDER BY 1, 5 DESC
"""

results, error = self.run_query(query, None)
Expand All @@ -170,7 +171,16 @@ def get_schema(self, get_stats=False):
table_name = '{0}.{1}'.format(row['table_schema'], row['table_name'])
if table_name not in schema:
schema[table_name] = {'name': table_name, 'columns': []}
schema[table_name]['columns'].append(row['column_name'] + ' (' + row['column_type'] + ')')

if row['extra_info'] == 'Partition Key':
schema[table_name]['columns'].append('[P] ' + row['column_name'] + ' (' + row['column_type'] + ')')
elif row['column_type'] == 'integer' or row['column_type'] == 'varchar' or row['column_type'] == 'timestamp' or row['column_type'] == 'boolean' or row['column_type'] == 'bigint':
schema[table_name]['columns'].append(row['column_name'] + ' (' + row['column_type'] + ')')
elif row['column_type'][0:2] == 'row' or row['column_type'][0:2] == 'map' or row['column_type'][0:2] == 'arr':
schema[table_name]['columns'].append(row['column_name'] + ' (row or map or array)')
else:
schema[table_name]['columns'].append(row['column_name'])


return schema.values()

Expand Down
16 changes: 14 additions & 2 deletions redash/query_runner/presto.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import json
from markupsafe import Markup, escape

from redash.utils import JSONEncoder
from redash.query_runner import *
Expand Down Expand Up @@ -84,9 +85,10 @@ def __init__(self, configuration):
def get_schema(self, get_stats=False):
schema = {}
query = """
SELECT table_schema, table_name, column_name, data_type as column_type
SELECT table_schema, table_name, column_name, data_type as column_type, extra_info
FROM information_schema.columns
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
ORDER BY 1, 5 DESC
"""

results, error = self.run_query(query, None)
Expand All @@ -102,7 +104,14 @@ def get_schema(self, get_stats=False):
if table_name not in schema:
schema[table_name] = {'name': table_name, 'columns': []}

schema[table_name]['columns'].append(row['column_name'] + ' (' + row['column_type'] + ')')
if row['extra_info'] == 'partition key':
schema[table_name]['columns'].append('[P] ' + row['column_name'] + ' (' + row['column_type'] + ')')
elif row['column_type'] == 'integer' or row['column_type'] == 'varchar' or row['column_type'] == 'timestamp' or row['column_type'] == 'boolean' or row['column_type'] == 'bigint':
schema[table_name]['columns'].append(row['column_name'] + ' (' + row['column_type'] + ')')
elif row['column_type'][0:2] == 'row' or row['column_type'][0:2] == 'map' or row['column_type'][0:2] == 'arr':
schema[table_name]['columns'].append(row['column_name'] + ' (row or map or array)')
else:
schema[table_name]['columns'].append(row['column_name'])

return schema.values()

Expand All @@ -122,6 +131,9 @@ def run_query(self, query, user):
column_tuples = [(i[0], PRESTO_TYPES_MAPPING.get(i[1], None)) for i in cursor.description]
columns = self.fetch_columns(column_tuples)
rows = [dict(zip(([c['name'] for c in columns]), r)) for i, r in enumerate(cursor.fetchall())]
for row in rows:
for field in row:
field = escape(field)
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
Expand Down

0 comments on commit 8134845

Please sign in to comment.