Skip to content

Commit

Permalink
fix: add timegrains to data payload (apache#20938)
Browse files Browse the repository at this point in the history
* add timegrains to data payload

* fix

* opps

* save

* integrate type casting for engiines

* add perm object

* change how wwe raise_for_access

* fix orderby on column types

* linting
  • Loading branch information
hughhhh authored Aug 4, 2022
1 parent eb5369f commit 6e5036d
Show file tree
Hide file tree
Showing 3 changed files with 103 additions and 2 deletions.
7 changes: 6 additions & 1 deletion superset/common/query_context_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
from superset.utils import csv
from superset.utils.cache import generate_cache_key, set_and_log_cache
from superset.utils.core import (
DatasourceType,
DTTM_ALIAS,
error_msg_from_exception,
get_column_names_from_columns,
Expand Down Expand Up @@ -512,4 +513,8 @@ def raise_for_access(self) -> None:
"""
for query in self._query_context.queries:
query.validate()
security_manager.raise_for_access(query_context=self._query_context)

if self._qc_datasource.type == DatasourceType.QUERY:
security_manager.raise_for_access(query=self._qc_datasource)
else:
security_manager.raise_for_access(query_context=self._query_context)
80 changes: 79 additions & 1 deletion superset/models/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1201,6 +1201,47 @@ def _get_top_groups(

return or_(*groups)

def dttm_sql_literal(self, dttm: sa.DateTime, col_type: Optional[str]) -> str:
"""Convert datetime object to a SQL expression string"""

sql = (
self.db_engine_spec.convert_dttm(col_type, dttm, db_extra=None)
if col_type
else None
)

if sql:
return sql

return f'{dttm.strftime("%Y-%m-%d %H:%M:%S.%f")}'

def get_time_filter(
self,
time_col: Dict[str, Any],
start_dttm: sa.DateTime,
end_dttm: sa.DateTime,
) -> ColumnElement:
label = "__time"
col = time_col.get("column_name")
sqla_col = literal_column(col)
my_col = self.make_sqla_column_compatible(sqla_col, label)
l = []
if start_dttm:
l.append(
my_col
>= self.db_engine_spec.get_text_clause(
self.dttm_sql_literal(start_dttm, time_col.get("type"))
)
)
if end_dttm:
l.append(
my_col
< self.db_engine_spec.get_text_clause(
self.dttm_sql_literal(end_dttm, time_col.get("type"))
)
)
return and_(*l)

def values_for_column(self, column_name: str, limit: int = 10000) -> List[Any]:
"""Runs query against sqla to retrieve some
sample values for the given column.
Expand Down Expand Up @@ -1257,6 +1298,12 @@ def get_timestamp_expression(
time_expr = self.db_engine_spec.get_timestamp_expr(col, None, time_grain)
return self.make_sqla_column_compatible(time_expr, label)

def get_sqla_col(self, col: Dict[str, Any]) -> Column:
label = col.get("column_name")
col_type = col.get("type")
col = sa.column(label, type_=col_type)
return self.make_sqla_column_compatible(col, label)

def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-many-branches,too-many-statements
self,
apply_fetch_values_predicate: bool = False,
Expand Down Expand Up @@ -1393,7 +1440,11 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma
col = metrics_exprs_by_expr.get(str(col), col)
need_groupby = True
elif col in columns_by_name:
col = columns_by_name[col].get_sqla_col()
gb_column_obj = columns_by_name[col]
if isinstance(gb_column_obj, dict):
col = self.get_sqla_col(gb_column_obj)
else:
col = gb_column_obj.get_sqla_col()
elif col in metrics_exprs_by_label:
col = metrics_exprs_by_label[col]
need_groupby = True
Expand Down Expand Up @@ -1490,6 +1541,33 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma
select_exprs.insert(0, timestamp)
groupby_all_columns[timestamp.name] = timestamp

# Use main dttm column to support index with secondary dttm columns.
if (
db_engine_spec.time_secondary_columns
and self.main_dttm_col in self.dttm_cols
and self.main_dttm_col != dttm_col.column_name
):
if isinstance(self.main_dttm_col, dict):
time_filters.append(
self.get_time_filter(
self.main_dttm_col,
from_dttm,
to_dttm,
)
)
else:
time_filters.append(
columns_by_name[self.main_dttm_col].get_time_filter(
from_dttm,
to_dttm,
)
)

if isinstance(dttm_col, dict):
time_filters.append(self.get_time_filter(dttm_col, from_dttm, to_dttm))
else:
time_filters.append(dttm_col.get_time_filter(from_dttm, to_dttm))

# Always remove duplicates by column name, as sometimes `metrics_exprs`
# can have the same name as a groupby column (e.g. when users use
# raw columns as custom SQL adhoc metric).
Expand Down
18 changes: 18 additions & 0 deletions superset/models/sql_lab.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,20 @@ def columns(self) -> List[ResultSetColumnType]:

@property
def data(self) -> Dict[str, Any]:
order_by_choices = []
for col in self.columns:
column_name = str(col.get("column_name") or "")
order_by_choices.append(
(json.dumps([column_name, True]), column_name + " [asc]")
)
order_by_choices.append(
(json.dumps([column_name, False]), column_name + " [desc]")
)

return {
"time_grain_sqla": [
(g.duration, g.name) for g in self.database.grains() or []
],
"filter_select": True,
"name": self.tab_name,
"columns": self.columns,
Expand All @@ -228,6 +241,7 @@ def data(self) -> Dict[str, Any]:
"sql": self.sql,
"owners": self.owners_data,
"database": {"id": self.database_id, "backend": self.database.backend},
"order_by_choices": order_by_choices,
}

def raise_for_access(self) -> None:
Expand Down Expand Up @@ -282,6 +296,10 @@ def dttm_cols(self) -> List[Any]:
def schema_perm(self) -> str:
return f"{self.database.database_name}.{self.schema}"

@property
def perm(self) -> str:
return f"[{self.database.database_name}].[{self.tab_name}](id:{self.id})"

@property
def default_endpoint(self) -> str:
return ""
Expand Down

0 comments on commit 6e5036d

Please sign in to comment.