Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/cldn 1565 #192

Merged
merged 3 commits into from
Aug 9, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion cccs-build/superset/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Vault CA container import
ARG VAULT_CA_CONTAINER=uchimera.azurecr.io/cccs/hogwarts/vault-ca:master_2921_22315d60
FROM $VAULT_CA_CONTAINER AS vault_ca
FROM uchimera.azurecr.io/cccs/superset-base:cccs-2.0_20220728115745_b4434
FROM uchimera.azurecr.io/cccs/superset-base:cccs-2.0_20220808144709_b4476

USER root

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
*/
import {
t,
tn,
FeatureFlag,
isFeatureEnabled,
DEFAULT_METRICS,
Expand Down Expand Up @@ -101,27 +100,7 @@ const validateAggControlValues = (
: [];
};

const validateColumnValues = (
controls: ControlStateMapping,
values: any[],
state: ControlPanelState,
) => {
const invalidColumns = values.filter(
(val: any) =>
val !== undefined &&
!state.datasource?.columns.some(col => col.name === val),
);
return invalidColumns.length !== 0
? [
tn(
'Invalid column: %s',
'Invalid columns: %s',
invalidColumns.length,
invalidColumns.join(', '),
),
]
: [];
};


const validateAggColumnValues = (
controls: ControlStateMapping,
Expand All @@ -130,7 +109,7 @@ const validateAggColumnValues = (
) => {
const result = validateAggControlValues(controls, values);
if (result.length === 0 && isAggMode({ controls })) {
return validateColumnValues(controls, values[1], state);
return [];
}
return result;
};
Expand Down Expand Up @@ -373,12 +352,6 @@ const config: ControlPanelConfig = {
isRawMode({ controls }) &&
ensureIsArray(controlState.value).length === 0
? [t('must have a value')]
: isRawMode({ controls })
? validateColumnValues(
controls,
ensureIsArray(controlState.value),
state,
)
: [];
return newState;
},
Expand Down
3 changes: 2 additions & 1 deletion superset/common/query_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ class QueryContext:
form_data: Optional[Dict[str, Any]]
result_type: ChartDataResultType
result_format: ChartDataResultFormat
viz_type: str
viz_type: Optional[str]
force: bool
custom_cache_timeout: Optional[int]

Expand All @@ -71,6 +71,7 @@ def __init__(
force: bool = False,
custom_cache_timeout: Optional[int] = None,
cache_values: Dict[str, Any],
viz_type: Optional[str]
) -> None:
self.datasource = datasource
self.result_type = result_type
Expand Down
2 changes: 1 addition & 1 deletion superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -403,7 +403,7 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]:
# Enables Alerts and reports new implementation
"ALERT_REPORTS": False,
"DASHBOARD_RBAC": False,
"ENABLE_EXPLORE_DRAG_AND_DROP": True,
"ENABLE_EXPLORE_DRAG_AND_DROP": False,
"ENABLE_FILTER_BOX_MIGRATION": False,
"ENABLE_ADVANCED_DATA_TYPES": False,
"ENABLE_DND_WITH_CLICK_UX": True,
Expand Down
1 change: 0 additions & 1 deletion superset/connectors/base/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -598,7 +598,6 @@ class BaseColumn(AuditMixinNullable, ImportExportMixin):
column_name = Column(String(255), nullable=False)
verbose_name = Column(String(1024))
is_active = Column(Boolean, default=True)
business_type = Column(String(255))
type = Column(Text)
advanced_data_type = Column(String(255))
groupby = Column(Boolean, default=True)
Expand Down
140 changes: 1 addition & 139 deletions superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -419,7 +419,6 @@ def data(self) -> Dict[str, Any]:
"groupby",
"is_dttm",
"type",
"business_type",
"type_generic",
"advanced_data_type",
"python_date_format",
Expand Down Expand Up @@ -2483,144 +2482,7 @@ def write_shadow_dataset(
)
session.add(new_dataset)

@staticmethod
def write_shadow_dataset( # pylint: disable=too-many-locals
dataset: "SqlaTable", database: Database, session: Session
) -> None:
"""
Shadow write the dataset to new models.

The ``SqlaTable`` model is currently being migrated to two new models, ``Table``
and ``Dataset``. In the first phase of the migration the new models are populated
whenever ``SqlaTable`` is modified (created, updated, or deleted).

In the second phase of the migration reads will be done from the new models.
Finally, in the third phase of the migration the old models will be removed.

For more context: https://github.com/apache/superset/issues/14909
"""

engine = database.get_sqla_engine(schema=dataset.schema)
conditional_quote = engine.dialect.identifier_preparer.quote

# create columns
columns = []
for column in dataset.columns:
# ``is_active`` might be ``None`` at this point, but it defaults to ``True``.
if column.is_active is False:
continue

try:
extra_json = json.loads(column.extra or "{}")
except json.decoder.JSONDecodeError:
extra_json = {}
for attr in {"groupby", "filterable", "verbose_name", "python_date_format"}:
value = getattr(column, attr)
if value:
extra_json[attr] = value

columns.append(
NewColumn(
name=column.column_name,
type=column.type or "Unknown",
expression=column.expression
or conditional_quote(column.column_name),
description=column.description,
is_temporal=column.is_dttm,
is_aggregation=False,
is_physical=column.expression is None,
is_spatial=False,
is_partition=False,
is_increase_desired=True,
extra_json=json.dumps(extra_json) if extra_json else None,
is_managed_externally=dataset.is_managed_externally,
external_url=dataset.external_url,
),
)

# create metrics
for metric in dataset.metrics:
try:
extra_json = json.loads(metric.extra or "{}")
except json.decoder.JSONDecodeError:
extra_json = {}
for attr in {"verbose_name", "metric_type", "d3format"}:
value = getattr(metric, attr)
if value:
extra_json[attr] = value

is_additive = (
metric.metric_type
and metric.metric_type.lower() in ADDITIVE_METRIC_TYPES
)

columns.append(
NewColumn(
name=metric.metric_name,
type="Unknown", # figuring this out would require a type inferrer
expression=metric.expression,
warning_text=metric.warning_text,
description=metric.description,
is_aggregation=True,
is_additive=is_additive,
is_physical=False,
is_spatial=False,
is_partition=False,
is_increase_desired=True,
extra_json=json.dumps(extra_json) if extra_json else None,
is_managed_externally=dataset.is_managed_externally,
external_url=dataset.external_url,
),
)

# physical dataset
if not dataset.sql:
physical_columns = [column for column in columns if column.is_physical]

# create table
table = NewTable(
name=dataset.table_name,
schema=dataset.schema,
catalog=None, # currently not supported
database_id=dataset.database_id,
columns=physical_columns,
is_managed_externally=dataset.is_managed_externally,
external_url=dataset.external_url,
)
tables = [table]

# virtual dataset
else:
# mark all columns as virtual (not physical)
for column in columns:
column.is_physical = False

# find referenced tables
parsed = ParsedQuery(dataset.sql)
referenced_tables = parsed.tables
tables = load_or_create_tables(
session,
dataset.database_id,
dataset.schema,
referenced_tables,
conditional_quote,
engine,
)

# create the new dataset
new_dataset = NewDataset(
sqlatable_id=dataset.id,
name=dataset.table_name,
expression=dataset.sql or conditional_quote(dataset.table_name),
tables=tables,
columns=columns,
is_physical=not dataset.sql,
is_managed_externally=dataset.is_managed_externally,
external_url=dataset.external_url,
)
session.add(new_dataset)



sa.event.listen(SqlaTable, "before_update", SqlaTable.before_update)
sa.event.listen(SqlaTable, "after_insert", SqlaTable.after_insert)
sa.event.listen(SqlaTable, "after_delete", SqlaTable.after_delete)
Expand Down
1 change: 0 additions & 1 deletion superset/datasets/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ def validate_python_date_format(value: str) -> None:
class DatasetColumnsPutSchema(Schema):
id = fields.Integer()
column_name = fields.String(required=True, validate=Length(1, 255))
business_type = fields.String(allow_none=True, Length=(1, 255))
type = fields.String(allow_none=True)
advanced_data_type = fields.String(allow_none=True, validate=Length(1, 255))
verbose_name = fields.String(allow_none=True, Length=(1, 1024))
Expand Down
1 change: 0 additions & 1 deletion tests/integration_tests/datasets/api_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -779,7 +779,6 @@ def test_update_dataset_create_column_and_metric(self):
assert columns[2].advanced_data_type == new_column_data["advanced_data_type"]
assert columns[2].extra == new_column_data["extra"]
assert columns[2].verbose_name == new_column_data["verbose_name"]
assert columns[2].business_type == new_column_data["business_type"]
assert str(columns[2].uuid) == new_column_data["uuid"]

metrics = (
Expand Down