Skip to content

Commit

Permalink
remove config related references to DruidDatasource
Browse files Browse the repository at this point in the history
  • Loading branch information
eschutho committed May 4, 2022
1 parent 2d2d55e commit 79c65f1
Show file tree
Hide file tree
Showing 11 changed files with 7 additions and 114 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,6 @@ import React from 'react';
import { t } from '@superset-ui/core';
import { sections } from '@superset-ui/chart-controls';

const appContainer = document.getElementById('app');
const bootstrapData = JSON.parse(appContainer.getAttribute('data-bootstrap'));

export default {
controlPanelSections: [
sections.legacyTimeseriesTime,
Expand Down
1 change: 0 additions & 1 deletion superset/cli/update.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import logging
import os
import sys
from datetime import datetime
from typing import Optional

import click
Expand Down
13 changes: 2 additions & 11 deletions superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -636,12 +636,6 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]:

VIZ_TYPE_DENYLIST: List[str] = []

# ---------------------------------------------------
# List of data sources not to be refreshed in druid cluster
# ---------------------------------------------------

DRUID_DATA_SOURCE_DENYLIST: List[str] = []

# --------------------------------------------------
# Modules, datasources and middleware to be registered
# --------------------------------------------------
Expand Down Expand Up @@ -975,8 +969,7 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC( # pylint: disable=invalid-name
# into a proxied one


def TRACKING_URL_TRANSFORMER(x):
return x
TRACKING_URL_TRANSFORMER = lambda x: x


# Interval between consecutive polls when using Hive Engine
Expand Down Expand Up @@ -1197,9 +1190,7 @@ def SQL_QUERY_MUTATOR( # pylint: disable=invalid-name,unused-argument
# This can be used to set any properties of the object based on naming
# conventions and such. You can find examples in the tests.


def SQLA_TABLE_MUTATOR(table):
return table
SQLA_TABLE_MUTATOR = lambda table: table


# Global async query config options.
Expand Down
19 changes: 1 addition & 18 deletions superset/dashboards/commands/importers/v0.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,20 +269,11 @@ def alter_native_filters(dashboard: Dashboard) -> None:
return dashboard_to_import.id # type: ignore


def decode_dashboards( # pylint: disable=too-many-return-statements
o: Dict[str, Any]
) -> Any:
def decode_dashboards(o: Dict[str, Any]) -> Any:
"""
Function to be passed into json.loads obj_hook parameter
Recreates the dashboard object from a json representation.
"""
# pylint: disable=import-outside-toplevel
from superset.connectors.druid.models import (
DruidCluster,
DruidColumn,
DruidDatasource,
DruidMetric,
)

if "__Dashboard__" in o:
return Dashboard(**o["__Dashboard__"])
Expand All @@ -294,14 +285,6 @@ def decode_dashboards( # pylint: disable=too-many-return-statements
return SqlaTable(**o["__SqlaTable__"])
if "__SqlMetric__" in o:
return SqlMetric(**o["__SqlMetric__"])
if "__DruidCluster__" in o:
return DruidCluster(**o["__DruidCluster__"])
if "__DruidColumn__" in o:
return DruidColumn(**o["__DruidColumn__"])
if "__DruidDatasource__" in o:
return DruidDatasource(**o["__DruidDatasource__"])
if "__DruidMetric__" in o:
return DruidMetric(**o["__DruidMetric__"])
if "__datetime__" in o:
return datetime.strptime(o["__datetime__"], "%Y-%m-%dT%H:%M:%S")

Expand Down
2 changes: 1 addition & 1 deletion superset/datasets/commands/importers/v0.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,7 @@ def validate(self) -> None:
# CLI export
if isinstance(config, dict):
# TODO (betodealmeida): validate with Marshmallow
if DATABASES_KEY not in config not in config:
if DATABASES_KEY not in config:
raise IncorrectVersionError(f"{file_name} has no valid keys")

# UI export
Expand Down
4 changes: 2 additions & 2 deletions superset/initialization/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def __init__(self, app: SupersetApp) -> None:
self.config = app.config
self.manifest: Dict[Any, Any] = {}

@deprecated(details="use self.superset_app instead of self.flask_app") # type: ignore
@deprecated(details="use self.superset_app instead of self.flask_app") # type: ignore
@property
def flask_app(self) -> SupersetApp:
return self.superset_app
Expand Down Expand Up @@ -145,7 +145,7 @@ def init_views(self) -> None:
from superset.reports.logs.api import ReportExecutionLogRestApi
from superset.security.api import SecurityRestApi
from superset.views.access_requests import AccessRequestsModelView
from superset.views.alerts import AlertView, ReportView
from superset.views.alerts import AlertView
from superset.views.annotations import (
AnnotationLayerModelView,
AnnotationModelView,
Expand Down
2 changes: 1 addition & 1 deletion superset/views/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
import logging
import traceback
from datetime import datetime
from typing import Any, Callable, cast, Dict, List, Optional, TYPE_CHECKING, Union
from typing import Any, Callable, cast, Dict, List, Optional, Union

import simplejson as json
import yaml
Expand Down
2 changes: 0 additions & 2 deletions tests/integration_tests/access_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,6 @@ class TestRequestAccess(SupersetTestCase):
@classmethod
def setUpClass(cls):
with app.app_context():
cls.create_druid_test_objects()

security_manager.add_role("override_me")
security_manager.add_role(TEST_ROLE_1)
security_manager.add_role(TEST_ROLE_2)
Expand Down
1 change: 0 additions & 1 deletion tests/integration_tests/core_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,6 @@ def test_admin_only_permissions(self):
def assert_admin_permission_in(role_name, assert_func):
role = security_manager.find_role(role_name)
permissions = [p.permission.name for p in role.permissions]
assert_func("can_sync_druid_source", permissions)
assert_func("can_approve", permissions)

assert_admin_permission_in("Admin", self.assertIn)
Expand Down
73 changes: 0 additions & 73 deletions tests/integration_tests/import_export_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,6 @@ def delete_imports(cls):
@classmethod
def setUpClass(cls):
cls.delete_imports()
cls.create_druid_test_objects()

@classmethod
def tearDownClass(cls):
Expand Down Expand Up @@ -674,78 +673,6 @@ def test_import_table_override_identical(self):
self.assertEqual(imported_id, imported_id_copy)
self.assert_table_equals(copy_table, self.get_table_by_id(imported_id))

def test_import_druid_no_metadata(self):
datasource = self.create_druid_datasource("pure_druid", id=10001)
imported_id = import_dataset(datasource, import_time=1989)
imported = self.get_datasource(imported_id)
self.assert_datasource_equals(datasource, imported)

def test_import_druid_1_col_1_met(self):
datasource = self.create_druid_datasource(
"druid_1_col_1_met", id=10002, cols_names=["col1"], metric_names=["metric1"]
)
imported_id = import_dataset(datasource, import_time=1990)
imported = self.get_datasource(imported_id)
self.assert_datasource_equals(datasource, imported)
self.assertEqual(
{"remote_id": 10002, "import_time": 1990, "database_name": "druid_test"},
json.loads(imported.params),
)

def test_import_druid_2_col_2_met(self):
datasource = self.create_druid_datasource(
"druid_2_col_2_met",
id=10003,
cols_names=["c1", "c2"],
metric_names=["m1", "m2"],
)
imported_id = import_dataset(datasource, import_time=1991)
imported = self.get_datasource(imported_id)
self.assert_datasource_equals(datasource, imported)

def test_import_druid_override(self):
datasource = self.create_druid_datasource(
"druid_override", id=10004, cols_names=["col1"], metric_names=["m1"]
)
imported_id = import_dataset(datasource, import_time=1991)
table_over = self.create_druid_datasource(
"druid_override",
id=10004,
cols_names=["new_col1", "col2", "col3"],
metric_names=["new_metric1"],
)
imported_over_id = import_dataset(table_over, import_time=1992)

imported_over = self.get_datasource(imported_over_id)
self.assertEqual(imported_id, imported_over.id)
expected_datasource = self.create_druid_datasource(
"druid_override",
id=10004,
metric_names=["new_metric1", "m1"],
cols_names=["col1", "new_col1", "col2", "col3"],
)
self.assert_datasource_equals(expected_datasource, imported_over)

def test_import_druid_override_identical(self):
datasource = self.create_druid_datasource(
"copy_cat",
id=10005,
cols_names=["new_col1", "col2", "col3"],
metric_names=["new_metric1"],
)
imported_id = import_dataset(datasource, import_time=1993)

copy_datasource = self.create_druid_datasource(
"copy_cat",
id=10005,
cols_names=["new_col1", "col2", "col3"],
metric_names=["new_metric1"],
)
imported_id_copy = import_dataset(copy_datasource, import_time=1994)

self.assertEqual(imported_id, imported_id_copy)
self.assert_datasource_equals(copy_datasource, self.get_datasource(imported_id))


if __name__ == "__main__":
unittest.main()
1 change: 0 additions & 1 deletion tests/integration_tests/security_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -596,7 +596,6 @@ def assert_can_admin(self, perm_set):

self.assertIn(("all_database_access", "all_database_access"), perm_set)
self.assertIn(("can_override_role_permissions", "Superset"), perm_set)
self.assertIn(("can_sync_druid_source", "Superset"), perm_set)
self.assertIn(("can_override_role_permissions", "Superset"), perm_set)
self.assertIn(("can_approve", "Superset"), perm_set)

Expand Down

0 comments on commit 79c65f1

Please sign in to comment.