From 51c565cfbe280d17bb3dee634e0d688d51008abc Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Tue, 16 Apr 2024 13:58:35 -0400 Subject: [PATCH 1/6] feat(sip-95): new endpoint for extra table metadata --- .../src/SqlLab/actions/sqlLab.test.js | 2 +- .../SqlEditorLeftBar.test.tsx | 2 +- .../TableElement/TableElement.test.tsx | 2 +- .../DndFilterSelect.tsx | 2 +- .../AdhocFilterControl/index.jsx | 2 +- .../src/hooks/apiResources/tables.ts | 6 +- superset/constants.py | 1 + superset/databases/api.py | 113 +++++++++++- superset/databases/schemas.py | 24 +++ superset/db_engine_specs/README.md | 2 +- superset/db_engine_specs/base.py | 21 ++- superset/db_engine_specs/bigquery.py | 8 +- superset/db_engine_specs/gsheets.py | 9 +- superset/db_engine_specs/lib.py | 10 +- superset/db_engine_specs/presto.py | 22 ++- superset/db_engine_specs/trino.py | 21 ++- superset/errors.py | 1 + superset/exceptions.py | 26 +++ .../integration_tests/databases/api_tests.py | 12 +- .../databases/commands/csv_upload_test.py | 3 + .../db_engine_specs/bigquery_tests.py | 12 +- .../db_engine_specs/presto_tests.py | 9 +- tests/unit_tests/conftest.py | 1 + tests/unit_tests/databases/api_test.py | 174 +++++++++++++++++- .../unit_tests/db_engine_specs/test_trino.py | 8 +- tests/unit_tests/fixtures/common.py | 3 + 26 files changed, 429 insertions(+), 67 deletions(-) diff --git a/superset-frontend/src/SqlLab/actions/sqlLab.test.js b/superset-frontend/src/SqlLab/actions/sqlLab.test.js index 20fd53ad389c9..ecf2c4d7e299c 100644 --- a/superset-frontend/src/SqlLab/actions/sqlLab.test.js +++ b/superset-frontend/src/SqlLab/actions/sqlLab.test.js @@ -511,7 +511,7 @@ describe('async actions', () => { const getTableMetadataEndpoint = 'glob:**/api/v1/database/*/table/*/*/'; fetchMock.get(getTableMetadataEndpoint, {}); const getExtraTableMetadataEndpoint = - 'glob:**/api/v1/database/*/table_extra/*/*/'; + 'glob:**/api/v1/database/*/table_metadata/extra/'; fetchMock.get(getExtraTableMetadataEndpoint, {}); let isFeatureEnabledMock; diff --git a/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/SqlEditorLeftBar.test.tsx b/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/SqlEditorLeftBar.test.tsx index f89c842b15518..f8c94468bf7f3 100644 --- a/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/SqlEditorLeftBar.test.tsx +++ b/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/SqlEditorLeftBar.test.tsx @@ -67,7 +67,7 @@ beforeEach(() => { columns: table.columns, }, }); - fetchMock.get('glob:*/api/v1/database/*/table_extra/*/*', { + fetchMock.get('glob:*/api/v1/database/*/table_metadata/extra/', { status: 200, body: {}, }); diff --git a/superset-frontend/src/SqlLab/components/TableElement/TableElement.test.tsx b/superset-frontend/src/SqlLab/components/TableElement/TableElement.test.tsx index d9ac1d1e6f90a..74fd58b798be7 100644 --- a/superset-frontend/src/SqlLab/components/TableElement/TableElement.test.tsx +++ b/superset-frontend/src/SqlLab/components/TableElement/TableElement.test.tsx @@ -49,7 +49,7 @@ jest.mock( ); const getTableMetadataEndpoint = 'glob:**/api/v1/database/*/table/*/*/'; const getExtraTableMetadataEndpoint = - 'glob:**/api/v1/database/*/table_extra/*/*/'; + 'glob:**/api/v1/database/*/table_metadata/extra/'; const updateTableSchemaEndpoint = 'glob:*/tableschemaview/*/expanded'; beforeEach(() => { diff --git a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndFilterSelect.tsx b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndFilterSelect.tsx index b479ce4c87595..b2fd3e35cf535 100644 --- a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndFilterSelect.tsx +++ b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndFilterSelect.tsx @@ -181,7 +181,7 @@ const DndFilterSelect = (props: DndFilterSelectProps) => { if (!isSqllabView && dbId && name && schema) { SupersetClient.get({ - endpoint: `/api/v1/database/${dbId}/table_extra/${name}/${schema}/`, + endpoint: `/api/v1/database/${dbId}/table_metadata/extra/?table=${name}&schema=${schema}`, }) .then(({ json }: { json: Record }) => { if (json?.partitions) { diff --git a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterControl/index.jsx b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterControl/index.jsx index 7b6a3938fd782..2bcb668f45cb9 100644 --- a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterControl/index.jsx +++ b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterControl/index.jsx @@ -143,7 +143,7 @@ class AdhocFilterControl extends React.Component { if (!isSqllabView && dbId && name && schema) { SupersetClient.get({ - endpoint: `/api/v1/database/${dbId}/table_extra/${name}/${schema}/`, + endpoint: `/api/v1/database/${dbId}/table_metadata/extra/?table=${name}&schema=${schema}`, }) .then(({ json }) => { if (json && json.partitions) { diff --git a/superset-frontend/src/hooks/apiResources/tables.ts b/superset-frontend/src/hooks/apiResources/tables.ts index 66eedc00b5e06..57c2a64527733 100644 --- a/superset-frontend/src/hooks/apiResources/tables.ts +++ b/superset-frontend/src/hooks/apiResources/tables.ts @@ -125,9 +125,9 @@ const tableApi = api.injectEndpoints({ FetchTableMetadataQueryParams >({ query: ({ dbId, schema, table }) => ({ - endpoint: `/api/v1/database/${dbId}/table_extra/${encodeURIComponent( - table, - )}/${encodeURIComponent(schema)}/`, + endpoint: schema + ? `/api/v1/database/${dbId}/table_metadata/extra/?table=${table}&schema=${schema}` + : `/api/v1/database/${dbId}/table_metadata/extra/?table=${table}`, transformResponse: ({ json }: JsonResponse) => json, }), }), diff --git a/superset/constants.py b/superset/constants.py index bf4e7717d5a24..e4d467bdd834e 100644 --- a/superset/constants.py +++ b/superset/constants.py @@ -135,6 +135,7 @@ class RouteMethod: # pylint: disable=too-few-public-methods "select_star": "read", "table_metadata": "read", "table_extra_metadata": "read", + "table_extra_metadata_deprecated": "read", "test_connection": "write", "validate_parameters": "write", "favorite_status": "read", diff --git a/superset/databases/api.py b/superset/databases/api.py index d778027f69faa..6ea9e936cebd5 100644 --- a/superset/databases/api.py +++ b/superset/databases/api.py @@ -15,11 +15,14 @@ # specific language governing permissions and limitations # under the License. # pylint: disable=too-many-lines + +from __future__ import annotations + import json import logging from datetime import datetime, timedelta from io import BytesIO -from typing import Any, cast, Optional +from typing import Any, cast from zipfile import is_zipfile, ZipFile from deprecation import deprecated @@ -82,6 +85,7 @@ get_export_ids_schema, OAuth2ProviderResponseSchema, openapi_spec_methods_override, + QualifiedTableSchema, SchemasResponseSchema, SelectStarResponseSchema, TableExtraMetadataResponseSchema, @@ -92,9 +96,18 @@ from superset.databases.utils import get_table_metadata from superset.db_engine_specs import get_available_engine_specs from superset.errors import ErrorLevel, SupersetError, SupersetErrorType -from superset.exceptions import OAuth2Error, SupersetErrorsException, SupersetException +from superset.exceptions import ( + DatabaseNotFoundException, + InvalidPayloadSchemaError, + OAuth2Error, + SupersetErrorsException, + SupersetException, + SupersetSecurityException, + TableNotFoundException, +) from superset.extensions import security_manager from superset.models.core import Database +from superset.sql_parse import Table from superset.superset_typing import FlaskResponse from superset.utils.core import error_msg_from_exception, parse_js_uri_path_item from superset.utils.oauth2 import decode_oauth2_state @@ -121,6 +134,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi): "tables", "table_metadata", "table_extra_metadata", + "table_extra_metadata_deprecated", "select_star", "schemas", "test_connection", @@ -764,15 +778,20 @@ def table_metadata( @check_table_access @safe @statsd_metrics + @deprecated(deprecated_in="4.0", removed_in="5.0") @event_logger.log_this_with_context( action=lambda self, *args, **kwargs: f"{self.__class__.__name__}" - f".table_extra_metadata", + f".table_extra_metadata_deprecated", log_to_statsd=False, ) - def table_extra_metadata( + def table_extra_metadata_deprecated( self, database: Database, table_name: str, schema_name: str ) -> FlaskResponse: """Get table extra metadata. + + A newer API was introduced between 4.0 and 5.0, with support for catalogs for + SIP-95. This method was kept to prevent breaking API integrations, but will be + removed in 5.0. --- get: summary: Get table extra metadata @@ -816,9 +835,87 @@ def table_extra_metadata( parsed_schema = parse_js_uri_path_item(schema_name, eval_undefined=True) table_name = cast(str, parse_js_uri_path_item(table_name)) - payload = database.db_engine_spec.extra_table_metadata( - database, table_name, parsed_schema - ) + table = Table(table_name, parsed_schema) + payload = database.db_engine_spec.get_extra_table_metadata(database, table) + return self.response(200, **payload) + + @expose("//table_metadata/extra/", methods=("GET",)) + @protect() + @statsd_metrics + @event_logger.log_this_with_context( + action=lambda self, *args, **kwargs: f"{self.__class__.__name__}" + f".table_extra_metadata", + log_to_statsd=False, + ) + def table_extra_metadata(self, pk: int) -> FlaskResponse: + """ + Get extra metadata for a given table. + + Optionally, a schema and a catalog can be passed, if different from the default + ones. + --- + get: + summary: Get table extra metadata + description: >- + Extra metadata associated with the table (partitions, description, etc.) + parameters: + - in: path + schema: + type: integer + name: pk + description: The database id + - in: query + schema: + type: string + name: table + required: true + description: Table name + - in: query + schema: + type: string + name: schema + description: >- + Optional table schema, if not passed default schema will be used + - in: query + schema: + type: string + name: catalog + description: >- + Optional table catalog, if not passed default catalog will be used + responses: + 200: + description: Table extra metadata information + content: + application/json: + schema: + $ref: "#/components/schemas/TableExtraMetadataResponseSchema" + 401: + $ref: '#/components/responses/401' + 404: + $ref: '#/components/responses/404' + 500: + $ref: '#/components/responses/500' + """ + self.incr_stats("init", self.table_metadata.__name__) + + database = DatabaseDAO.find_by_id(pk) + if database is None: + raise DatabaseNotFoundException("No such database") + + try: + parameters = QualifiedTableSchema().load(request.args) + except ValidationError as ex: + raise InvalidPayloadSchemaError(ex) from ex + + table = Table(**parameters) + try: + security_manager.raise_for_access(database=database, table=table) + except SupersetSecurityException as ex: + # instead of raising 403, raise 404 to hide table existence + raise TableNotFoundException("No such table") from ex + + payload = database.db_engine_spec.get_extra_table_metadata(database, table) + return self.response(200, **payload) @expose("//select_star//", methods=("GET",)) @@ -832,7 +929,7 @@ def table_extra_metadata( log_to_statsd=False, ) def select_star( - self, database: Database, table_name: str, schema_name: Optional[str] = None + self, database: Database, table_name: str, schema_name: str | None = None ) -> FlaskResponse: """Get database select star for table. --- diff --git a/superset/databases/schemas.py b/superset/databases/schemas.py index 45fe8a2200eb1..10c065a1c088f 100644 --- a/superset/databases/schemas.py +++ b/superset/databases/schemas.py @@ -1195,3 +1195,27 @@ class OAuth2ProviderResponseSchema(Schema): class Meta: # pylint: disable=too-few-public-methods # Ignore unknown fields that might be sent by the OAuth2 provider unknown = EXCLUDE + + +class QualifiedTableSchema(Schema): + """ + Schema for a qualified table reference. + + Catalog and schema can be ommited, to fallback to default values. Table name must be + present. + """ + + table = fields.String( + required=True, + metadata={"description": "The table name"}, + ) + schema = fields.String( + required=False, + load_default=None, + metadata={"description": "The table schema"}, + ) + catalog = fields.String( + required=False, + load_default=None, + metadata={"description": "The table catalog"}, + ) diff --git a/superset/db_engine_specs/README.md b/superset/db_engine_specs/README.md index f158a3a41bb4d..11f0f90ab7d81 100644 --- a/superset/db_engine_specs/README.md +++ b/superset/db_engine_specs/README.md @@ -603,7 +603,7 @@ For some databases the `df_to_sql` classmethod needs to be implemented. For exam ### Extra table metadata -DB engine specs can return additional metadata associated with a table. This is done via the `extra_table_metadata` class method. Trino uses this to return information about the latest partition, for example, and Bigquery returns clustering information. This information is then surfaced in the SQL Lab UI, when browsing tables in the metadata explorer (on the left panel). +DB engine specs can return additional metadata associated with a table. This is done via the `get_extra_table_metadata` class method. Trino uses this to return information about the latest partition, for example, and Bigquery returns clustering information. This information is then surfaced in the SQL Lab UI, when browsing tables in the metadata explorer (on the left panel). ### DB API exception mapping diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index ec1cc741d5dda..bde268821396d 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -1034,7 +1034,23 @@ def normalize_indexes(cls, indexes: list[dict[str, Any]]) -> list[dict[str, Any] return indexes @classmethod - def extra_table_metadata( # pylint: disable=unused-argument + def get_extra_table_metadata( # pylint: disable=unused-argument + cls, + database: Database, + table: Table, + ) -> dict[str, Any]: + """ + Returns engine-specific table metadata + + :param database: Database instance + :param table: A Table instance + :return: Engine-specific table metadata + """ + return cls.extra_table_metadata(database, table.table, table.schema) + + @deprecated(deprecated_in="4.0", removed_in="5.0") + @classmethod + def extra_table_metadata( cls, database: Database, table_name: str, @@ -1043,12 +1059,13 @@ def extra_table_metadata( # pylint: disable=unused-argument """ Returns engine-specific table metadata + Deprecated, since it doesn't support catalogs. + :param database: Database instance :param table_name: Table name :param schema_name: Schema name :return: Engine-specific table metadata """ - # TODO: Fix circular import caused by importing Database return {} @classmethod diff --git a/superset/db_engine_specs/bigquery.py b/superset/db_engine_specs/bigquery.py index 63860e8aa89a0..20eae4f9336ce 100644 --- a/superset/db_engine_specs/bigquery.py +++ b/superset/db_engine_specs/bigquery.py @@ -320,10 +320,12 @@ def get_indexes( return cls.normalize_indexes(inspector.get_indexes(table_name, schema)) @classmethod - def extra_table_metadata( - cls, database: "Database", table_name: str, schema_name: Optional[str] + def get_extra_table_metadata( + cls, + database: "Database", + table: Table, ) -> dict[str, Any]: - indexes = database.get_indexes(table_name, schema_name) + indexes = database.get_indexes(table.table, table.schema) if not indexes: return {} partitions_columns = [ diff --git a/superset/db_engine_specs/gsheets.py b/superset/db_engine_specs/gsheets.py index 3e33aa8e32dd8..4aed2693f4676 100644 --- a/superset/db_engine_specs/gsheets.py +++ b/superset/db_engine_specs/gsheets.py @@ -137,15 +137,14 @@ def get_url_for_impersonation( return url @classmethod - def extra_table_metadata( + def get_extra_table_metadata( cls, database: Database, - table_name: str, - schema_name: str | None, + table: Table, ) -> dict[str, Any]: - with database.get_raw_connection(schema=schema_name) as conn: + with database.get_raw_connection(schema=table.schema) as conn: cursor = conn.cursor() - cursor.execute(f'SELECT GET_METADATA("{table_name}")') + cursor.execute(f'SELECT GET_METADATA("{table.table}")') results = cursor.fetchone()[0] try: metadata = json.loads(results) diff --git a/superset/db_engine_specs/lib.py b/superset/db_engine_specs/lib.py index 4e9937c537304..398d5af64741c 100644 --- a/superset/db_engine_specs/lib.py +++ b/superset/db_engine_specs/lib.py @@ -63,7 +63,7 @@ NICE_TO_HAVE_FEATURES = { "user_impersonation": "Supports user impersonation", "file_upload": "Support file upload", - "extra_table_metadata": "Returns extra table metadata", + "get_extra_table_metadata": "Returns extra table metadata", "dbapi_exception_mapping": "Maps driver exceptions to Superset exceptions", "custom_errors": "Parses error messages and returns Superset errors", "dynamic_schema": "Supports changing the schema per-query", @@ -142,7 +142,9 @@ def diagnose(spec: type[BaseEngineSpec]) -> dict[str, Any]: or has_custom_method(spec, "get_url_for_impersonation") ), "file_upload": spec.supports_file_upload, - "extra_table_metadata": has_custom_method(spec, "extra_table_metadata"), + "get_extra_table_metadata": has_custom_method( + spec, "get_extra_table_metadata" + ), "dbapi_exception_mapping": has_custom_method( spec, "get_dbapi_exception_mapping" ), @@ -177,7 +179,7 @@ def diagnose(spec: type[BaseEngineSpec]) -> dict[str, Any]: nice_to_have = [ "user_impersonation", "file_upload", - "extra_table_metadata", + "get_extra_table_metadata", "dbapi_exception_mapping", "custom_errors", "dynamic_schema", @@ -264,7 +266,7 @@ def generate_table() -> list[list[Any]]: keys = [ "user_impersonation", "file_upload", - "extra_table_metadata", + "get_extra_table_metadata", "dbapi_exception_mapping", "custom_errors", "dynamic_schema", diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py index d749d2cb185be..a8143c87edbb6 100644 --- a/superset/db_engine_specs/presto.py +++ b/superset/db_engine_specs/presto.py @@ -66,6 +66,7 @@ if TYPE_CHECKING: # prevent circular imports from superset.models.core import Database + from superset.sql_parse import Table with contextlib.suppress(ImportError): # pyhive may not be installed from pyhive.presto import Cursor @@ -1224,14 +1225,20 @@ def expand_data( # pylint: disable=too-many-locals return all_columns, data, expanded_columns @classmethod - def extra_table_metadata( - cls, database: Database, table_name: str, schema_name: str | None + def get_extra_table_metadata( + cls, + database: Database, + table: Table, ) -> dict[str, Any]: metadata = {} - if indexes := database.get_indexes(table_name, schema_name): + if indexes := database.get_indexes(table.table, table.schema): col_names, latest_parts = cls.latest_partition( - table_name, schema_name, database, show_first=True, indexes=indexes + table.table, + table.schema, + database, + show_first=True, + indexes=indexes, ) if not latest_parts: @@ -1241,8 +1248,8 @@ def extra_table_metadata( "cols": sorted(indexes[0].get("column_names", [])), "latest": dict(zip(col_names, latest_parts)), "partitionQuery": cls._partition_query( - table_name=table_name, - schema=schema_name, + table_name=table.table, + schema=table.schema, indexes=indexes, database=database, ), @@ -1250,7 +1257,8 @@ def extra_table_metadata( # flake8 is not matching `Optional[str]` to `Any` for some reason... metadata["view"] = cast( - Any, cls.get_create_view(database, schema_name, table_name) + Any, + cls.get_create_view(database, table.schema, table.table), ) return metadata diff --git a/superset/db_engine_specs/trino.py b/superset/db_engine_specs/trino.py index 2185de8c867ae..32c119ec1f645 100644 --- a/superset/db_engine_specs/trino.py +++ b/superset/db_engine_specs/trino.py @@ -45,6 +45,7 @@ if TYPE_CHECKING: from superset.models.core import Database + from superset.sql_parse import Table with contextlib.suppress(ImportError): # trino may not be installed from trino.dbapi import Cursor @@ -58,18 +59,17 @@ class TrinoEngineSpec(PrestoBaseEngineSpec): allows_alias_to_source_column = False @classmethod - def extra_table_metadata( + def get_extra_table_metadata( cls, database: Database, - table_name: str, - schema_name: str | None, + table: Table, ) -> dict[str, Any]: metadata = {} - if indexes := database.get_indexes(table_name, schema_name): + if indexes := database.get_indexes(table.table, table.schema): col_names, latest_parts = cls.latest_partition( - table_name, - schema_name, + table.table, + table.schema, database, show_first=True, indexes=indexes, @@ -91,17 +91,18 @@ def extra_table_metadata( ), "latest": dict(zip(col_names, latest_parts)), "partitionQuery": cls._partition_query( - table_name=table_name, - schema=schema_name, + table_name=table.table, + schema=table.schema, indexes=indexes, database=database, ), } - if database.has_view_by_name(table_name, schema_name): + if database.has_view_by_name(table.table, table.schema): with database.get_inspector_with_context() as inspector: metadata["view"] = inspector.get_view_definition( - table_name, schema_name + table.table, + table.schema, ) return metadata diff --git a/superset/errors.py b/superset/errors.py index 612977ad65334..9f0f5bed05354 100644 --- a/superset/errors.py +++ b/superset/errors.py @@ -73,6 +73,7 @@ class SupersetErrorType(StrEnum): # Other errors BACKEND_TIMEOUT_ERROR = "BACKEND_TIMEOUT_ERROR" DATABASE_NOT_FOUND_ERROR = "DATABASE_NOT_FOUND_ERROR" + TABLE_NOT_FOUND_ERROR = "TABLE_NOT_FOUND_ERROR" # Sql Lab errors MISSING_TEMPLATE_PARAMS_ERROR = "MISSING_TEMPLATE_PARAMS_ERROR" diff --git a/superset/exceptions.py b/superset/exceptions.py index 45cbdf3c87963..0315ee30f4f44 100644 --- a/superset/exceptions.py +++ b/superset/exceptions.py @@ -362,3 +362,29 @@ class CreateKeyValueDistributedLockFailedException(Exception): """ Exception to signalize failure to acquire lock. """ + + +class DatabaseNotFoundException(SupersetErrorException): + status = 404 + + def __init__(self, message: str): + super().__init__( + SupersetError( + message=message, + error_type=SupersetErrorType.DATABASE_NOT_FOUND_ERROR, + level=ErrorLevel.ERROR, + ) + ) + + +class TableNotFoundException(SupersetErrorException): + status = 404 + + def __init__(self, message: str): + super().__init__( + SupersetError( + message=message, + error_type=SupersetErrorType.TABLE_NOT_FOUND_ERROR, + level=ErrorLevel.ERROR, + ) + ) diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py index 33ac9ef77d92e..5734f1a91d106 100644 --- a/tests/integration_tests/databases/api_tests.py +++ b/tests/integration_tests/databases/api_tests.py @@ -1507,9 +1507,9 @@ def test_get_table_metadata_no_db_permission(self): self.assertEqual(rv.status_code, 404) @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_get_table_extra_metadata(self): + def test_get_table_extra_metadata_deprecated(self): """ - Database API: Test get table extra metadata info + Database API: Test deprecated get table extra metadata info """ example_db = get_example_database() self.login(ADMIN_USERNAME) @@ -1519,9 +1519,9 @@ def test_get_table_extra_metadata(self): response = json.loads(rv.data.decode("utf-8")) self.assertEqual(response, {}) - def test_get_invalid_database_table_extra_metadata(self): + def test_get_invalid_database_table_extra_metadata_deprecated(self): """ - Database API: Test get invalid database from table extra metadata + Database API: Test get invalid database from deprecated table extra metadata """ database_id = 1000 self.login(ADMIN_USERNAME) @@ -1533,9 +1533,9 @@ def test_get_invalid_database_table_extra_metadata(self): rv = self.client.get(uri) self.assertEqual(rv.status_code, 404) - def test_get_invalid_table_table_extra_metadata(self): + def test_get_invalid_table_table_extra_metadata_deprecated(self): """ - Database API: Test get invalid table from table extra metadata + Database API: Test get invalid table from deprecated table extra metadata """ example_db = get_example_database() uri = f"api/v1/database/{example_db.id}/table_extra/wrong_table/null/" diff --git a/tests/integration_tests/databases/commands/csv_upload_test.py b/tests/integration_tests/databases/commands/csv_upload_test.py index c9ad002a0ffb3..18cc6f4a8da78 100644 --- a/tests/integration_tests/databases/commands/csv_upload_test.py +++ b/tests/integration_tests/databases/commands/csv_upload_test.py @@ -14,6 +14,9 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +from __future__ import annotations + import json from datetime import datetime diff --git a/tests/integration_tests/db_engine_specs/bigquery_tests.py b/tests/integration_tests/db_engine_specs/bigquery_tests.py index 3eaaa3d1c7ddc..c012376b3999e 100644 --- a/tests/integration_tests/db_engine_specs/bigquery_tests.py +++ b/tests/integration_tests/db_engine_specs/bigquery_tests.py @@ -111,15 +111,16 @@ def values(self): result = BigQueryEngineSpec.fetch_data(None, 0) self.assertEqual(result, [1, 2]) - def test_extra_table_metadata(self): + def test_get_extra_table_metadata(self): """ DB Eng Specs (bigquery): Test extra table metadata """ database = mock.Mock() # Test no indexes database.get_indexes = mock.MagicMock(return_value=None) - result = BigQueryEngineSpec.extra_table_metadata( - database, "some_table", "some_schema" + result = BigQueryEngineSpec.get_extra_table_metadata( + database, + Table("some_table", "some_schema"), ) self.assertEqual(result, {}) @@ -138,8 +139,9 @@ def test_extra_table_metadata(self): "clustering": {"cols": [["c_col1", "c_col2", "c_col3"]]}, } database.get_indexes = mock.MagicMock(return_value=index_metadata) - result = BigQueryEngineSpec.extra_table_metadata( - database, "some_table", "some_schema" + result = BigQueryEngineSpec.get_extra_table_metadata( + database, + Table("some_table", "some_schema"), ) self.assertEqual(result, expected_result) diff --git a/tests/integration_tests/db_engine_specs/presto_tests.py b/tests/integration_tests/db_engine_specs/presto_tests.py index c28e78afe6692..81a9e611054d3 100644 --- a/tests/integration_tests/db_engine_specs/presto_tests.py +++ b/tests/integration_tests/db_engine_specs/presto_tests.py @@ -25,7 +25,7 @@ from superset.db_engine_specs.presto import PrestoEngineSpec from superset.errors import ErrorLevel, SupersetError, SupersetErrorType -from superset.sql_parse import ParsedQuery +from superset.sql_parse import ParsedQuery, Table from superset.utils.database import get_example_database from tests.integration_tests.db_engine_specs.base_tests import TestDbEngineSpec @@ -549,7 +549,7 @@ def test_presto_expand_data_with_complex_array_columns(self): self.assertEqual(actual_data, expected_data) self.assertEqual(actual_expanded_cols, expected_expanded_cols) - def test_presto_extra_table_metadata(self): + def test_presto_get_extra_table_metadata(self): database = mock.Mock() database.get_indexes = mock.Mock( return_value=[{"column_names": ["ds", "hour"]}] @@ -558,8 +558,9 @@ def test_presto_extra_table_metadata(self): df = pd.DataFrame({"ds": ["01-01-19"], "hour": [1]}) database.get_df = mock.Mock(return_value=df) PrestoEngineSpec.get_create_view = mock.Mock(return_value=None) - result = PrestoEngineSpec.extra_table_metadata( - database, "test_table", "test_schema" + result = PrestoEngineSpec.get_extra_table_metadata( + database, + Table("test_table", "test_schema"), ) assert result["partitions"]["cols"] == ["ds", "hour"] assert result["partitions"]["latest"] == {"ds": "01-01-19", "hour": 1} diff --git a/tests/unit_tests/conftest.py b/tests/unit_tests/conftest.py index 3824d7b7b7c28..02873fcf404e1 100644 --- a/tests/unit_tests/conftest.py +++ b/tests/unit_tests/conftest.py @@ -146,6 +146,7 @@ def full_api_access(mocker: MockFixture) -> Iterator[None]: "flask_appbuilder.security.decorators.verify_jwt_in_request", return_value=True, ) + mocker.patch.object(security_manager, "is_item_public", return_value=True) mocker.patch.object(security_manager, "has_access", return_value=True) mocker.patch.object(security_manager, "can_access_all_databases", return_value=True) diff --git a/tests/unit_tests/databases/api_test.py b/tests/unit_tests/databases/api_test.py index 4df406acfd287..b29a54613c94b 100644 --- a/tests/unit_tests/databases/api_test.py +++ b/tests/unit_tests/databases/api_test.py @@ -14,6 +14,11 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +# pylint: disable=unused-argument, import-outside-toplevel, line-too-long, invalid-name + +from __future__ import annotations + import json from datetime import datetime from io import BytesIO @@ -30,10 +35,11 @@ from superset import db from superset.commands.database.csv_import import CSVImportCommand from superset.db_engine_specs.sqlite import SqliteEngineSpec +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from superset.exceptions import SupersetSecurityException +from superset.sql_parse import Table from tests.unit_tests.fixtures.common import create_csv_file -# pylint: disable=unused-argument, import-outside-toplevel, line-too-long - def test_filter_by_uuid( session: Session, @@ -1167,3 +1173,167 @@ def test_csv_upload_file_extension_valid( content_type="multipart/form-data", ) assert response.status_code == 200 + + +def test_table_extra_metadata_happy_path( + mocker: MockFixture, + client: Any, + full_api_access: None, +) -> None: + """ + Test the `table_extra_metadata` endpoint. + """ + database = mocker.MagicMock() + database.db_engine_spec.get_extra_table_metadata.return_value = {"hello": "world"} + mocker.patch("superset.databases.api.DatabaseDAO.find_by_id", return_value=database) + mocker.patch("superset.databases.api.security_manager.raise_for_access") + + response = client.get("/api/v1/database/1/table_metadata/extra/?table=t") + assert response.json == {"hello": "world"} + database.db_engine_spec.get_extra_table_metadata.assert_called_with( + database, + Table("t"), + ) + + response = client.get("/api/v1/database/1/table_metadata/extra/?table=t&schema=s") + database.db_engine_spec.get_extra_table_metadata.assert_called_with( + database, + Table("t", "s"), + ) + + response = client.get("/api/v1/database/1/table_metadata/extra/?table=t&catalog=c") + database.db_engine_spec.get_extra_table_metadata.assert_called_with( + database, + Table("t", None, "c"), + ) + + response = client.get( + "/api/v1/database/1/table_metadata/extra/?table=t&schema=s&catalog=c" + ) + database.db_engine_spec.get_extra_table_metadata.assert_called_with( + database, + Table("t", "s", "c"), + ) + + +def test_table_extra_metadata_no_table( + mocker: MockFixture, + client: Any, + full_api_access: None, +) -> None: + """ + Test the `table_extra_metadata` endpoint when no table name is passed. + """ + database = mocker.MagicMock() + mocker.patch("superset.databases.api.DatabaseDAO.find_by_id", return_value=database) + + response = client.get("/api/v1/database/1/table_metadata/extra/?schema=s&catalog=c") + assert response.status_code == 422 + assert response.json == { + "errors": [ + { + "message": "An error happened when validating the request", + "error_type": "INVALID_PAYLOAD_SCHEMA_ERROR", + "level": "error", + "extra": { + "messages": {"table": ["Missing data for required field."]}, + "issue_codes": [ + { + "code": 1020, + "message": "Issue 1020 - The submitted payload has the incorrect schema.", + } + ], + }, + } + ] + } + + +def test_table_extra_metadata_slashes( + mocker: MockFixture, + client: Any, + full_api_access: None, +) -> None: + """ + Test the `table_extra_metadata` endpoint with names that have slashes. + """ + database = mocker.MagicMock() + database.db_engine_spec.get_extra_table_metadata.return_value = {"hello": "world"} + mocker.patch("superset.databases.api.DatabaseDAO.find_by_id", return_value=database) + mocker.patch("superset.databases.api.security_manager.raise_for_access") + + client.get("/api/v1/database/1/table_metadata/extra/?table=foo/bar") + database.db_engine_spec.get_extra_table_metadata.assert_called_with( + database, + Table("foo/bar"), + ) + + +def test_table_extra_metadata_invalid_database( + mocker: MockFixture, + client: Any, + full_api_access: None, +) -> None: + """ + Test the `table_extra_metadata` endpoint when the database is invalid. + """ + mocker.patch("superset.databases.api.DatabaseDAO.find_by_id", return_value=None) + + response = client.get("/api/v1/database/1/table_metadata/extra/?table=t") + assert response.status_code == 404 + assert response.json == { + "errors": [ + { + "message": "No such database", + "error_type": "DATABASE_NOT_FOUND_ERROR", + "level": "error", + "extra": { + "issue_codes": [ + { + "code": 1011, + "message": "Issue 1011 - Superset encountered an unexpected error.", + }, + { + "code": 1036, + "message": "Issue 1036 - The database was deleted.", + }, + ] + }, + } + ] + } + + +def test_table_extra_metadata_unauthorized( + mocker: MockFixture, + client: Any, + full_api_access: None, +) -> None: + """ + Test the `table_extra_metadata` endpoint when the user is unauthorized. + """ + database = mocker.MagicMock() + mocker.patch("superset.databases.api.DatabaseDAO.find_by_id", return_value=database) + mocker.patch( + "superset.databases.api.security_manager.raise_for_access", + side_effect=SupersetSecurityException( + SupersetError( + error_type=SupersetErrorType.TABLE_SECURITY_ACCESS_ERROR, + message="You don't have access to the table", + level=ErrorLevel.ERROR, + ) + ), + ) + + response = client.get("/api/v1/database/1/table_metadata/extra/?table=t") + assert response.status_code == 404 + assert response.json == { + "errors": [ + { + "message": "No such table", + "error_type": "TABLE_NOT_FOUND_ERROR", + "level": "error", + "extra": None, + } + ] + } diff --git a/tests/unit_tests/db_engine_specs/test_trino.py b/tests/unit_tests/db_engine_specs/test_trino.py index 308902c9016cd..7ce9199c5bd1d 100644 --- a/tests/unit_tests/db_engine_specs/test_trino.py +++ b/tests/unit_tests/db_engine_specs/test_trino.py @@ -37,6 +37,7 @@ SupersetDBAPIOperationalError, SupersetDBAPIProgrammingError, ) +from superset.sql_parse import Table from superset.superset_typing import ResultSetColumnType, SQLAColumnType from superset.utils.core import GenericDataType from tests.unit_tests.db_engine_specs.utils import ( @@ -310,7 +311,7 @@ def test_convert_dttm( assert_convert_dttm(TrinoEngineSpec, target_type, expected_result, dttm) -def test_extra_table_metadata() -> None: +def test_get_extra_table_metadata() -> None: from superset.db_engine_specs.trino import TrinoEngineSpec db_mock = Mock() @@ -320,7 +321,10 @@ def test_extra_table_metadata() -> None: db_mock.get_extra = Mock(return_value={}) db_mock.has_view_by_name = Mock(return_value=None) db_mock.get_df = Mock(return_value=pd.DataFrame({"ds": ["01-01-19"], "hour": [1]})) - result = TrinoEngineSpec.extra_table_metadata(db_mock, "test_table", "test_schema") + result = TrinoEngineSpec.get_extra_table_metadata( + db_mock, + Table("test_table", "test_schema"), + ) assert result["partitions"]["cols"] == ["ds", "hour"] assert result["partitions"]["latest"] == {"ds": "01-01-19", "hour": 1} diff --git a/tests/unit_tests/fixtures/common.py b/tests/unit_tests/fixtures/common.py index d4dca126721cf..412c3d34d54d4 100644 --- a/tests/unit_tests/fixtures/common.py +++ b/tests/unit_tests/fixtures/common.py @@ -14,6 +14,9 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +from __future__ import annotations + import csv from datetime import datetime from io import BytesIO, StringIO From 5da9c0501a5d4223a884dcf643c626521dff54f6 Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Tue, 16 Apr 2024 15:47:49 -0400 Subject: [PATCH 2/6] Improve extra_table_metadata deprecation --- superset/db_engine_specs/base.py | 30 ++++++------- tests/unit_tests/db_engine_specs/test_base.py | 44 ++++++++++++++++++- 2 files changed, 55 insertions(+), 19 deletions(-) diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index bde268821396d..451e96927d9fc 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -21,6 +21,7 @@ import json import logging import re +import warnings from datetime import datetime from re import Match, Pattern from typing import ( @@ -1046,26 +1047,21 @@ def get_extra_table_metadata( # pylint: disable=unused-argument :param table: A Table instance :return: Engine-specific table metadata """ - return cls.extra_table_metadata(database, table.table, table.schema) + # old method that doesn't work with catalogs + if hasattr(cls, "extra_table_metadata"): + warnings.warn( + "The `extra_table_metadata` method is deprecated, please implement " + "the `get_extra_table_metadata` method in the DB engine spec.", + DeprecationWarning, + ) - @deprecated(deprecated_in="4.0", removed_in="5.0") - @classmethod - def extra_table_metadata( - cls, - database: Database, - table_name: str, - schema_name: str | None, - ) -> dict[str, Any]: - """ - Returns engine-specific table metadata + # If a catalog is passed, return nothing, since we don't know the exact + # table that is being requested. + if table.catalog: + return {} - Deprecated, since it doesn't support catalogs. + return cls.extra_table_metadata(database, table.table, table.schema) - :param database: Database instance - :param table_name: Table name - :param schema_name: Schema name - :return: Engine-specific table metadata - """ return {} @classmethod diff --git a/tests/unit_tests/db_engine_specs/test_base.py b/tests/unit_tests/db_engine_specs/test_base.py index 4e2d3f4b44a89..e17e0d2833db8 100644 --- a/tests/unit_tests/db_engine_specs/test_base.py +++ b/tests/unit_tests/db_engine_specs/test_base.py @@ -14,10 +14,13 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + # pylint: disable=import-outside-toplevel, protected-access +from __future__ import annotations + from textwrap import dedent -from typing import Any, Optional +from typing import Any import pytest from pytest_mock import MockFixture @@ -26,6 +29,7 @@ from sqlalchemy.engine.url import URL from sqlalchemy.sql import sqltypes +from superset.sql_parse import Table from superset.superset_typing import ResultSetColumnType, SQLAColumnType from superset.utils.core import GenericDataType from tests.unit_tests.db_engine_specs.utils import assert_column_spec @@ -155,7 +159,7 @@ def test_cte_query_parsing(original: types.TypeEngine, expected: str) -> None: def test_get_column_spec( native_type: str, sqla_type: type[types.TypeEngine], - attrs: Optional[dict[str, Any]], + attrs: dict[str, Any] | None, generic_type: GenericDataType, is_dttm: bool, ) -> None: @@ -263,3 +267,39 @@ class NoLimitDBEngineSpec(BaseEngineSpec): a FROM my_table""" ) + + +def test_extra_table_metadata(mocker: MockFixture) -> None: + """ + Test the deprecated `extra_table_metadata` method. + """ + from superset.db_engine_specs.base import BaseEngineSpec + from superset.models.core import Database + + class ThirdPartyDBEngineSpec(BaseEngineSpec): + @classmethod + def extra_table_metadata( + cls, + database: Database, + table_name: str, + schema_name: str | None, + ) -> dict[str, Any]: + return {"table": table_name, "schema": schema_name} + + database = mocker.MagicMock() + warnings = mocker.patch("superset.db_engine_specs.base.warnings") + + assert ThirdPartyDBEngineSpec.get_extra_table_metadata( + database, + Table("table", "schema"), + ) == {"table": "table", "schema": "schema"} + + assert ( + ThirdPartyDBEngineSpec.get_extra_table_metadata( + database, + Table("table", "schema", "catalog"), + ) + == {} + ) + + warnings.warn.assert_called() From e3c1b649a79fbd883f79592d2a7bead6295075db Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Wed, 17 Apr 2024 10:26:39 -0400 Subject: [PATCH 3/6] Fix test --- tests/unit_tests/conftest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/unit_tests/conftest.py b/tests/unit_tests/conftest.py index 02873fcf404e1..71b3bff338e58 100644 --- a/tests/unit_tests/conftest.py +++ b/tests/unit_tests/conftest.py @@ -87,6 +87,7 @@ def app(request: SubRequest) -> Iterator[SupersetApp]: app.config["WTF_CSRF_ENABLED"] = False app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False app.config["TESTING"] = True + app.config["RATELIMIT_ENABLED"] = False # loop over extra configs passed in by tests # and update the app config From 48da875b5b28d207050769186905f06edd1418fb Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Wed, 17 Apr 2024 10:40:45 -0400 Subject: [PATCH 4/6] Fix stats --- superset/databases/api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/superset/databases/api.py b/superset/databases/api.py index 6ea9e936cebd5..5dade5085e533 100644 --- a/superset/databases/api.py +++ b/superset/databases/api.py @@ -831,7 +831,7 @@ def table_extra_metadata_deprecated( 500: $ref: '#/components/responses/500' """ - self.incr_stats("init", self.table_metadata.__name__) + self.incr_stats("init", self.table_extra_metadata_deprecated.__name__) parsed_schema = parse_js_uri_path_item(schema_name, eval_undefined=True) table_name = cast(str, parse_js_uri_path_item(table_name)) @@ -896,7 +896,7 @@ def table_extra_metadata(self, pk: int) -> FlaskResponse: 500: $ref: '#/components/responses/500' """ - self.incr_stats("init", self.table_metadata.__name__) + self.incr_stats("init", self.table_extra_metadata.__name__) database = DatabaseDAO.find_by_id(pk) if database is None: From 18bf8d6dac7524fbd7a99d47c8bad609c856cc09 Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Wed, 17 Apr 2024 14:48:24 -0400 Subject: [PATCH 5/6] Fix glob pattern --- .../src/SqlLab/components/TableElement/TableElement.test.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/superset-frontend/src/SqlLab/components/TableElement/TableElement.test.tsx b/superset-frontend/src/SqlLab/components/TableElement/TableElement.test.tsx index 74fd58b798be7..a2fe88020aa45 100644 --- a/superset-frontend/src/SqlLab/components/TableElement/TableElement.test.tsx +++ b/superset-frontend/src/SqlLab/components/TableElement/TableElement.test.tsx @@ -49,7 +49,7 @@ jest.mock( ); const getTableMetadataEndpoint = 'glob:**/api/v1/database/*/table/*/*/'; const getExtraTableMetadataEndpoint = - 'glob:**/api/v1/database/*/table_metadata/extra/'; + 'glob:**/api/v1/database/*/table_metadata/extra/*'; const updateTableSchemaEndpoint = 'glob:*/tableschemaview/*/expanded'; beforeEach(() => { From 0a622a603aca0f084b0b5557ad35395ec18f04f4 Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Wed, 17 Apr 2024 19:43:20 -0400 Subject: [PATCH 6/6] s/table/name/g --- .../DndColumnSelectControl/DndFilterSelect.tsx | 2 +- .../FilterControl/AdhocFilterControl/index.jsx | 2 +- .../src/hooks/apiResources/tables.ts | 4 ++-- superset/databases/api.py | 13 +++++++------ superset/databases/schemas.py | 2 +- tests/unit_tests/databases/api_test.py | 16 ++++++++-------- 6 files changed, 20 insertions(+), 19 deletions(-) diff --git a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndFilterSelect.tsx b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndFilterSelect.tsx index b2fd3e35cf535..696861b28913d 100644 --- a/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndFilterSelect.tsx +++ b/superset-frontend/src/explore/components/controls/DndColumnSelectControl/DndFilterSelect.tsx @@ -181,7 +181,7 @@ const DndFilterSelect = (props: DndFilterSelectProps) => { if (!isSqllabView && dbId && name && schema) { SupersetClient.get({ - endpoint: `/api/v1/database/${dbId}/table_metadata/extra/?table=${name}&schema=${schema}`, + endpoint: `/api/v1/database/${dbId}/table_metadata/extra/?name=${name}&schema=${schema}`, }) .then(({ json }: { json: Record }) => { if (json?.partitions) { diff --git a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterControl/index.jsx b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterControl/index.jsx index 2bcb668f45cb9..c68a97b4d728f 100644 --- a/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterControl/index.jsx +++ b/superset-frontend/src/explore/components/controls/FilterControl/AdhocFilterControl/index.jsx @@ -143,7 +143,7 @@ class AdhocFilterControl extends React.Component { if (!isSqllabView && dbId && name && schema) { SupersetClient.get({ - endpoint: `/api/v1/database/${dbId}/table_metadata/extra/?table=${name}&schema=${schema}`, + endpoint: `/api/v1/database/${dbId}/table_metadata/extra/?name=${name}&schema=${schema}`, }) .then(({ json }) => { if (json && json.partitions) { diff --git a/superset-frontend/src/hooks/apiResources/tables.ts b/superset-frontend/src/hooks/apiResources/tables.ts index 57c2a64527733..164fe0f0ab19c 100644 --- a/superset-frontend/src/hooks/apiResources/tables.ts +++ b/superset-frontend/src/hooks/apiResources/tables.ts @@ -126,8 +126,8 @@ const tableApi = api.injectEndpoints({ >({ query: ({ dbId, schema, table }) => ({ endpoint: schema - ? `/api/v1/database/${dbId}/table_metadata/extra/?table=${table}&schema=${schema}` - : `/api/v1/database/${dbId}/table_metadata/extra/?table=${table}`, + ? `/api/v1/database/${dbId}/table_metadata/extra/?name=${table}&schema=${schema}` + : `/api/v1/database/${dbId}/table_metadata/extra/?name=${table}`, transformResponse: ({ json }: JsonResponse) => json, }), }), diff --git a/superset/databases/api.py b/superset/databases/api.py index 5dade5085e533..4537fc8bc512f 100644 --- a/superset/databases/api.py +++ b/superset/databases/api.py @@ -867,7 +867,7 @@ def table_extra_metadata(self, pk: int) -> FlaskResponse: - in: query schema: type: string - name: table + name: name required: true description: Table name - in: query @@ -875,13 +875,15 @@ def table_extra_metadata(self, pk: int) -> FlaskResponse: type: string name: schema description: >- - Optional table schema, if not passed default schema will be used + Optional table schema, if not passed the schema configured in the database + will be used - in: query schema: type: string name: catalog description: >- - Optional table catalog, if not passed default catalog will be used + Optional table catalog, if not passed the catalog configured in the + database will be used responses: 200: description: Table extra metadata information @@ -898,8 +900,7 @@ def table_extra_metadata(self, pk: int) -> FlaskResponse: """ self.incr_stats("init", self.table_extra_metadata.__name__) - database = DatabaseDAO.find_by_id(pk) - if database is None: + if not (database := DatabaseDAO.find_by_id(pk)): raise DatabaseNotFoundException("No such database") try: @@ -907,7 +908,7 @@ def table_extra_metadata(self, pk: int) -> FlaskResponse: except ValidationError as ex: raise InvalidPayloadSchemaError(ex) from ex - table = Table(**parameters) + table = Table(parameters["name"], parameters["schema"], parameters["catalog"]) try: security_manager.raise_for_access(database=database, table=table) except SupersetSecurityException as ex: diff --git a/superset/databases/schemas.py b/superset/databases/schemas.py index 10c065a1c088f..dccab980bcd18 100644 --- a/superset/databases/schemas.py +++ b/superset/databases/schemas.py @@ -1205,7 +1205,7 @@ class QualifiedTableSchema(Schema): present. """ - table = fields.String( + name = fields.String( required=True, metadata={"description": "The table name"}, ) diff --git a/tests/unit_tests/databases/api_test.py b/tests/unit_tests/databases/api_test.py index b29a54613c94b..5e004992de28c 100644 --- a/tests/unit_tests/databases/api_test.py +++ b/tests/unit_tests/databases/api_test.py @@ -1188,27 +1188,27 @@ def test_table_extra_metadata_happy_path( mocker.patch("superset.databases.api.DatabaseDAO.find_by_id", return_value=database) mocker.patch("superset.databases.api.security_manager.raise_for_access") - response = client.get("/api/v1/database/1/table_metadata/extra/?table=t") + response = client.get("/api/v1/database/1/table_metadata/extra/?name=t") assert response.json == {"hello": "world"} database.db_engine_spec.get_extra_table_metadata.assert_called_with( database, Table("t"), ) - response = client.get("/api/v1/database/1/table_metadata/extra/?table=t&schema=s") + response = client.get("/api/v1/database/1/table_metadata/extra/?name=t&schema=s") database.db_engine_spec.get_extra_table_metadata.assert_called_with( database, Table("t", "s"), ) - response = client.get("/api/v1/database/1/table_metadata/extra/?table=t&catalog=c") + response = client.get("/api/v1/database/1/table_metadata/extra/?name=t&catalog=c") database.db_engine_spec.get_extra_table_metadata.assert_called_with( database, Table("t", None, "c"), ) response = client.get( - "/api/v1/database/1/table_metadata/extra/?table=t&schema=s&catalog=c" + "/api/v1/database/1/table_metadata/extra/?name=t&schema=s&catalog=c" ) database.db_engine_spec.get_extra_table_metadata.assert_called_with( database, @@ -1236,7 +1236,7 @@ def test_table_extra_metadata_no_table( "error_type": "INVALID_PAYLOAD_SCHEMA_ERROR", "level": "error", "extra": { - "messages": {"table": ["Missing data for required field."]}, + "messages": {"name": ["Missing data for required field."]}, "issue_codes": [ { "code": 1020, @@ -1262,7 +1262,7 @@ def test_table_extra_metadata_slashes( mocker.patch("superset.databases.api.DatabaseDAO.find_by_id", return_value=database) mocker.patch("superset.databases.api.security_manager.raise_for_access") - client.get("/api/v1/database/1/table_metadata/extra/?table=foo/bar") + client.get("/api/v1/database/1/table_metadata/extra/?name=foo/bar") database.db_engine_spec.get_extra_table_metadata.assert_called_with( database, Table("foo/bar"), @@ -1279,7 +1279,7 @@ def test_table_extra_metadata_invalid_database( """ mocker.patch("superset.databases.api.DatabaseDAO.find_by_id", return_value=None) - response = client.get("/api/v1/database/1/table_metadata/extra/?table=t") + response = client.get("/api/v1/database/1/table_metadata/extra/?name=t") assert response.status_code == 404 assert response.json == { "errors": [ @@ -1325,7 +1325,7 @@ def test_table_extra_metadata_unauthorized( ), ) - response = client.get("/api/v1/database/1/table_metadata/extra/?table=t") + response = client.get("/api/v1/database/1/table_metadata/extra/?name=t") assert response.status_code == 404 assert response.json == { "errors": [