diff --git a/.github/workflows/chromatic-master.yml b/.github/workflows/chromatic-master.yml new file mode 100644 index 0000000000000..6cdf10506f00f --- /dev/null +++ b/.github/workflows/chromatic-master.yml @@ -0,0 +1,55 @@ +# .github/workflows/chromatic.yml +# seee https://www.chromatic.com/docs/github-actions +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Workflow name +name: 'Chromatic Storybook Master' + +# Event for the workflow +# Only run if changes were made in superset-frontend folder of repo on merge to Master +on: + # This will trigger when a branch merges to master when the PR has changes in the frontend folder updating the chromatic baseline + push: + branches: + - master + paths: + - "superset-frontend/**" + +# List of jobs +jobs: + chromatic-deployment: + # Operating System + runs-on: ubuntu-latest + # Job steps + steps: + - uses: actions/checkout@v1 + - name: Install dependencies + run: npm ci + working-directory: superset-frontend + # 👇 Build and publish Storybook to Chromatic + - name: Build and publish Storybook to Chromatic + id: chromatic-master + uses: chromaui/action@v1 + # Required options for the Chromatic GitHub Action + with: + # 👇 Location of package.json from root of mono-repo + workingDir: superset-frontend + # 👇 Chromatic projectToken, refer to the manage page to obtain it. + projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }} + exitZeroOnChanges: true # 👈 Option to prevent the workflow from failing + autoAcceptChanges: true # 👈 Option to accept all changes when merging to master diff --git a/.github/workflows/chromatic.yml b/.github/workflows/chromatic-pr.yml similarity index 55% rename from .github/workflows/chromatic.yml rename to .github/workflows/chromatic-pr.yml index 66afc24290206..09d57083f1255 100644 --- a/.github/workflows/chromatic.yml +++ b/.github/workflows/chromatic-pr.yml @@ -18,28 +18,22 @@ # # Workflow name -name: 'Chromatic' +name: 'Chromatic Pull Request' -# Event for the workflow -# Only run if changes were made in superset-frontend folder of repo +# This will allow us to see how the Storybook builds when a PR has been created that has changes in the frontend folder +# NOTE: pull_request_target actions will always run the version of this action on master branch +# you will not see changes in the action behavior until this action's changes have been merged to master. +# This is a security precaution so that PR coming from a fork cannot access a Github secret without a repo +# committer aproving and merging the PR declaring the action safe. on: - # This will trigger when a branch merges to master when the PR has changes in the frontend folder updating the chromatic baseline - push: - branches: - - master - paths: - - "superset-frontend/**" - # this will allow us to see how the Storybook builds when a PR has been created that has changes in the frontend folder - # NOTE: pull_request_target actions will always run the version of this action on master branch - # you will not see changes in the action behavior until this action's changes have been merged to master. - # This is a security precaution so that PR coming from a fork cannot access a Github secret without a repo - # committer aproving and merging the PR declaring the action safe. pull_request_target: + # We want this action to run under specific pull request changes # opened: pull request is created # synchronize: commit(s) pushed to the pull request # reopened: closed pull request is reopened types: [opened, synchronize, reopened] + # This action will only run if one or more files have changes under the superset-frontend folder paths: - "superset-frontend/**" @@ -48,28 +42,20 @@ jobs: chromatic-deployment: # Operating System runs-on: ubuntu-latest + # define outputs that can be used in the storybook-link-comment job + outputs: + storybookUrl: ${{ steps.chromatic-deploy.outputs.storybookUrl }} + buildUrl: ${{ steps.chromatic-deploy.outputs.buildUrl }} # Job steps steps: + - id: chromatic-deploy - uses: actions/checkout@v1 - name: Install dependencies run: npm ci working-directory: superset-frontend - # 👇 Checks if the branch is not master and runs Chromatic - - name: Publish to Chromatic - id: chromatic-pr - if: github.ref != 'refs/heads/master' - uses: chromaui/action@v1 - # Required options for the Chromatic GitHub Action - with: - # 👇 Location of package.json from root of mono-repo - workingDir: superset-frontend - # 👇 Chromatic projectToken, refer to the Chromatic manage page to obtain it. - projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }} - exitZeroOnChanges: true # 👈 Option to prevent the workflow from failing - # 👇 Checks if the branch is master and accepts all changes in Chromatic - - name: Publish to Chromatic and auto accept changes - id: chromatic-master - if: github.ref == 'refs/heads/master' + # 👇 Build and publish Storybook to Chromatic + - name: Build and publish Storybook to Chromatic + id: chromatic-deploy uses: chromaui/action@v1 # Required options for the Chromatic GitHub Action with: @@ -77,10 +63,16 @@ jobs: workingDir: superset-frontend # 👇 Chromatic projectToken, refer to the manage page to obtain it. projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }} - autoAcceptChanges: true # 👈 Option to accept all changes when merging to master - exitZeroOnChanges: true # 👈 Option to prevent the workflow from failing + exitZeroOnChanges: true # 👈 Option to prevent the workflow from failing when changes are detected + + # Put a comment on the PR with link to latest build of the Storybook for this branch + storybook-link-comment: + # Operating System + runs-on: ubuntu-latest + # job steps + needs: chromatic-deployment + steps: - name: Echo Storybook build link - if: ${{ success() }} uses: actions/github-script@v6 with: github-token: ${{secrets.GITHUB_TOKEN}} @@ -89,5 +81,5 @@ jobs: issue_number: context.issue.number, owner: context.repo.owner, repo: context.repo.repo, - body: 'Storybook has completed and can be viewed at ${{ steps.chromatic-pr.outputs.storybookUrl }}' + body: 'Storybook has completed and can be viewed at ${{ needs.chromatic-deployment.outputs.storybookUrl }} Chromatic visual test results can be viewed at ${{ needs.chromatic-deployment.outputs.buildUrl }}' }) diff --git a/README.md b/README.md index 2ad9a66166375..b547030669480 100644 --- a/README.md +++ b/README.md @@ -117,8 +117,8 @@ Here are some of the major database solutions that are supported: teradata yugabyte

-**A more comprehensive list of supported databases** along with the configuration instructions can be found -[here](https://superset.apache.org/docs/databases/installing-database-drivers). + +**A more comprehensive list of supported databases** along with the configuration instructions can be found [here](https://superset.apache.org/docs/databases/installing-database-drivers). Want to add support for your datastore or data engine? Read more [here](https://superset.apache.org/docs/frequently-asked-questions#does-superset-work-with-insert-database-engine-here) about the technical requirements. diff --git a/docs/static/resources/openapi.json b/docs/static/resources/openapi.json index 9020e4ba7d289..9082e94dc5aa2 100644 --- a/docs/static/resources/openapi.json +++ b/docs/static/resources/openapi.json @@ -93,6 +93,31 @@ } }, "schemas": { + "AdvancedDataTypeSchema": { + "properties": { + "display_value": { + "description": "The string representation of the parsed values", + "type": "string" + }, + "error_message": { + "type": "string" + }, + "valid_filter_operators": { + "items": { + "type": "string" + }, + "type": "array" + }, + "values": { + "items": { + "description": "parsed value (can be any value)", + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, "AnnotationLayer": { "properties": { "annotationType": { @@ -232,7 +257,7 @@ "AnnotationLayerRestApi.get_list": { "properties": { "changed_by": { - "$ref": "#/components/schemas/AnnotationLayerRestApi.get_list.User" + "$ref": "#/components/schemas/AnnotationLayerRestApi.get_list.User1" }, "changed_on": { "format": "date-time", @@ -243,7 +268,7 @@ "readOnly": true }, "created_by": { - "$ref": "#/components/schemas/AnnotationLayerRestApi.get_list.User1" + "$ref": "#/components/schemas/AnnotationLayerRestApi.get_list.User" }, "created_on": { "format": "date-time", @@ -389,13 +414,13 @@ "AnnotationRestApi.get_list": { "properties": { "changed_by": { - "$ref": "#/components/schemas/AnnotationRestApi.get_list.User" + "$ref": "#/components/schemas/AnnotationRestApi.get_list.User1" }, "changed_on_delta_humanized": { "readOnly": true }, "created_by": { - "$ref": "#/components/schemas/AnnotationRestApi.get_list.User1" + "$ref": "#/components/schemas/AnnotationRestApi.get_list.User" }, "end_dttm": { "format": "date-time", @@ -780,8 +805,12 @@ "type": { "description": "Datasource type", "enum": [ - "druid", - "table" + "sl_table", + "table", + "dataset", + "query", + "saved_query", + "view" ], "type": "string" } @@ -1031,22 +1060,24 @@ "operation": { "description": "Post processing operation type", "enum": [ + "_flatten_column_after_pivot", "aggregate", "boxplot", + "compare", "contribution", "cum", + "diff", + "flatten", "geodetic_parse", "geohash_decode", "geohash_encode", "pivot", "prophet", + "rename", + "resample", "rolling", "select", - "sort", - "diff", - "compare", - "resample", - "flatten" + "sort" ], "example": "aggregate", "type": "string" @@ -1533,6 +1564,9 @@ "nullable": true, "type": "string" }, + "is_managed_externally": { + "type": "boolean" + }, "owners": { "$ref": "#/components/schemas/ChartDataRestApi.get.User" }, @@ -1617,7 +1651,7 @@ "type": "string" }, "changed_by": { - "$ref": "#/components/schemas/ChartDataRestApi.get_list.User1" + "$ref": "#/components/schemas/ChartDataRestApi.get_list.User" }, "changed_by_name": { "readOnly": true @@ -1632,7 +1666,7 @@ "readOnly": true }, "created_by": { - "$ref": "#/components/schemas/ChartDataRestApi.get_list.User2" + "$ref": "#/components/schemas/ChartDataRestApi.get_list.User1" }, "datasource_id": { "format": "int32", @@ -1664,13 +1698,16 @@ "format": "int32", "type": "integer" }, + "is_managed_externally": { + "type": "boolean" + }, "last_saved_at": { "format": "date-time", "nullable": true, "type": "string" }, "last_saved_by": { - "$ref": "#/components/schemas/ChartDataRestApi.get_list.User" + "$ref": "#/components/schemas/ChartDataRestApi.get_list.User2" }, "owners": { "$ref": "#/components/schemas/ChartDataRestApi.get_list.User3" @@ -1723,10 +1760,6 @@ "maxLength": 64, "type": "string" }, - "id": { - "format": "int32", - "type": "integer" - }, "last_name": { "maxLength": 64, "type": "string" @@ -1744,6 +1777,10 @@ "maxLength": 64, "type": "string" }, + "id": { + "format": "int32", + "type": "integer" + }, "last_name": { "maxLength": 64, "type": "string" @@ -1841,8 +1878,11 @@ "datasource_type": { "description": "The type of dataset/datasource identified on `datasource_id`.", "enum": [ - "druid", + "sl_table", "table", + "dataset", + "query", + "saved_query", "view" ], "type": "string" @@ -1944,8 +1984,11 @@ "datasource_type": { "description": "The type of dataset/datasource identified on `datasource_id`.", "enum": [ - "druid", + "sl_table", "table", + "dataset", + "query", + "saved_query", "view" ], "nullable": true, @@ -2188,9 +2231,6 @@ "description": "Form data from the Explore controls used to form the chart's data query.", "type": "object" }, - "modified": { - "type": "string" - }, "slice_id": { "format": "int32", "type": "integer" @@ -2282,6 +2322,9 @@ "nullable": true, "type": "string" }, + "is_managed_externally": { + "type": "boolean" + }, "owners": { "$ref": "#/components/schemas/ChartRestApi.get.User" }, @@ -2366,7 +2409,7 @@ "type": "string" }, "changed_by": { - "$ref": "#/components/schemas/ChartRestApi.get_list.User1" + "$ref": "#/components/schemas/ChartRestApi.get_list.User" }, "changed_by_name": { "readOnly": true @@ -2381,7 +2424,7 @@ "readOnly": true }, "created_by": { - "$ref": "#/components/schemas/ChartRestApi.get_list.User2" + "$ref": "#/components/schemas/ChartRestApi.get_list.User1" }, "datasource_id": { "format": "int32", @@ -2413,13 +2456,16 @@ "format": "int32", "type": "integer" }, + "is_managed_externally": { + "type": "boolean" + }, "last_saved_at": { "format": "date-time", "nullable": true, "type": "string" }, "last_saved_by": { - "$ref": "#/components/schemas/ChartRestApi.get_list.User" + "$ref": "#/components/schemas/ChartRestApi.get_list.User2" }, "owners": { "$ref": "#/components/schemas/ChartRestApi.get_list.User3" @@ -2472,10 +2518,6 @@ "maxLength": 64, "type": "string" }, - "id": { - "format": "int32", - "type": "integer" - }, "last_name": { "maxLength": 64, "type": "string" @@ -2493,6 +2535,10 @@ "maxLength": 64, "type": "string" }, + "id": { + "format": "int32", + "type": "integer" + }, "last_name": { "maxLength": 64, "type": "string" @@ -2590,8 +2636,11 @@ "datasource_type": { "description": "The type of dataset/datasource identified on `datasource_id`.", "enum": [ - "druid", + "sl_table", "table", + "dataset", + "query", + "saved_query", "view" ], "type": "string" @@ -2693,8 +2742,11 @@ "datasource_type": { "description": "The type of dataset/datasource identified on `datasource_id`.", "enum": [ - "druid", + "sl_table", "table", + "dataset", + "query", + "saved_query", "view" ], "nullable": true, @@ -2804,13 +2856,13 @@ "CssTemplateRestApi.get_list": { "properties": { "changed_by": { - "$ref": "#/components/schemas/CssTemplateRestApi.get_list.User" + "$ref": "#/components/schemas/CssTemplateRestApi.get_list.User1" }, "changed_on_delta_humanized": { "readOnly": true }, "created_by": { - "$ref": "#/components/schemas/CssTemplateRestApi.get_list.User1" + "$ref": "#/components/schemas/CssTemplateRestApi.get_list.User" }, "created_on": { "format": "date-time", @@ -3087,6 +3139,10 @@ "format": "int32", "type": "integer" }, + "is_managed_externally": { + "nullable": true, + "type": "boolean" + }, "json_metadata": { "description": "This JSON object is generated dynamically when clicking the save or overwrite button in the dashboard view. It is exposed here for reference and for power users who may want to alter specific parameters.", "type": "string" @@ -3126,6 +3182,7 @@ "properties": { "filterState": { "description": "Native filter state", + "nullable": true, "type": "object" }, "hash": { @@ -3143,9 +3200,6 @@ "type": "array" } }, - "required": [ - "filterState" - ], "type": "object" }, "DashboardRestApi.get": { @@ -3185,6 +3239,9 @@ "created_by": { "$ref": "#/components/schemas/DashboardRestApi.get_list.User1" }, + "created_on_delta_humanized": { + "readOnly": true + }, "css": { "nullable": true, "type": "string" @@ -3198,6 +3255,9 @@ "format": "int32", "type": "integer" }, + "is_managed_externally": { + "type": "boolean" + }, "json_metadata": { "nullable": true, "type": "string" @@ -3205,12 +3265,6 @@ "owners": { "$ref": "#/components/schemas/DashboardRestApi.get_list.User2" }, - "advanced_data_type": { - "maxLength": 255, - "minLength": 1, - "nullable": true, - "type": "string" - }, "position_json": { "nullable": true, "type": "string" @@ -3510,6 +3564,14 @@ }, "type": "object" }, + "Database1": { + "properties": { + "database_name": { + "type": "string" + } + }, + "type": "object" + }, "DatabaseFunctionNamesResponse": { "properties": { "function_names": { @@ -3667,6 +3729,9 @@ "nullable": true, "type": "boolean" }, + "is_managed_externally": { + "type": "boolean" + }, "parameters": { "readOnly": true }, @@ -4087,6 +4152,12 @@ }, "DatasetColumnsPut": { "properties": { + "advanced_data_type": { + "maxLength": 255, + "minLength": 1, + "nullable": true, + "type": "string" + }, "column_name": { "maxLength": 255, "minLength": 1, @@ -4181,6 +4252,24 @@ }, "type": "object" }, + "DatasetDuplicateSchema": { + "properties": { + "base_model_id": { + "format": "int32", + "type": "integer" + }, + "table_name": { + "maxLength": 250, + "minLength": 1, + "type": "string" + } + }, + "required": [ + "base_model_id", + "table_name" + ], + "type": "object" + }, "DatasetMetricRestApi.get": { "properties": { "id": { @@ -4251,12 +4340,6 @@ "nullable": true, "type": "string" }, - "advanced_data_type": { - "maxLength": 255, - "minLength": 1, - "nullable": true, - "type": "string" - }, "uuid": { "format": "uuid", "nullable": true, @@ -4395,6 +4478,9 @@ "format": "int32", "type": "integer" }, + "is_managed_externally": { + "type": "boolean" + }, "is_sqllab_view": { "nullable": true, "type": "boolean" @@ -4530,6 +4616,11 @@ }, "DatasetRestApi.get.TableColumn": { "properties": { + "advanced_data_type": { + "maxLength": 255, + "nullable": true, + "type": "string" + }, "changed_on": { "format": "date-time", "nullable": true, @@ -4633,7 +4724,7 @@ "DatasetRestApi.get_list": { "properties": { "changed_by": { - "$ref": "#/components/schemas/DatasetRestApi.get_list.User" + "$ref": "#/components/schemas/DatasetRestApi.get_list.User1" }, "changed_by_name": { "readOnly": true @@ -4676,7 +4767,7 @@ "readOnly": true }, "owners": { - "$ref": "#/components/schemas/DatasetRestApi.get_list.User1" + "$ref": "#/components/schemas/DatasetRestApi.get_list.User" }, "schema": { "maxLength": 255, @@ -4720,6 +4811,14 @@ "maxLength": 64, "type": "string" }, + "id": { + "format": "int32", + "type": "integer" + }, + "last_name": { + "maxLength": 64, + "type": "string" + }, "username": { "maxLength": 64, "type": "string" @@ -4727,6 +4826,7 @@ }, "required": [ "first_name", + "last_name", "username" ], "type": "object" @@ -4737,14 +4837,6 @@ "maxLength": 64, "type": "string" }, - "id": { - "format": "int32", - "type": "integer" - }, - "last_name": { - "maxLength": 64, - "type": "string" - }, "username": { "maxLength": 64, "type": "string" @@ -4752,7 +4844,6 @@ }, "required": [ "first_name", - "last_name", "username" ], "type": "object" @@ -4904,8 +4995,11 @@ "datasource_type": { "description": "The type of dataset/datasource identified on `datasource_id`.", "enum": [ - "druid", + "sl_table", "table", + "dataset", + "query", + "saved_query", "view" ], "type": "string" @@ -4945,6 +5039,80 @@ }, "type": "object" }, + "EmbeddedDashboardConfig": { + "properties": { + "allowed_domains": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "required": [ + "allowed_domains" + ], + "type": "object" + }, + "EmbeddedDashboardResponseSchema": { + "properties": { + "allowed_domains": { + "items": { + "type": "string" + }, + "type": "array" + }, + "changed_by": { + "$ref": "#/components/schemas/User" + }, + "changed_on": { + "format": "date-time", + "type": "string" + }, + "dashboard_id": { + "type": "string" + }, + "uuid": { + "type": "string" + } + }, + "type": "object" + }, + "EmbeddedDashboardRestApi.get": { + "properties": { + "uuid": { + "format": "uuid", + "type": "string" + } + }, + "type": "object" + }, + "EmbeddedDashboardRestApi.get_list": { + "properties": { + "uuid": { + "format": "uuid", + "type": "string" + } + }, + "type": "object" + }, + "EmbeddedDashboardRestApi.post": { + "properties": { + "uuid": { + "format": "uuid", + "type": "string" + } + }, + "type": "object" + }, + "EmbeddedDashboardRestApi.put": { + "properties": { + "uuid": { + "format": "uuid", + "type": "string" + } + }, + "type": "object" + }, "ExplorePermalinkPostSchema": { "properties": { "formData": { @@ -5128,18 +5296,31 @@ "format": "int32", "type": "integer" }, - "dataset_id": { - "description": "The dataset ID", + "datasource_id": { + "description": "The datasource ID", "format": "int32", "type": "integer" }, + "datasource_type": { + "description": "The datasource type", + "enum": [ + "sl_table", + "table", + "dataset", + "query", + "saved_query", + "view" + ], + "type": "string" + }, "form_data": { "description": "Any type of JSON supported text.", "type": "string" } }, "required": [ - "dataset_id", + "datasource_id", + "datasource_type", "form_data" ], "type": "object" @@ -5151,18 +5332,31 @@ "format": "int32", "type": "integer" }, - "dataset_id": { - "description": "The dataset ID", + "datasource_id": { + "description": "The datasource ID", "format": "int32", "type": "integer" }, + "datasource_type": { + "description": "The datasource type", + "enum": [ + "sl_table", + "table", + "dataset", + "query", + "saved_query", + "view" + ], + "type": "string" + }, "form_data": { "description": "Any type of JSON supported text.", "type": "string" } }, "required": [ - "dataset_id", + "datasource_id", + "datasource_type", "form_data" ], "type": "object" @@ -5484,18 +5678,16 @@ "properties": { "changed_on": { "format": "date-time", - "nullable": true, "type": "string" }, "database": { - "$ref": "#/components/schemas/QueryRestApi.get_list.Database" + "$ref": "#/components/schemas/Database1" }, "end_time": { - "nullable": true, + "format": "float", "type": "number" }, "executed_sql": { - "nullable": true, "type": "string" }, "id": { @@ -5504,89 +5696,37 @@ }, "rows": { "format": "int32", - "nullable": true, "type": "integer" }, "schema": { - "maxLength": 256, - "nullable": true, "type": "string" }, "sql": { - "nullable": true, "type": "string" }, "sql_tables": { "readOnly": true }, "start_time": { - "nullable": true, + "format": "float", "type": "number" }, "status": { - "maxLength": 16, - "nullable": true, "type": "string" }, "tab_name": { - "maxLength": 256, - "nullable": true, "type": "string" }, "tmp_table_name": { - "maxLength": 256, - "nullable": true, "type": "string" }, "tracking_url": { - "nullable": true, "type": "string" }, "user": { - "$ref": "#/components/schemas/QueryRestApi.get_list.User" - } - }, - "required": [ - "database" - ], - "type": "object" - }, - "QueryRestApi.get_list.Database": { - "properties": { - "database_name": { - "maxLength": 250, - "type": "string" - } - }, - "required": [ - "database_name" - ], - "type": "object" - }, - "QueryRestApi.get_list.User": { - "properties": { - "first_name": { - "maxLength": 64, - "type": "string" - }, - "id": { - "format": "int32", - "type": "integer" - }, - "last_name": { - "maxLength": 64, - "type": "string" - }, - "username": { - "maxLength": 64, - "type": "string" + "$ref": "#/components/schemas/User" } }, - "required": [ - "first_name", - "last_name", - "username" - ], "type": "object" }, "QueryRestApi.post": { @@ -6006,6 +6146,11 @@ "changed_on_delta_humanized": { "readOnly": true }, + "chart_id": { + "format": "int32", + "nullable": true, + "type": "integer" + }, "created_by": { "$ref": "#/components/schemas/ReportScheduleRestApi.get_list.User1" }, @@ -6026,6 +6171,11 @@ "crontab_humanized": { "readOnly": true }, + "dashboard_id": { + "format": "int32", + "nullable": true, + "type": "integer" + }, "description": { "nullable": true, "type": "string" @@ -6790,6 +6940,7 @@ "Pacific/Guam", "Pacific/Honolulu", "Pacific/Johnston", + "Pacific/Kanton", "Pacific/Kiritimati", "Pacific/Kosrae", "Pacific/Kwajalein", @@ -7514,6 +7665,7 @@ "Pacific/Guam", "Pacific/Honolulu", "Pacific/Johnston", + "Pacific/Kanton", "Pacific/Kiritimati", "Pacific/Kosrae", "Pacific/Kwajalein", @@ -7885,6 +8037,20 @@ }, "type": "object" }, + "TableExtraMetadataResponseSchema": { + "properties": { + "clustering": { + "type": "object" + }, + "metadata": { + "type": "object" + }, + "partitions": { + "type": "object" + } + }, + "type": "object" + }, "TableMetadataColumnsResponse": { "properties": { "duplicates_constraint": { @@ -8110,6 +8276,46 @@ }, "type": "object" }, + "ValidateSQLRequest": { + "properties": { + "schema": { + "nullable": true, + "type": "string" + }, + "sql": { + "description": "SQL statement to validate", + "type": "string" + }, + "template_params": { + "nullable": true, + "type": "object" + } + }, + "required": [ + "sql" + ], + "type": "object" + }, + "ValidateSQLResponse": { + "properties": { + "end_column": { + "format": "int32", + "type": "integer" + }, + "line_number": { + "format": "int32", + "type": "integer" + }, + "message": { + "type": "string" + }, + "start_column": { + "format": "int32", + "type": "integer" + } + }, + "type": "object" + }, "ValidatorConfigJSON": { "properties": { "op": { @@ -8131,6 +8337,26 @@ }, "type": "object" }, + "advanced_data_type_convert_schema": { + "properties": { + "type": { + "default": "port", + "type": "string" + }, + "values": { + "items": { + "default": "http" + }, + "minItems": 1, + "type": "array" + } + }, + "required": [ + "type", + "values" + ], + "type": "object" + }, "database_schemas_query_schema": { "properties": { "force": { @@ -8374,6 +8600,98 @@ }, "openapi": "3.0.2", "paths": { + "/api/v1/advanced_data_type/convert": { + "get": { + "parameters": [ + { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/advanced_data_type_convert_schema" + } + } + }, + "in": "query", + "name": "q" + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AdvancedDataTypeSchema" + } + } + }, + "description": "AdvancedDataTypeResponse object has been returned." + }, + "400": { + "$ref": "#/components/responses/400" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "summary": "Returns a AdvancedDataTypeResponse object populated with the passed in args.", + "tags": [ + "Advanced Data Type" + ] + } + }, + "/api/v1/advanced_data_type/types": { + "get": { + "description": "Returns a list of available advanced data types.", + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "result": { + "items": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + } + } + }, + "description": "a successful return of the available advanced data types has taken place." + }, + "401": { + "$ref": "#/components/responses/401" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "tags": [ + "Advanced Data Type" + ] + } + }, "/api/v1/annotation_layer/": { "delete": { "description": "Deletes multiple annotation layers in a bulk operation.", @@ -9380,9 +9698,6 @@ }, "description": "ZIP file" }, - "400": { - "$ref": "#/components/responses/400" - }, "401": { "$ref": "#/components/responses/401" }, @@ -9440,7 +9755,7 @@ } } }, - "description": "Dashboard import result" + "description": "Assets import result" }, "400": { "$ref": "#/components/responses/400" @@ -10460,7 +10775,7 @@ } ], "responses": { - "200": { + "202": { "content": { "application/json": { "schema": { @@ -10470,9 +10785,6 @@ }, "description": "Chart async result" }, - "302": { - "description": "Redirects to the current digest" - }, "400": { "$ref": "#/components/responses/400" }, @@ -10600,9 +10912,6 @@ }, "description": "Chart thumbnail image" }, - "302": { - "description": "Redirects to the current digest" - }, "400": { "$ref": "#/components/responses/400" }, @@ -11453,9 +11762,6 @@ }, "description": "Dashboard added" }, - "302": { - "description": "Redirects to the current digest" - }, "400": { "$ref": "#/components/responses/400" }, @@ -12163,9 +12469,6 @@ }, "description": "Dashboard" }, - "302": { - "description": "Redirects to the current digest" - }, "400": { "$ref": "#/components/responses/400" }, @@ -12221,9 +12524,6 @@ }, "description": "Dashboard chart definitions" }, - "302": { - "description": "Redirects to the current digest" - }, "400": { "$ref": "#/components/responses/400" }, @@ -12280,9 +12580,6 @@ }, "description": "Dashboard dataset definitions" }, - "302": { - "description": "Redirects to the current digest" - }, "400": { "$ref": "#/components/responses/400" }, @@ -12306,16 +12603,17 @@ ] } }, - "/api/v1/dashboard/{pk}": { + "/api/v1/dashboard/{id_or_slug}/embedded": { "delete": { - "description": "Deletes a Dashboard.", + "description": "Removes a dashboard's embedded configuration.", "parameters": [ { + "description": "The dashboard id or slug", "in": "path", - "name": "pk", + "name": "id_or_slug", "required": true, "schema": { - "type": "integer" + "type": "string" } } ], @@ -12333,20 +12631,11 @@ } } }, - "description": "Dashboard deleted" + "description": "Successfully removed the configuration" }, "401": { "$ref": "#/components/responses/401" }, - "403": { - "$ref": "#/components/responses/403" - }, - "404": { - "$ref": "#/components/responses/404" - }, - "422": { - "$ref": "#/components/responses/422" - }, "500": { "$ref": "#/components/responses/500" } @@ -12360,25 +12649,237 @@ "Dashboards" ] }, - "put": { - "description": "Changes a Dashboard.", + "get": { + "description": "Returns the dashboard's embedded configuration", "parameters": [ { + "description": "The dashboard id or slug", "in": "path", - "name": "pk", + "name": "id_or_slug", "required": true, "schema": { - "type": "integer" + "type": "string" } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DashboardRestApi.put" - } - } + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "result": { + "$ref": "#/components/schemas/EmbeddedDashboardResponseSchema" + } + }, + "type": "object" + } + } + }, + "description": "Result contains the embedded dashboard config" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "tags": [ + "Dashboards" + ] + }, + "post": { + "description": "Sets a dashboard's embedded configuration.", + "parameters": [ + { + "description": "The dashboard id or slug", + "in": "path", + "name": "id_or_slug", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/EmbeddedDashboardConfig" + } + } + }, + "description": "The embedded configuration to set", + "required": true + }, + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "result": { + "$ref": "#/components/schemas/EmbeddedDashboardResponseSchema" + } + }, + "type": "object" + } + } + }, + "description": "Successfully set the configuration" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "tags": [ + "Dashboards" + ] + }, + "put": { + "description": "Sets a dashboard's embedded configuration.", + "parameters": [ + { + "description": "The dashboard id or slug", + "in": "path", + "name": "id_or_slug", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/EmbeddedDashboardConfig" + } + } + }, + "description": "The embedded configuration to set", + "required": true + }, + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "result": { + "$ref": "#/components/schemas/EmbeddedDashboardResponseSchema" + } + }, + "type": "object" + } + } + }, + "description": "Successfully set the configuration" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "tags": [ + "Dashboards" + ] + } + }, + "/api/v1/dashboard/{pk}": { + "delete": { + "description": "Deletes a Dashboard.", + "parameters": [ + { + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" + } + } + }, + "description": "Dashboard deleted" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "403": { + "$ref": "#/components/responses/403" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "422": { + "$ref": "#/components/responses/422" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "tags": [ + "Dashboards" + ] + }, + "put": { + "description": "Changes a Dashboard.", + "parameters": [ + { + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DashboardRestApi.put" + } + } }, "description": "Dashboard schema", "required": true @@ -12837,6 +13338,9 @@ }, "description": "Thumbnail does not exist on cache, fired async to compute" }, + "302": { + "description": "Redirects to the current digest" + }, "401": { "$ref": "#/components/responses/401" }, @@ -13000,9 +13504,6 @@ }, "description": "Database added" }, - "302": { - "description": "Redirects to the current digest" - }, "400": { "$ref": "#/components/responses/400" }, @@ -13767,7 +14268,145 @@ }, "/api/v1/database/{pk}/select_star/{table_name}/": { "get": { - "description": "Get database select star for table", + "description": "Get database select star for table", + "parameters": [ + { + "description": "The database id", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "description": "Table name", + "in": "path", + "name": "table_name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "description": "Table schema", + "in": "path", + "name": "schema_name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SelectStarResponseSchema" + } + } + }, + "description": "SQL statement for a select star for table" + }, + "400": { + "$ref": "#/components/responses/400" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "422": { + "$ref": "#/components/responses/422" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "tags": [ + "Database" + ] + } + }, + "/api/v1/database/{pk}/select_star/{table_name}/{schema_name}/": { + "get": { + "description": "Get database select star for table", + "parameters": [ + { + "description": "The database id", + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "description": "Table name", + "in": "path", + "name": "table_name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "description": "Table schema", + "in": "path", + "name": "schema_name", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/SelectStarResponseSchema" + } + } + }, + "description": "SQL statement for a select star for table" + }, + "400": { + "$ref": "#/components/responses/400" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "422": { + "$ref": "#/components/responses/422" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "tags": [ + "Database" + ] + } + }, + "/api/v1/database/{pk}/table/{table_name}/{schema_name}/": { + "get": { + "description": "Get database table metadata", "parameters": [ { "description": "The database id", @@ -13802,11 +14441,11 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SelectStarResponseSchema" + "$ref": "#/components/schemas/TableMetadataResponseSchema" } } }, - "description": "SQL statement for a select star for table" + "description": "Table metadata information" }, "400": { "$ref": "#/components/responses/400" @@ -13834,9 +14473,9 @@ ] } }, - "/api/v1/database/{pk}/select_star/{table_name}/{schema_name}/": { + "/api/v1/database/{pk}/table_extra/{table_name}/{schema_name}/": { "get": { - "description": "Get database select star for table", + "description": "Response depends on each DB engine spec normally focused on partitions", "parameters": [ { "description": "The database id", @@ -13871,11 +14510,11 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SelectStarResponseSchema" + "$ref": "#/components/schemas/TableExtraMetadataResponseSchema" } } }, - "description": "SQL statement for a select star for table" + "description": "Table extra metadata information" }, "400": { "$ref": "#/components/responses/400" @@ -13898,53 +14537,55 @@ "jwt": [] } ], + "summary": "Get table extra metadata", "tags": [ "Database" ] } }, - "/api/v1/database/{pk}/table/{table_name}/{schema_name}/": { - "get": { - "description": "Get database table metadata", + "/api/v1/database/{pk}/validate_sql": { + "post": { + "description": "Validates arbitrary SQL.", "parameters": [ { - "description": "The database id", "in": "path", "name": "pk", "required": true, "schema": { "type": "integer" } - }, - { - "description": "Table name", - "in": "path", - "name": "table_name", - "required": true, - "schema": { - "type": "string" - } - }, - { - "description": "Table schema", - "in": "path", - "name": "schema_name", - "required": true, - "schema": { - "type": "string" - } } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ValidateSQLRequest" + } + } + }, + "description": "Validate SQL request", + "required": true + }, "responses": { "200": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TableMetadataResponseSchema" + "properties": { + "result": { + "description": "A List of SQL errors found on the statement", + "items": { + "$ref": "#/components/schemas/ValidateSQLResponse" + }, + "type": "array" + } + }, + "type": "object" } } }, - "description": "Table metadata information" + "description": "Validation result" }, "400": { "$ref": "#/components/responses/400" @@ -13955,9 +14596,6 @@ "404": { "$ref": "#/components/responses/404" }, - "422": { - "$ref": "#/components/responses/422" - }, "500": { "$ref": "#/components/responses/500" } @@ -13967,6 +14605,7 @@ "jwt": [] } ], + "summary": "Validates that arbitrary sql is acceptable for the given database", "tags": [ "Database" ] @@ -14337,6 +14976,75 @@ ] } }, + "/api/v1/dataset/duplicate": { + "post": { + "description": "Duplicates a Dataset", + "parameters": [ + { + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/DatasetDuplicateSchema" + } + } + }, + "description": "Dataset schema", + "required": true + }, + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "message": { + "type": "string" + } + }, + "type": "object" + } + } + }, + "description": "Dataset duplicate" + }, + "400": { + "$ref": "#/components/responses/400" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "403": { + "$ref": "#/components/responses/403" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "422": { + "$ref": "#/components/responses/422" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "tags": [ + "Datasets" + ] + } + }, "/api/v1/dataset/export/": { "get": { "description": "Exports multiple datasets and downloads them as YAML files", @@ -14971,6 +15679,118 @@ ] } }, + "/api/v1/dataset/{pk}/samples": { + "get": { + "description": "get samples from a Dataset", + "parameters": [ + { + "in": "path", + "name": "pk", + "required": true, + "schema": { + "type": "integer" + } + }, + { + "in": "query", + "name": "force", + "schema": { + "type": "boolean" + } + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "result": { + "$ref": "#/components/schemas/ChartDataResponseResult" + } + }, + "type": "object" + } + } + }, + "description": "Dataset samples" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "403": { + "$ref": "#/components/responses/403" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "422": { + "$ref": "#/components/responses/422" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "tags": [ + "Datasets" + ] + } + }, + "/api/v1/embedded_dashboard/{uuid}": { + "get": { + "description": "Get a report schedule log", + "parameters": [ + { + "description": "The embedded configuration uuid", + "in": "path", + "name": "uuid", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "result": { + "$ref": "#/components/schemas/EmbeddedDashboardResponseSchema" + } + }, + "type": "object" + } + } + }, + "description": "Result contains the embedded dashboard configuration" + }, + "401": { + "$ref": "#/components/responses/401" + }, + "404": { + "$ref": "#/components/responses/404" + }, + "500": { + "$ref": "#/components/responses/500" + } + }, + "security": [ + { + "jwt": [] + } + ], + "tags": [ + "Embedded Dashboard" + ] + } + }, "/api/v1/explore/form_data": { "post": { "description": "Stores a new form_data.", @@ -15620,6 +16440,34 @@ ] } }, + "/api/v1/me/roles/": { + "get": { + "description": "Returns the user roles corresponding to the agent making the request, or returns a 401 error if the user is unauthenticated.", + "responses": { + "200": { + "content": { + "application/json": { + "schema": { + "properties": { + "result": { + "$ref": "#/components/schemas/UserResponseSchema" + } + }, + "type": "object" + } + } + }, + "description": "The current user" + }, + "401": { + "$ref": "#/components/responses/401" + } + }, + "tags": [ + "Current User" + ] + } + }, "/api/v1/menu/": { "get": { "description": "Get the menu data structure. Returns a forest like structure with the menu the user has access to", @@ -16211,6 +17059,9 @@ "404": { "$ref": "#/components/responses/404" }, + "422": { + "$ref": "#/components/responses/422" + }, "500": { "$ref": "#/components/responses/500" } @@ -16575,6 +17426,9 @@ "404": { "$ref": "#/components/responses/404" }, + "422": { + "$ref": "#/components/responses/422" + }, "500": { "$ref": "#/components/responses/500" } @@ -17560,6 +18414,9 @@ }, "description": "Result contains the guest token" }, + "400": { + "$ref": "#/components/responses/400" + }, "401": { "$ref": "#/components/responses/401" }, diff --git a/superset-frontend/cypress-base/cypress/integration/dashboard/edit_properties.test.ts b/superset-frontend/cypress-base/cypress/integration/dashboard/edit_properties.test.ts index 0bb83fa414a84..57839ebc2cf76 100644 --- a/superset-frontend/cypress-base/cypress/integration/dashboard/edit_properties.test.ts +++ b/superset-frontend/cypress-base/cypress/integration/dashboard/edit_properties.test.ts @@ -190,7 +190,7 @@ describe('Dashboard edit action', () => { }); }); }); - describe('the color scheme affects the chart colors', () => { + describe.skip('the color scheme affects the chart colors', () => { it('should change the chart colors', () => { openAdvancedProperties().then(() => { clear('#json_metadata'); diff --git a/superset-frontend/plugins/legacy-plugin-chart-world-map/src/WorldMap.js b/superset-frontend/plugins/legacy-plugin-chart-world-map/src/WorldMap.js index 195255687ee4c..0be1e6b49ffb1 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-world-map/src/WorldMap.js +++ b/superset-frontend/plugins/legacy-plugin-chart-world-map/src/WorldMap.js @@ -108,14 +108,13 @@ function WorldMap(element, props) { const handleContextMenu = source => { const pointerEvent = d3.event; pointerEvent.preventDefault(); - const val = source.id || source.country; - const formattedVal = mapData[val].name; + const val = mapData[source.id || source.country].name; const filters = [ { col: entity, op: '==', val, - formattedVal, + formattedVal: val, }, ]; onContextMenu(filters, pointerEvent.clientX, pointerEvent.clientY); diff --git a/superset-frontend/plugins/plugin-chart-pivot-table/src/PivotTableChart.tsx b/superset-frontend/plugins/plugin-chart-pivot-table/src/PivotTableChart.tsx index 924470559fb8d..4d740148f7238 100644 --- a/superset-frontend/plugins/plugin-chart-pivot-table/src/PivotTableChart.tsx +++ b/superset-frontend/plugins/plugin-chart-pivot-table/src/PivotTableChart.tsx @@ -365,8 +365,8 @@ export default function PivotTableChart(props: PivotTableProps) { const handleContextMenu = useCallback( ( e: MouseEvent, - colKey: DataRecordValue[] | undefined, - rowKey: DataRecordValue[] | undefined, + colKey: (string | number | boolean)[] | undefined, + rowKey: (string | number | boolean)[] | undefined, ) => { if (onContextMenu) { e.preventDefault(); diff --git a/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx b/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx index 85580e7b63a3d..d107f2e5e16d9 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/DataTable/DataTable.tsx @@ -23,6 +23,7 @@ import React, { HTMLProps, MutableRefObject, CSSProperties, + MouseEvent, } from 'react'; import { useTable, @@ -66,6 +67,7 @@ export interface DataTableProps extends TableOptions { rowCount: number; wrapperRef?: MutableRefObject; onColumnOrderChange: () => void; + onContextMenu?: (value: D, clientX: number, clientY: number) => void; } export interface RenderHTMLCellProps extends HTMLProps { @@ -98,6 +100,7 @@ export default typedMemo(function DataTable({ serverPagination, wrapperRef: userWrapperRef, onColumnOrderChange, + onContextMenu, ...moreUseTableOptions }: DataTableProps): JSX.Element { const tableHooks: PluginHook[] = [ @@ -270,7 +273,20 @@ export default typedMemo(function DataTable({ prepareRow(row); const { key: rowKey, ...rowProps } = row.getRowProps(); return ( - + { + if (onContextMenu) { + e.preventDefault(); + onContextMenu( + row.original, + e.nativeEvent.clientX, + e.nativeEvent.clientY, + ); + } + }} + > {row.cells.map(cell => cell.render('Cell', { key: cell.column.id }), )} diff --git a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx index 8acc06199f66e..98c19f20ba639 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx +++ b/superset-frontend/plugins/plugin-chart-table/src/TableChart.tsx @@ -39,6 +39,7 @@ import { ensureIsArray, GenericDataType, getTimeFormatterForGranularity, + QueryObjectFilterClause, styled, t, tn, @@ -205,6 +206,7 @@ export default function TableChart( sticky = true, // whether to use sticky header columnColorFormatters, allowRearrangeColumns = false, + onContextMenu, } = props; const timestampFormatter = useCallback( value => getTimeFormatterForGranularity(timeGrain)(value), @@ -576,6 +578,24 @@ export default function TableChart( const { width: widthFromState, height: heightFromState } = tableSize; + const handleContextMenu = + onContextMenu && !isRawRecords + ? (value: D, clientX: number, clientY: number) => { + const filters: QueryObjectFilterClause[] = []; + columnsMeta.forEach(col => { + if (!col.isMetric) { + filters.push({ + col: col.key, + op: '==', + val: value[col.key] as string | number | boolean, + formattedVal: String(value[col.key]), + }); + } + }); + onContextMenu(filters, clientX, clientY); + } + : undefined; + return ( @@ -598,6 +618,7 @@ export default function TableChart( selectPageSize={pageSize !== null && SelectPageSize} // not in use in Superset, but needed for unit tests sticky={sticky} + onContextMenu={handleContextMenu} /> ); diff --git a/superset-frontend/plugins/plugin-chart-table/src/transformProps.ts b/superset-frontend/plugins/plugin-chart-table/src/transformProps.ts index 5cf4fd1e83c43..bca48e63403cc 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/transformProps.ts +++ b/superset-frontend/plugins/plugin-chart-table/src/transformProps.ts @@ -204,7 +204,11 @@ const transformProps = ( queriesData = [], filterState, ownState: serverPaginationData, - hooks: { onAddFilter: onChangeFilter, setDataMask = () => {} }, + hooks: { + onAddFilter: onChangeFilter, + setDataMask = () => {}, + onContextMenu, + }, } = chartProps; const { @@ -274,6 +278,7 @@ const transformProps = ( columnColorFormatters, timeGrain, allowRearrangeColumns, + onContextMenu, }; }; diff --git a/superset-frontend/plugins/plugin-chart-table/src/types.ts b/superset-frontend/plugins/plugin-chart-table/src/types.ts index f5b83fa8bfd7e..6a5cb88f44865 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/types.ts +++ b/superset-frontend/plugins/plugin-chart-table/src/types.ts @@ -30,6 +30,7 @@ import { ChartDataResponseResult, QueryFormData, SetDataMaskHook, + QueryObjectFilterClause, } from '@superset-ui/core'; import { ColorFormatters, ColumnConfig } from '@superset-ui/chart-controls'; @@ -111,6 +112,11 @@ export interface TableChartTransformedProps { onChangeFilter?: ChartProps['hooks']['onAddFilter']; columnColorFormatters?: ColorFormatters; allowRearrangeColumns?: boolean; + onContextMenu?: ( + filters: QueryObjectFilterClause[], + clientX: number, + clientY: number, + ) => void; } export default {}; diff --git a/superset-frontend/src/explore/controlUtils/getControlValuesCompatibleWithDatasource.ts b/superset-frontend/src/explore/controlUtils/getControlValuesCompatibleWithDatasource.ts index e070e82464d52..346768557c9dd 100644 --- a/superset-frontend/src/explore/controlUtils/getControlValuesCompatibleWithDatasource.ts +++ b/superset-frontend/src/explore/controlUtils/getControlValuesCompatibleWithDatasource.ts @@ -23,6 +23,7 @@ import { isAdhocMetricSimple, isSavedMetric, isSimpleAdhocFilter, + isFreeFormAdhocFilter, JsonValue, SimpleAdhocFilter, } from '@superset-ui/core'; @@ -70,6 +71,7 @@ const isControlValueCompatibleWithDatasource = ( column.column_name === (value as SimpleAdhocFilter).subject, ); } + if (isFreeFormAdhocFilter(value)) return true; return false; }; diff --git a/superset-frontend/src/utils/common.js b/superset-frontend/src/utils/common.js index 603ec7c54992d..400a7d05e0109 100644 --- a/superset-frontend/src/utils/common.js +++ b/superset-frontend/src/utils/common.js @@ -97,7 +97,7 @@ export function prepareCopyToClipboardTabularData(data, columns) { // JavaScript does not maintain the order of a mixed set of keys (i.e integers and strings) // the below function orders the keys based on the column names. const key = columns[j].name || columns[j]; - if (data[i][key]) { + if (key in data[i]) { row[j] = data[i][key]; } else { row[j] = data[i][parseFloat(key)]; diff --git a/superset-frontend/src/utils/common.test.jsx b/superset-frontend/src/utils/common.test.jsx index 6c73b1011cd92..571e493addbff 100644 --- a/superset-frontend/src/utils/common.test.jsx +++ b/superset-frontend/src/utils/common.test.jsx @@ -59,6 +59,16 @@ describe('utils/common', () => { 'lorem\tipsum\t\ndolor\tsit\tamet\n', ); }); + it('includes 0 values', () => { + const array = [ + { column1: 0, column2: 0 }, + { column1: 1, column2: -1, 0: 0 }, + ]; + const column = ['column1', 'column2', '0']; + expect(prepareCopyToClipboardTabularData(array, column)).toEqual( + '0\t0\t\n1\t-1\t0\n', + ); + }); }); describe('applyFormattingToTabularData', () => { it('does not mutate empty array', () => { diff --git a/superset-frontend/src/views/CRUD/data/dataset/DatasetList.test.jsx b/superset-frontend/src/views/CRUD/data/dataset/DatasetList.test.jsx index 2f23f45573311..a2c2ab6954778 100644 --- a/superset-frontend/src/views/CRUD/data/dataset/DatasetList.test.jsx +++ b/superset-frontend/src/views/CRUD/data/dataset/DatasetList.test.jsx @@ -41,6 +41,7 @@ const store = mockStore({}); const datasetsInfoEndpoint = 'glob:*/api/v1/dataset/_info*'; const datasetsOwnersEndpoint = 'glob:*/api/v1/dataset/related/owners*'; const datasetsSchemaEndpoint = 'glob:*/api/v1/dataset/distinct/schema*'; +const datasetsDuplicateEndpoint = 'glob:*/api/v1/dataset/duplicate*'; const databaseEndpoint = 'glob:*/api/v1/dataset/related/database*'; const datasetsEndpoint = 'glob:*/api/v1/dataset/?*'; @@ -63,7 +64,7 @@ const mockUser = { }; fetchMock.get(datasetsInfoEndpoint, { - permissions: ['can_read', 'can_write'], + permissions: ['can_read', 'can_write', 'can_duplicate'], }); fetchMock.get(datasetsOwnersEndpoint, { result: [], @@ -71,6 +72,9 @@ fetchMock.get(datasetsOwnersEndpoint, { fetchMock.get(datasetsSchemaEndpoint, { result: [], }); +fetchMock.post(datasetsDuplicateEndpoint, { + result: [], +}); fetchMock.get(datasetsEndpoint, { result: mockdatasets, dataset_count: 3, @@ -181,6 +185,44 @@ describe('DatasetList', () => { wrapper.find('[data-test="bulk-select-copy"]').text(), ).toMatchInlineSnapshot(`"3 Selected (2 Physical, 1 Virtual)"`); }); + + it('shows duplicate modal when duplicate action is clicked', async () => { + await waitForComponentToPaint(wrapper); + expect( + wrapper.find('[data-test="duplicate-modal-input"]').exists(), + ).toBeFalsy(); + act(() => { + wrapper + .find('#duplicate-action-tooltop') + .at(0) + .find('.action-button') + .props() + .onClick(); + }); + await waitForComponentToPaint(wrapper); + expect( + wrapper.find('[data-test="duplicate-modal-input"]').exists(), + ).toBeTruthy(); + }); + + it('calls the duplicate endpoint', async () => { + await waitForComponentToPaint(wrapper); + await act(async () => { + wrapper + .find('#duplicate-action-tooltop') + .at(0) + .find('.action-button') + .props() + .onClick(); + await waitForComponentToPaint(wrapper); + wrapper + .find('[data-test="duplicate-modal-input"]') + .at(0) + .props() + .onPressEnter(); + }); + expect(fetchMock.calls(/dataset\/duplicate/)).toHaveLength(1); + }); }); jest.mock('react-router-dom', () => ({ diff --git a/superset-frontend/src/views/CRUD/data/dataset/DatasetList.tsx b/superset-frontend/src/views/CRUD/data/dataset/DatasetList.tsx index 107b072f25781..265a692bb6ed1 100644 --- a/superset-frontend/src/views/CRUD/data/dataset/DatasetList.tsx +++ b/superset-frontend/src/views/CRUD/data/dataset/DatasetList.tsx @@ -69,6 +69,7 @@ import { PASSWORDS_NEEDED_MESSAGE, CONFIRM_OVERWRITE_MESSAGE, } from './constants'; +import DuplicateDatasetModal from './DuplicateDatasetModal'; const FlexRowContainer = styled.div` align-items: center; @@ -119,6 +120,11 @@ type Dataset = { table_name: string; }; +interface VirtualDataset extends Dataset { + extra: Record; + sql: string; +} + interface DatasetListProps { addDangerToast: (msg: string) => void; addSuccessToast: (msg: string) => void; @@ -157,6 +163,9 @@ const DatasetList: FunctionComponent = ({ const [datasetCurrentlyEditing, setDatasetCurrentlyEditing] = useState(null); + const [datasetCurrentlyDuplicating, setDatasetCurrentlyDuplicating] = + useState(null); + const [importingDataset, showImportModal] = useState(false); const [passwordFields, setPasswordFields] = useState([]); const [preparingExport, setPreparingExport] = useState(false); @@ -178,6 +187,7 @@ const DatasetList: FunctionComponent = ({ const canEdit = hasPerm('can_write'); const canDelete = hasPerm('can_write'); const canCreate = hasPerm('can_write'); + const canDuplicate = hasPerm('can_duplicate'); const canExport = hasPerm('can_export') && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT); @@ -241,6 +251,10 @@ const DatasetList: FunctionComponent = ({ ), ); + const openDatasetDuplicateModal = (dataset: VirtualDataset) => { + setDatasetCurrentlyDuplicating(dataset); + }; + const handleBulkDatasetExport = (datasetsToExport: Dataset[]) => { const ids = datasetsToExport.map(({ id }) => id); handleResourceExport('dataset', ids, () => { @@ -397,7 +411,8 @@ const DatasetList: FunctionComponent = ({ const handleEdit = () => openDatasetEditModal(original); const handleDelete = () => openDatasetDeleteModal(original); const handleExport = () => handleBulkDatasetExport([original]); - if (!canEdit && !canDelete && !canExport) { + const handleDuplicate = () => openDatasetDuplicateModal(original); + if (!canEdit && !canDelete && !canExport && !canDuplicate) { return null; } return ( @@ -456,16 +471,32 @@ const DatasetList: FunctionComponent = ({ )} + {canDuplicate && original.kind === 'virtual' && ( + + + + + + )} ); }, Header: t('Actions'), id: 'actions', - hidden: !canEdit && !canDelete, + hidden: !canEdit && !canDelete && !canDuplicate, disableSortBy: true, }, ], - [canEdit, canDelete, canExport, openDatasetEditModal], + [canEdit, canDelete, canExport, openDatasetEditModal, canDuplicate], ); const filterTypes: Filters = useMemo( @@ -625,6 +656,10 @@ const DatasetList: FunctionComponent = ({ setDatasetCurrentlyEditing(null); }; + const closeDatasetDuplicateModal = () => { + setDatasetCurrentlyDuplicating(null); + }; + const handleDatasetDelete = ({ id, table_name: tableName }: Dataset) => { SupersetClient.delete({ endpoint: `/api/v1/dataset/${id}`, @@ -660,6 +695,30 @@ const DatasetList: FunctionComponent = ({ ); }; + const handleDatasetDuplicate = (newDatasetName: string) => { + if (datasetCurrentlyDuplicating === null) { + addDangerToast(t('There was an issue duplicating the dataset.')); + } + + SupersetClient.post({ + endpoint: `/api/v1/dataset/duplicate`, + postPayload: { + base_model_id: datasetCurrentlyDuplicating?.id, + table_name: newDatasetName, + }, + }).then( + () => { + setDatasetCurrentlyDuplicating(null); + refreshData(); + }, + createErrorHandler(errMsg => + addDangerToast( + t('There was an issue duplicating the selected datasets: %s', errMsg), + ), + ), + ); + }; + return ( <> @@ -694,6 +753,11 @@ const DatasetList: FunctionComponent = ({ show /> )} + void; + onDuplicate: (newDatasetName: string) => void; +} + +const DuplicateDatasetModal: FunctionComponent = ({ + dataset, + onHide, + onDuplicate, +}) => { + const [show, setShow] = useState(false); + const [disableSave, setDisableSave] = useState(false); + const [newDuplicateDatasetName, setNewDuplicateDatasetName] = + useState(''); + + const onChange = (event: React.ChangeEvent) => { + const targetValue = event.target.value ?? ''; + setNewDuplicateDatasetName(targetValue); + setDisableSave(targetValue === ''); + }; + + const duplicateDataset = () => { + onDuplicate(newDuplicateDatasetName); + }; + + useEffect(() => { + setNewDuplicateDatasetName(''); + setShow(dataset !== null); + }, [dataset]); + + return ( + + {t('New dataset name')} + + + ); +}; + +export default DuplicateDatasetModal; diff --git a/superset-frontend/src/views/components/Menu.test.tsx b/superset-frontend/src/views/components/Menu.test.tsx index 5d785f91d48fc..0c32d24a184ea 100644 --- a/superset-frontend/src/views/components/Menu.test.tsx +++ b/superset-frontend/src/views/components/Menu.test.tsx @@ -178,6 +178,10 @@ const mockedProps = { tooltip: '', text: '', }, + environment_tag: { + text: 'Production', + color: '#000', + }, navbar_right: { show_watermark: false, bug_report_url: '/report/', @@ -284,6 +288,15 @@ test('should render the brand', () => { expect(image).toHaveAttribute('src', icon); }); +test('should render the environment tag', () => { + useSelectorMock.mockReturnValue({ roles: user.roles }); + const { + data: { environment_tag }, + } = mockedProps; + render(, { useRedux: true }); + expect(screen.getByText(environment_tag.text)).toBeInTheDocument(); +}); + test('should render all the top navbar menu items', () => { useSelectorMock.mockReturnValue({ roles: user.roles }); const { diff --git a/superset-frontend/src/views/components/Menu.tsx b/superset-frontend/src/views/components/Menu.tsx index 59d243879d8e4..320eea4e16d9f 100644 --- a/superset-frontend/src/views/components/Menu.tsx +++ b/superset-frontend/src/views/components/Menu.tsx @@ -63,6 +63,10 @@ export interface MenuProps { brand: BrandProps; navbar_right: NavBarProps; settings: MenuObjectProps[]; + environment_tag: { + text: string; + color: string; + }; }; isFrontendRoute?: (path?: string) => boolean; } @@ -200,7 +204,13 @@ const { SubMenu } = DropdownMenu; const { useBreakpoint } = Grid; export function Menu({ - data: { menu, brand, navbar_right: navbarRight, settings }, + data: { + menu, + brand, + navbar_right: navbarRight, + settings, + environment_tag: environmentTag, + }, isFrontendRoute = () => false, }: MenuProps) { const [showMenu, setMenu] = useState('horizontal'); @@ -330,6 +340,7 @@ export function Menu({ settings={settings} navbarRight={navbarRight} isFrontendRoute={isFrontendRoute} + environmentTag={environmentTag} /> diff --git a/superset-frontend/src/views/components/RightMenu.tsx b/superset-frontend/src/views/components/RightMenu.tsx index 516d920739c94..fabf071eb60b9 100644 --- a/superset-frontend/src/views/components/RightMenu.tsx +++ b/superset-frontend/src/views/components/RightMenu.tsx @@ -29,10 +29,12 @@ import { SupersetTheme, SupersetClient, getExtensionsRegistry, + useTheme, } from '@superset-ui/core'; import { MainNav as Menu } from 'src/components/Menu'; import { Tooltip } from 'src/components/Tooltip'; import Icons from 'src/components/Icons'; +import Label from 'src/components/Label'; import { findPermission } from 'src/utils/findPermission'; import { isUserAdmin } from 'src/dashboard/util/permissionUtils'; import { UserWithPermissionsAndRoles } from 'src/types/bootstrapTypes'; @@ -85,6 +87,10 @@ const StyledAnchor = styled.a` padding-left: ${({ theme }) => theme.gridUnit}px; `; +const tagStyles = (theme: SupersetTheme) => css` + color: ${theme.colors.grayscale.light5}; +`; + const { SubMenu } = Menu; const RightMenu = ({ @@ -92,6 +98,7 @@ const RightMenu = ({ settings, navbarRight, isFrontendRoute, + environmentTag, setQuery, }: RightMenuProps & { setQuery: ({ databaseAdded }: { databaseAdded: boolean }) => void; @@ -262,6 +269,8 @@ const RightMenu = ({ const handleDatabaseAdd = () => setQuery({ databaseAdded: true }); + const theme = useTheme(); + return ( {canDatabase && ( @@ -272,6 +281,20 @@ const RightMenu = ({ onDatabaseAdd={handleDatabaseAdd} /> )} + {environmentTag.text && ( + + )} boolean; + environmentTag: { + text: string; + color: string; + }; } export enum GlobalMenuDataOptions { diff --git a/superset/config.py b/superset/config.py index 3b63a48f929b5..647dc81ea78d6 100644 --- a/superset/config.py +++ b/superset/config.py @@ -211,7 +211,7 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]: # # e.g.: # -# class AesGcmEncryptedAdapter( # pylint: disable=too-few-public-methods +# class AesGcmEncryptedAdapter( # AbstractEncryptedFieldAdapter # ): # def create( @@ -1342,6 +1342,21 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument "port": internet_port, } +# Configuration for environment tag shown on the navbar. Setting 'text' to '' will hide the tag. +# 'color' can either be a hex color code, or a dot-indexed theme color (e.g. error.base) +ENVIRONMENT_TAG_CONFIG = { + "variable": "FLASK_ENV", + "values": { + "development": { + "color": "error.base", + "text": "Development", + }, + "production": { + "color": "", + "text": "", + }, + }, +} # ------------------------------------------------------------------- # * WARNING: STOP EDITING HERE * diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 7ac5675e6ecd2..b9ffb4792cfab 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -1410,7 +1410,9 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma col=selected, template_processor=template_processor ) groupby_all_columns[outer.name] = outer - if not series_column_names or outer.name in series_column_names: + if ( + is_timeseries and not series_column_names + ) or outer.name in series_column_names: groupby_series_columns[outer.name] = outer select_exprs.append(outer) elif columns: diff --git a/superset/dao/exceptions.py b/superset/dao/exceptions.py index 93cb25d3fc70e..a11db63a4c14d 100644 --- a/superset/dao/exceptions.py +++ b/superset/dao/exceptions.py @@ -65,4 +65,5 @@ class DatasourceTypeNotSupportedError(DAOException): class DatasourceNotFound(DAOException): + status = 404 message = "Datasource does not exist" diff --git a/superset/datasets/api.py b/superset/datasets/api.py index e25e8252f9443..bcc7d7a43ac76 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -22,7 +22,7 @@ from zipfile import is_zipfile, ZipFile import yaml -from flask import request, Response, send_file +from flask import g, request, Response, send_file from flask_appbuilder.api import expose, protect, rison, safe from flask_appbuilder.models.sqla.interface import SQLAInterface from flask_babel import ngettext @@ -37,6 +37,7 @@ from superset.datasets.commands.bulk_delete import BulkDeleteDatasetCommand from superset.datasets.commands.create import CreateDatasetCommand from superset.datasets.commands.delete import DeleteDatasetCommand +from superset.datasets.commands.duplicate import DuplicateDatasetCommand from superset.datasets.commands.exceptions import ( DatasetBulkDeleteFailedError, DatasetCreateFailedError, @@ -54,6 +55,7 @@ from superset.datasets.dao import DatasetDAO from superset.datasets.filters import DatasetCertifiedFilter, DatasetIsNullOrEmptyFilter from superset.datasets.schemas import ( + DatasetDuplicateSchema, DatasetPostSchema, DatasetPutSchema, DatasetRelatedObjectsResponse, @@ -90,6 +92,7 @@ class DatasetRestApi(BaseSupersetModelRestApi): "bulk_delete", "refresh", "related_objects", + "duplicate", } list_columns = [ "id", @@ -184,6 +187,7 @@ class DatasetRestApi(BaseSupersetModelRestApi): ] add_model_schema = DatasetPostSchema() edit_model_schema = DatasetPutSchema() + duplicate_model_schema = DatasetDuplicateSchema() add_columns = ["database", "schema", "table_name", "owners"] edit_columns = [ "table_name", @@ -220,7 +224,10 @@ class DatasetRestApi(BaseSupersetModelRestApi): apispec_parameter_schemas = { "get_export_ids_schema": get_export_ids_schema, } - openapi_spec_component_schemas = (DatasetRelatedObjectsResponse,) + openapi_spec_component_schemas = ( + DatasetRelatedObjectsResponse, + DatasetDuplicateSchema, + ) @expose("/", methods=["POST"]) @protect() @@ -512,6 +519,77 @@ def export(self, **kwargs: Any) -> Response: mimetype="application/text", ) + @expose("/duplicate", methods=["POST"]) + @protect() + @safe + @statsd_metrics + @event_logger.log_this_with_context( + action=lambda self, *args, **kwargs: f"{self.__class__.__name__}" f".duplicate", + log_to_statsd=False, + ) + @requires_json + def duplicate(self) -> Response: + """Duplicates a Dataset + --- + post: + description: >- + Duplicates a Dataset + requestBody: + description: Dataset schema + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/DatasetDuplicateSchema' + responses: + 201: + description: Dataset duplicated + content: + application/json: + schema: + type: object + properties: + id: + type: number + result: + $ref: '#/components/schemas/DatasetDuplicateSchema' + 400: + $ref: '#/components/responses/400' + 401: + $ref: '#/components/responses/401' + 403: + $ref: '#/components/responses/403' + 404: + $ref: '#/components/responses/404' + 422: + $ref: '#/components/responses/422' + 500: + $ref: '#/components/responses/500' + """ + try: + item = self.duplicate_model_schema.load(request.json) + # This validates custom Schema with custom validations + except ValidationError as error: + return self.response_400(message=error.messages) + + try: + new_model = DuplicateDatasetCommand([g.user.id], item).run() + return self.response(201, id=new_model.id, result=item) + except DatasetInvalidError as ex: + return self.response_422( + message=ex.normalized_messages() + if isinstance(ex, ValidationError) + else str(ex) + ) + except DatasetCreateFailedError as ex: + logger.error( + "Error creating model %s: %s", + self.__class__.__name__, + str(ex), + exc_info=True, + ) + return self.response_422(message=str(ex)) + @expose("//refresh", methods=["PUT"]) @protect() @safe diff --git a/superset/datasets/commands/duplicate.py b/superset/datasets/commands/duplicate.py new file mode 100644 index 0000000000000..3ee538230b68c --- /dev/null +++ b/superset/datasets/commands/duplicate.py @@ -0,0 +1,133 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import logging +from typing import Any, Dict, List + +from flask_appbuilder.models.sqla import Model +from flask_appbuilder.security.sqla.models import User +from flask_babel import gettext as __ +from marshmallow import ValidationError +from sqlalchemy.exc import SQLAlchemyError + +from superset.commands.base import BaseCommand, CreateMixin +from superset.commands.exceptions import DatasourceTypeInvalidError +from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn +from superset.dao.exceptions import DAOCreateFailedError +from superset.datasets.commands.exceptions import ( + DatasetDuplicateFailedError, + DatasetExistsValidationError, + DatasetInvalidError, + DatasetNotFoundError, +) +from superset.datasets.dao import DatasetDAO +from superset.errors import ErrorLevel, SupersetError, SupersetErrorType +from superset.exceptions import SupersetErrorException +from superset.extensions import db +from superset.models.core import Database +from superset.sql_parse import ParsedQuery + +logger = logging.getLogger(__name__) + + +class DuplicateDatasetCommand(CreateMixin, BaseCommand): + def __init__(self, user: User, data: Dict[str, Any]): + self._actor = user + self._base_model: SqlaTable = SqlaTable() + self._properties = data.copy() + + def run(self) -> Model: + self.validate() + try: + database_id = self._base_model.database_id + table_name = self._properties["table_name"] + owners = self._properties["owners"] + database = db.session.query(Database).get(database_id) + if not database: + raise SupersetErrorException( + SupersetError( + message=__("The database was not found."), + error_type=SupersetErrorType.DATABASE_NOT_FOUND_ERROR, + level=ErrorLevel.ERROR, + ), + status=404, + ) + table = SqlaTable(table_name=table_name, owners=owners) + table.database = database + table.schema = self._base_model.schema + table.template_params = self._base_model.template_params + table.is_sqllab_view = True + table.sql = ParsedQuery(self._base_model.sql).stripped() + db.session.add(table) + cols = [] + for config_ in self._base_model.columns: + column_name = config_.column_name + col = TableColumn( + column_name=column_name, + verbose_name=config_.verbose_name, + filterable=True, + groupby=True, + is_dttm=config_.is_dttm, + type=config_.type, + ) + cols.append(col) + table.columns = cols + mets = [] + for config_ in self._base_model.metrics: + metric_name = config_.metric_name + met = SqlMetric( + metric_name=metric_name, + verbose_name=config_.verbose_name, + expression=config_.expression, + metric_type=config_.metric_type, + description=config_.description, + ) + mets.append(met) + table.metrics = mets + db.session.commit() + except (SQLAlchemyError, DAOCreateFailedError) as ex: + logger.warning(ex, exc_info=True) + db.session.rollback() + raise DatasetDuplicateFailedError() from ex + return table + + def validate(self) -> None: + exceptions: List[ValidationError] = [] + base_model_id = self._properties["base_model_id"] + duplicate_name = self._properties["table_name"] + + base_model = DatasetDAO.find_by_id(base_model_id) + if not base_model: + exceptions.append(DatasetNotFoundError()) + else: + self._base_model = base_model + + if self._base_model and self._base_model.kind != "virtual": + exceptions.append(DatasourceTypeInvalidError()) + + if DatasetDAO.find_one_or_none(table_name=duplicate_name): + exceptions.append(DatasetExistsValidationError(table_name=duplicate_name)) + + try: + owners = self.populate_owners(self._actor) + self._properties["owners"] = owners + except ValidationError as ex: + exceptions.append(ex) + + if exceptions: + exception = DatasetInvalidError() + exception.add_list(exceptions) + raise exception diff --git a/superset/datasets/commands/exceptions.py b/superset/datasets/commands/exceptions.py index b743a4355ea06..c76b7b3ad53dc 100644 --- a/superset/datasets/commands/exceptions.py +++ b/superset/datasets/commands/exceptions.py @@ -187,3 +187,7 @@ class DatasetImportError(ImportFailedError): class DatasetAccessDeniedError(ForbiddenError): message = _("You don't have access to this dataset.") + + +class DatasetDuplicateFailedError(CreateFailedError): + message = _("Dataset could not be duplicated.") diff --git a/superset/datasets/schemas.py b/superset/datasets/schemas.py index 38646471d03c3..9d2b474894b02 100644 --- a/superset/datasets/schemas.py +++ b/superset/datasets/schemas.py @@ -107,6 +107,11 @@ class DatasetPutSchema(Schema): external_url = fields.String(allow_none=True) +class DatasetDuplicateSchema(Schema): + base_model_id = fields.Integer(required=True) + table_name = fields.String(required=True, allow_none=False, validate=Length(1, 250)) + + class DatasetRelatedChart(Schema): id = fields.Integer() slice_name = fields.String() diff --git a/superset/db_engine_specs/trino.py b/superset/db_engine_specs/trino.py index 2a23d1c969593..9aa89ce34a06f 100644 --- a/superset/db_engine_specs/trino.py +++ b/superset/db_engine_specs/trino.py @@ -35,7 +35,7 @@ from superset.models.core import Database try: - from trino.dbapi import Cursor # pylint: disable=unused-import + from trino.dbapi import Cursor except ImportError: pass diff --git a/superset/explore/commands/get.py b/superset/explore/commands/get.py index 1b9ec433dabac..3a656ea2b9f8a 100644 --- a/superset/explore/commands/get.py +++ b/superset/explore/commands/get.py @@ -27,7 +27,7 @@ from superset.commands.base import BaseCommand from superset.connectors.base.models import BaseDatasource from superset.connectors.sqla.models import SqlaTable -from superset.datasets.commands.exceptions import DatasetNotFoundError +from superset.dao.exceptions import DatasourceNotFound from superset.datasource.dao import DatasourceDAO from superset.exceptions import SupersetException from superset.explore.commands.parameters import CommandParameters @@ -114,7 +114,7 @@ def run(self) -> Optional[Dict[str, Any]]: dataset = DatasourceDAO.get_datasource( db.session, cast(str, self._dataset_type), self._dataset_id ) - except DatasetNotFoundError: + except DatasourceNotFound: pass dataset_name = dataset.name if dataset else _("[Missing Dataset]") diff --git a/superset/models/helpers.py b/superset/models/helpers.py index 1ce534b593d01..bc58cee8c6c59 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -750,6 +750,9 @@ def get_fetch_values_predicate(self) -> List[Any]: def get_extra_cache_keys(query_obj: Dict[str, Any]) -> List[str]: raise NotImplementedError() + def get_template_processor(self, **kwargs: Any) -> BaseTemplateProcessor: + raise NotImplementedError() + def _process_sql_expression( # pylint: disable=no-self-use self, expression: Optional[str], @@ -1291,9 +1294,7 @@ def get_timestamp_expression( column: Dict[str, Any], time_grain: Optional[str], label: Optional[str] = None, - template_processor: Optional[ # pylint: disable=unused-argument - BaseTemplateProcessor - ] = None, + template_processor: Optional[BaseTemplateProcessor] = None, ) -> Union[TimestampExpression, Label]: """ Return a SQLAlchemy Core element representation of self to be used in a query. @@ -1307,6 +1308,11 @@ def get_timestamp_expression( column_spec = self.db_engine_spec.get_column_spec(column.get("type")) type_ = column_spec.sqla_type if column_spec else sa.DateTime col = sa.column(column.get("column_name"), type_=type_) + + if template_processor: + expression = template_processor.process_template(column["column_name"]) + col = sa.literal_column(expression, type_=type_) + time_expr = self.db_engine_spec.get_timestamp_expr(col, None, time_grain) return self.make_sqla_column_compatible(time_expr, label) @@ -1377,7 +1383,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma applied_template_filters: List[str] = [] template_kwargs["removed_filters"] = removed_filters template_kwargs["applied_filters"] = applied_template_filters - template_processor = None # self.get_template_processor(**template_kwargs) + template_processor = self.get_template_processor(**template_kwargs) db_engine_spec = self.db_engine_spec prequeries: List[str] = [] orderby = orderby or [] @@ -1487,7 +1493,10 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma table_col = columns_by_name[selected] if isinstance(table_col, dict): outer = self.get_timestamp_expression( - table_col, time_grain, selected, template_processor + column=table_col, + time_grain=time_grain, + label=selected, + template_processor=template_processor, ) else: outer = table_col.get_timestamp_expression( @@ -1515,7 +1524,9 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma col=selected, template_processor=template_processor ) groupby_all_columns[outer.name] = outer - if not series_column_names or outer.name in series_column_names: + if ( + is_timeseries and not series_column_names + ) or outer.name in series_column_names: groupby_series_columns[outer.name] = outer select_exprs.append(outer) elif columns: @@ -1548,7 +1559,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma if is_timeseries: if isinstance(dttm_col, dict): timestamp = self.get_timestamp_expression( - dttm_col, time_grain, template_processor + dttm_col, time_grain, template_processor=template_processor ) else: timestamp = dttm_col.get_timestamp_expression( @@ -1637,7 +1648,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma elif col_obj and filter_grain: if isinstance(col_obj, dict): sqla_col = self.get_timestamp_expression( - col_obj, time_grain, template_processor + col_obj, time_grain, template_processor=template_processor ) else: sqla_col = col_obj.get_timestamp_expression( @@ -1768,9 +1779,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma where = extras.get("where") if where: try: - where = template_processor.process_template( # type: ignore - f"({where})" - ) + where = template_processor.process_template(f"{where}") except TemplateError as ex: raise QueryObjectValidationError( _( @@ -1782,9 +1791,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma having = extras.get("having") if having: try: - having = template_processor.process_template( # type: ignore - f"({having})" - ) + having = template_processor.process_template(f"{having}") except TemplateError as ex: raise QueryObjectValidationError( _( diff --git a/superset/models/sql_lab.py b/superset/models/sql_lab.py index d0e0470d4b1f4..d12af490818d5 100644 --- a/superset/models/sql_lab.py +++ b/superset/models/sql_lab.py @@ -42,6 +42,7 @@ from sqlalchemy.orm import backref, relationship from superset import security_manager +from superset.jinja_context import BaseTemplateProcessor, get_template_processor from superset.models.helpers import ( AuditMixinNullable, ExploreMixin, @@ -126,6 +127,9 @@ class Query( __table_args__ = (sqla.Index("ti_user_id_changed_on", user_id, changed_on),) + def get_template_processor(self, **kwargs: Any) -> BaseTemplateProcessor: + return get_template_processor(query=self, database=self.database, **kwargs) + def to_dict(self) -> Dict[str, Any]: return { "changedOn": self.changed_on, diff --git a/superset/security/manager.py b/superset/security/manager.py index 369c73fade4a9..3a92feba4f1c8 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -1826,6 +1826,9 @@ def get_permissions( if user.is_anonymous: public_role = current_app.config.get("AUTH_ROLE_PUBLIC") query = query.filter(Role.name == public_role) + elif self.is_guest_user(user): + guest_role = current_app.config.get("GUEST_ROLE_NAME") + query = query.filter(Role.name == guest_role) else: query = query.filter(assoc_user_role.c.user_id == user.id) diff --git a/superset/views/base.py b/superset/views/base.py index 73b97d9f41419..4bc491fdeb311 100644 --- a/superset/views/base.py +++ b/superset/views/base.py @@ -17,6 +17,7 @@ import dataclasses import functools import logging +import os import traceback from datetime import datetime from typing import Any, Callable, cast, Dict, List, Optional, Union @@ -307,6 +308,18 @@ def menu_data() -> Dict[str, Any]: if callable(brand_text): brand_text = brand_text() build_number = appbuilder.app.config["BUILD_NUMBER"] + try: + environment_tag = ( + appbuilder.app.config["ENVIRONMENT_TAG_CONFIG"]["values"][ + os.environ.get( + appbuilder.app.config["ENVIRONMENT_TAG_CONFIG"]["variable"] + ) + ] + or {} + ) + except KeyError: + environment_tag = {} + return { "menu": menu, "brand": { @@ -316,6 +329,7 @@ def menu_data() -> Dict[str, Any]: "tooltip": appbuilder.app.config["LOGO_TOOLTIP"], "text": brand_text, }, + "environment_tag": environment_tag, "navbar_right": { # show the watermark if the default app icon has been overriden "show_watermark": ("superset-logo-horiz" not in appbuilder.app_icon), diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py index 950756d816162..b667180de9253 100644 --- a/tests/integration_tests/datasets/api_tests.py +++ b/tests/integration_tests/datasets/api_tests.py @@ -99,6 +99,13 @@ def get_fixture_datasets(self) -> List[SqlaTable]: .all() ) + def get_fixture_virtual_datasets(self) -> List[SqlaTable]: + return ( + db.session.query(SqlaTable) + .filter(SqlaTable.table_name.in_(self.fixture_virtual_table_names)) + .all() + ) + @pytest.fixture() def create_virtual_datasets(self): with self.create_app().app_context(): @@ -443,7 +450,12 @@ def test_info_security_dataset(self): rv = self.get_assert_metric(uri, "info") data = json.loads(rv.data.decode("utf-8")) assert rv.status_code == 200 - assert set(data["permissions"]) == {"can_read", "can_write", "can_export"} + assert set(data["permissions"]) == { + "can_read", + "can_write", + "can_export", + "can_duplicate", + } def test_create_dataset_item(self): """ @@ -2134,3 +2146,78 @@ def test_get_datasets_is_certified_filter(self): db.session.delete(table_w_certification) db.session.commit() + + @pytest.mark.usefixtures("create_virtual_datasets") + def test_duplicate_virtual_dataset(self): + """ + Dataset API: Test duplicate virtual dataset + """ + if backend() == "sqlite": + return + + dataset = self.get_fixture_virtual_datasets()[0] + + self.login(username="admin") + uri = f"api/v1/dataset/duplicate" + table_data = {"base_model_id": dataset.id, "table_name": "Dupe1"} + rv = self.post_assert_metric(uri, table_data, "duplicate") + assert rv.status_code == 201 + rv_data = json.loads(rv.data) + new_dataset: SqlaTable = ( + db.session.query(SqlaTable).filter_by(id=rv_data["id"]).one_or_none() + ) + assert new_dataset is not None + assert new_dataset.id != dataset.id + assert new_dataset.table_name == "Dupe1" + assert len(new_dataset.columns) == 2 + assert new_dataset.columns[0].column_name == "id" + assert new_dataset.columns[1].column_name == "name" + + @pytest.mark.usefixtures("create_datasets") + def test_duplicate_physical_dataset(self): + """ + Dataset API: Test duplicate physical dataset + """ + if backend() == "sqlite": + return + + dataset = self.get_fixture_datasets()[0] + + self.login(username="admin") + uri = f"api/v1/dataset/duplicate" + table_data = {"base_model_id": dataset.id, "table_name": "Dupe2"} + rv = self.post_assert_metric(uri, table_data, "duplicate") + assert rv.status_code == 422 + + @pytest.mark.usefixtures("create_virtual_datasets") + def test_duplicate_existing_dataset(self): + """ + Dataset API: Test duplicate dataset with existing name + """ + if backend() == "sqlite": + return + + dataset = self.get_fixture_virtual_datasets()[0] + + self.login(username="admin") + uri = f"api/v1/dataset/duplicate" + table_data = { + "base_model_id": dataset.id, + "table_name": "sql_virtual_dataset_2", + } + rv = self.post_assert_metric(uri, table_data, "duplicate") + assert rv.status_code == 422 + + def test_duplicate_invalid_dataset(self): + """ + Dataset API: Test duplicate invalid dataset + """ + + self.login(username="admin") + uri = f"api/v1/dataset/duplicate" + table_data = { + "base_model_id": -1, + "table_name": "Dupe3", + } + rv = self.post_assert_metric(uri, table_data, "duplicate") + assert rv.status_code == 422