diff --git a/.github/workflows/bashlib.sh b/.github/workflows/bashlib.sh
index 0f9a8fd10e01b..32e89be43174d 100644
--- a/.github/workflows/bashlib.sh
+++ b/.github/workflows/bashlib.sh
@@ -38,10 +38,10 @@ default-setup-command() {
}
apt-get-install() {
- say "::group::apt-get install dependencies"
- sudo apt-get update && sudo apt-get install --yes \
- libsasl2-dev
- say "::endgroup::"
+ say "::group::apt-get install dependencies"
+ sudo apt-get update && sudo apt-get install --yes \
+ libsasl2-dev
+ say "::endgroup::"
}
pip-upgrade() {
@@ -161,7 +161,7 @@ cypress-run() {
if [[ -z $CYPRESS_KEY ]]; then
$cypress --spec "cypress/integration/$page" --browser "$browser"
else
- export CYPRESS_RECORD_KEY=`echo $CYPRESS_KEY | base64 --decode`
+ export CYPRESS_RECORD_KEY=$(echo $CYPRESS_KEY | base64 --decode)
# additional flags for Cypress dashboard recording
$cypress --spec "cypress/integration/$page" --browser "$browser" \
--record --group "$group" --tag "${GITHUB_REPOSITORY},${GITHUB_EVENT_NAME}" \
@@ -190,8 +190,8 @@ cypress-run-all() {
cat "$flasklog"
say "::endgroup::"
- # Rerun SQL Lab tests with backend persist enabled
- export SUPERSET_CONFIG=tests.integration_tests.superset_test_config_sqllab_backend_persist
+ # Rerun SQL Lab tests with backend persist disabled
+ export SUPERSET_CONFIG=tests.integration_tests.superset_test_config_sqllab_backend_persist_off
# Restart Flask with new configs
kill $flaskProcessId
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 8731b8aa3d1bb..1f29891dfddc9 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -20,7 +20,7 @@ repos:
hooks:
- id: isort
- repo: https://github.com/pre-commit/mirrors-mypy
- rev: v0.910
+ rev: v0.941
hooks:
- id: mypy
additional_dependencies: [types-all]
diff --git a/RELEASING/changelog.py b/RELEASING/changelog.py
index 0cf600280b799..441e3092d047e 100644
--- a/RELEASING/changelog.py
+++ b/RELEASING/changelog.py
@@ -381,12 +381,12 @@ def change_log(
with open(csv, "w") as csv_file:
log_items = list(logs)
field_names = log_items[0].keys()
- writer = lib_csv.DictWriter( # type: ignore
+ writer = lib_csv.DictWriter(
csv_file,
delimiter=",",
quotechar='"',
quoting=lib_csv.QUOTE_ALL,
- fieldnames=field_names, # type: ignore
+ fieldnames=field_names,
)
writer.writeheader()
for log in logs:
diff --git a/UPDATING.md b/UPDATING.md
index ea9f02094a52c..07193a462d3be 100644
--- a/UPDATING.md
+++ b/UPDATING.md
@@ -24,6 +24,7 @@ assists people when migrating to a new version.
## Next
+- [19046](https://github.com/apache/superset/pull/19046): Enables the drag and drop interface in Explore control panel by default. Flips `ENABLE_EXPLORE_DRAG_AND_DROP` and `ENABLE_DND_WITH_CLICK_UX` feature flags to `True`.
- [18936](https://github.com/apache/superset/pull/18936): Removes legacy SIP-15 interm logic/flags—specifically the `SIP_15_ENABLED`, `SIP_15_GRACE_PERIOD_END`, `SIP_15_DEFAULT_TIME_RANGE_ENDPOINTS`, and `SIP_15_TOAST_MESSAGE` flags. Time range endpoints are no longer configurable and strictly adhere to the `[start, end)` paradigm, i.e., inclusive of the start and exclusive of the end. Additionally this change removes the now obsolete `time_range_endpoints` from the form-data and resulting in the cache being busted.
### Breaking Changes
@@ -34,9 +35,10 @@ assists people when migrating to a new version.
- [17984](https://github.com/apache/superset/pull/17984): Default Flask SECRET_KEY has changed for security reasons. You should always override with your own secret. Set `PREVIOUS_SECRET_KEY` (ex: PREVIOUS_SECRET_KEY = "\2\1thisismyscretkey\1\2\\e\\y\\y\\h") with your previous key and use `superset re-encrypt-secrets` to rotate you current secrets
- [15254](https://github.com/apache/superset/pull/15254): Previously `QUERY_COST_FORMATTERS_BY_ENGINE`, `SQL_VALIDATORS_BY_ENGINE` and `SCHEDULED_QUERIES` were expected to be defined in the feature flag dictionary in the `config.py` file. These should now be defined as a top-level config, with the feature flag dictionary being reserved for boolean only values.
- [17539](https://github.com/apache/superset/pull/17539): all Superset CLI commands (init, load_examples and etc) require setting the FLASK_APP environment variable (which is set by default when `.flaskenv` is loaded)
-- [18970](https://github.com/apache/superset/pull/18970): Changes feature
-flag for the legacy datasource editor (DISABLE_LEGACY_DATASOURCE_EDITOR) in config.py to True, thus disabling the feature from being shown in the client.
+- [18970](https://github.com/apache/superset/pull/18970): Changes feature flag for the legacy datasource editor (DISABLE_LEGACY_DATASOURCE_EDITOR) in config.py to True, thus disabling the feature from being shown in the client.
- [19017](https://github.com/apache/superset/pull/19017): Removes Python 3.7 support.
+- [19142](https://github.com/apache/superset/pull/19142): Changes feature flag for versioned export(VERSIONED_EXPORT) to be true.
+- [19107](https://github.com/apache/superset/pull/19107): Feature flag `SQLLAB_BACKEND_PERSISTENCE` is now on by default, which enables persisting SQL Lab tabs in the backend instead of the browser's `localStorage`.
### Potential Downtime
@@ -49,11 +51,12 @@ flag for the legacy datasource editor (DISABLE_LEGACY_DATASOURCE_EDITOR) in conf
### Deprecations
+- [19078](https://github.com/apache/superset/pull/19078): Creation of old shorturl links has been deprecated in favor of a new permalink feature that solves the long url problem (old shorturls will still work, though!). By default, new permalinks use UUID4 as the key. However, to use serial ids similar to the old shorturls, add the following to your `superset_config.py`: `PERMALINK_KEY_TYPE = "id"`.
- [18960](https://github.com/apache/superset/pull/18960): Persisting URL params in chart metadata is no longer supported. To set a default value for URL params in Jinja code, use the optional second argument: `url_param("my-param", "my-default-value")`.
### Other
-- [17589](https://github.com/apache/incubator-superset/pull/17589): It is now possible to limit access to users' recent activity data by setting the `ENABLE_BROAD_ACTIVITY_ACCESS` config flag to false, or customizing the `raise_for_user_activity_access` method in the security manager.
+- [17589](https://github.com/apache/superset/pull/17589): It is now possible to limit access to users' recent activity data by setting the `ENABLE_BROAD_ACTIVITY_ACCESS` config flag to false, or customizing the `raise_for_user_activity_access` method in the security manager.
- [17536](https://github.com/apache/superset/pull/17536): introduced a key-value endpoint to store dashboard filter state. This endpoint is backed by Flask-Caching and the default configuration assumes that the values will be stored in the file system. If you are already using another cache backend like Redis or Memchached, you'll probably want to change this setting in `superset_config.py`. The key is `FILTER_STATE_CACHE_CONFIG` and the available settings can be found in Flask-Caching [docs](https://flask-caching.readthedocs.io/en/latest/).
- [17882](https://github.com/apache/superset/pull/17882): introduced a key-value endpoint to store Explore form data. This endpoint is backed by Flask-Caching and the default configuration assumes that the values will be stored in the file system. If you are already using another cache backend like Redis or Memchached, you'll probably want to change this setting in `superset_config.py`. The key is `EXPLORE_FORM_DATA_CACHE_CONFIG` and the available settings can be found in Flask-Caching [docs](https://flask-caching.readthedocs.io/en/latest/).
diff --git a/docs/static/resources/openapi.json b/docs/static/resources/openapi.json
index 86d07ad8264c5..1e8c6129f1d44 100644
--- a/docs/static/resources/openapi.json
+++ b/docs/static/resources/openapi.json
@@ -2248,6 +2248,9 @@
"allows_virtual_table_explore": {
"type": "boolean"
},
+ "disable_data_preview": {
+ "type": "boolean"
+ },
"backend": {
"type": "string"
},
@@ -2472,6 +2475,9 @@
"allows_virtual_table_explore": {
"readOnly": true
},
+ "disable_data_preview": {
+ "readOnly": true
+ },
"backend": {
"readOnly": true
},
@@ -2571,7 +2577,7 @@
"type": "boolean"
},
"extra": {
- "description": "
JSON string containing extra configuration elements. 1. The engine_params
object gets unpacked into the sqlalchemy.create_engine call, while the metadata_params
gets unpacked into the sqlalchemy.MetaData call. 2. The metadata_cache_timeout
is a cache timeout setting in seconds for metadata fetch of this database. Specify it as \"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600} . If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires. 3. The schemas_allowed_for_csv_upload
is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as \"schemas_allowed_for_csv_upload\": [\"public\", \"csv_upload\"] . If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty 4. the version
field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct 5. The allows_virtual_table_explore
field is a boolean specifying whether or not the Explore button in SQL Lab results is shown.
",
+ "description": "JSON string containing extra configuration elements. 1. The engine_params
object gets unpacked into the sqlalchemy.create_engine call, while the metadata_params
gets unpacked into the sqlalchemy.MetaData call. 2. The metadata_cache_timeout
is a cache timeout setting in seconds for metadata fetch of this database. Specify it as \"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600} . If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires. 3. The schemas_allowed_for_csv_upload
is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as \"schemas_allowed_for_csv_upload\": [\"public\", \"csv_upload\"] . If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty 4. the version
field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct 5. The allows_virtual_table_explore
field is a boolean specifying whether or not the Explore button in SQL Lab results is shown 6. The disable_data_preview
field is a boolean specifying whether or not data preview queries will be run when fetching table metadata in SQL Lab.
",
"type": "string"
},
"force_ctas_schema": {
@@ -2663,7 +2669,7 @@
"type": "boolean"
},
"extra": {
- "description": "JSON string containing extra configuration elements. 1. The engine_params
object gets unpacked into the sqlalchemy.create_engine call, while the metadata_params
gets unpacked into the sqlalchemy.MetaData call. 2. The metadata_cache_timeout
is a cache timeout setting in seconds for metadata fetch of this database. Specify it as \"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600} . If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires. 3. The schemas_allowed_for_csv_upload
is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as \"schemas_allowed_for_csv_upload\": [\"public\", \"csv_upload\"] . If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty 4. the version
field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct 5. The allows_virtual_table_explore
field is a boolean specifying whether or not the Explore button in SQL Lab results is shown.
",
+ "description": "JSON string containing extra configuration elements. 1. The engine_params
object gets unpacked into the sqlalchemy.create_engine call, while the metadata_params
gets unpacked into the sqlalchemy.MetaData call. 2. The metadata_cache_timeout
is a cache timeout setting in seconds for metadata fetch of this database. Specify it as \"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600} . If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires. 3. The schemas_allowed_for_csv_upload
is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as \"schemas_allowed_for_csv_upload\": [\"public\", \"csv_upload\"] . If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty 4. the version
field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct 5. The allows_virtual_table_explore
field is a boolean specifying whether or not the Explore button in SQL Lab results is shown 6. The disable_data_preview
field is a boolean specifying whether or not data preview queries will be run when fetching table metadata in SQL Lab.
",
"type": "string"
},
"force_ctas_schema": {
@@ -2720,7 +2726,7 @@
"type": "string"
},
"extra": {
- "description": "JSON string containing extra configuration elements. 1. The engine_params
object gets unpacked into the sqlalchemy.create_engine call, while the metadata_params
gets unpacked into the sqlalchemy.MetaData call. 2. The metadata_cache_timeout
is a cache timeout setting in seconds for metadata fetch of this database. Specify it as \"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600} . If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires. 3. The schemas_allowed_for_csv_upload
is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as \"schemas_allowed_for_csv_upload\": [\"public\", \"csv_upload\"] . If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty 4. the version
field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct 5. The allows_virtual_table_explore
field is a boolean specifying whether or not the Explore button in SQL Lab results is shown.
",
+ "description": "JSON string containing extra configuration elements. 1. The engine_params
object gets unpacked into the sqlalchemy.create_engine call, while the metadata_params
gets unpacked into the sqlalchemy.MetaData call. 2. The metadata_cache_timeout
is a cache timeout setting in seconds for metadata fetch of this database. Specify it as \"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600} . If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires. 3. The schemas_allowed_for_csv_upload
is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as \"schemas_allowed_for_csv_upload\": [\"public\", \"csv_upload\"] . If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty 4. the version
field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct 5. The allows_virtual_table_explore
field is a boolean specifying whether or not the Explore button in SQL Lab results is shown 6. The disable_data_preview
field is a boolean specifying whether or not data preview queries will be run when fetching table metadata in SQL Lab.
",
"type": "string"
},
"impersonate_user": {
@@ -2768,7 +2774,7 @@
"type": "string"
},
"extra": {
- "description": "JSON string containing extra configuration elements. 1. The engine_params
object gets unpacked into the sqlalchemy.create_engine call, while the metadata_params
gets unpacked into the sqlalchemy.MetaData call. 2. The metadata_cache_timeout
is a cache timeout setting in seconds for metadata fetch of this database. Specify it as \"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600} . If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires. 3. The schemas_allowed_for_csv_upload
is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as \"schemas_allowed_for_csv_upload\": [\"public\", \"csv_upload\"] . If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty 4. the version
field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct 5. The allows_virtual_table_explore
field is a boolean specifying whether or not the Explore button in SQL Lab results is shown.
",
+ "description": "JSON string containing extra configuration elements. 1. The engine_params
object gets unpacked into the sqlalchemy.create_engine call, while the metadata_params
gets unpacked into the sqlalchemy.MetaData call. 2. The metadata_cache_timeout
is a cache timeout setting in seconds for metadata fetch of this database. Specify it as \"metadata_cache_timeout\": {\"schema_cache_timeout\": 600, \"table_cache_timeout\": 600} . If unset, cache will not be enabled for the functionality. A timeout of 0 indicates that the cache never expires. 3. The schemas_allowed_for_csv_upload
is a comma separated list of schemas that CSVs are allowed to upload to. Specify it as \"schemas_allowed_for_csv_upload\": [\"public\", \"csv_upload\"] . If database flavor does not support schema or any schema is allowed to be accessed, just leave the list empty 4. the version
field is a string specifying the this db's version. This should be used with Presto DBs so that the syntax is correct 5. The allows_virtual_table_explore
field is a boolean specifying whether or not the Explore button in SQL Lab results is shown 6. The disable_data_preview
field is a boolean specifying whether or not data preview queries will be run when fetching table metadata in SQL Lab.
",
"type": "string"
},
"impersonate_user": {
diff --git a/superset-frontend/cypress-base/cypress.json b/superset-frontend/cypress-base/cypress.json
index 8e023d8a1a24b..f9729be1c3c91 100644
--- a/superset-frontend/cypress-base/cypress.json
+++ b/superset-frontend/cypress-base/cypress.json
@@ -1,7 +1,7 @@
{
"baseUrl": "http://localhost:8088",
"chromeWebSecurity": false,
- "defaultCommandTimeout": 5000,
+ "defaultCommandTimeout": 8000,
"numTestsKeptInMemory": 0,
"experimentalFetchPolyfill": true,
"requestTimeout": 10000,
diff --git a/superset-frontend/cypress-base/cypress/integration/dashboard/key_value.test.ts b/superset-frontend/cypress-base/cypress/integration/dashboard/key_value.test.ts
index ba27bf30163a2..24b6ff0aa7a62 100644
--- a/superset-frontend/cypress-base/cypress/integration/dashboard/key_value.test.ts
+++ b/superset-frontend/cypress-base/cypress/integration/dashboard/key_value.test.ts
@@ -27,16 +27,19 @@ interface QueryString {
native_filters_key: string;
}
-describe('nativefiler url param key', () => {
+xdescribe('nativefiler url param key', () => {
// const urlParams = { param1: '123', param2: 'abc' };
before(() => {
cy.login();
- cy.visit(WORLD_HEALTH_DASHBOARD);
- WORLD_HEALTH_CHARTS.forEach(waitForChartLoad);
- cy.wait(1000); // wait for key to be published (debounced)
});
+
let initialFilterKey: string;
it('should have cachekey in nativefilter param', () => {
+ // things in `before` will not retry and the `waitForChartLoad` check is
+ // especically flaky and may need more retries
+ cy.visit(WORLD_HEALTH_DASHBOARD);
+ WORLD_HEALTH_CHARTS.forEach(waitForChartLoad);
+ cy.wait(1000); // wait for key to be published (debounced)
cy.location().then(loc => {
const queryParams = qs.parse(loc.search) as QueryString;
expect(typeof queryParams.native_filters_key).eq('string');
@@ -44,6 +47,9 @@ describe('nativefiler url param key', () => {
});
it('should have different key when page reloads', () => {
+ cy.visit(WORLD_HEALTH_DASHBOARD);
+ WORLD_HEALTH_CHARTS.forEach(waitForChartLoad);
+ cy.wait(1000); // wait for key to be published (debounced)
cy.location().then(loc => {
const queryParams = qs.parse(loc.search) as QueryString;
expect(queryParams.native_filters_key).not.equal(initialFilterKey);
diff --git a/superset-frontend/cypress-base/cypress/integration/sqllab/tabs.test.js b/superset-frontend/cypress-base/cypress/integration/sqllab/tabs.test.js
deleted file mode 100644
index 24dd074992b02..0000000000000
--- a/superset-frontend/cypress-base/cypress/integration/sqllab/tabs.test.js
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-describe('SqlLab query tabs', () => {
- beforeEach(() => {
- cy.login();
- cy.visit('/superset/sqllab');
- });
-
- it('allows you to create a tab', () => {
- cy.get('[data-test="sql-editor-tabs"]').then(tabList => {
- const initialTabCount = tabList.length;
- // add tab
- cy.get('[data-test="add-tab-icon"]').first().click();
- // wait until we find the new tab
- cy.get('[data-test="sql-editor-tabs"]')
- .children()
- .eq(0)
- .contains(`Untitled Query ${initialTabCount}`);
- cy.get('[data-test="sql-editor-tabs"]')
- .children()
- .eq(0)
- .contains(`Untitled Query ${initialTabCount + 1}`);
- });
- });
-
- it('allows you to close a tab', () => {
- cy.get('[data-test="sql-editor-tabs"]')
- .children()
- .then(tabListA => {
- const initialTabCount = tabListA.length;
-
- // open the tab dropdown to remove
- cy.get('[data-test="dropdown-toggle-button"]')
- .children()
- .first()
- .click({
- force: true,
- });
-
- // first item is close
- cy.get('[data-test="close-tab-menu-option"]').click();
-
- cy.get('[data-test="sql-editor-tabs"]').should(
- 'have.length',
- initialTabCount - 1,
- );
- });
- });
-});
diff --git a/superset-frontend/cypress-base/cypress/integration/sqllab/tabs.test.ts b/superset-frontend/cypress-base/cypress/integration/sqllab/tabs.test.ts
new file mode 100644
index 0000000000000..0e85664cb785a
--- /dev/null
+++ b/superset-frontend/cypress-base/cypress/integration/sqllab/tabs.test.ts
@@ -0,0 +1,60 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+describe('SqlLab query tabs', () => {
+ beforeEach(() => {
+ cy.login();
+ cy.visit('/superset/sqllab');
+ });
+
+ it('allows you to create and close a tab', () => {
+ const tablistSelector = '[data-test="sql-editor-tabs"] > [role="tablist"]';
+ const tabSelector = `${tablistSelector} [role="tab"]`;
+ cy.get(tabSelector).then(tabs => {
+ const initialTabCount = tabs.length;
+ const initialUntitledCount = Math.max(
+ 0,
+ ...tabs
+ .map((i, tabItem) =>
+ Number(tabItem.textContent?.match(/Untitled Query (\d+)/)?.[1]),
+ )
+ .toArray(),
+ );
+
+ // add two new tabs
+ cy.get('[data-test="add-tab-icon"]:visible:last').click();
+ cy.contains('[role="tab"]', `Untitled Query ${initialUntitledCount + 1}`);
+ cy.get(tabSelector).should('have.length', initialTabCount + 1);
+
+ cy.get('[data-test="add-tab-icon"]:visible:last').click();
+ cy.contains('[role="tab"]', `Untitled Query ${initialUntitledCount + 2}`);
+ cy.get(tabSelector).should('have.length', initialTabCount + 2);
+
+ // close the tabs
+ cy.get(`${tabSelector}:last [data-test="dropdown-trigger"]`).click({
+ force: true,
+ });
+ cy.get('[data-test="close-tab-menu-option"]').click();
+ cy.get(tabSelector).should('have.length', initialTabCount + 1);
+ cy.contains('[role="tab"]', `Untitled Query ${initialUntitledCount + 1}`);
+
+ cy.get(`${tablistSelector} [aria-label="remove"]:last`).click();
+ cy.get(tabSelector).should('have.length', initialTabCount);
+ });
+ });
+});
diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/index.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/index.tsx
index 4021fa2c38597..0419c55ed3184 100644
--- a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/index.tsx
+++ b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/index.tsx
@@ -95,6 +95,11 @@ type Control = {
default?: unknown;
};
+type SelectDefaultOption = {
+ label: string;
+ value: string;
+};
+
const groupByControl: SharedControlConfig<'SelectControl', ColumnMeta> = {
type: 'SelectControl',
label: t('Group by'),
@@ -434,29 +439,36 @@ const size: SharedControlConfig<'MetricsControl'> = {
default: null,
};
-const y_axis_format: SharedControlConfig<'SelectControl'> = {
- type: 'SelectControl',
- freeForm: true,
- label: t('Y Axis Format'),
- renderTrigger: true,
- default: DEFAULT_NUMBER_FORMAT,
- choices: D3_FORMAT_OPTIONS,
- description: D3_FORMAT_DOCS,
- mapStateToProps: state => {
- const showWarning = state.controls?.comparison_type?.value === 'percentage';
- return {
- warning: showWarning
- ? t(
- 'When `Calculation type` is set to "Percentage change", the Y ' +
- 'Axis Format is forced to `.1%`',
- )
- : null,
- disabled: showWarning,
- };
- },
-};
-
-const x_axis_time_format: SharedControlConfig<'SelectControl'> = {
+const y_axis_format: SharedControlConfig<'SelectControl', SelectDefaultOption> =
+ {
+ type: 'SelectControl',
+ freeForm: true,
+ label: t('Y Axis Format'),
+ renderTrigger: true,
+ default: DEFAULT_NUMBER_FORMAT,
+ choices: D3_FORMAT_OPTIONS,
+ description: D3_FORMAT_DOCS,
+ filterOption: ({ data: option }, search) =>
+ option.label.includes(search) || option.value.includes(search),
+ mapStateToProps: state => {
+ const showWarning =
+ state.controls?.comparison_type?.value === 'percentage';
+ return {
+ warning: showWarning
+ ? t(
+ 'When `Calculation type` is set to "Percentage change", the Y ' +
+ 'Axis Format is forced to `.1%`',
+ )
+ : null,
+ disabled: showWarning,
+ };
+ },
+ };
+
+const x_axis_time_format: SharedControlConfig<
+ 'SelectControl',
+ SelectDefaultOption
+> = {
type: 'SelectControl',
freeForm: true,
label: t('Time format'),
@@ -464,6 +476,8 @@ const x_axis_time_format: SharedControlConfig<'SelectControl'> = {
default: DEFAULT_TIME_FORMAT,
choices: D3_TIME_FORMAT_OPTIONS,
description: D3_TIME_FORMAT_DOCS,
+ filterOption: ({ data: option }, search) =>
+ option.label.includes(search) || option.value.includes(search),
};
const adhoc_filters: SharedControlConfig<'AdhocFilterControl'> = {
diff --git a/superset-frontend/packages/superset-ui-core/src/chart/models/ChartMetadata.ts b/superset-frontend/packages/superset-ui-core/src/chart/models/ChartMetadata.ts
index 1013eeee2d3b3..7a25fe86208d1 100644
--- a/superset-frontend/packages/superset-ui-core/src/chart/models/ChartMetadata.ts
+++ b/superset-frontend/packages/superset-ui-core/src/chart/models/ChartMetadata.ts
@@ -43,10 +43,11 @@ export interface ChartMetadataConfig {
exampleGallery?: ExampleImage[];
tags?: string[];
category?: string | null;
- label?: {
- name?: ChartLabel;
- description?: string;
- } | null;
+ // deprecated: true hides a chart from all viz picker interactions.
+ deprecated?: boolean;
+ // label: ChartLabel.DEPRECATED which will display a "deprecated" label on the chart.
+ label?: ChartLabel | null;
+ labelExplanation?: string | null;
}
export default class ChartMetadata {
@@ -80,10 +81,11 @@ export default class ChartMetadata {
category: string | null;
- label?: {
- name?: ChartLabel;
- description?: string;
- } | null;
+ deprecated?: boolean;
+
+ label?: ChartLabel | null;
+
+ labelExplanation?: string | null;
constructor(config: ChartMetadataConfig) {
const {
@@ -101,7 +103,9 @@ export default class ChartMetadata {
exampleGallery = [],
tags = [],
category = null,
+ deprecated = false,
label = null,
+ labelExplanation = null,
} = config;
this.name = name;
@@ -127,7 +131,9 @@ export default class ChartMetadata {
this.exampleGallery = exampleGallery;
this.tags = tags;
this.category = category;
+ this.deprecated = deprecated;
this.label = label;
+ this.labelExplanation = labelExplanation;
}
canBeAnnotationType(type: string): boolean {
diff --git a/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts b/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts
index aad547ca2aa5d..0bfae7777e7df 100644
--- a/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts
+++ b/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts
@@ -53,18 +53,21 @@ export interface PlainObject {
}
export enum ChartLabel {
- VERIFIED = 'VERIFIED',
DEPRECATED = 'DEPRECATED',
FEATURED = 'FEATURED',
}
-export const ChartLabelWeight = {
+export const chartLabelExplanations: Record = {
+ [ChartLabel.DEPRECATED]:
+ 'This chart uses features or modules which are no longer actively maintained. It will eventually be replaced or removed.',
+ [ChartLabel.FEATURED]:
+ 'This chart was tested and verified, so the overall experience should be stable.',
+};
+
+export const chartLabelWeight: Record = {
[ChartLabel.DEPRECATED]: {
weight: -0.1,
},
- [ChartLabel.VERIFIED]: {
- weight: 0.2,
- },
[ChartLabel.FEATURED]: {
weight: 0.1,
},
diff --git a/superset-frontend/src/SqlLab/actions/sqlLab.js b/superset-frontend/src/SqlLab/actions/sqlLab.js
index f89a6a8535df7..e13e4263a36cd 100644
--- a/superset-frontend/src/SqlLab/actions/sqlLab.js
+++ b/superset-frontend/src/SqlLab/actions/sqlLab.js
@@ -1018,28 +1018,13 @@ function getTableMetadata(table, query, dispatch) {
),
})
.then(({ json }) => {
- const dataPreviewQuery = {
- id: shortid.generate(),
- dbId: query.dbId,
- sql: json.selectStar,
- tableName: table.name,
- sqlEditorId: null,
- tab: '',
- runAsync: false,
- ctas: false,
- isDataPreview: true,
- };
const newTable = {
...table,
...json,
expanded: true,
isMetadataLoading: false,
- dataPreviewQueryId: dataPreviewQuery.id,
};
- Promise.all([
- dispatch(mergeTable(newTable, dataPreviewQuery)), // Merge table to tables in state
- dispatch(runQuery(dataPreviewQuery)), // Run query to get preview data for table
- ]);
+ dispatch(mergeTable(newTable)); // Merge table to tables in state
return newTable;
})
.catch(() =>
@@ -1082,7 +1067,7 @@ function getTableExtendedMetadata(table, query, dispatch) {
);
}
-export function addTable(query, tableName, schemaName) {
+export function addTable(query, database, tableName, schemaName) {
return function (dispatch) {
const table = {
dbId: query.dbId,
@@ -1110,6 +1095,32 @@ export function addTable(query, tableName, schemaName) {
})
: Promise.resolve({ json: { id: shortid.generate() } });
+ if (!database.disable_data_preview && database.id === query.dbId) {
+ const dataPreviewQuery = {
+ id: shortid.generate(),
+ dbId: query.dbId,
+ sql: newTable.selectStar,
+ tableName: table.name,
+ sqlEditorId: null,
+ tab: '',
+ runAsync: database.allow_run_async,
+ ctas: false,
+ isDataPreview: true,
+ };
+ Promise.all([
+ dispatch(
+ mergeTable(
+ {
+ ...newTable,
+ dataPreviewQueryId: dataPreviewQuery.id,
+ },
+ dataPreviewQuery,
+ ),
+ ),
+ dispatch(runQuery(dataPreviewQuery)),
+ ]);
+ }
+
return sync
.then(({ json: resultJson }) =>
dispatch(mergeTable({ ...table, id: resultJson.id })),
diff --git a/superset-frontend/src/SqlLab/actions/sqlLab.test.js b/superset-frontend/src/SqlLab/actions/sqlLab.test.js
index d04d8b90ab1a8..789ae986bfbe7 100644
--- a/superset-frontend/src/SqlLab/actions/sqlLab.test.js
+++ b/superset-frontend/src/SqlLab/actions/sqlLab.test.js
@@ -727,28 +727,60 @@ describe('async actions', () => {
it('updates the table schema state in the backend', () => {
expect.assertions(5);
+ const database = { disable_data_preview: true };
+ const tableName = 'table';
+ const schemaName = 'schema';
+ const store = mockStore({});
+ const expectedActionTypes = [
+ actions.MERGE_TABLE, // addTable
+ actions.MERGE_TABLE, // getTableMetadata
+ actions.MERGE_TABLE, // getTableExtendedMetadata
+ actions.MERGE_TABLE, // addTable
+ ];
+ return store
+ .dispatch(actions.addTable(query, database, tableName, schemaName))
+ .then(() => {
+ expect(store.getActions().map(a => a.type)).toEqual(
+ expectedActionTypes,
+ );
+ expect(fetchMock.calls(updateTableSchemaEndpoint)).toHaveLength(1);
+ expect(fetchMock.calls(getTableMetadataEndpoint)).toHaveLength(1);
+ expect(fetchMock.calls(getExtraTableMetadataEndpoint)).toHaveLength(
+ 1,
+ );
+
+ // tab state is not updated, since no query was run
+ expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(0);
+ });
+ });
+
+ it('updates and runs data preview query when configured', () => {
+ expect.assertions(5);
+
const results = {
data: mockBigNumber,
- query: { sqlEditorId: 'null' },
+ query: { sqlEditorId: 'null', dbId: 1 },
query_id: 'efgh',
};
fetchMock.post(runQueryEndpoint, JSON.stringify(results), {
overwriteRoutes: true,
});
+ const database = { disable_data_preview: false, id: 1 };
const tableName = 'table';
const schemaName = 'schema';
const store = mockStore({});
const expectedActionTypes = [
actions.MERGE_TABLE, // addTable
actions.MERGE_TABLE, // getTableMetadata
- actions.START_QUERY, // runQuery (data preview)
actions.MERGE_TABLE, // getTableExtendedMetadata
- actions.QUERY_SUCCESS, // querySuccess
+ actions.MERGE_TABLE, // addTable (data preview)
+ actions.START_QUERY, // runQuery (data preview)
actions.MERGE_TABLE, // addTable
+ actions.QUERY_SUCCESS, // querySuccess
];
return store
- .dispatch(actions.addTable(query, tableName, schemaName))
+ .dispatch(actions.addTable(query, database, tableName, schemaName))
.then(() => {
expect(store.getActions().map(a => a.type)).toEqual(
expectedActionTypes,
@@ -758,7 +790,6 @@ describe('async actions', () => {
expect(fetchMock.calls(getExtraTableMetadataEndpoint)).toHaveLength(
1,
);
-
// tab state is not updated, since the query is a data preview
expect(fetchMock.calls(updateTabStateEndpoint)).toHaveLength(0);
});
diff --git a/superset-frontend/src/SqlLab/components/AceEditorWrapper/index.tsx b/superset-frontend/src/SqlLab/components/AceEditorWrapper/index.tsx
index ce201e89d904c..25da49137ad60 100644
--- a/superset-frontend/src/SqlLab/components/AceEditorWrapper/index.tsx
+++ b/superset-frontend/src/SqlLab/components/AceEditorWrapper/index.tsx
@@ -43,11 +43,17 @@ interface Props {
actions: {
queryEditorSetSelectedText: (edit: any, text: null | string) => void;
queryEditorSetFunctionNames: (queryEditor: object, dbId: number) => void;
- addTable: (queryEditor: any, value: any, schema: any) => void;
+ addTable: (
+ queryEditor: any,
+ database: any,
+ value: any,
+ schema: any,
+ ) => void;
};
autocomplete: boolean;
onBlur: (sql: string) => void;
sql: string;
+ database: any;
schemas: any[];
tables: any[];
functionNames: string[];
@@ -171,17 +177,20 @@ class AceEditorWrapper extends React.PureComponent {
meta: 'schema',
}));
const columns = {};
- const tables = props.extendedTables || props.tables || [];
+
+ const tables = props.tables || [];
+ const extendedTables = props.extendedTables || [];
const tableWords = tables.map(t => {
- const tableName = t.name;
- const cols = t.columns || [];
+ const tableName = t.value;
+ const extendedTable = extendedTables.find(et => et.name === tableName);
+ const cols = (extendedTable && extendedTable.columns) || [];
cols.forEach(col => {
columns[col.name] = null; // using an object as a unique set
});
return {
- name: tableName,
+ name: t.label,
value: tableName,
score: TABLE_AUTOCOMPLETE_SCORE,
meta: 'table',
@@ -207,6 +216,7 @@ class AceEditorWrapper extends React.PureComponent {
if (data.meta === 'table') {
this.props.actions.addTable(
this.props.queryEditor,
+ this.props.database,
data.value,
this.props.queryEditor.schema,
);
diff --git a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/QueryAutoRefresh.test.jsx b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/QueryAutoRefresh.test.jsx
index 93cea6d08d65f..06bf187e1185a 100644
--- a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/QueryAutoRefresh.test.jsx
+++ b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/QueryAutoRefresh.test.jsx
@@ -17,14 +17,12 @@
* under the License.
*/
import React from 'react';
-import { render } from 'spec/helpers/testing-library';
-import { ThemeProvider, supersetTheme } from '@superset-ui/core';
+import { shallow } from 'enzyme';
+import sinon from 'sinon';
import thunk from 'redux-thunk';
import configureStore from 'redux-mock-store';
import QueryAutoRefresh from 'src/SqlLab/components/QueryAutoRefresh';
import { initialState, runningQuery } from 'src/SqlLab/fixtures';
-import fetchMock from 'fetch-mock';
-import * as actions from 'src/SqlLab/actions/sqlLab';
describe('QueryAutoRefresh', () => {
const middlewares = [thunk];
@@ -40,29 +38,31 @@ describe('QueryAutoRefresh', () => {
sqlLab,
};
const store = mockStore(state);
- const setup = (overrides = {}) => (
-
-
-
- );
-
- const mockFetch = fetchMock.get('glob:*/superset/queries/*', {});
+ const getWrapper = () =>
+ shallow( )
+ .dive()
+ .dive();
+ let wrapper;
it('shouldCheckForQueries', () => {
- render(setup(), {
- useRedux: true,
- });
-
- expect(mockFetch.called()).toBe(true);
+ wrapper = getWrapper();
+ expect(wrapper.instance().shouldCheckForQueries()).toBe(true);
});
it('setUserOffline', () => {
- const spy = jest.spyOn(actions, 'setUserOffline');
+ wrapper = getWrapper();
+ const spy = sinon.spy(wrapper.instance().props.actions, 'setUserOffline');
- render(setup(), {
- useRedux: true,
+ // state not changed
+ wrapper.setState({
+ offline: false,
});
+ expect(spy.called).toBe(false);
- expect(spy).toHaveBeenCalled();
+ // state is changed
+ wrapper.setState({
+ offline: true,
+ });
+ expect(spy.callCount).toBe(1);
});
});
diff --git a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.jsx b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.jsx
index 43f6c5d8a7d6e..b54936b691efe 100644
--- a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.jsx
+++ b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.jsx
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-import { useState, useEffect } from 'react';
+import React from 'react';
import PropTypes from 'prop-types';
import { bindActionCreators } from 'redux';
import { connect } from 'react-redux';
@@ -28,12 +28,31 @@ const QUERY_UPDATE_BUFFER_MS = 5000;
const MAX_QUERY_AGE_TO_POLL = 21600000;
const QUERY_TIMEOUT_LIMIT = 10000;
-function QueryAutoRefresh({ offline, queries, queriesLastUpdate, actions }) {
- const [offlineState, setOfflineState] = useState(offline);
- let timer = null;
+class QueryAutoRefresh extends React.PureComponent {
+ constructor(props) {
+ super(props);
+ this.state = {
+ offline: props.offline,
+ };
+ }
+
+ UNSAFE_componentWillMount() {
+ this.startTimer();
+ }
+
+ componentDidUpdate(prevProps) {
+ if (prevProps.offline !== this.state.offline) {
+ this.props.actions.setUserOffline(this.state.offline);
+ }
+ }
+
+ componentWillUnmount() {
+ this.stopTimer();
+ }
- const shouldCheckForQueries = () => {
+ shouldCheckForQueries() {
// if there are started or running queries, this method should return true
+ const { queries } = this.props;
const now = new Date().getTime();
const isQueryRunning = q =>
['running', 'started', 'pending', 'fetching'].indexOf(q.state) >= 0;
@@ -41,57 +60,46 @@ function QueryAutoRefresh({ offline, queries, queriesLastUpdate, actions }) {
return Object.values(queries).some(
q => isQueryRunning(q) && now - q.startDttm < MAX_QUERY_AGE_TO_POLL,
);
- };
+ }
+
+ startTimer() {
+ if (!this.timer) {
+ this.timer = setInterval(this.stopwatch.bind(this), QUERY_UPDATE_FREQ);
+ }
+ }
- const stopwatch = () => {
+ stopTimer() {
+ clearInterval(this.timer);
+ this.timer = null;
+ }
+
+ stopwatch() {
// only poll /superset/queries/ if there are started or running queries
- if (shouldCheckForQueries()) {
+ if (this.shouldCheckForQueries()) {
SupersetClient.get({
endpoint: `/superset/queries/${
- queriesLastUpdate - QUERY_UPDATE_BUFFER_MS
+ this.props.queriesLastUpdate - QUERY_UPDATE_BUFFER_MS
}`,
timeout: QUERY_TIMEOUT_LIMIT,
})
.then(({ json }) => {
if (Object.keys(json).length > 0) {
- actions.refreshQueries(json);
+ this.props.actions.refreshQueries(json);
}
-
- setOfflineState(false);
+ this.setState({ offline: false });
})
.catch(() => {
- setOfflineState(true);
+ this.setState({ offline: true });
});
} else {
- setOfflineState(false);
+ this.setState({ offline: false });
}
- };
-
- const startTimer = () => {
- if (!timer) {
- timer = setInterval(stopwatch(), QUERY_UPDATE_FREQ);
- }
- };
-
- const stopTimer = () => {
- clearInterval(timer);
- timer = null;
- };
-
- useEffect(() => {
- startTimer();
- return () => {
- stopTimer();
- };
- }, []);
+ }
- useEffect(() => {
- actions.setUserOffline(offlineState);
- }, [offlineState]);
-
- return null;
+ render() {
+ return null;
+ }
}
-
QueryAutoRefresh.propTypes = {
offline: PropTypes.bool.isRequired,
queries: PropTypes.object.isRequired,
diff --git a/superset-frontend/src/hooks/useUrlShortener.ts b/superset-frontend/src/SqlLab/components/QueryHistory/QueryHistory.test.tsx
similarity index 52%
rename from superset-frontend/src/hooks/useUrlShortener.ts
rename to superset-frontend/src/SqlLab/components/QueryHistory/QueryHistory.test.tsx
index 33cb636b4527c..782b147839186 100644
--- a/superset-frontend/src/hooks/useUrlShortener.ts
+++ b/superset-frontend/src/SqlLab/components/QueryHistory/QueryHistory.test.tsx
@@ -16,24 +16,35 @@
* specific language governing permissions and limitations
* under the License.
*/
-import { useState, useEffect } from 'react';
-import { getShortUrl as getShortUrlUtil } from 'src/utils/urlUtils';
+import React from 'react';
+import { render, screen } from 'spec/helpers/testing-library';
+import QueryHistory from 'src/SqlLab/components/QueryHistory';
-export function useUrlShortener(url: string): Function {
- const [update, setUpdate] = useState(false);
- const [shortUrl, setShortUrl] = useState('');
+const NOOP = () => {};
+const mockedProps = {
+ queries: [],
+ actions: {
+ queryEditorSetSql: NOOP,
+ cloneQueryToNewTab: NOOP,
+ fetchQueryResults: NOOP,
+ clearQueryResults: NOOP,
+ removeQuery: NOOP,
+ },
+ displayLimit: 1000,
+};
- async function getShortUrl(urlOverride?: string) {
- if (update) {
- const newShortUrl = await getShortUrlUtil(urlOverride || url);
- setShortUrl(newShortUrl);
- setUpdate(false);
- return newShortUrl;
- }
- return shortUrl;
- }
+const setup = (overrides = {}) => (
+
+);
- useEffect(() => setUpdate(true), [url]);
+describe('QueryHistory', () => {
+ it('Renders an empty state for query history', () => {
+ render(setup());
- return getShortUrl;
-}
+ const emptyStateText = screen.getByText(
+ /run a query to display query history/i,
+ );
+
+ expect(emptyStateText).toBeVisible();
+ });
+});
diff --git a/superset-frontend/src/SqlLab/components/QueryHistory/index.tsx b/superset-frontend/src/SqlLab/components/QueryHistory/index.tsx
index e2d0453bb297a..7cf9d6ba657dd 100644
--- a/superset-frontend/src/SqlLab/components/QueryHistory/index.tsx
+++ b/superset-frontend/src/SqlLab/components/QueryHistory/index.tsx
@@ -17,8 +17,8 @@
* under the License.
*/
import React from 'react';
-import Alert from 'src/components/Alert';
-import { t } from '@superset-ui/core';
+import { EmptyStateMedium } from 'src/components/EmptyState';
+import { t, styled } from '@superset-ui/core';
import { Query } from 'src/SqlLab/types';
import QueryTable from 'src/SqlLab/components/QueryTable';
@@ -34,6 +34,17 @@ interface QueryHistoryProps {
displayLimit: number;
}
+const StyledEmptyStateWrapper = styled.div`
+ height: 100%;
+ .ant-empty-image img {
+ margin-right: 28px;
+ }
+
+ p {
+ margin-right: 28px;
+ }
+`;
+
const QueryHistory = ({ queries, actions, displayLimit }: QueryHistoryProps) =>
queries.length > 0 ? (
displayLimit={displayLimit}
/>
) : (
-
+
+
+
);
export default QueryHistory;
diff --git a/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.jsx b/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.jsx
index dbf81cfcf282d..1786a6cf313a6 100644
--- a/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.jsx
+++ b/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.jsx
@@ -20,11 +20,15 @@ import React from 'react';
import configureStore from 'redux-mock-store';
import thunk from 'redux-thunk';
import { styledShallow as shallow } from 'spec/helpers/theming';
+import { render, screen, act } from 'spec/helpers/testing-library';
import SouthPaneContainer from 'src/SqlLab/components/SouthPane/state';
import ResultSet from 'src/SqlLab/components/ResultSet';
import '@testing-library/jest-dom/extend-expect';
import { STATUS_OPTIONS } from 'src/SqlLab/constants';
import { initialState } from 'src/SqlLab/fixtures';
+import { UserWithPermissionsAndRoles } from 'src/types/bootstrapTypes';
+
+const NOOP = () => {};
const mockedProps = {
editorQueries: [
@@ -71,13 +75,36 @@ const mockedProps = {
offline: false,
};
+const mockedEmptyProps = {
+ editorQueries: [],
+ latestQueryId: '',
+ dataPreviewQueries: [],
+ actions: {
+ queryEditorSetSql: NOOP,
+ cloneQueryToNewTab: NOOP,
+ fetchQueryResults: NOOP,
+ clearQueryResults: NOOP,
+ removeQuery: NOOP,
+ setActiveSouthPaneTab: NOOP,
+ },
+ activeSouthPaneTab: '',
+ height: 100,
+ databases: '',
+ offline: false,
+ displayLimit: 100,
+ user: UserWithPermissionsAndRoles,
+ defaultQueryLimit: 100,
+};
+
const middlewares = [thunk];
const mockStore = configureStore(middlewares);
const store = mockStore(initialState);
+const setup = (overrides = {}) => (
+
+);
-describe('SouthPane', () => {
- const getWrapper = () =>
- shallow( ).dive();
+describe('SouthPane - Enzyme', () => {
+ const getWrapper = () => shallow(setup()).dive();
let wrapper;
@@ -95,3 +122,20 @@ describe('SouthPane', () => {
);
});
});
+
+describe('SouthPane - RTL', () => {
+ const renderAndWait = overrides => {
+ const mounted = act(async () => {
+ render(setup(overrides));
+ });
+
+ return mounted;
+ };
+ it('Renders an empty state for results', async () => {
+ await renderAndWait(mockedEmptyProps);
+
+ const emptyStateText = screen.getByText(/run a query to display results/i);
+
+ expect(emptyStateText).toBeVisible();
+ });
+});
diff --git a/superset-frontend/src/SqlLab/components/SouthPane/index.tsx b/superset-frontend/src/SqlLab/components/SouthPane/index.tsx
index f7efc04f34725..3fb0f9c5261e4 100644
--- a/superset-frontend/src/SqlLab/components/SouthPane/index.tsx
+++ b/superset-frontend/src/SqlLab/components/SouthPane/index.tsx
@@ -20,6 +20,7 @@ import React, { createRef } from 'react';
import shortid from 'shortid';
import Alert from 'src/components/Alert';
import Tabs from 'src/components/Tabs';
+import { EmptyStateMedium } from 'src/components/EmptyState';
import { t, styled } from '@superset-ui/core';
import { isFeatureEnabled, FeatureFlag } from 'src/featureFlags';
@@ -93,6 +94,17 @@ const StyledPane = styled.div`
}
`;
+const StyledEmptyStateWrapper = styled.div`
+ height: 100%;
+ .ant-empty-image img {
+ margin-right: 28px;
+ }
+
+ p {
+ margin-right: 28px;
+ }
+`;
+
export default function SouthPane({
editorQueries,
latestQueryId,
@@ -161,7 +173,12 @@ export default function SouthPane({
}
} else {
results = (
-
+
+
+
);
}
return results;
diff --git a/superset-frontend/src/SqlLab/components/SqlEditor/index.jsx b/superset-frontend/src/SqlLab/components/SqlEditor/index.jsx
index 168a53d52b1a0..7899cbf71908a 100644
--- a/superset-frontend/src/SqlLab/components/SqlEditor/index.jsx
+++ b/superset-frontend/src/SqlLab/components/SqlEditor/index.jsx
@@ -514,6 +514,7 @@ class SqlEditor extends React.PureComponent {
onChange={this.onSqlChanged}
queryEditor={this.props.queryEditor}
sql={this.props.queryEditor.sql}
+ database={this.props.database}
schemas={this.props.queryEditor.schemaOptions}
tables={this.props.queryEditor.tableOptions}
functionNames={this.props.queryEditor.functionNames}
diff --git a/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx b/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx
index 7bbdfcf6345ec..f9e8c2da9f98f 100644
--- a/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx
+++ b/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-import React from 'react';
+import React, { useEffect, useRef, useCallback } from 'react';
import Button from 'src/components/Button';
import { t, styled, css, SupersetTheme } from '@superset-ui/core';
import Collapse from 'src/components/Collapse';
@@ -36,12 +36,15 @@ interface actionsTypes {
queryEditorSetFunctionNames: (queryEditor: QueryEditor, dbId: number) => void;
collapseTable: (table: Table) => void;
expandTable: (table: Table) => void;
- addTable: (queryEditor: any, value: any, schema: any) => void;
+ addTable: (queryEditor: any, database: any, value: any, schema: any) => void;
setDatabases: (arg0: any) => {};
addDangerToast: (msg: string) => void;
queryEditorSetSchema: (queryEditor: QueryEditor, schema?: string) => void;
queryEditorSetSchemaOptions: () => void;
- queryEditorSetTableOptions: (options: Array) => void;
+ queryEditorSetTableOptions: (
+ queryEditor: QueryEditor,
+ options: Array,
+ ) => void;
resetState: () => void;
}
@@ -86,6 +89,13 @@ export default function SqlEditorLeftBar({
tables = [],
height = 500,
}: SqlEditorLeftBarProps) {
+ // Ref needed to avoid infinite rerenders on handlers
+ // that require and modify the queryEditor
+ const queryEditorRef = useRef(queryEditor);
+ useEffect(() => {
+ queryEditorRef.current = queryEditor;
+ }, [queryEditor]);
+
const onDbChange = ({ id: dbId }: { id: number }) => {
actions.queryEditorSetDb(queryEditor, dbId);
actions.queryEditorSetFunctionNames(queryEditor, dbId);
@@ -93,7 +103,7 @@ export default function SqlEditorLeftBar({
const onTableChange = (tableName: string, schemaName: string) => {
if (tableName && schemaName) {
- actions.addTable(queryEditor, tableName, schemaName);
+ actions.addTable(queryEditor, database, tableName, schemaName);
}
};
@@ -132,9 +142,23 @@ export default function SqlEditorLeftBar({
const shouldShowReset = window.location.search === '?reset=1';
const tableMetaDataHeight = height - 130; // 130 is the height of the selects above
- const onSchemaChange = (schema: string) => {
- actions.queryEditorSetSchema(queryEditor, schema);
- };
+ const handleSchemaChange = useCallback(
+ (schema: string) => {
+ if (queryEditorRef.current) {
+ actions.queryEditorSetSchema(queryEditorRef.current, schema);
+ }
+ },
+ [actions],
+ );
+
+ const handleTablesLoad = React.useCallback(
+ (options: Array) => {
+ if (queryEditorRef.current) {
+ actions.queryEditorSetTableOptions(queryEditorRef.current, options);
+ }
+ },
+ [actions],
+ );
return (
@@ -143,10 +167,10 @@ export default function SqlEditorLeftBar({
getDbList={actions.setDatabases}
handleError={actions.addDangerToast}
onDbChange={onDbChange}
- onSchemaChange={onSchemaChange}
+ onSchemaChange={handleSchemaChange}
onSchemasLoad={actions.queryEditorSetSchemaOptions}
onTableChange={onTableChange}
- onTablesLoad={actions.queryEditorSetTableOptions}
+ onTablesLoad={handleTablesLoad}
schema={queryEditor.schema}
sqlLabMode
/>
diff --git a/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx b/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx
index 11c6fa8b6c097..8c20a493b0876 100644
--- a/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx
+++ b/superset-frontend/src/SqlLab/components/TabbedSqlEditors/index.jsx
@@ -386,9 +386,7 @@ class TabbedSqlEditors extends React.PureComponent {
);
const tabHeader = (
-
-
-
+
{qe.title} {' '}
);
diff --git a/superset-frontend/src/components/AnchorLink/AnchorLink.test.jsx b/superset-frontend/src/components/AnchorLink/AnchorLink.test.jsx
index 9f0c05a8eb87a..3f05416b1c0c5 100644
--- a/superset-frontend/src/components/AnchorLink/AnchorLink.test.jsx
+++ b/superset-frontend/src/components/AnchorLink/AnchorLink.test.jsx
@@ -25,6 +25,7 @@ import URLShortLinkButton from 'src/components/URLShortLinkButton';
describe('AnchorLink', () => {
const props = {
anchorLinkId: 'CHART-123',
+ dashboardId: 10,
};
const globalLocation = window.location;
@@ -64,8 +65,9 @@ describe('AnchorLink', () => {
expect(wrapper.find(URLShortLinkButton)).toExist();
expect(wrapper.find(URLShortLinkButton)).toHaveProp({ placement: 'right' });
- const targetUrl = wrapper.find(URLShortLinkButton).prop('url');
- const hash = targetUrl.slice(targetUrl.indexOf('#') + 1);
- expect(hash).toBe(props.anchorLinkId);
+ const anchorLinkId = wrapper.find(URLShortLinkButton).prop('anchorLinkId');
+ const dashboardId = wrapper.find(URLShortLinkButton).prop('dashboardId');
+ expect(anchorLinkId).toBe(props.anchorLinkId);
+ expect(dashboardId).toBe(props.dashboardId);
});
});
diff --git a/superset-frontend/src/components/AnchorLink/index.jsx b/superset-frontend/src/components/AnchorLink/index.jsx
index 743cb3a3c6493..71ba76dff7a07 100644
--- a/superset-frontend/src/components/AnchorLink/index.jsx
+++ b/superset-frontend/src/components/AnchorLink/index.jsx
@@ -21,11 +21,11 @@ import PropTypes from 'prop-types';
import { t } from '@superset-ui/core';
import URLShortLinkButton from 'src/components/URLShortLinkButton';
-import getDashboardUrl from 'src/dashboard/util/getDashboardUrl';
import getLocationHash from 'src/dashboard/util/getLocationHash';
const propTypes = {
anchorLinkId: PropTypes.string.isRequired,
+ dashboardId: PropTypes.number,
filters: PropTypes.object,
showShortLinkButton: PropTypes.bool,
inFocus: PropTypes.bool,
@@ -70,17 +70,14 @@ class AnchorLink extends React.PureComponent {
}
render() {
- const { anchorLinkId, filters, showShortLinkButton, placement } =
+ const { anchorLinkId, dashboardId, showShortLinkButton, placement } =
this.props;
return (
{showShortLinkButton && (
void;
style?: React.CSSProperties;
@@ -49,5 +49,3 @@ export default function Checkbox({ checked, onChange, style }: CheckboxProps) {
);
}
-
-export type { CheckboxProps };
diff --git a/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx b/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx
index d8d4e23eb1651..2387c2e2517fe 100644
--- a/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx
+++ b/superset-frontend/src/components/DatabaseSelector/DatabaseSelector.test.tsx
@@ -76,6 +76,7 @@ beforeEach(() => {
allows_cost_estimate: 'Allows Cost Estimate',
allows_subquery: 'Allows Subquery',
allows_virtual_table_explore: 'Allows Virtual Table Explore',
+ disable_data_preview: 'Disables SQL Lab Data Preview',
backend: 'Backend',
changed_on: 'Changed On',
changed_on_delta_humanized: 'Changed On Delta Humanized',
@@ -97,6 +98,7 @@ beforeEach(() => {
'allows_cost_estimate',
'allows_subquery',
'allows_virtual_table_explore',
+ 'disable_data_preview',
'backend',
'changed_on',
'changed_on_delta_humanized',
@@ -130,6 +132,7 @@ beforeEach(() => {
allows_cost_estimate: null,
allows_subquery: true,
allows_virtual_table_explore: true,
+ disable_data_preview: false,
backend: 'postgresql',
changed_on: '2021-03-09T19:02:07.141095',
changed_on_delta_humanized: 'a day ago',
@@ -150,6 +153,7 @@ beforeEach(() => {
allows_cost_estimate: null,
allows_subquery: true,
allows_virtual_table_explore: true,
+ disable_data_preview: false,
backend: 'mysql',
changed_on: '2021-03-09T19:02:07.141095',
changed_on_delta_humanized: 'a day ago',
diff --git a/superset-frontend/src/components/Dropdown/index.tsx b/superset-frontend/src/components/Dropdown/index.tsx
index fdfa9f945c6c2..e5d5f9f8526c5 100644
--- a/superset-frontend/src/components/Dropdown/index.tsx
+++ b/superset-frontend/src/components/Dropdown/index.tsx
@@ -72,7 +72,7 @@ export interface DropdownProps {
export const Dropdown = ({ overlay, ...rest }: DropdownProps) => (
-
+
diff --git a/superset-frontend/src/components/Popover/index.tsx b/superset-frontend/src/components/Popover/index.tsx
index 880e457913a2e..bccc31c35c4bb 100644
--- a/superset-frontend/src/components/Popover/index.tsx
+++ b/superset-frontend/src/components/Popover/index.tsx
@@ -18,6 +18,9 @@
*/
import { Popover } from 'antd';
+export { PopoverProps } from 'antd/lib/popover';
+export { TooltipPlacement } from 'antd/lib/tooltip';
+
// Eventually Popover can be wrapped and customized in this file
// for now we're just redirecting
export default Popover;
diff --git a/superset-frontend/src/components/Select/utils.ts b/superset-frontend/src/components/Select/utils.ts
index f62b93ade3668..f3880f52f7d01 100644
--- a/superset-frontend/src/components/Select/utils.ts
+++ b/superset-frontend/src/components/Select/utils.ts
@@ -60,8 +60,10 @@ export function findValue(
return (Array.isArray(value) ? value : [value]).map(find);
}
-export function getValue(option: string | number | { value: string | number }) {
- return typeof option === 'object' ? option.value : option;
+export function getValue(
+ option: string | number | { value: string | number | null } | null,
+) {
+ return option && typeof option === 'object' ? option.value : option;
}
type LabeledValue = { label?: ReactNode; value?: V };
diff --git a/superset-frontend/src/components/TableSelector/TableSelector.test.tsx b/superset-frontend/src/components/TableSelector/TableSelector.test.tsx
index cd7b51d4b124d..013e937edeb41 100644
--- a/superset-frontend/src/components/TableSelector/TableSelector.test.tsx
+++ b/superset-frontend/src/components/TableSelector/TableSelector.test.tsx
@@ -20,12 +20,13 @@
import React from 'react';
import { render, screen, waitFor } from 'spec/helpers/testing-library';
import { SupersetClient } from '@superset-ui/core';
+import { act } from 'react-dom/test-utils';
import userEvent from '@testing-library/user-event';
import TableSelector from '.';
const SupersetClientGet = jest.spyOn(SupersetClient, 'get');
-const createProps = () => ({
+const createProps = (props = {}) => ({
database: {
id: 1,
database_name: 'main',
@@ -34,23 +35,33 @@ const createProps = () => ({
},
schema: 'test_schema',
handleError: jest.fn(),
+ ...props,
});
-beforeAll(() => {
- SupersetClientGet.mockImplementation(
- async () =>
- ({
- json: {
- options: [
- { label: 'table_a', value: 'table_a' },
- { label: 'table_b', value: 'table_b' },
- ],
- },
- } as any),
- );
+afterEach(() => {
+ jest.clearAllMocks();
});
+const getSchemaMockFunction = async () =>
+ ({
+ json: {
+ result: ['schema_a', 'schema_b'],
+ },
+ } as any);
+
+const getTableMockFunction = async () =>
+ ({
+ json: {
+ options: [
+ { label: 'table_a', value: 'table_a' },
+ { label: 'table_b', value: 'table_b' },
+ ],
+ },
+ } as any);
+
test('renders with default props', async () => {
+ SupersetClientGet.mockImplementation(getTableMockFunction);
+
const props = createProps();
render( , { useRedux: true });
const databaseSelect = screen.getByRole('combobox', {
@@ -70,6 +81,8 @@ test('renders with default props', async () => {
});
test('renders table options', async () => {
+ SupersetClientGet.mockImplementation(getTableMockFunction);
+
const props = createProps();
render( , { useRedux: true });
const tableSelect = screen.getByRole('combobox', {
@@ -85,6 +98,8 @@ test('renders table options', async () => {
});
test('renders disabled without schema', async () => {
+ SupersetClientGet.mockImplementation(getTableMockFunction);
+
const props = createProps();
render( , { useRedux: true });
const tableSelect = screen.getByRole('combobox', {
@@ -94,3 +109,42 @@ test('renders disabled without schema', async () => {
expect(tableSelect).toBeDisabled();
});
});
+
+test('table options are notified after schema selection', async () => {
+ SupersetClientGet.mockImplementation(getSchemaMockFunction);
+
+ const callback = jest.fn();
+ const props = createProps({
+ onTablesLoad: callback,
+ schema: undefined,
+ });
+ render( , { useRedux: true });
+
+ const schemaSelect = screen.getByRole('combobox', {
+ name: 'Select schema or type schema name',
+ });
+ expect(schemaSelect).toBeInTheDocument();
+ expect(callback).not.toHaveBeenCalled();
+
+ userEvent.click(schemaSelect);
+
+ expect(
+ await screen.findByRole('option', { name: 'schema_a' }),
+ ).toBeInTheDocument();
+ expect(
+ await screen.findByRole('option', { name: 'schema_b' }),
+ ).toBeInTheDocument();
+
+ SupersetClientGet.mockImplementation(getTableMockFunction);
+
+ act(() => {
+ userEvent.click(screen.getAllByText('schema_a')[1]);
+ });
+
+ await waitFor(() => {
+ expect(callback).toHaveBeenCalledWith([
+ { label: 'table_a', value: 'table_a' },
+ { label: 'table_b', value: 'table_b' },
+ ]);
+ });
+});
diff --git a/superset-frontend/src/components/TableSelector/index.tsx b/superset-frontend/src/components/TableSelector/index.tsx
index 88ac9cefba47f..50804f7d920ce 100644
--- a/superset-frontend/src/components/TableSelector/index.tsx
+++ b/superset-frontend/src/components/TableSelector/index.tsx
@@ -208,15 +208,14 @@ const TableSelector: FunctionComponent = ({
currentTable = option;
}
});
- if (onTablesLoad) {
- onTablesLoad(json.options);
- }
+
+ onTablesLoad?.(json.options);
setTableOptions(options);
setCurrentTable(currentTable);
setLoadingTables(false);
if (forceRefresh) addSuccessToast('List updated');
})
- .catch(e => {
+ .catch(() => {
setLoadingTables(false);
handleError(t('There was an error loading the tables'));
});
diff --git a/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.stories.tsx b/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.stories.tsx
index cf9d1d6e730ed..6bf0d438daca6 100644
--- a/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.stories.tsx
+++ b/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.stories.tsx
@@ -18,7 +18,7 @@
*/
import React from 'react';
import { useArgs } from '@storybook/client-api';
-import TimezoneSelector, { TimezoneProps } from './index';
+import TimezoneSelector, { TimezoneSelectorProps } from './index';
export default {
title: 'TimezoneSelector',
@@ -26,7 +26,7 @@ export default {
};
// eslint-disable-next-line @typescript-eslint/no-unused-vars
-export const InteractiveTimezoneSelector = (args: TimezoneProps) => {
+export const InteractiveTimezoneSelector = (args: TimezoneSelectorProps) => {
const [{ timezone }, updateArgs] = useArgs();
const onTimezoneChange = (value: string) => {
updateArgs({ timezone: value });
diff --git a/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.test.tsx b/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.test.tsx
index 035cff842c9e2..19c713adf4f13 100644
--- a/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.test.tsx
+++ b/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.test.tsx
@@ -20,21 +20,42 @@ import React from 'react';
import moment from 'moment-timezone';
import { render, screen, waitFor } from 'spec/helpers/testing-library';
import userEvent from '@testing-library/user-event';
-import TimezoneSelector from './index';
+import type { TimezoneSelectorProps } from './index';
-jest.spyOn(moment.tz, 'guess').mockReturnValue('America/New_York');
+const loadComponent = (mockCurrentTime?: string) => {
+ if (mockCurrentTime) {
+ jest.useFakeTimers('modern');
+ jest.setSystemTime(new Date(mockCurrentTime));
+ }
+ return new Promise>(resolve => {
+ jest.isolateModules(() => {
+ const { default: TimezoneSelector } = module.require('./index');
+ resolve(TimezoneSelector);
+ jest.useRealTimers();
+ });
+ });
+};
const getSelectOptions = () =>
waitFor(() => document.querySelectorAll('.ant-select-item-option-content'));
-it('use the timezone from `moment` if no timezone provided', () => {
+const openSelectMenu = async () => {
+ const searchInput = screen.getByRole('combobox');
+ userEvent.click(searchInput);
+};
+
+jest.spyOn(moment.tz, 'guess').mockReturnValue('America/New_York');
+
+test('use the timezone from `moment` if no timezone provided', async () => {
+ const TimezoneSelector = await loadComponent('2022-01-01');
const onTimezoneChange = jest.fn();
render( );
expect(onTimezoneChange).toHaveBeenCalledTimes(1);
expect(onTimezoneChange).toHaveBeenCalledWith('America/Nassau');
});
-it('update to closest deduped timezone when timezone is provided', async () => {
+test('update to closest deduped timezone when timezone is provided', async () => {
+ const TimezoneSelector = await loadComponent('2022-01-01');
const onTimezoneChange = jest.fn();
render(
{
expect(onTimezoneChange).toHaveBeenLastCalledWith('America/Vancouver');
});
-it('use the default timezone when an invalid timezone is provided', async () => {
+test('use the default timezone when an invalid timezone is provided', async () => {
+ const TimezoneSelector = await loadComponent('2022-01-01');
const onTimezoneChange = jest.fn();
render(
,
@@ -55,7 +77,8 @@ it('use the default timezone when an invalid timezone is provided', async () =>
expect(onTimezoneChange).toHaveBeenLastCalledWith('Africa/Abidjan');
});
-it('can select a timezone values and returns canonical value', async () => {
+test('render timezones in correct oder for standard time', async () => {
+ const TimezoneSelector = await loadComponent('2022-01-01');
const onTimezoneChange = jest.fn();
render(
{
timezone="America/Nassau"
/>,
);
-
- const searchInput = screen.getByRole('combobox', {
- name: 'Timezone selector',
- });
- expect(searchInput).toBeInTheDocument();
- userEvent.click(searchInput);
- const isDaylight = moment(moment.now()).isDST();
-
- const selectedTimezone = isDaylight
- ? 'GMT -04:00 (Eastern Daylight Time)'
- : 'GMT -05:00 (Eastern Standard Time)';
-
- // selected option ranks first
+ await openSelectMenu();
const options = await getSelectOptions();
- expect(options[0]).toHaveTextContent(selectedTimezone);
-
- // others are ranked by offset
+ expect(options[0]).toHaveTextContent('GMT -05:00 (Eastern Standard Time)');
expect(options[1]).toHaveTextContent('GMT -11:00 (Pacific/Pago_Pago)');
expect(options[2]).toHaveTextContent('GMT -10:00 (Hawaii Standard Time)');
expect(options[3]).toHaveTextContent('GMT -10:00 (America/Adak)');
+});
+
+test('render timezones in correct order for daylight saving time', async () => {
+ const TimezoneSelector = await loadComponent('2022-07-01');
+ const onTimezoneChange = jest.fn();
+ render(
+ ,
+ );
+ await openSelectMenu();
+ const options = await getSelectOptions();
+ // first option is always current timezone
+ expect(options[0]).toHaveTextContent('GMT -04:00 (Eastern Daylight Time)');
+ expect(options[1]).toHaveTextContent('GMT -11:00 (Pacific/Pago_Pago)');
+ expect(options[2]).toHaveTextContent('GMT -10:00 (Hawaii Standard Time)');
+ expect(options[3]).toHaveTextContent('GMT -09:30 (Pacific/Marquesas)');
+});
+test('can select a timezone values and returns canonical timezone name', async () => {
+ const TimezoneSelector = await loadComponent('2022-01-01');
+ const onTimezoneChange = jest.fn();
+ render(
+ ,
+ );
+
+ await openSelectMenu();
+
+ const searchInput = screen.getByRole('combobox');
// search for mountain time
await userEvent.type(searchInput, 'mou', { delay: 10 });
-
- const findTitle = isDaylight
- ? 'GMT -06:00 (Mountain Daylight Time)'
- : 'GMT -07:00 (Mountain Standard Time)';
+ const findTitle = 'GMT -07:00 (Mountain Standard Time)';
const selectOption = await screen.findByTitle(findTitle);
- expect(selectOption).toBeInTheDocument();
userEvent.click(selectOption);
expect(onTimezoneChange).toHaveBeenCalledTimes(1);
expect(onTimezoneChange).toHaveBeenLastCalledWith('America/Cambridge_Bay');
});
-it('can update props and rerender with different values', async () => {
+test('can update props and rerender with different values', async () => {
+ const TimezoneSelector = await loadComponent('2022-01-01');
const onTimezoneChange = jest.fn();
const { rerender } = render(
{
);
};
-export interface TimezoneProps {
- onTimezoneChange: (value: string) => void;
- timezone?: string | null;
-}
-
const ALL_ZONES = moment.tz
.countries()
.map(country => moment.tz.zonesForCountry(country, true))
@@ -106,7 +101,15 @@ const matchTimezoneToOptions = (timezone: string) =>
TIMEZONE_OPTIONS.find(option => option.offsets === getOffsetKey(timezone))
?.value || DEFAULT_TIMEZONE.value;
-const TimezoneSelector = ({ onTimezoneChange, timezone }: TimezoneProps) => {
+export type TimezoneSelectorProps = {
+ onTimezoneChange: (value: string) => void;
+ timezone?: string | null;
+};
+
+export default function TimezoneSelector({
+ onTimezoneChange,
+ timezone,
+}: TimezoneSelectorProps) {
const validTimezone = useMemo(
() => matchTimezoneToOptions(timezone || moment.tz.guess()),
[timezone],
@@ -129,6 +132,4 @@ const TimezoneSelector = ({ onTimezoneChange, timezone }: TimezoneProps) => {
sortComparator={TIMEZONE_OPTIONS_SORT_COMPARATOR}
/>
);
-};
-
-export default TimezoneSelector;
+}
diff --git a/superset-frontend/src/components/URLShortLinkButton/URLShortLinkButton.test.tsx b/superset-frontend/src/components/URLShortLinkButton/URLShortLinkButton.test.tsx
index f54a2ba364fbb..36ffc9e339432 100644
--- a/superset-frontend/src/components/URLShortLinkButton/URLShortLinkButton.test.tsx
+++ b/superset-frontend/src/components/URLShortLinkButton/URLShortLinkButton.test.tsx
@@ -23,48 +23,76 @@ import fetchMock from 'fetch-mock';
import URLShortLinkButton from 'src/components/URLShortLinkButton';
import ToastContainer from 'src/components/MessageToasts/ToastContainer';
-const fakeUrl = 'http://fakeurl.com';
+const DASHBOARD_ID = 10;
+const PERMALINK_PAYLOAD = {
+ key: '123',
+ url: 'http://fakeurl.com/123',
+};
+const FILTER_STATE_PAYLOAD = {
+ value: '{}',
+};
-fetchMock.post('glob:*/r/shortner/', fakeUrl);
+const props = {
+ dashboardId: DASHBOARD_ID,
+};
+
+fetchMock.get(
+ `glob:*/api/v1/dashboard/${DASHBOARD_ID}/filter_state*`,
+ FILTER_STATE_PAYLOAD,
+);
+
+fetchMock.post(
+ `glob:*/api/v1/dashboard/${DASHBOARD_ID}/permalink`,
+ PERMALINK_PAYLOAD,
+);
test('renders with default props', () => {
- render( , { useRedux: true });
+ render( , { useRedux: true });
expect(screen.getByRole('button')).toBeInTheDocument();
});
test('renders overlay on click', async () => {
- render( , { useRedux: true });
+ render( , { useRedux: true });
userEvent.click(screen.getByRole('button'));
expect(await screen.findByRole('tooltip')).toBeInTheDocument();
});
test('obtains short url', async () => {
- render( , { useRedux: true });
+ render( , { useRedux: true });
userEvent.click(screen.getByRole('button'));
- expect(await screen.findByRole('tooltip')).toHaveTextContent(fakeUrl);
+ expect(await screen.findByRole('tooltip')).toHaveTextContent(
+ PERMALINK_PAYLOAD.url,
+ );
});
test('creates email anchor', async () => {
const subject = 'Subject';
const content = 'Content';
- render( , {
- useRedux: true,
- });
+ render(
+ ,
+ {
+ useRedux: true,
+ },
+ );
- const href = `mailto:?Subject=${subject}%20&Body=${content}${fakeUrl}`;
+ const href = `mailto:?Subject=${subject}%20&Body=${content}${PERMALINK_PAYLOAD.url}`;
userEvent.click(screen.getByRole('button'));
expect(await screen.findByRole('link')).toHaveAttribute('href', href);
});
test('renders error message on short url error', async () => {
- fetchMock.mock('glob:*/r/shortner/', 500, {
+ fetchMock.mock(`glob:*/api/v1/dashboard/${DASHBOARD_ID}/permalink`, 500, {
overwriteRoutes: true,
});
render(
<>
-
+
>,
{ useRedux: true },
diff --git a/superset-frontend/src/components/URLShortLinkButton/index.jsx b/superset-frontend/src/components/URLShortLinkButton/index.jsx
index 1678471b61f79..35795f81a11fa 100644
--- a/superset-frontend/src/components/URLShortLinkButton/index.jsx
+++ b/superset-frontend/src/components/URLShortLinkButton/index.jsx
@@ -21,14 +21,17 @@ import PropTypes from 'prop-types';
import { t } from '@superset-ui/core';
import Popover from 'src/components/Popover';
import CopyToClipboard from 'src/components/CopyToClipboard';
-import { getShortUrl } from 'src/utils/urlUtils';
+import { getDashboardPermalink, getUrlParam } from 'src/utils/urlUtils';
import withToasts from 'src/components/MessageToasts/withToasts';
+import { URL_PARAMS } from 'src/constants';
+import { getFilterValue } from 'src/dashboard/components/nativeFilters/FilterBar/keyValue';
const propTypes = {
- url: PropTypes.string,
+ addDangerToast: PropTypes.func.isRequired,
+ anchorLinkId: PropTypes.string,
+ dashboardId: PropTypes.number,
emailSubject: PropTypes.string,
emailContent: PropTypes.string,
- addDangerToast: PropTypes.func.isRequired,
placement: PropTypes.oneOf(['right', 'left', 'top', 'bottom']),
};
@@ -50,9 +53,20 @@ class URLShortLinkButton extends React.Component {
getCopyUrl(e) {
e.stopPropagation();
- getShortUrl(this.props.url)
- .then(this.onShortUrlSuccess)
- .catch(this.props.addDangerToast);
+ const nativeFiltersKey = getUrlParam(URL_PARAMS.nativeFiltersKey);
+ if (this.props.dashboardId) {
+ getFilterValue(this.props.dashboardId, nativeFiltersKey)
+ .then(filterState =>
+ getDashboardPermalink(
+ String(this.props.dashboardId),
+ filterState,
+ this.props.anchorLinkId,
+ )
+ .then(this.onShortUrlSuccess)
+ .catch(this.props.addDangerToast),
+ )
+ .catch(this.props.addDangerToast);
+ }
}
renderPopover() {
@@ -96,7 +110,6 @@ class URLShortLinkButton extends React.Component {
}
URLShortLinkButton.defaultProps = {
- url: window.location.href.substring(window.location.origin.length),
placement: 'left',
emailSubject: '',
emailContent: '',
diff --git a/superset-frontend/src/constants.ts b/superset-frontend/src/constants.ts
index b54fc1173c28f..777d5f2a4e434 100644
--- a/superset-frontend/src/constants.ts
+++ b/superset-frontend/src/constants.ts
@@ -71,8 +71,24 @@ export const URL_PARAMS = {
name: 'force',
type: 'boolean',
},
+ permalinkKey: {
+ name: 'permalink_key',
+ type: 'string',
+ },
} as const;
+export const RESERVED_CHART_URL_PARAMS: string[] = [
+ URL_PARAMS.formDataKey.name,
+ URL_PARAMS.sliceId.name,
+ URL_PARAMS.datasetId.name,
+];
+export const RESERVED_DASHBOARD_URL_PARAMS: string[] = [
+ URL_PARAMS.nativeFilters.name,
+ URL_PARAMS.nativeFiltersKey.name,
+ URL_PARAMS.permalinkKey.name,
+ URL_PARAMS.preselectFilters.name,
+];
+
/**
* Faster debounce delay for inputs without expensive operation.
*/
diff --git a/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/HeaderActionsDropdown.test.tsx b/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/HeaderActionsDropdown.test.tsx
index 8996645079472..d1f87ec999e0c 100644
--- a/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/HeaderActionsDropdown.test.tsx
+++ b/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/HeaderActionsDropdown.test.tsx
@@ -135,8 +135,8 @@ test('should show the share actions', async () => {
};
render(setup(canShareProps));
await openDropdown();
- expect(screen.getByText('Copy dashboard URL')).toBeInTheDocument();
- expect(screen.getByText('Share dashboard by email')).toBeInTheDocument();
+ expect(screen.getByText('Copy permalink to clipboard')).toBeInTheDocument();
+ expect(screen.getByText('Share permalink by email')).toBeInTheDocument();
});
test('should render the "Save Modal" when user can save', async () => {
diff --git a/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/index.jsx b/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/index.jsx
index b7d368ec32db2..9375c684af90a 100644
--- a/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/index.jsx
+++ b/superset-frontend/src/dashboard/components/Header/HeaderActionsDropdown/index.jsx
@@ -257,8 +257,8 @@ class HeaderActionsDropdown extends React.PureComponent {
{userCanShare && (
;
onExploreChart: () => void;
forceRefresh: (sliceId: number, dashboardId: number) => void;
@@ -309,8 +310,8 @@ class SliceHeaderControls extends React.PureComponent<
{supersetCanShare && (
= 5 ? 'left' : 'right'}
diff --git a/superset-frontend/src/dashboard/components/menu/ShareMenuItems/ShareMenuItems.test.tsx b/superset-frontend/src/dashboard/components/menu/ShareMenuItems/ShareMenuItems.test.tsx
index da7d196bd8b50..579f9d4b69077 100644
--- a/superset-frontend/src/dashboard/components/menu/ShareMenuItems/ShareMenuItems.test.tsx
+++ b/superset-frontend/src/dashboard/components/menu/ShareMenuItems/ShareMenuItems.test.tsx
@@ -31,7 +31,7 @@ const DASHBOARD_ID = '26';
const createProps = () => ({
addDangerToast: jest.fn(),
addSuccessToast: jest.fn(),
- url: `/superset/dashboard/${DASHBOARD_ID}/?preselect_filters=%7B%7D`,
+ url: `/superset/dashboard/${DASHBOARD_ID}`,
copyMenuItemTitle: 'Copy dashboard URL',
emailMenuItemTitle: 'Share dashboard by email',
emailSubject: 'Superset dashboard COVID Vaccine Dashboard',
@@ -45,10 +45,10 @@ beforeAll((): void => {
// @ts-ignore
delete window.location;
fetchMock.post(
- 'http://localhost/r/shortner/',
- { body: 'http://localhost:8088/r/3' },
+ `http://localhost/api/v1/dashboard/${DASHBOARD_ID}/permalink`,
+ { key: '123', url: 'http://localhost/superset/dashboard/p/123/' },
{
- sendAsJson: false,
+ sendAsJson: true,
},
);
});
@@ -104,7 +104,7 @@ test('Click on "Copy dashboard URL" and succeed', async () => {
await waitFor(() => {
expect(spy).toBeCalledTimes(1);
- expect(spy).toBeCalledWith('http://localhost:8088/r/3');
+ expect(spy).toBeCalledWith('http://localhost/superset/dashboard/p/123/');
expect(props.addSuccessToast).toBeCalledTimes(1);
expect(props.addSuccessToast).toBeCalledWith('Copied to clipboard!');
expect(props.addDangerToast).toBeCalledTimes(0);
@@ -130,7 +130,7 @@ test('Click on "Copy dashboard URL" and fail', async () => {
await waitFor(() => {
expect(spy).toBeCalledTimes(1);
- expect(spy).toBeCalledWith('http://localhost:8088/r/3');
+ expect(spy).toBeCalledWith('http://localhost/superset/dashboard/p/123/');
expect(props.addSuccessToast).toBeCalledTimes(0);
expect(props.addDangerToast).toBeCalledTimes(1);
expect(props.addDangerToast).toBeCalledWith(
@@ -159,14 +159,14 @@ test('Click on "Share dashboard by email" and succeed', async () => {
await waitFor(() => {
expect(props.addDangerToast).toBeCalledTimes(0);
expect(window.location.href).toBe(
- 'mailto:?Subject=Superset%20dashboard%20COVID%20Vaccine%20Dashboard%20&Body=Check%20out%20this%20dashboard%3A%20http%3A%2F%2Flocalhost%3A8088%2Fr%2F3',
+ 'mailto:?Subject=Superset%20dashboard%20COVID%20Vaccine%20Dashboard%20&Body=Check%20out%20this%20dashboard%3A%20http%3A%2F%2Flocalhost%2Fsuperset%2Fdashboard%2Fp%2F123%2F',
);
});
});
test('Click on "Share dashboard by email" and fail', async () => {
fetchMock.post(
- 'http://localhost/r/shortner/',
+ `http://localhost/api/v1/dashboard/${DASHBOARD_ID}/permalink`,
{ status: 404 },
{ overwriteRoutes: true },
);
diff --git a/superset-frontend/src/dashboard/components/menu/ShareMenuItems/index.tsx b/superset-frontend/src/dashboard/components/menu/ShareMenuItems/index.tsx
index cb31503ac8611..c70e47dc3d01d 100644
--- a/superset-frontend/src/dashboard/components/menu/ShareMenuItems/index.tsx
+++ b/superset-frontend/src/dashboard/components/menu/ShareMenuItems/index.tsx
@@ -17,19 +17,16 @@
* under the License.
*/
import React from 'react';
-import { useUrlShortener } from 'src/hooks/useUrlShortener';
import copyTextToClipboard from 'src/utils/copy';
-import { t, logging } from '@superset-ui/core';
+import { t, logging, QueryFormData } from '@superset-ui/core';
import { Menu } from 'src/components/Menu';
-import { getUrlParam } from 'src/utils/urlUtils';
-import { postFormData } from 'src/explore/exploreUtils/formData';
-import { useTabId } from 'src/hooks/useTabId';
-import { URL_PARAMS } from 'src/constants';
-import { mountExploreUrl } from 'src/explore/exploreUtils';
import {
- createFilterKey,
- getFilterValue,
-} from 'src/dashboard/components/nativeFilters/FilterBar/keyValue';
+ getChartPermalink,
+ getDashboardPermalink,
+ getUrlParam,
+} from 'src/utils/urlUtils';
+import { RESERVED_DASHBOARD_URL_PARAMS, URL_PARAMS } from 'src/constants';
+import { getFilterValue } from 'src/dashboard/components/nativeFilters/FilterBar/keyValue';
interface ShareMenuItemProps {
url?: string;
@@ -40,12 +37,11 @@ interface ShareMenuItemProps {
addDangerToast: Function;
addSuccessToast: Function;
dashboardId?: string;
- formData?: { slice_id: number; datasource: string };
+ formData?: Pick;
}
const ShareMenuItems = (props: ShareMenuItemProps) => {
const {
- url,
copyMenuItemTitle,
emailMenuItemTitle,
emailSubject,
@@ -57,47 +53,25 @@ const ShareMenuItems = (props: ShareMenuItemProps) => {
...rest
} = props;
- const tabId = useTabId();
-
- const getShortUrl = useUrlShortener(url || '');
-
- async function getCopyUrl() {
- const risonObj = getUrlParam(URL_PARAMS.nativeFilters);
- if (typeof risonObj === 'object' || !dashboardId) return null;
- const prevData = await getFilterValue(
- dashboardId,
- getUrlParam(URL_PARAMS.nativeFiltersKey),
- );
- const newDataMaskKey = await createFilterKey(
- dashboardId,
- JSON.stringify(prevData),
- tabId,
- );
- const newUrl = new URL(`${window.location.origin}${url}`);
- newUrl.searchParams.set(URL_PARAMS.nativeFilters.name, newDataMaskKey);
- return `${newUrl.pathname}${newUrl.search}`;
- }
-
async function generateUrl() {
+ // chart
if (formData) {
- const key = await postFormData(
- parseInt(formData.datasource.split('_')[0], 10),
- formData,
- formData.slice_id,
- tabId,
- );
- return `${window.location.origin}${mountExploreUrl(null, {
- [URL_PARAMS.formDataKey.name]: key,
- [URL_PARAMS.sliceId.name]: formData.slice_id,
- })}`;
+ // we need to remove reserved dashboard url params
+ return getChartPermalink(formData, RESERVED_DASHBOARD_URL_PARAMS);
+ }
+ // dashboard
+ const nativeFiltersKey = getUrlParam(URL_PARAMS.nativeFiltersKey);
+ let filterState = {};
+ if (nativeFiltersKey && dashboardId) {
+ filterState = await getFilterValue(dashboardId, nativeFiltersKey);
}
- const copyUrl = await getCopyUrl();
- return getShortUrl(copyUrl);
+ return getDashboardPermalink(String(dashboardId), filterState);
}
async function onCopyLink() {
try {
- await copyTextToClipboard(await generateUrl());
+ const url = await generateUrl();
+ await copyTextToClipboard(url);
addSuccessToast(t('Copied to clipboard!'));
} catch (error) {
logging.error(error);
diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx
index 73a589312acc6..309d75dac9a80 100644
--- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx
+++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx
@@ -165,6 +165,11 @@ export interface FiltersBarProps {
offset: number;
}
+const EXCLUDED_URL_PARAMS: string[] = [
+ URL_PARAMS.nativeFilters.name,
+ URL_PARAMS.permalinkKey.name,
+];
+
const publishDataMask = debounce(
async (
history,
@@ -177,9 +182,9 @@ const publishDataMask = debounce(
const { search } = location;
const previousParams = new URLSearchParams(search);
const newParams = new URLSearchParams();
- let dataMaskKey: string;
+ let dataMaskKey: string | null;
previousParams.forEach((value, key) => {
- if (key !== URL_PARAMS.nativeFilters.name) {
+ if (!EXCLUDED_URL_PARAMS.includes(key)) {
newParams.append(key, value);
}
});
@@ -200,7 +205,9 @@ const publishDataMask = debounce(
} else {
dataMaskKey = await createFilterKey(dashboardId, dataMask, tabId);
}
- newParams.set(URL_PARAMS.nativeFiltersKey.name, dataMaskKey);
+ if (dataMaskKey) {
+ newParams.set(URL_PARAMS.nativeFiltersKey.name, dataMaskKey);
+ }
// pathname could be updated somewhere else through window.history
// keep react router history in sync with window history
diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/keyValue.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/keyValue.tsx
index 9682fdb7b8f0e..ec9735f091690 100644
--- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/keyValue.tsx
+++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/keyValue.tsx
@@ -17,6 +17,7 @@
* under the License.
*/
import { SupersetClient, logging } from '@superset-ui/core';
+import { DashboardPermalinkValue } from 'src/dashboard/types';
const assembleEndpoint = (
dashId: string | number,
@@ -58,7 +59,7 @@ export const createFilterKey = (
endpoint: assembleEndpoint(dashId, undefined, tabId),
jsonPayload: { value },
})
- .then(r => r.json.key)
+ .then(r => r.json.key as string)
.catch(err => {
logging.error(err);
return null;
@@ -73,3 +74,13 @@ export const getFilterValue = (dashId: string | number, key: string) =>
logging.error(err);
return null;
});
+
+export const getPermalinkValue = (key: string) =>
+ SupersetClient.get({
+ endpoint: `/api/v1/dashboard/permalink/${key}`,
+ })
+ .then(({ json }) => json as DashboardPermalinkValue)
+ .catch(err => {
+ logging.error(err);
+ return null;
+ });
diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/DraggableFilter.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/DraggableFilter.tsx
index 188655355ca45..7a4827c80bcf0 100644
--- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/DraggableFilter.tsx
+++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/DraggableFilter.tsx
@@ -57,7 +57,7 @@ const DragIcon = styled(Icons.Drag, {
interface FilterTabTitleProps {
index: number;
filterIds: string[];
- onRearrage: (dragItemIndex: number, targetIndex: number) => void;
+ onRearrange: (dragItemIndex: number, targetIndex: number) => void;
}
interface DragItem {
@@ -68,7 +68,7 @@ interface DragItem {
export const DraggableFilter: React.FC = ({
index,
- onRearrage,
+ onRearrange,
filterIds,
children,
}) => {
@@ -120,7 +120,7 @@ export const DraggableFilter: React.FC = ({
return;
}
- onRearrage(dragIndex, hoverIndex);
+ onRearrange(dragIndex, hoverIndex);
// Note: we're mutating the monitor item here.
// Generally it's better to avoid mutations,
// but it's good here for the sake of performance
diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterConfigPane.test.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterConfigPane.test.tsx
index 78c4d77da1918..3742d536326fb 100644
--- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterConfigPane.test.tsx
+++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterConfigPane.test.tsx
@@ -22,6 +22,9 @@ import { buildNativeFilter } from 'spec/fixtures/mockNativeFilters';
import { act, fireEvent, render, screen } from 'spec/helpers/testing-library';
import FilterConfigPane from './FilterConfigurePane';
+const scrollMock = jest.fn();
+Element.prototype.scroll = scrollMock;
+
const defaultProps = {
children: jest.fn(),
getFilterTitle: (id: string) => id,
@@ -56,6 +59,10 @@ function defaultRender(initialState: any = defaultState, props = defaultProps) {
});
}
+beforeEach(() => {
+ scrollMock.mockClear();
+});
+
test('renders form', async () => {
await act(async () => {
defaultRender();
@@ -65,7 +72,7 @@ test('renders form', async () => {
test('drag and drop', async () => {
defaultRender();
- // Drag the state and contry filter above the product filter
+ // Drag the state and country filter above the product filter
const [countryStateFilter, productFilter] = document.querySelectorAll(
'div[draggable=true]',
);
@@ -132,3 +139,41 @@ test('add divider', async () => {
});
expect(defaultProps.onAdd).toHaveBeenCalledWith('DIVIDER');
});
+
+test('filter container should scroll to bottom when adding items', async () => {
+ const state = {
+ dashboardInfo: {
+ metadata: {
+ native_filter_configuration: new Array(35)
+ .fill(0)
+ .map((_, index) =>
+ buildNativeFilter(`NATIVE_FILTER-${index}`, `filter-${index}`, []),
+ ),
+ },
+ },
+ dashboardLayout,
+ };
+ const props = {
+ ...defaultProps,
+ filters: new Array(35).fill(0).map((_, index) => `NATIVE_FILTER-${index}`),
+ };
+
+ defaultRender(state, props);
+
+ const addButton = screen.getByText('Add filters and dividers')!;
+ fireEvent.mouseOver(addButton);
+
+ const addFilterButton = await screen.findByText('Filter');
+ await act(async () => {
+ fireEvent(
+ addFilterButton,
+ new MouseEvent('click', {
+ bubbles: true,
+ cancelable: true,
+ }),
+ );
+ });
+
+ const containerElement = screen.getByTestId('filter-title-container');
+ expect(containerElement.scroll).toHaveBeenCalled();
+});
diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterConfigurePane.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterConfigurePane.tsx
index a65e167fdf39b..dba7e6bb30250 100644
--- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterConfigurePane.tsx
+++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterConfigurePane.tsx
@@ -50,7 +50,7 @@ const TitlesContainer = styled.div`
border-right: 1px solid ${({ theme }) => theme.colors.grayscale.light2};
`;
-const FiltureConfigurePane: React.FC = ({
+const FilterConfigurePane: React.FC = ({
getFilterTitle,
onChange,
onRemove,
@@ -75,7 +75,7 @@ const FiltureConfigurePane: React.FC = ({
getFilterTitle={getFilterTitle}
onChange={onChange}
onAdd={(type: NativeFilterType) => onAdd(type)}
- onRearrage={onRearrange}
+ onRearrange={onRearrange}
onRemove={(id: string) => onRemove(id)}
restoreFilter={restoreFilter}
/>
@@ -98,4 +98,4 @@ const FiltureConfigurePane: React.FC = ({
);
};
-export default FiltureConfigurePane;
+export default FilterConfigurePane;
diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitleContainer.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitleContainer.tsx
index 6ef40d8303b57..f5fe459e4b260 100644
--- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitleContainer.tsx
+++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitleContainer.tsx
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-import React from 'react';
+import React, { forwardRef } from 'react';
import { styled, t } from '@superset-ui/core';
import Icons from 'src/components/Icons';
import { FilterRemoval } from './types';
@@ -72,124 +72,134 @@ interface Props {
removedFilters: Record;
onRemove: (id: string) => void;
restoreFilter: (id: string) => void;
- onRearrage: (dragIndex: number, targetIndex: number) => void;
+ onRearrange: (dragIndex: number, targetIndex: number) => void;
filters: string[];
erroredFilters: string[];
}
-const FilterTitleContainer: React.FC = ({
- getFilterTitle,
- onChange,
- onRemove,
- restoreFilter,
- onRearrage,
- currentFilterId,
- removedFilters,
- filters,
- erroredFilters = [],
-}) => {
- const renderComponent = (id: string) => {
- const isRemoved = !!removedFilters[id];
- const isErrored = erroredFilters.includes(id);
- const isActive = currentFilterId === id;
- const classNames = [];
- if (isErrored) {
- classNames.push('errored');
- }
- if (isActive) {
- classNames.push('active');
- }
- return (
- onChange(id)}
- className={classNames.join(' ')}
- >
-
-
- {isRemoved ? t('(Removed)') : getFilterTitle(id)}
+const FilterTitleContainer = forwardRef
(
+ (
+ {
+ getFilterTitle,
+ onChange,
+ onRemove,
+ restoreFilter,
+ onRearrange,
+ currentFilterId,
+ removedFilters,
+ filters,
+ erroredFilters = [],
+ },
+ ref,
+ ) => {
+ const renderComponent = (id: string) => {
+ const isRemoved = !!removedFilters[id];
+ const isErrored = erroredFilters.includes(id);
+ const isActive = currentFilterId === id;
+ const classNames = [];
+ if (isErrored) {
+ classNames.push('errored');
+ }
+ if (isActive) {
+ classNames.push('active');
+ }
+ return (
+ onChange(id)}
+ className={classNames.join(' ')}
+ >
+
+
+ {isRemoved ? t('(Removed)') : getFilterTitle(id)}
+
+ {!removedFilters[id] && isErrored && (
+
+ )}
+ {isRemoved && (
+
{
+ e.preventDefault();
+ restoreFilter(id);
+ }}
+ >
+ {t('Undo?')}
+
+ )}
- {!removedFilters[id] && isErrored && (
-
- )}
- {isRemoved && (
- {
- e.preventDefault();
- restoreFilter(id);
- }}
- >
- {t('Undo?')}
-
- )}
-
-
- {isRemoved ? null : (
- {
- event.stopPropagation();
- onRemove(id);
- }}
- alt="RemoveFilter"
- />
- )}
-
-
- );
- };
- const recursivelyRender = (
- elementId: string,
- nodeList: Array<{ id: string; parentId: string | null }>,
- rendered: Array
,
- ): React.ReactNode => {
- const didAlreadyRender = rendered.indexOf(elementId) >= 0;
- if (didAlreadyRender) {
- return null;
- }
- let parent = null;
- const element = nodeList.filter(el => el.id === elementId)[0];
- if (!element) {
- return null;
- }
+
+ {isRemoved ? null : (
+ {
+ event.stopPropagation();
+ onRemove(id);
+ }}
+ alt="RemoveFilter"
+ />
+ )}
+
+
+ );
+ };
+ const recursivelyRender = (
+ elementId: string,
+ nodeList: Array<{ id: string; parentId: string | null }>,
+ rendered: Array,
+ ): React.ReactNode => {
+ const didAlreadyRender = rendered.indexOf(elementId) >= 0;
+ if (didAlreadyRender) {
+ return null;
+ }
+ let parent = null;
+ const element = nodeList.filter(el => el.id === elementId)[0];
+ if (!element) {
+ return null;
+ }
+
+ rendered.push(elementId);
+ if (element.parentId) {
+ parent = recursivelyRender(element.parentId, nodeList, rendered);
+ }
+ const children = nodeList
+ .filter(item => item.parentId === elementId)
+ .map(item => recursivelyRender(item.id, nodeList, rendered));
+ return (
+ <>
+ {parent}
+ {renderComponent(elementId)}
+ {children}
+ >
+ );
+ };
+
+ const renderFilterGroups = () => {
+ const items: React.ReactNode[] = [];
+ filters.forEach((item, index) => {
+ items.push(
+
+ {renderComponent(item)}
+ ,
+ );
+ });
+ return items;
+ };
- rendered.push(elementId);
- if (element.parentId) {
- parent = recursivelyRender(element.parentId, nodeList, rendered);
- }
- const children = nodeList
- .filter(item => item.parentId === elementId)
- .map(item => recursivelyRender(item.id, nodeList, rendered));
return (
- <>
- {parent}
- {renderComponent(elementId)}
- {children}
- >
+
+ {renderFilterGroups()}
+
);
- };
-
- const renderFilterGroups = () => {
- const items: React.ReactNode[] = [];
- filters.forEach((item, index) => {
- items.push(
-
- {renderComponent(item)}
- ,
- );
- });
- return items;
- };
- return {renderFilterGroups()} ;
-};
+ },
+);
export default FilterTitleContainer;
diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitlePane.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitlePane.tsx
index 5681a41717666..79dc4148349aa 100644
--- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitlePane.tsx
+++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitlePane.tsx
@@ -16,8 +16,8 @@
* specific language governing permissions and limitations
* under the License.
*/
+import React, { useRef } from 'react';
import { NativeFilterType, styled, t, useTheme } from '@superset-ui/core';
-import React from 'react';
import { AntdDropdown } from 'src/components';
import { MainNav as Menu } from 'src/components/Menu';
import FilterTitleContainer from './FilterTitleContainer';
@@ -26,7 +26,7 @@ import { FilterRemoval } from './types';
interface Props {
restoreFilter: (id: string) => void;
getFilterTitle: (id: string) => string;
- onRearrage: (dragIndex: number, targetIndex: number) => void;
+ onRearrange: (dragIndex: number, targetIndex: number) => void;
onRemove: (id: string) => void;
onChange: (id: string) => void;
onAdd: (type: NativeFilterType) => void;
@@ -52,23 +52,26 @@ const TabsContainer = styled.div`
flex-direction: column;
`;
+const options = [
+ { label: 'Filter', type: NativeFilterType.NATIVE_FILTER },
+ { label: 'Divider', type: NativeFilterType.DIVIDER },
+];
+
const FilterTitlePane: React.FC = ({
getFilterTitle,
onChange,
onAdd,
onRemove,
- onRearrage,
+ onRearrange,
restoreFilter,
currentFilterId,
filters,
removedFilters,
erroredFilters,
}) => {
+ const filtersContainerRef = useRef(null);
const theme = useTheme();
- const options = [
- { label: 'Filter', type: NativeFilterType.NATIVE_FILTER },
- { label: 'Divider', type: NativeFilterType.DIVIDER },
- ];
+
const handleOnAdd = (type: NativeFilterType) => {
onAdd(type);
setTimeout(() => {
@@ -77,6 +80,11 @@ const FilterTitlePane: React.FC = ({
const navList = element.getElementsByClassName('ant-tabs-nav-list')[0];
navList.scrollTop = navList.scrollHeight;
}
+
+ filtersContainerRef?.current?.scroll?.({
+ top: filtersContainerRef.current.scrollHeight,
+ behavior: 'smooth',
+ });
}, 0);
};
const menu = (
@@ -109,6 +117,7 @@ const FilterTitlePane: React.FC = ({
}}
>
= ({
erroredFilters={erroredFilters}
onChange={onChange}
onRemove={onRemove}
- onRearrage={onRearrage}
+ onRearrange={onRearrange}
restoreFilter={restoreFilter}
/>
diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx
index fd3ac06ea6db9..d258b34fa7489 100644
--- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx
+++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx
@@ -38,7 +38,7 @@ import ErrorBoundary from 'src/components/ErrorBoundary';
import { StyledModal } from 'src/components/Modal';
import { testWithId } from 'src/utils/testUtils';
import { useFilterConfigMap, useFilterConfiguration } from '../state';
-import FiltureConfigurePane from './FilterConfigurePane';
+import FilterConfigurePane from './FilterConfigurePane';
import FiltersConfigForm, {
FilterPanels,
} from './FiltersConfigForm/FiltersConfigForm';
@@ -379,7 +379,7 @@ export function FiltersConfigModal({
handleConfirmCancel();
}
};
- const onRearrage = (dragIndex: number, targetIndex: number) => {
+ const onRearrange = (dragIndex: number, targetIndex: number) => {
const newOrderedFilter = [...orderedFilters];
const removed = newOrderedFilter.splice(dragIndex, 1)[0];
newOrderedFilter.splice(targetIndex, 0, removed);
@@ -522,7 +522,7 @@ export function FiltersConfigModal({
onValuesChange={onValuesChange}
layout="vertical"
>
-
{(id: string) => getForm(id)}
-
+
diff --git a/superset-frontend/src/dashboard/containers/DashboardPage.tsx b/superset-frontend/src/dashboard/containers/DashboardPage.tsx
index 878f3d68695ae..e5fff328724d5 100644
--- a/superset-frontend/src/dashboard/containers/DashboardPage.tsx
+++ b/superset-frontend/src/dashboard/containers/DashboardPage.tsx
@@ -49,7 +49,10 @@ import { URL_PARAMS } from 'src/constants';
import { getUrlParam } from 'src/utils/urlUtils';
import { canUserEditDashboard } from 'src/dashboard/util/findPermission';
import { getFilterSets } from '../actions/nativeFilters';
-import { getFilterValue } from '../components/nativeFilters/FilterBar/keyValue';
+import {
+ getFilterValue,
+ getPermalinkValue,
+} from '../components/nativeFilters/FilterBar/keyValue';
import { filterCardPopoverStyle } from '../styles';
export const MigrationContext = React.createContext(
@@ -161,12 +164,17 @@ const DashboardPage: FC = () => {
useEffect(() => {
// eslint-disable-next-line consistent-return
async function getDataMaskApplied() {
+ const permalinkKey = getUrlParam(URL_PARAMS.permalinkKey);
const nativeFilterKeyValue = getUrlParam(URL_PARAMS.nativeFiltersKey);
let dataMaskFromUrl = nativeFilterKeyValue || {};
const isOldRison = getUrlParam(URL_PARAMS.nativeFilters);
- // check if key from key_value api and get datamask
- if (nativeFilterKeyValue) {
+ if (permalinkKey) {
+ const permalinkValue = await getPermalinkValue(permalinkKey);
+ if (permalinkValue) {
+ dataMaskFromUrl = permalinkValue.state.filterState;
+ }
+ } else if (nativeFilterKeyValue) {
dataMaskFromUrl = await getFilterValue(id, nativeFilterKeyValue);
}
if (isOldRison) {
diff --git a/superset-frontend/src/dashboard/types.ts b/superset-frontend/src/dashboard/types.ts
index fbdf362eea709..dffbd9fbe0be8 100644
--- a/superset-frontend/src/dashboard/types.ts
+++ b/superset-frontend/src/dashboard/types.ts
@@ -144,3 +144,11 @@ type ActiveFilter = {
export type ActiveFilters = {
[key: string]: ActiveFilter;
};
+
+export type DashboardPermalinkValue = {
+ dashboardId: string;
+ state: {
+ filterState: DataMaskStateWithId;
+ hash: string;
+ };
+};
diff --git a/superset-frontend/src/explore/components/EmbedCodeButton.jsx b/superset-frontend/src/explore/components/EmbedCodeButton.jsx
index 57e6d30de4532..71f77a4621fa2 100644
--- a/superset-frontend/src/explore/components/EmbedCodeButton.jsx
+++ b/superset-frontend/src/explore/components/EmbedCodeButton.jsx
@@ -25,6 +25,7 @@ import Icons from 'src/components/Icons';
import { Tooltip } from 'src/components/Tooltip';
import CopyToClipboard from 'src/components/CopyToClipboard';
import { URL_PARAMS } from 'src/constants';
+import { getChartPermalink } from 'src/utils/urlUtils';
export default class EmbedCodeButton extends React.Component {
constructor(props) {
@@ -32,8 +33,11 @@ export default class EmbedCodeButton extends React.Component {
this.state = {
height: '400',
width: '600',
+ url: '',
+ errorMessage: '',
};
this.handleInputChange = this.handleInputChange.bind(this);
+ this.updateUrl = this.updateUrl.bind(this);
}
handleInputChange(e) {
@@ -43,8 +47,21 @@ export default class EmbedCodeButton extends React.Component {
this.setState(data);
}
+ updateUrl() {
+ this.setState({ url: '' });
+ getChartPermalink(this.props.formData)
+ .then(url => this.setState({ errorMessage: '', url }))
+ .catch(() => {
+ this.setState({ errorMessage: t('Error') });
+ this.props.addDangerToast(
+ t('Sorry, something went wrong. Try again later.'),
+ );
+ });
+ }
+
generateEmbedHTML() {
- const srcLink = `${window.location.href}&${URL_PARAMS.standalone.name}=1&height=${this.state.height}`;
+ if (!this.state.url) return '';
+ const srcLink = `${this.state.url}?${URL_PARAMS.standalone.name}=1&height=${this.state.height}`;
return (
'
-
+
+
+
+
+
+
+
diff --git a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/SqlAlchemyForm.tsx b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/SqlAlchemyForm.tsx
index 7226efdcb4b37..96a0bfef07cc2 100644
--- a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/SqlAlchemyForm.tsx
+++ b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/SqlAlchemyForm.tsx
@@ -45,7 +45,10 @@ const SqlAlchemyTab = ({
fallbackDocsUrl =
SupersetText.DB_MODAL_SQLALCHEMY_FORM?.SQLALCHEMY_DOCS_URL;
fallbackDisplayText =
- SupersetText.DB_MODAL_SQLALCHEMY_FORM?.SQLALCHEMY_DOCS_URL;
+ SupersetText.DB_MODAL_SQLALCHEMY_FORM?.SQLALCHEMY_DISPLAY_TEXT;
+ } else {
+ fallbackDocsUrl = 'https://docs.sqlalchemy.org/en/13/core/engines.html';
+ fallbackDisplayText = 'SQLAlchemy docs';
}
return (
<>
diff --git a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.test.jsx b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.test.jsx
index 6fcbfdad43848..9db2333573dfa 100644
--- a/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.test.jsx
+++ b/superset-frontend/src/views/CRUD/data/database/DatabaseModal/index.test.jsx
@@ -591,6 +591,15 @@ describe('DatabaseModal', () => {
const allowDbExplorationText = screen.getByText(
/allow this database to be explored/i,
);
+ const disableSQLLabDataPreviewQueriesCheckbox = screen.getByRole(
+ 'checkbox',
+ {
+ name: /Disable SQL Lab data preview queries/i,
+ },
+ );
+ const disableSQLLabDataPreviewQueriesText = screen.getByText(
+ /Disable SQL Lab data preview queries/i,
+ );
// ---------- Assertions ----------
const visibleComponents = [
@@ -610,6 +619,7 @@ describe('DatabaseModal', () => {
checkboxOffSVGs[4],
checkboxOffSVGs[5],
checkboxOffSVGs[6],
+ checkboxOffSVGs[7],
tooltipIcons[0],
tooltipIcons[1],
tooltipIcons[2],
@@ -617,6 +627,7 @@ describe('DatabaseModal', () => {
tooltipIcons[4],
tooltipIcons[5],
tooltipIcons[6],
+ tooltipIcons[7],
exposeInSQLLabText,
allowCTASText,
allowCVASText,
@@ -627,6 +638,7 @@ describe('DatabaseModal', () => {
allowMultiSchemaMDFetchText,
enableQueryCostEstimationText,
allowDbExplorationText,
+ disableSQLLabDataPreviewQueriesText,
];
// These components exist in the DOM but are not visible
const invisibleComponents = [
@@ -637,6 +649,7 @@ describe('DatabaseModal', () => {
allowMultiSchemaMDFetchCheckbox,
enableQueryCostEstimationCheckbox,
allowDbExplorationCheckbox,
+ disableSQLLabDataPreviewQueriesCheckbox,
];
visibleComponents.forEach(component => {
@@ -645,8 +658,8 @@ describe('DatabaseModal', () => {
invisibleComponents.forEach(component => {
expect(component).not.toBeVisible();
});
- expect(checkboxOffSVGs).toHaveLength(7);
- expect(tooltipIcons).toHaveLength(7);
+ expect(checkboxOffSVGs).toHaveLength(8);
+ expect(tooltipIcons).toHaveLength(8);
});
it('renders the "Advanced" - PERFORMANCE tab correctly', async () => {
diff --git a/superset-frontend/src/views/CRUD/data/database/types.ts b/superset-frontend/src/views/CRUD/data/database/types.ts
index 4ffb69535cbe2..c03891689e90b 100644
--- a/superset-frontend/src/views/CRUD/data/database/types.ts
+++ b/superset-frontend/src/views/CRUD/data/database/types.ts
@@ -92,6 +92,7 @@ export type DatabaseObject = {
version?: string;
cost_estimate_enabled?: boolean; // in SQL Lab
+ disable_data_preview?: boolean; // in SQL Lab
};
// Temporary storage
diff --git a/superset-frontend/src/views/CRUD/utils.tsx b/superset-frontend/src/views/CRUD/utils.tsx
index d9d21c8565d17..c2ae0d8cbed13 100644
--- a/superset-frontend/src/views/CRUD/utils.tsx
+++ b/superset-frontend/src/views/CRUD/utils.tsx
@@ -412,12 +412,11 @@ export const hasTerminalValidation = (errors: Record[]) =>
);
export const checkUploadExtensions = (
- perm: Array | string | undefined | boolean,
- cons: Array,
+ perm: Array,
+ cons: Array,
) => {
if (perm !== undefined) {
- if (typeof perm === 'boolean') return perm;
- return intersection(perm, cons).length;
+ return intersection(perm, cons).length > 0;
}
return false;
};
diff --git a/superset-frontend/src/views/components/Menu.tsx b/superset-frontend/src/views/components/Menu.tsx
index 068cad8e363d1..77a074f71b6c7 100644
--- a/superset-frontend/src/views/components/Menu.tsx
+++ b/superset-frontend/src/views/components/Menu.tsx
@@ -74,7 +74,7 @@ interface MenuObjectChildProps {
index?: number;
url?: string;
isFrontendRoute?: boolean;
- perm?: string | Array | boolean;
+ perm?: string | boolean;
view?: string;
}
diff --git a/superset-frontend/src/views/components/MenuRight.tsx b/superset-frontend/src/views/components/MenuRight.tsx
index 531ed53847346..ab5b5d8d82a7e 100644
--- a/superset-frontend/src/views/components/MenuRight.tsx
+++ b/superset-frontend/src/views/components/MenuRight.tsx
@@ -79,7 +79,6 @@ const RightMenu = ({
ALLOWED_EXTENSIONS,
HAS_GSHEETS_INSTALLED,
} = useSelector(state => state.common.conf);
-
const [showModal, setShowModal] = useState(false);
const [engine, setEngine] = useState('');
const canSql = findPermission('can_sqllab', 'Superset', roles);
@@ -124,19 +123,25 @@ const RightMenu = ({
label: t('Upload CSV to database'),
name: 'Upload a CSV',
url: '/csvtodatabaseview/form',
- perm: CSV_EXTENSIONS && canUploadCSV,
+ perm:
+ checkUploadExtensions(CSV_EXTENSIONS, ALLOWED_EXTENSIONS) &&
+ canUploadCSV,
},
{
label: t('Upload columnar file to database'),
name: 'Upload a Columnar file',
url: '/columnartodatabaseview/form',
- perm: COLUMNAR_EXTENSIONS && canUploadColumnar,
+ perm:
+ checkUploadExtensions(COLUMNAR_EXTENSIONS, ALLOWED_EXTENSIONS) &&
+ canUploadColumnar,
},
{
label: t('Upload Excel file to database'),
name: 'Upload Excel',
url: '/exceltodatabaseview/form',
- perm: EXCEL_EXTENSIONS && canUploadExcel,
+ perm:
+ checkUploadExtensions(EXCEL_EXTENSIONS, ALLOWED_EXTENSIONS) &&
+ canUploadExcel,
},
],
},
@@ -209,9 +214,7 @@ const RightMenu = ({
title={menuIconAndLabel(menu)}
>
{menu.childs.map((item, idx) =>
- typeof item !== 'string' &&
- item.name &&
- checkUploadExtensions(item.perm, ALLOWED_EXTENSIONS) ? (
+ typeof item !== 'string' && item.name && item.perm ? (
<>
{idx === 2 && }
diff --git a/superset-frontend/webpack.config.js b/superset-frontend/webpack.config.js
index a4c1b86b482eb..6b3c4e30a8b13 100644
--- a/superset-frontend/webpack.config.js
+++ b/superset-frontend/webpack.config.js
@@ -95,10 +95,10 @@ const plugins = [
entryFiles[entry] = {
css: chunks
.filter(x => x.endsWith('.css'))
- .map(x => path.join(output.publicPath, x)),
+ .map(x => `${output.publicPath}${x}`),
js: chunks
.filter(x => x.endsWith('.js'))
- .map(x => path.join(output.publicPath, x)),
+ .map(x => `${output.publicPath}${x}`),
};
});
diff --git a/superset/charts/commands/export.py b/superset/charts/commands/export.py
index 35f70a82eab65..9b3a06c473585 100644
--- a/superset/charts/commands/export.py
+++ b/superset/charts/commands/export.py
@@ -26,7 +26,7 @@
from superset.charts.commands.exceptions import ChartNotFoundError
from superset.charts.dao import ChartDAO
from superset.datasets.commands.export import ExportDatasetsCommand
-from superset.commands.export import ExportModelsCommand
+from superset.commands.export.models import ExportModelsCommand
from superset.models.slice import Slice
from superset.utils.dict_import_export import EXPORT_VERSION
@@ -43,7 +43,7 @@ class ExportChartsCommand(ExportModelsCommand):
not_found = ChartNotFoundError
@staticmethod
- def _export(model: Slice) -> Iterator[Tuple[str, str]]:
+ def _export(model: Slice, export_related: bool = True) -> Iterator[Tuple[str, str]]:
chart_slug = secure_filename(model.slice_name)
file_name = f"charts/{chart_slug}_{model.id}.yaml"
@@ -72,5 +72,5 @@ def _export(model: Slice) -> Iterator[Tuple[str, str]]:
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
- if model.table:
+ if model.table and export_related:
yield from ExportDatasetsCommand([model.table.id]).run()
diff --git a/superset/charts/data/api.py b/superset/charts/data/api.py
index dc92d97458c41..73468d651cbc5 100644
--- a/superset/charts/data/api.py
+++ b/superset/charts/data/api.py
@@ -306,16 +306,13 @@ def _run_async(
Execute command as an async query.
"""
# First, look for the chart query results in the cache.
+ result = None
try:
result = command.run(force_cached=True)
+ if result is not None:
+ return self._send_chart_response(result)
except ChartDataCacheLoadError:
- result = None # type: ignore
-
- already_cached_result = result is not None
-
- # If the chart query has already been cached, return it immediately.
- if already_cached_result:
- return self._send_chart_response(result)
+ pass
# Otherwise, kick off a background job to run the chart query.
# Clients will either poll or be notified of query completion,
diff --git a/superset/cli/main.py b/superset/cli/main.py
index 45b4c9e46a101..a1a03e9de26d0 100755
--- a/superset/cli/main.py
+++ b/superset/cli/main.py
@@ -45,7 +45,7 @@ def make_shell_context() -> Dict[str, Any]:
# add sub-commands
for load, module_name, is_pkg in pkgutil.walk_packages(
- cli.__path__, cli.__name__ + "." # type: ignore
+ cli.__path__, cli.__name__ + "."
):
module = importlib.import_module(module_name)
for attribute in module.__dict__.values():
diff --git a/superset/key_value/commands/__init__.py b/superset/commands/export/__init__.py
similarity index 100%
rename from superset/key_value/commands/__init__.py
rename to superset/commands/export/__init__.py
diff --git a/superset/commands/export/assets.py b/superset/commands/export/assets.py
new file mode 100644
index 0000000000000..8711cac4dd01c
--- /dev/null
+++ b/superset/commands/export/assets.py
@@ -0,0 +1,64 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from datetime import datetime, timezone
+from typing import Iterator, Tuple
+
+import yaml
+
+from superset.charts.commands.export import ExportChartsCommand
+from superset.commands.base import BaseCommand
+from superset.dashboards.commands.export import ExportDashboardsCommand
+from superset.databases.commands.export import ExportDatabasesCommand
+from superset.datasets.commands.export import ExportDatasetsCommand
+from superset.queries.saved_queries.commands.export import ExportSavedQueriesCommand
+from superset.utils.dict_import_export import EXPORT_VERSION
+
+METADATA_FILE_NAME = "metadata.yaml"
+
+
+class ExportAssetsCommand(BaseCommand):
+ """
+ Command that exports all databases, datasets, charts, dashboards and saved queries.
+ """
+
+ def run(self) -> Iterator[Tuple[str, str]]:
+
+ metadata = {
+ "version": EXPORT_VERSION,
+ "type": "assets",
+ "timestamp": datetime.now(tz=timezone.utc).isoformat(),
+ }
+ yield METADATA_FILE_NAME, yaml.safe_dump(metadata, sort_keys=False)
+ seen = {METADATA_FILE_NAME}
+
+ commands = [
+ ExportDatabasesCommand,
+ ExportDatasetsCommand,
+ ExportChartsCommand,
+ ExportDashboardsCommand,
+ ExportSavedQueriesCommand,
+ ]
+ for command in commands:
+ ids = [model.id for model in command.dao.find_all()]
+ for file_name, file_content in command(ids, export_related=False).run():
+ if file_name not in seen:
+ yield file_name, file_content
+ seen.add(file_name)
+
+ def validate(self) -> None:
+ pass
diff --git a/superset/commands/export.py b/superset/commands/export/models.py
similarity index 86%
rename from superset/commands/export.py
rename to superset/commands/export/models.py
index 2b54de87852e9..dd4ff3bc57172 100644
--- a/superset/commands/export.py
+++ b/superset/commands/export/models.py
@@ -14,10 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-# isort:skip_file
-from datetime import datetime
-from datetime import timezone
+from datetime import datetime, timezone
from typing import Iterator, List, Tuple, Type
import yaml
@@ -36,14 +34,15 @@ class ExportModelsCommand(BaseCommand):
dao: Type[BaseDAO] = BaseDAO
not_found: Type[CommandException] = CommandException
- def __init__(self, model_ids: List[int]):
+ def __init__(self, model_ids: List[int], export_related: bool = True):
self.model_ids = model_ids
+ self.export_related = export_related
# this will be set when calling validate()
self._models: List[Model] = []
@staticmethod
- def _export(model: Model) -> Iterator[Tuple[str, str]]:
+ def _export(model: Model, export_related: bool = True) -> Iterator[Tuple[str, str]]:
raise NotImplementedError("Subclasses MUST implement _export")
def run(self) -> Iterator[Tuple[str, str]]:
@@ -58,7 +57,7 @@ def run(self) -> Iterator[Tuple[str, str]]:
seen = {METADATA_FILE_NAME}
for model in self._models:
- for file_name, file_content in self._export(model):
+ for file_name, file_content in self._export(model, self.export_related):
if file_name not in seen:
yield file_name, file_content
seen.add(file_name)
diff --git a/superset/commands/importers/v1/__init__.py b/superset/commands/importers/v1/__init__.py
index 89891f48e597a..c620ec9f2ac85 100644
--- a/superset/commands/importers/v1/__init__.py
+++ b/superset/commands/importers/v1/__init__.py
@@ -24,9 +24,11 @@
from superset.commands.base import BaseCommand
from superset.commands.exceptions import CommandException, CommandInvalidError
from superset.commands.importers.v1.utils import (
+ load_configs,
load_metadata,
load_yaml,
METADATA_FILE_NAME,
+ validate_metadata_type,
)
from superset.dao.base import BaseDAO
from superset.models.core import Database
@@ -78,9 +80,13 @@ def validate(self) -> None:
except ValidationError as exc:
exceptions.append(exc)
metadata = None
+ if self.dao.model_cls:
+ validate_metadata_type(metadata, self.dao.model_cls.__name__, exceptions)
- self._validate_metadata_type(metadata, exceptions)
- self._load__configs(exceptions)
+ # load the configs and make sure we have confirmation to overwrite existing models
+ self._configs = load_configs(
+ self.contents, self.schemas, self.passwords, exceptions
+ )
self._prevent_overwrite_existing_model(exceptions)
if exceptions:
@@ -88,49 +94,6 @@ def validate(self) -> None:
exception.add_list(exceptions)
raise exception
- def _validate_metadata_type(
- self, metadata: Optional[Dict[str, str]], exceptions: List[ValidationError]
- ) -> None:
- """Validate that the type declared in METADATA_FILE_NAME is correct"""
- if metadata and "type" in metadata:
- type_validator = validate.Equal(self.dao.model_cls.__name__) # type: ignore
- try:
- type_validator(metadata["type"])
- except ValidationError as exc:
- exc.messages = {METADATA_FILE_NAME: {"type": exc.messages}}
- exceptions.append(exc)
-
- def _load__configs(self, exceptions: List[ValidationError]) -> None:
- # load existing databases so we can apply the password validation
- db_passwords: Dict[str, str] = {
- str(uuid): password
- for uuid, password in db.session.query(
- Database.uuid, Database.password
- ).all()
- }
- for file_name, content in self.contents.items():
- # skip directories
- if not content:
- continue
-
- prefix = file_name.split("/")[0]
- schema = self.schemas.get(f"{prefix}/")
- if schema:
- try:
- config = load_yaml(file_name, content)
-
- # populate passwords from the request or from existing DBs
- if file_name in self.passwords:
- config["password"] = self.passwords[file_name]
- elif prefix == "databases" and config["uuid"] in db_passwords:
- config["password"] = db_passwords[config["uuid"]]
-
- schema.load(config)
- self._configs[file_name] = config
- except ValidationError as exc:
- exc.messages = {file_name: exc.messages}
- exceptions.append(exc)
-
def _prevent_overwrite_existing_model( # pylint: disable=invalid-name
self, exceptions: List[ValidationError]
) -> None:
diff --git a/superset/commands/importers/v1/assets.py b/superset/commands/importers/v1/assets.py
new file mode 100644
index 0000000000000..9f945c560af5f
--- /dev/null
+++ b/superset/commands/importers/v1/assets.py
@@ -0,0 +1,164 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from typing import Any, Dict, List, Optional, Tuple
+
+from marshmallow import Schema
+from marshmallow.exceptions import ValidationError
+from sqlalchemy.orm import Session
+from sqlalchemy.sql import select
+
+from superset import db
+from superset.charts.commands.importers.v1.utils import import_chart
+from superset.charts.schemas import ImportV1ChartSchema
+from superset.commands.base import BaseCommand
+from superset.commands.exceptions import CommandInvalidError, ImportFailedError
+from superset.commands.importers.v1.utils import (
+ load_configs,
+ load_metadata,
+ validate_metadata_type,
+)
+from superset.dashboards.commands.importers.v1.utils import (
+ find_chart_uuids,
+ import_dashboard,
+ update_id_refs,
+)
+from superset.dashboards.schemas import ImportV1DashboardSchema
+from superset.databases.commands.importers.v1.utils import import_database
+from superset.databases.schemas import ImportV1DatabaseSchema
+from superset.datasets.commands.importers.v1.utils import import_dataset
+from superset.datasets.schemas import ImportV1DatasetSchema
+from superset.models.dashboard import dashboard_slices
+from superset.queries.saved_queries.commands.importers.v1.utils import (
+ import_saved_query,
+)
+from superset.queries.saved_queries.schemas import ImportV1SavedQuerySchema
+
+
+class ImportAssetsCommand(BaseCommand):
+ """
+ Command for importing databases, datasets, charts, dashboards and saved queries.
+
+ This command is used for managing Superset assets externally under source control,
+ and will overwrite everything.
+ """
+
+ schemas: Dict[str, Schema] = {
+ "charts/": ImportV1ChartSchema(),
+ "dashboards/": ImportV1DashboardSchema(),
+ "datasets/": ImportV1DatasetSchema(),
+ "databases/": ImportV1DatabaseSchema(),
+ "queries/": ImportV1SavedQuerySchema(),
+ }
+
+ # pylint: disable=unused-argument
+ def __init__(self, contents: Dict[str, str], *args: Any, **kwargs: Any):
+ self.contents = contents
+ self.passwords: Dict[str, str] = kwargs.get("passwords") or {}
+ self._configs: Dict[str, Any] = {}
+
+ # pylint: disable=too-many-locals
+ @staticmethod
+ def _import(session: Session, configs: Dict[str, Any]) -> None:
+ # import databases first
+ database_ids: Dict[str, int] = {}
+ for file_name, config in configs.items():
+ if file_name.startswith("databases/"):
+ database = import_database(session, config, overwrite=True)
+ database_ids[str(database.uuid)] = database.id
+
+ # import saved queries
+ for file_name, config in configs.items():
+ if file_name.startswith("queries/"):
+ config["db_id"] = database_ids[config["database_uuid"]]
+ import_saved_query(session, config, overwrite=True)
+
+ # import datasets
+ dataset_info: Dict[str, Dict[str, Any]] = {}
+ for file_name, config in configs.items():
+ if file_name.startswith("datasets/"):
+ config["database_id"] = database_ids[config["database_uuid"]]
+ dataset = import_dataset(session, config, overwrite=True)
+ dataset_info[str(dataset.uuid)] = {
+ "datasource_id": dataset.id,
+ "datasource_type": dataset.datasource_type,
+ "datasource_name": dataset.table_name,
+ }
+
+ # import charts
+ chart_ids: Dict[str, int] = {}
+ for file_name, config in configs.items():
+ if file_name.startswith("charts/"):
+ config.update(dataset_info[config["dataset_uuid"]])
+ chart = import_chart(session, config, overwrite=True)
+ chart_ids[str(chart.uuid)] = chart.id
+
+ # store the existing relationship between dashboards and charts
+ existing_relationships = session.execute(
+ select([dashboard_slices.c.dashboard_id, dashboard_slices.c.slice_id])
+ ).fetchall()
+
+ # import dashboards
+ dashboard_chart_ids: List[Tuple[int, int]] = []
+ for file_name, config in configs.items():
+ if file_name.startswith("dashboards/"):
+ config = update_id_refs(config, chart_ids, dataset_info)
+ dashboard = import_dashboard(session, config, overwrite=True)
+ for uuid in find_chart_uuids(config["position"]):
+ if uuid not in chart_ids:
+ break
+ chart_id = chart_ids[uuid]
+ if (dashboard.id, chart_id) not in existing_relationships:
+ dashboard_chart_ids.append((dashboard.id, chart_id))
+
+ # set ref in the dashboard_slices table
+ values = [
+ {"dashboard_id": dashboard_id, "slice_id": chart_id}
+ for (dashboard_id, chart_id) in dashboard_chart_ids
+ ]
+ # pylint: disable=no-value-for-parameter # sqlalchemy/issues/4656
+ session.execute(dashboard_slices.insert(), values)
+
+ def run(self) -> None:
+ self.validate()
+
+ # rollback to prevent partial imports
+ try:
+ self._import(db.session, self._configs)
+ db.session.commit()
+ except Exception as ex:
+ db.session.rollback()
+ raise ImportFailedError() from ex
+
+ def validate(self) -> None:
+ exceptions: List[ValidationError] = []
+
+ # verify that the metadata file is present and valid
+ try:
+ metadata: Optional[Dict[str, str]] = load_metadata(self.contents)
+ except ValidationError as exc:
+ exceptions.append(exc)
+ metadata = None
+ validate_metadata_type(metadata, "assets", exceptions)
+
+ self._configs = load_configs(
+ self.contents, self.schemas, self.passwords, exceptions
+ )
+
+ if exceptions:
+ exception = CommandInvalidError("Error importing assets")
+ exception.add_list(exceptions)
+ raise exception
diff --git a/superset/commands/importers/v1/utils.py b/superset/commands/importers/v1/utils.py
index 15bec8278ca75..de86e3f3cc6ab 100644
--- a/superset/commands/importers/v1/utils.py
+++ b/superset/commands/importers/v1/utils.py
@@ -15,14 +15,16 @@
import logging
from pathlib import Path
-from typing import Any, Dict
+from typing import Any, Dict, List, Optional
from zipfile import ZipFile
import yaml
from marshmallow import fields, Schema, validate
from marshmallow.exceptions import ValidationError
+from superset import db
from superset.commands.importers.exceptions import IncorrectVersionError
+from superset.models.core import Database
METADATA_FILE_NAME = "metadata.yaml"
IMPORT_VERSION = "1.0.0"
@@ -76,6 +78,58 @@ def load_metadata(contents: Dict[str, str]) -> Dict[str, str]:
return metadata
+def validate_metadata_type(
+ metadata: Optional[Dict[str, str]], type_: str, exceptions: List[ValidationError],
+) -> None:
+ """Validate that the type declared in METADATA_FILE_NAME is correct"""
+ if metadata and "type" in metadata:
+ type_validator = validate.Equal(type_)
+ try:
+ type_validator(metadata["type"])
+ except ValidationError as exc:
+ exc.messages = {METADATA_FILE_NAME: {"type": exc.messages}}
+ exceptions.append(exc)
+
+
+def load_configs(
+ contents: Dict[str, str],
+ schemas: Dict[str, Schema],
+ passwords: Dict[str, str],
+ exceptions: List[ValidationError],
+) -> Dict[str, Any]:
+ configs: Dict[str, Any] = {}
+
+ # load existing databases so we can apply the password validation
+ db_passwords: Dict[str, str] = {
+ str(uuid): password
+ for uuid, password in db.session.query(Database.uuid, Database.password).all()
+ }
+ for file_name, content in contents.items():
+ # skip directories
+ if not content:
+ continue
+
+ prefix = file_name.split("/")[0]
+ schema = schemas.get(f"{prefix}/")
+ if schema:
+ try:
+ config = load_yaml(file_name, content)
+
+ # populate passwords from the request or from existing DBs
+ if file_name in passwords:
+ config["password"] = passwords[file_name]
+ elif prefix == "databases" and config["uuid"] in db_passwords:
+ config["password"] = db_passwords[config["uuid"]]
+
+ schema.load(config)
+ configs[file_name] = config
+ except ValidationError as exc:
+ exc.messages = {file_name: exc.messages}
+ exceptions.append(exc)
+
+ return configs
+
+
def is_valid_config(file_name: str) -> bool:
path = Path(file_name)
diff --git a/superset/common/query_object.py b/superset/common/query_object.py
index 2a40155d1ca4f..fd988a36fac05 100644
--- a/superset/common/query_object.py
+++ b/superset/common/query_object.py
@@ -31,7 +31,7 @@
QueryObjectValidationError,
)
from superset.sql_parse import validate_filter_clause
-from superset.typing import Column, Metric, OrderBy
+from superset.superset_typing import Column, Metric, OrderBy
from superset.utils import pandas_postprocessing
from superset.utils.core import (
DTTM_ALIAS,
diff --git a/superset/config.py b/superset/config.py
index 6579719ea81cc..775765d08f0aa 100644
--- a/superset/config.py
+++ b/superset/config.py
@@ -44,8 +44,9 @@
from superset.constants import CHANGE_ME_SECRET_KEY
from superset.jinja_context import BaseTemplateProcessor
+from superset.key_value.types import KeyType
from superset.stats_logger import DummyStatsLogger
-from superset.typing import CacheConfig
+from superset.superset_typing import CacheConfig
from superset.utils.core import is_test, parse_boolean_string
from superset.utils.encrypt import SQLAlchemyUtilsAdapter
from superset.utils.log import DBEventLogger
@@ -390,7 +391,7 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]:
"REMOVE_SLICE_LEVEL_LABEL_COLORS": False,
"SHARE_QUERIES_VIA_KV_STORE": False,
"TAGGING_SYSTEM": False,
- "SQLLAB_BACKEND_PERSISTENCE": False,
+ "SQLLAB_BACKEND_PERSISTENCE": True,
"LISTVIEWS_DEFAULT_CARD_VIEW": False,
# Enables the replacement React views for all the FAB views (list, edit, show) with
# designs introduced in https://github.com/apache/superset/issues/8976
@@ -407,7 +408,7 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]:
"DASHBOARD_NATIVE_FILTERS_SET": False,
"DASHBOARD_FILTERS_EXPERIMENTAL": False,
"GLOBAL_ASYNC_QUERIES": False,
- "VERSIONED_EXPORT": False,
+ "VERSIONED_EXPORT": True,
# Note that: RowLevelSecurityFilter is only given by default to the Admin role
# and the Admin Role does have the all_datasources security permission.
# But, if users create a specific role with access to RowLevelSecurityFilter MVC
@@ -611,6 +612,8 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]:
# store cache keys by datasource UID (via CacheKey) for custom processing/invalidation
STORE_CACHE_KEYS_IN_METADATA_DB = False
+PERMALINK_KEY_TYPE: KeyType = "uuid"
+
# CORS Options
ENABLE_CORS = False
CORS_OPTIONS: Dict[Any, Any] = {}
@@ -1249,6 +1252,10 @@ def SQL_QUERY_MUTATOR( # pylint: disable=invalid-name,unused-argument
# SQLALCHEMY_DATABASE_URI by default if set to `None`
SQLALCHEMY_EXAMPLES_URI = None
+# Optional prefix to be added to all static asset paths when rendering the UI.
+# This is useful for hosting assets in an external CDN, for example
+STATIC_ASSETS_PREFIX = ""
+
# Some sqlalchemy connection strings can open Superset to security risks.
# Typically these should not be allowed.
PREVENT_UNSAFE_DB_CONNECTIONS = True
@@ -1336,10 +1343,6 @@ def SQL_QUERY_MUTATOR( # pylint: disable=invalid-name,unused-argument
# Do not show user info or profile in the menu
MENU_HIDE_USER_INFO = False
-# SQLalchemy link doc reference
-SQLALCHEMY_DOCS_URL = "https://docs.sqlalchemy.org/en/13/core/engines.html"
-SQLALCHEMY_DISPLAY_TEXT = "SQLAlchemy docs"
-
# Set to False to only allow viewing own recent activity
ENABLE_BROAD_ACTIVITY_ACCESS = True
diff --git a/superset/connectors/base/models.py b/superset/connectors/base/models.py
index 967235f328c2e..939a1fc1c7c27 100644
--- a/superset/connectors/base/models.py
+++ b/superset/connectors/base/models.py
@@ -29,7 +29,7 @@
from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.models.helpers import AuditMixinNullable, ImportExportMixin, QueryResult
from superset.models.slice import Slice
-from superset.typing import FilterValue, FilterValues, QueryObjectDict
+from superset.superset_typing import FilterValue, FilterValues, QueryObjectDict
from superset.utils import core as utils
from superset.utils.core import GenericDataType
@@ -339,11 +339,14 @@ def data_for_slices( # pylint: disable=too-many-locals
or []
)
else:
- column_names.update(
- column
+ _columns = [
+ utils.get_column_name(column)
+ if utils.is_adhoc_column(column)
+ else column
for column_param in COLUMN_FORM_DATA_PARAMS
for column in utils.get_iterable(form_data.get(column_param) or [])
- )
+ ]
+ column_names.update(_columns)
filtered_metrics = [
metric
diff --git a/superset/connectors/druid/models.py b/superset/connectors/druid/models.py
index 32edb695279c0..3a17ec5319374 100644
--- a/superset/connectors/druid/models.py
+++ b/superset/connectors/druid/models.py
@@ -58,7 +58,7 @@
from superset.extensions import encrypted_field_factory
from superset.models.core import Database
from superset.models.helpers import AuditMixinNullable, ImportExportMixin, QueryResult
-from superset.typing import (
+from superset.superset_typing import (
AdhocMetric,
AdhocMetricColumn,
FilterValues,
diff --git a/superset/connectors/druid/views.py b/superset/connectors/druid/views.py
index 03a3a42ec08cc..cd7e5d279ba25 100644
--- a/superset/connectors/druid/views.py
+++ b/superset/connectors/druid/views.py
@@ -34,7 +34,7 @@
from superset.connectors.connector_registry import ConnectorRegistry
from superset.connectors.druid import models
from superset.constants import RouteMethod
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from superset.utils import core as utils
from superset.views.base import (
BaseSupersetView,
diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py
index 9cc2f8a78136b..99cbc50997559 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -96,8 +96,14 @@
QueryResult,
)
from superset.sql_parse import ParsedQuery
+from superset.superset_typing import (
+ AdhocColumn,
+ AdhocMetric,
+ Metric,
+ OrderBy,
+ QueryObjectDict,
+)
from superset.tables.models import Table as NewTable
-from superset.typing import AdhocColumn, AdhocMetric, Metric, OrderBy, QueryObjectDict
from superset.utils import core as utils
from superset.utils.core import (
GenericDataType,
@@ -308,7 +314,7 @@ def get_time_filter(
if start_dttm:
l.append(col >= self.table.text(self.dttm_sql_literal(start_dttm)))
if end_dttm:
- l.append(col <= self.table.text(self.dttm_sql_literal(end_dttm)))
+ l.append(col < self.table.text(self.dttm_sql_literal(end_dttm)))
return and_(*l)
def get_timestamp_expression(
diff --git a/superset/connectors/sqla/views.py b/superset/connectors/sqla/views.py
index fef8a2d8a4356..a16ffa49f62ba 100644
--- a/superset/connectors/sqla/views.py
+++ b/superset/connectors/sqla/views.py
@@ -36,7 +36,7 @@
from superset.connectors.base.views import DatasourceModelView
from superset.connectors.sqla import models
from superset.constants import MODEL_VIEW_RW_METHOD_PERMISSION_MAP, RouteMethod
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from superset.utils import core as utils
from superset.views.base import (
check_ownership,
diff --git a/superset/dashboards/commands/export.py b/superset/dashboards/commands/export.py
index a0467972d5feb..87408bab37706 100644
--- a/superset/dashboards/commands/export.py
+++ b/superset/dashboards/commands/export.py
@@ -29,7 +29,7 @@
from superset.dashboards.commands.exceptions import DashboardNotFoundError
from superset.dashboards.commands.importers.v1.utils import find_chart_uuids
from superset.dashboards.dao import DashboardDAO
-from superset.commands.export import ExportModelsCommand
+from superset.commands.export.models import ExportModelsCommand
from superset.datasets.commands.export import ExportDatasetsCommand
from superset.datasets.dao import DatasetDAO
from superset.models.dashboard import Dashboard
@@ -106,8 +106,11 @@ class ExportDashboardsCommand(ExportModelsCommand):
dao = DashboardDAO
not_found = DashboardNotFoundError
+ # pylint: disable=too-many-locals
@staticmethod
- def _export(model: Dashboard) -> Iterator[Tuple[str, str]]:
+ def _export(
+ model: Dashboard, export_related: bool = True
+ ) -> Iterator[Tuple[str, str]]:
dashboard_slug = secure_filename(model.dashboard_title)
file_name = f"dashboards/{dashboard_slug}.yaml"
@@ -138,7 +141,8 @@ def _export(model: Dashboard) -> Iterator[Tuple[str, str]]:
if dataset_id is not None:
dataset = DatasetDAO.find_by_id(dataset_id)
target["datasetUuid"] = str(dataset.uuid)
- yield from ExportDatasetsCommand([dataset_id]).run()
+ if export_related:
+ yield from ExportDatasetsCommand([dataset_id]).run()
# the mapping between dashboard -> charts is inferred from the position
# attribute, so if it's not present we need to add a default config
@@ -160,5 +164,6 @@ def _export(model: Dashboard) -> Iterator[Tuple[str, str]]:
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
- chart_ids = [chart.id for chart in model.slices]
- yield from ExportChartsCommand(chart_ids).run()
+ if export_related:
+ chart_ids = [chart.id for chart in model.slices]
+ yield from ExportChartsCommand(chart_ids).run()
diff --git a/superset/dashboards/filter_state/api.py b/superset/dashboards/filter_state/api.py
index 6add94558fc6a..efe6e69178301 100644
--- a/superset/dashboards/filter_state/api.py
+++ b/superset/dashboards/filter_state/api.py
@@ -25,12 +25,12 @@
from superset.dashboards.filter_state.commands.get import GetFilterStateCommand
from superset.dashboards.filter_state.commands.update import UpdateFilterStateCommand
from superset.extensions import event_logger
-from superset.key_value.api import KeyValueRestApi
+from superset.temporary_cache.api import TemporaryCacheRestApi
logger = logging.getLogger(__name__)
-class DashboardFilterStateRestApi(KeyValueRestApi):
+class DashboardFilterStateRestApi(TemporaryCacheRestApi):
class_permission_name = "DashboardFilterStateRestApi"
resource_name = "dashboard"
openapi_spec_tag = "Dashboard Filter State"
@@ -74,7 +74,7 @@ def post(self, pk: int) -> Response:
content:
application/json:
schema:
- $ref: '#/components/schemas/KeyValuePostSchema'
+ $ref: '#/components/schemas/TemporaryCachePostSchema'
responses:
201:
description: The value was stored successfully.
@@ -128,7 +128,7 @@ def put(self, pk: int, key: str) -> Response:
content:
application/json:
schema:
- $ref: '#/components/schemas/KeyValuePutSchema'
+ $ref: '#/components/schemas/TemporaryCachePutSchema'
responses:
200:
description: The value was stored successfully.
diff --git a/superset/dashboards/filter_state/commands/create.py b/superset/dashboards/filter_state/commands/create.py
index f1abe8a5f0e92..a37ee072fa75e 100644
--- a/superset/dashboards/filter_state/commands/create.py
+++ b/superset/dashboards/filter_state/commands/create.py
@@ -18,13 +18,13 @@
from superset.dashboards.dao import DashboardDAO
from superset.extensions import cache_manager
-from superset.key_value.commands.create import CreateKeyValueCommand
-from superset.key_value.commands.entry import Entry
-from superset.key_value.commands.parameters import CommandParameters
-from superset.key_value.utils import cache_key, random_key
+from superset.temporary_cache.commands.create import CreateTemporaryCacheCommand
+from superset.temporary_cache.commands.entry import Entry
+from superset.temporary_cache.commands.parameters import CommandParameters
+from superset.temporary_cache.utils import cache_key, random_key
-class CreateFilterStateCommand(CreateKeyValueCommand):
+class CreateFilterStateCommand(CreateTemporaryCacheCommand):
def create(self, cmd_params: CommandParameters) -> str:
resource_id = cmd_params.resource_id
actor = cmd_params.actor
diff --git a/superset/dashboards/filter_state/commands/delete.py b/superset/dashboards/filter_state/commands/delete.py
index 1ad3f5e547367..155c63f1084c6 100644
--- a/superset/dashboards/filter_state/commands/delete.py
+++ b/superset/dashboards/filter_state/commands/delete.py
@@ -18,14 +18,14 @@
from superset.dashboards.dao import DashboardDAO
from superset.extensions import cache_manager
-from superset.key_value.commands.delete import DeleteKeyValueCommand
-from superset.key_value.commands.entry import Entry
-from superset.key_value.commands.exceptions import KeyValueAccessDeniedError
-from superset.key_value.commands.parameters import CommandParameters
-from superset.key_value.utils import cache_key
+from superset.temporary_cache.commands.delete import DeleteTemporaryCacheCommand
+from superset.temporary_cache.commands.entry import Entry
+from superset.temporary_cache.commands.exceptions import TemporaryCacheAccessDeniedError
+from superset.temporary_cache.commands.parameters import CommandParameters
+from superset.temporary_cache.utils import cache_key
-class DeleteFilterStateCommand(DeleteKeyValueCommand):
+class DeleteFilterStateCommand(DeleteTemporaryCacheCommand):
def delete(self, cmd_params: CommandParameters) -> bool:
resource_id = cmd_params.resource_id
actor = cmd_params.actor
@@ -35,7 +35,7 @@ def delete(self, cmd_params: CommandParameters) -> bool:
entry: Entry = cache_manager.filter_state_cache.get(key)
if entry:
if entry["owner"] != actor.get_user_id():
- raise KeyValueAccessDeniedError()
+ raise TemporaryCacheAccessDeniedError()
tab_id = cmd_params.tab_id
contextual_key = cache_key(session.get("_id"), tab_id, resource_id)
cache_manager.filter_state_cache.delete(contextual_key)
diff --git a/superset/dashboards/filter_state/commands/get.py b/superset/dashboards/filter_state/commands/get.py
index 509960fdb3829..9cdd5bcddcb48 100644
--- a/superset/dashboards/filter_state/commands/get.py
+++ b/superset/dashboards/filter_state/commands/get.py
@@ -20,12 +20,12 @@
from superset.dashboards.dao import DashboardDAO
from superset.extensions import cache_manager
-from superset.key_value.commands.get import GetKeyValueCommand
-from superset.key_value.commands.parameters import CommandParameters
-from superset.key_value.utils import cache_key
+from superset.temporary_cache.commands.get import GetTemporaryCacheCommand
+from superset.temporary_cache.commands.parameters import CommandParameters
+from superset.temporary_cache.utils import cache_key
-class GetFilterStateCommand(GetKeyValueCommand):
+class GetFilterStateCommand(GetTemporaryCacheCommand):
def __init__(self, cmd_params: CommandParameters) -> None:
super().__init__(cmd_params)
config = app.config["FILTER_STATE_CACHE_CONFIG"]
diff --git a/superset/dashboards/filter_state/commands/update.py b/superset/dashboards/filter_state/commands/update.py
index 5d8d9151dd8cd..6a9cd3931c9c8 100644
--- a/superset/dashboards/filter_state/commands/update.py
+++ b/superset/dashboards/filter_state/commands/update.py
@@ -20,14 +20,14 @@
from superset.dashboards.dao import DashboardDAO
from superset.extensions import cache_manager
-from superset.key_value.commands.entry import Entry
-from superset.key_value.commands.exceptions import KeyValueAccessDeniedError
-from superset.key_value.commands.parameters import CommandParameters
-from superset.key_value.commands.update import UpdateKeyValueCommand
-from superset.key_value.utils import cache_key, random_key
+from superset.temporary_cache.commands.entry import Entry
+from superset.temporary_cache.commands.exceptions import TemporaryCacheAccessDeniedError
+from superset.temporary_cache.commands.parameters import CommandParameters
+from superset.temporary_cache.commands.update import UpdateTemporaryCacheCommand
+from superset.temporary_cache.utils import cache_key, random_key
-class UpdateFilterStateCommand(UpdateKeyValueCommand):
+class UpdateFilterStateCommand(UpdateTemporaryCacheCommand):
def update(self, cmd_params: CommandParameters) -> Optional[str]:
resource_id = cmd_params.resource_id
actor = cmd_params.actor
@@ -41,7 +41,7 @@ def update(self, cmd_params: CommandParameters) -> Optional[str]:
if entry:
user_id = actor.get_user_id()
if entry["owner"] != user_id:
- raise KeyValueAccessDeniedError()
+ raise TemporaryCacheAccessDeniedError()
# Generate a new key if tab_id changes or equals 0
contextual_key = cache_key(
diff --git a/tests/unit_tests/explore/form_data/__init__.py b/superset/dashboards/permalink/__init__.py
similarity index 100%
rename from tests/unit_tests/explore/form_data/__init__.py
rename to superset/dashboards/permalink/__init__.py
diff --git a/superset/dashboards/permalink/api.py b/superset/dashboards/permalink/api.py
new file mode 100644
index 0000000000000..2236c4de2a1af
--- /dev/null
+++ b/superset/dashboards/permalink/api.py
@@ -0,0 +1,171 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import logging
+
+from flask import current_app, g, request, Response
+from flask_appbuilder.api import BaseApi, expose, protect, safe
+from marshmallow import ValidationError
+
+from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod
+from superset.dashboards.commands.exceptions import (
+ DashboardAccessDeniedError,
+ DashboardNotFoundError,
+)
+from superset.dashboards.permalink.commands.create import (
+ CreateDashboardPermalinkCommand,
+)
+from superset.dashboards.permalink.commands.get import GetDashboardPermalinkCommand
+from superset.dashboards.permalink.exceptions import DashboardPermalinkInvalidStateError
+from superset.dashboards.permalink.schemas import DashboardPermalinkPostSchema
+from superset.extensions import event_logger
+from superset.key_value.exceptions import KeyValueAccessDeniedError
+from superset.views.base_api import requires_json
+
+logger = logging.getLogger(__name__)
+
+
+class DashboardPermalinkRestApi(BaseApi):
+ add_model_schema = DashboardPermalinkPostSchema()
+ method_permission_name = MODEL_API_RW_METHOD_PERMISSION_MAP
+ include_route_methods = {
+ RouteMethod.POST,
+ RouteMethod.PUT,
+ RouteMethod.GET,
+ RouteMethod.DELETE,
+ }
+ allow_browser_login = True
+ class_permission_name = "DashboardPermalinkRestApi"
+ resource_name = "dashboard"
+ openapi_spec_tag = "Dashboard Permanent Link"
+ openapi_spec_component_schemas = (DashboardPermalinkPostSchema,)
+
+ @expose("//permalink", methods=["POST"])
+ @protect()
+ @safe
+ @event_logger.log_this_with_context(
+ action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.post",
+ log_to_statsd=False,
+ )
+ @requires_json
+ def post(self, pk: str) -> Response:
+ """Stores a new permanent link.
+ ---
+ post:
+ description: >-
+ Stores a new permanent link.
+ parameters:
+ - in: path
+ schema:
+ type: string
+ name: pk
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/DashboardPermalinkPostSchema'
+ responses:
+ 201:
+ description: The permanent link was stored successfully.
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ key:
+ type: string
+ description: The key to retrieve the permanent link data.
+ url:
+ type: string
+ description: permanent link.
+ 400:
+ $ref: '#/components/responses/400'
+ 401:
+ $ref: '#/components/responses/401'
+ 422:
+ $ref: '#/components/responses/422'
+ 500:
+ $ref: '#/components/responses/500'
+ """
+ key_type = current_app.config["PERMALINK_KEY_TYPE"]
+ try:
+ state = self.add_model_schema.load(request.json)
+ key = CreateDashboardPermalinkCommand(
+ actor=g.user, dashboard_id=pk, state=state, key_type=key_type,
+ ).run()
+ http_origin = request.headers.environ.get("HTTP_ORIGIN")
+ url = f"{http_origin}/superset/dashboard/p/{key}/"
+ return self.response(201, key=key, url=url)
+ except (ValidationError, DashboardPermalinkInvalidStateError) as ex:
+ return self.response(400, message=str(ex))
+ except (DashboardAccessDeniedError, KeyValueAccessDeniedError,) as ex:
+ return self.response(403, message=str(ex))
+ except DashboardNotFoundError as ex:
+ return self.response(404, message=str(ex))
+
+ @expose("/permalink/", methods=["GET"])
+ @protect()
+ @safe
+ @event_logger.log_this_with_context(
+ action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.get",
+ log_to_statsd=False,
+ )
+ def get(self, key: str) -> Response:
+ """Retrives permanent link state for dashboard.
+ ---
+ get:
+ description: >-
+ Retrives dashboard state associated with a permanent link.
+ parameters:
+ - in: path
+ schema:
+ type: string
+ name: key
+ responses:
+ 200:
+ description: Returns the stored state.
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ state:
+ type: object
+ description: The stored state
+ 400:
+ $ref: '#/components/responses/400'
+ 401:
+ $ref: '#/components/responses/401'
+ 404:
+ $ref: '#/components/responses/404'
+ 422:
+ $ref: '#/components/responses/422'
+ 500:
+ $ref: '#/components/responses/500'
+ """
+ try:
+ key_type = current_app.config["PERMALINK_KEY_TYPE"]
+ value = GetDashboardPermalinkCommand(
+ actor=g.user, key=key, key_type=key_type
+ ).run()
+ if not value:
+ return self.response_404()
+ return self.response(200, **value)
+ except DashboardAccessDeniedError as ex:
+ return self.response(403, message=str(ex))
+ except DashboardNotFoundError as ex:
+ return self.response(404, message=str(ex))
diff --git a/superset/dashboards/permalink/commands/__init__.py b/superset/dashboards/permalink/commands/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/superset/dashboards/permalink/commands/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/superset/dashboards/permalink/commands/base.py b/superset/dashboards/permalink/commands/base.py
new file mode 100644
index 0000000000000..2c0343810e024
--- /dev/null
+++ b/superset/dashboards/permalink/commands/base.py
@@ -0,0 +1,23 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from abc import ABC
+
+from superset.commands.base import BaseCommand
+
+
+class BaseDashboardPermalinkCommand(BaseCommand, ABC):
+ resource = "dashboard_permalink"
diff --git a/superset/dashboards/permalink/commands/create.py b/superset/dashboards/permalink/commands/create.py
new file mode 100644
index 0000000000000..954c08a7b123f
--- /dev/null
+++ b/superset/dashboards/permalink/commands/create.py
@@ -0,0 +1,61 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import logging
+
+from flask_appbuilder.security.sqla.models import User
+from sqlalchemy.exc import SQLAlchemyError
+
+from superset.dashboards.dao import DashboardDAO
+from superset.dashboards.permalink.commands.base import BaseDashboardPermalinkCommand
+from superset.dashboards.permalink.exceptions import DashboardPermalinkCreateFailedError
+from superset.dashboards.permalink.types import DashboardPermalinkState
+from superset.key_value.commands.create import CreateKeyValueCommand
+from superset.key_value.types import KeyType
+
+logger = logging.getLogger(__name__)
+
+
+class CreateDashboardPermalinkCommand(BaseDashboardPermalinkCommand):
+ def __init__(
+ self,
+ actor: User,
+ dashboard_id: str,
+ state: DashboardPermalinkState,
+ key_type: KeyType,
+ ):
+ self.actor = actor
+ self.dashboard_id = dashboard_id
+ self.state = state
+ self.key_type = key_type
+
+ def run(self) -> str:
+ self.validate()
+ try:
+ DashboardDAO.get_by_id_or_slug(self.dashboard_id)
+ value = {
+ "dashboardId": self.dashboard_id,
+ "state": self.state,
+ }
+ return CreateKeyValueCommand(
+ self.actor, self.resource, value, self.key_type
+ ).run()
+ except SQLAlchemyError as ex:
+ logger.exception("Error running create command")
+ raise DashboardPermalinkCreateFailedError() from ex
+
+ def validate(self) -> None:
+ pass
diff --git a/superset/dashboards/permalink/commands/get.py b/superset/dashboards/permalink/commands/get.py
new file mode 100644
index 0000000000000..c82ade64d1a46
--- /dev/null
+++ b/superset/dashboards/permalink/commands/get.py
@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import logging
+from typing import Optional
+
+from flask_appbuilder.security.sqla.models import User
+from sqlalchemy.exc import SQLAlchemyError
+
+from superset.dashboards.commands.exceptions import DashboardNotFoundError
+from superset.dashboards.dao import DashboardDAO
+from superset.dashboards.permalink.commands.base import BaseDashboardPermalinkCommand
+from superset.dashboards.permalink.exceptions import DashboardPermalinkGetFailedError
+from superset.dashboards.permalink.types import DashboardPermalinkValue
+from superset.key_value.commands.get import GetKeyValueCommand
+from superset.key_value.exceptions import KeyValueGetFailedError, KeyValueParseKeyError
+from superset.key_value.types import KeyType
+
+logger = logging.getLogger(__name__)
+
+
+class GetDashboardPermalinkCommand(BaseDashboardPermalinkCommand):
+ def __init__(
+ self, actor: User, key: str, key_type: KeyType,
+ ):
+ self.actor = actor
+ self.key = key
+ self.key_type = key_type
+
+ def run(self) -> Optional[DashboardPermalinkValue]:
+ self.validate()
+ try:
+ command = GetKeyValueCommand(
+ self.resource, self.key, key_type=self.key_type
+ )
+ value: Optional[DashboardPermalinkValue] = command.run()
+ if value:
+ DashboardDAO.get_by_id_or_slug(value["dashboardId"])
+ return value
+ return None
+ except (
+ DashboardNotFoundError,
+ KeyValueGetFailedError,
+ KeyValueParseKeyError,
+ ) as ex:
+ raise DashboardPermalinkGetFailedError(message=ex.message) from ex
+ except SQLAlchemyError as ex:
+ logger.exception("Error running get command")
+ raise DashboardPermalinkGetFailedError() from ex
+
+ def validate(self) -> None:
+ pass
diff --git a/superset/dashboards/permalink/exceptions.py b/superset/dashboards/permalink/exceptions.py
new file mode 100644
index 0000000000000..c234d614f1a27
--- /dev/null
+++ b/superset/dashboards/permalink/exceptions.py
@@ -0,0 +1,31 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from flask_babel import lazy_gettext as _
+
+from superset.commands.exceptions import CommandException, CreateFailedError
+
+
+class DashboardPermalinkInvalidStateError(CommandException):
+ message = _("Invalid state.")
+
+
+class DashboardPermalinkCreateFailedError(CreateFailedError):
+ message = _("An error occurred while creating the value.")
+
+
+class DashboardPermalinkGetFailedError(CommandException):
+ message = _("An error occurred while accessing the value.")
diff --git a/superset/dashboards/permalink/schemas.py b/superset/dashboards/permalink/schemas.py
new file mode 100644
index 0000000000000..91d60b02c23b7
--- /dev/null
+++ b/superset/dashboards/permalink/schemas.py
@@ -0,0 +1,40 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from marshmallow import fields, Schema
+
+
+class DashboardPermalinkPostSchema(Schema):
+ filterState = fields.Dict(
+ required=True, allow_none=False, description="Native filter state",
+ )
+ urlParams = fields.List(
+ fields.Tuple(
+ (
+ fields.String(required=True, allow_none=True, description="Key"),
+ fields.String(required=True, allow_none=True, description="Value"),
+ ),
+ required=False,
+ allow_none=True,
+ description="URL Parameter key-value pair",
+ ),
+ required=False,
+ allow_none=True,
+ description="URL Parameters",
+ )
+ hash = fields.String(
+ required=False, allow_none=True, description="Optional anchor link"
+ )
diff --git a/superset/key_value/schemas.py b/superset/dashboards/permalink/types.py
similarity index 69%
rename from superset/key_value/schemas.py
rename to superset/dashboards/permalink/types.py
index 3583a0cb769b6..815c5bfe91d47 100644
--- a/superset/key_value/schemas.py
+++ b/superset/dashboards/permalink/types.py
@@ -14,16 +14,15 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-from marshmallow import fields, Schema
+from typing import Any, Dict, List, Optional, Tuple, TypedDict
-class KeyValuePostSchema(Schema):
- value = fields.String(
- required=True, allow_none=False, description="Any type of JSON supported text."
- )
+class DashboardPermalinkState(TypedDict):
+ filterState: Dict[str, Any]
+ hash: Optional[str]
+ urlParams: Optional[List[Tuple[str, str]]]
-class KeyValuePutSchema(Schema):
- value = fields.String(
- required=True, allow_none=False, description="Any type of JSON supported text."
- )
+class DashboardPermalinkValue(TypedDict):
+ dashboardId: str
+ state: DashboardPermalinkState
diff --git a/superset/dashboards/schemas.py b/superset/dashboards/schemas.py
index d2f55d2e15ba8..b1831fdcbbe70 100644
--- a/superset/dashboards/schemas.py
+++ b/superset/dashboards/schemas.py
@@ -175,6 +175,7 @@ class DatabaseSchema(Schema):
allows_subquery = fields.Bool()
allows_cost_estimate = fields.Bool()
allows_virtual_table_explore = fields.Bool()
+ disable_data_preview = fields.Bool()
explore_database_id = fields.Int()
diff --git a/superset/databases/api.py b/superset/databases/api.py
index 1b8b408c1ca91..3737addedeb92 100644
--- a/superset/databases/api.py
+++ b/superset/databases/api.py
@@ -70,7 +70,7 @@
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.extensions import security_manager
from superset.models.core import Database
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from superset.utils.core import error_msg_from_exception
from superset.views.base_api import (
BaseSupersetModelRestApi,
@@ -145,6 +145,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
"extra",
"force_ctas_schema",
"id",
+ "disable_data_preview",
]
add_columns = [
"database_name",
diff --git a/superset/databases/commands/export.py b/superset/databases/commands/export.py
index 134bda580c7e5..9e8cb7e374426 100644
--- a/superset/databases/commands/export.py
+++ b/superset/databases/commands/export.py
@@ -25,7 +25,7 @@
from superset.databases.commands.exceptions import DatabaseNotFoundError
from superset.databases.dao import DatabaseDAO
-from superset.commands.export import ExportModelsCommand
+from superset.commands.export.models import ExportModelsCommand
from superset.models.core import Database
from superset.utils.dict_import_export import EXPORT_VERSION
@@ -55,7 +55,9 @@ class ExportDatabasesCommand(ExportModelsCommand):
not_found = DatabaseNotFoundError
@staticmethod
- def _export(model: Database) -> Iterator[Tuple[str, str]]:
+ def _export(
+ model: Database, export_related: bool = True
+ ) -> Iterator[Tuple[str, str]]:
database_slug = secure_filename(model.database_name)
file_name = f"databases/{database_slug}.yaml"
@@ -90,18 +92,19 @@ def _export(model: Database) -> Iterator[Tuple[str, str]]:
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
- for dataset in model.tables:
- dataset_slug = secure_filename(dataset.table_name)
- file_name = f"datasets/{database_slug}/{dataset_slug}.yaml"
-
- payload = dataset.export_to_dict(
- recursive=True,
- include_parent_ref=False,
- include_defaults=True,
- export_uuids=True,
- )
- payload["version"] = EXPORT_VERSION
- payload["database_uuid"] = str(model.uuid)
-
- file_content = yaml.safe_dump(payload, sort_keys=False)
- yield file_name, file_content
+ if export_related:
+ for dataset in model.tables:
+ dataset_slug = secure_filename(dataset.table_name)
+ file_name = f"datasets/{database_slug}/{dataset_slug}.yaml"
+
+ payload = dataset.export_to_dict(
+ recursive=True,
+ include_parent_ref=False,
+ include_defaults=True,
+ export_uuids=True,
+ )
+ payload["version"] = EXPORT_VERSION
+ payload["database_uuid"] = str(model.uuid)
+
+ file_content = yaml.safe_dump(payload, sort_keys=False)
+ yield file_name, file_content
diff --git a/superset/databases/schemas.py b/superset/databases/schemas.py
index e030a7e06add8..4483b051f1be0 100644
--- a/superset/databases/schemas.py
+++ b/superset/databases/schemas.py
@@ -115,10 +115,12 @@
'["public", "csv_upload"]**. '
"If database flavor does not support schema or any schema is allowed "
"to be accessed, just leave the list empty "
- "4. the ``version`` field is a string specifying the this db's version. "
+ "4. The ``version`` field is a string specifying the this db's version. "
"This should be used with Presto DBs so that the syntax is correct "
"5. The ``allows_virtual_table_explore`` field is a boolean specifying "
- "whether or not the Explore button in SQL Lab results is shown.",
+ "whether or not the Explore button in SQL Lab results is shown. "
+ "6. The ``disable_data_preview`` field is a boolean specifying whether or not data "
+ "preview queries will be run when fetching table metadata in SQL Lab.",
True,
)
get_export_ids_schema = {"type": "array", "items": {"type": "integer"}}
diff --git a/superset/datasets/commands/export.py b/superset/datasets/commands/export.py
index 4e3843a0daee9..be9210a06c669 100644
--- a/superset/datasets/commands/export.py
+++ b/superset/datasets/commands/export.py
@@ -23,7 +23,7 @@
import yaml
from werkzeug.utils import secure_filename
-from superset.commands.export import ExportModelsCommand
+from superset.commands.export.models import ExportModelsCommand
from superset.connectors.sqla.models import SqlaTable
from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.datasets.dao import DatasetDAO
@@ -40,7 +40,9 @@ class ExportDatasetsCommand(ExportModelsCommand):
not_found = DatasetNotFoundError
@staticmethod
- def _export(model: SqlaTable) -> Iterator[Tuple[str, str]]:
+ def _export(
+ model: SqlaTable, export_related: bool = True
+ ) -> Iterator[Tuple[str, str]]:
database_slug = secure_filename(model.database.database_name)
dataset_slug = secure_filename(model.table_name)
file_name = f"datasets/{database_slug}/{dataset_slug}.yaml"
@@ -76,23 +78,24 @@ def _export(model: SqlaTable) -> Iterator[Tuple[str, str]]:
yield file_name, file_content
# include database as well
- file_name = f"databases/{database_slug}.yaml"
-
- payload = model.database.export_to_dict(
- recursive=False,
- include_parent_ref=False,
- include_defaults=True,
- export_uuids=True,
- )
- # TODO (betodealmeida): move this logic to export_to_dict once this
- # becomes the default export endpoint
- if payload.get("extra"):
- try:
- payload["extra"] = json.loads(payload["extra"])
- except json.decoder.JSONDecodeError:
- logger.info("Unable to decode `extra` field: %s", payload["extra"])
+ if export_related:
+ file_name = f"databases/{database_slug}.yaml"
+
+ payload = model.database.export_to_dict(
+ recursive=False,
+ include_parent_ref=False,
+ include_defaults=True,
+ export_uuids=True,
+ )
+ # TODO (betodealmeida): move this logic to export_to_dict once this
+ # becomes the default export endpoint
+ if payload.get("extra"):
+ try:
+ payload["extra"] = json.loads(payload["extra"])
+ except json.decoder.JSONDecodeError:
+ logger.info("Unable to decode `extra` field: %s", payload["extra"])
- payload["version"] = EXPORT_VERSION
+ payload["version"] = EXPORT_VERSION
- file_content = yaml.safe_dump(payload, sort_keys=False)
- yield file_name, file_content
+ file_content = yaml.safe_dump(payload, sort_keys=False)
+ yield file_name, file_content
diff --git a/superset/explore/form_data/api.py b/superset/explore/form_data/api.py
index 7cbb22715c700..dc6ee7ea94cc3 100644
--- a/superset/explore/form_data/api.py
+++ b/superset/explore/form_data/api.py
@@ -37,7 +37,7 @@
from superset.explore.form_data.commands.update import UpdateFormDataCommand
from superset.explore.form_data.schemas import FormDataPostSchema, FormDataPutSchema
from superset.extensions import event_logger
-from superset.key_value.commands.exceptions import KeyValueAccessDeniedError
+from superset.temporary_cache.commands.exceptions import TemporaryCacheAccessDeniedError
from superset.views.base_api import requires_json
logger = logging.getLogger(__name__)
@@ -121,7 +121,7 @@ def post(self) -> Response:
except (
ChartAccessDeniedError,
DatasetAccessDeniedError,
- KeyValueAccessDeniedError,
+ TemporaryCacheAccessDeniedError,
) as ex:
return self.response(403, message=str(ex))
except (ChartNotFoundError, DatasetNotFoundError) as ex:
@@ -198,7 +198,7 @@ def put(self, key: str) -> Response:
except (
ChartAccessDeniedError,
DatasetAccessDeniedError,
- KeyValueAccessDeniedError,
+ TemporaryCacheAccessDeniedError,
) as ex:
return self.response(403, message=str(ex))
except (ChartNotFoundError, DatasetNotFoundError) as ex:
@@ -253,7 +253,7 @@ def get(self, key: str) -> Response:
except (
ChartAccessDeniedError,
DatasetAccessDeniedError,
- KeyValueAccessDeniedError,
+ TemporaryCacheAccessDeniedError,
) as ex:
return self.response(403, message=str(ex))
except (ChartNotFoundError, DatasetNotFoundError) as ex:
@@ -309,7 +309,7 @@ def delete(self, key: str) -> Response:
except (
ChartAccessDeniedError,
DatasetAccessDeniedError,
- KeyValueAccessDeniedError,
+ TemporaryCacheAccessDeniedError,
) as ex:
return self.response(403, message=str(ex))
except (ChartNotFoundError, DatasetNotFoundError) as ex:
diff --git a/superset/explore/form_data/commands/create.py b/superset/explore/form_data/commands/create.py
index c1fa61e14a31b..a325241c6641d 100644
--- a/superset/explore/form_data/commands/create.py
+++ b/superset/explore/form_data/commands/create.py
@@ -22,10 +22,11 @@
from superset.commands.base import BaseCommand
from superset.explore.form_data.commands.parameters import CommandParameters
from superset.explore.form_data.commands.state import TemporaryExploreState
-from superset.explore.form_data.utils import check_access
+from superset.explore.utils import check_access
from superset.extensions import cache_manager
-from superset.key_value.commands.exceptions import KeyValueCreateFailedError
-from superset.key_value.utils import cache_key, random_key
+from superset.temporary_cache.commands.exceptions import TemporaryCacheCreateFailedError
+from superset.temporary_cache.utils import cache_key, random_key
+from superset.utils.schema import validate_json
logger = logging.getLogger(__name__)
@@ -35,6 +36,7 @@ def __init__(self, cmd_params: CommandParameters):
self._cmd_params = cmd_params
def run(self) -> str:
+ self.validate()
try:
dataset_id = self._cmd_params.dataset_id
chart_id = self._cmd_params.chart_id
@@ -58,7 +60,8 @@ def run(self) -> str:
return key
except SQLAlchemyError as ex:
logger.exception("Error running create command")
- raise KeyValueCreateFailedError() from ex
+ raise TemporaryCacheCreateFailedError() from ex
def validate(self) -> None:
- pass
+ if self._cmd_params.form_data:
+ validate_json(self._cmd_params.form_data)
diff --git a/superset/explore/form_data/commands/delete.py b/superset/explore/form_data/commands/delete.py
index 80193186ea311..ec537313d2ba0 100644
--- a/superset/explore/form_data/commands/delete.py
+++ b/superset/explore/form_data/commands/delete.py
@@ -23,13 +23,13 @@
from superset.commands.base import BaseCommand
from superset.explore.form_data.commands.parameters import CommandParameters
from superset.explore.form_data.commands.state import TemporaryExploreState
-from superset.explore.form_data.utils import check_access
+from superset.explore.utils import check_access
from superset.extensions import cache_manager
-from superset.key_value.commands.exceptions import (
- KeyValueAccessDeniedError,
- KeyValueDeleteFailedError,
+from superset.temporary_cache.commands.exceptions import (
+ TemporaryCacheAccessDeniedError,
+ TemporaryCacheDeleteFailedError,
)
-from superset.key_value.utils import cache_key
+from superset.temporary_cache.utils import cache_key
logger = logging.getLogger(__name__)
@@ -50,7 +50,7 @@ def run(self) -> bool:
chart_id = state["chart_id"]
check_access(dataset_id, chart_id, actor)
if state["owner"] != actor.get_user_id():
- raise KeyValueAccessDeniedError()
+ raise TemporaryCacheAccessDeniedError()
tab_id = self._cmd_params.tab_id
contextual_key = cache_key(
session.get("_id"), tab_id, dataset_id, chart_id
@@ -60,7 +60,7 @@ def run(self) -> bool:
return False
except SQLAlchemyError as ex:
logger.exception("Error running delete command")
- raise KeyValueDeleteFailedError() from ex
+ raise TemporaryCacheDeleteFailedError() from ex
def validate(self) -> None:
pass
diff --git a/superset/explore/form_data/commands/get.py b/superset/explore/form_data/commands/get.py
index 2252387b34084..5b582008218cc 100644
--- a/superset/explore/form_data/commands/get.py
+++ b/superset/explore/form_data/commands/get.py
@@ -24,9 +24,9 @@
from superset.commands.base import BaseCommand
from superset.explore.form_data.commands.parameters import CommandParameters
from superset.explore.form_data.commands.state import TemporaryExploreState
-from superset.explore.form_data.utils import check_access
+from superset.explore.utils import check_access
from superset.extensions import cache_manager
-from superset.key_value.commands.exceptions import KeyValueGetFailedError
+from superset.temporary_cache.commands.exceptions import TemporaryCacheGetFailedError
logger = logging.getLogger(__name__)
@@ -52,7 +52,7 @@ def run(self) -> Optional[str]:
return None
except SQLAlchemyError as ex:
logger.exception("Error running get command")
- raise KeyValueGetFailedError() from ex
+ raise TemporaryCacheGetFailedError() from ex
def validate(self) -> None:
pass
diff --git a/superset/explore/form_data/commands/update.py b/superset/explore/form_data/commands/update.py
index 8c8b6a500bcbc..0c986ee102cb0 100644
--- a/superset/explore/form_data/commands/update.py
+++ b/superset/explore/form_data/commands/update.py
@@ -24,13 +24,14 @@
from superset.commands.base import BaseCommand
from superset.explore.form_data.commands.parameters import CommandParameters
from superset.explore.form_data.commands.state import TemporaryExploreState
-from superset.explore.form_data.utils import check_access
+from superset.explore.utils import check_access
from superset.extensions import cache_manager
-from superset.key_value.commands.exceptions import (
- KeyValueAccessDeniedError,
- KeyValueUpdateFailedError,
+from superset.temporary_cache.commands.exceptions import (
+ TemporaryCacheAccessDeniedError,
+ TemporaryCacheUpdateFailedError,
)
-from superset.key_value.utils import cache_key, random_key
+from superset.temporary_cache.utils import cache_key, random_key
+from superset.utils.schema import validate_json
logger = logging.getLogger(__name__)
@@ -42,6 +43,7 @@ def __init__(
self._cmd_params = cmd_params
def run(self) -> Optional[str]:
+ self.validate()
try:
dataset_id = self._cmd_params.dataset_id
chart_id = self._cmd_params.chart_id
@@ -55,7 +57,7 @@ def run(self) -> Optional[str]:
if state and form_data:
user_id = actor.get_user_id()
if state["owner"] != user_id:
- raise KeyValueAccessDeniedError()
+ raise TemporaryCacheAccessDeniedError()
# Generate a new key if tab_id changes or equals 0
tab_id = self._cmd_params.tab_id
@@ -77,7 +79,8 @@ def run(self) -> Optional[str]:
return key
except SQLAlchemyError as ex:
logger.exception("Error running update command")
- raise KeyValueUpdateFailedError() from ex
+ raise TemporaryCacheUpdateFailedError() from ex
def validate(self) -> None:
- pass
+ if self._cmd_params.form_data:
+ validate_json(self._cmd_params.form_data)
diff --git a/superset/explore/permalink/__init__.py b/superset/explore/permalink/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/superset/explore/permalink/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/superset/explore/permalink/api.py b/superset/explore/permalink/api.py
new file mode 100644
index 0000000000000..025b1a45481c8
--- /dev/null
+++ b/superset/explore/permalink/api.py
@@ -0,0 +1,174 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import logging
+
+from flask import current_app, g, request, Response
+from flask_appbuilder.api import BaseApi, expose, protect, safe
+from marshmallow import ValidationError
+
+from superset.charts.commands.exceptions import (
+ ChartAccessDeniedError,
+ ChartNotFoundError,
+)
+from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod
+from superset.datasets.commands.exceptions import (
+ DatasetAccessDeniedError,
+ DatasetNotFoundError,
+)
+from superset.explore.permalink.commands.create import CreateExplorePermalinkCommand
+from superset.explore.permalink.commands.get import GetExplorePermalinkCommand
+from superset.explore.permalink.exceptions import ExplorePermalinkInvalidStateError
+from superset.explore.permalink.schemas import ExplorePermalinkPostSchema
+from superset.extensions import event_logger
+from superset.key_value.exceptions import KeyValueAccessDeniedError
+from superset.views.base_api import requires_json
+
+logger = logging.getLogger(__name__)
+
+
+class ExplorePermalinkRestApi(BaseApi):
+ add_model_schema = ExplorePermalinkPostSchema()
+ method_permission_name = MODEL_API_RW_METHOD_PERMISSION_MAP
+ include_route_methods = {
+ RouteMethod.POST,
+ RouteMethod.PUT,
+ RouteMethod.GET,
+ RouteMethod.DELETE,
+ }
+ allow_browser_login = True
+ class_permission_name = "ExplorePermalinkRestApi"
+ resource_name = "explore"
+ openapi_spec_tag = "Explore Permanent Link"
+ openapi_spec_component_schemas = (ExplorePermalinkPostSchema,)
+
+ @expose("/permalink", methods=["POST"])
+ @protect()
+ @safe
+ @event_logger.log_this_with_context(
+ action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.post",
+ log_to_statsd=False,
+ )
+ @requires_json
+ def post(self) -> Response:
+ """Stores a new permanent link.
+ ---
+ post:
+ description: >-
+ Stores a new permanent link.
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ExplorePermalinkPostSchema'
+ responses:
+ 201:
+ description: The permanent link was stored successfully.
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ key:
+ type: string
+ description: The key to retrieve the permanent link data.
+ url:
+ type: string
+ description: pemanent link.
+ 400:
+ $ref: '#/components/responses/400'
+ 401:
+ $ref: '#/components/responses/401'
+ 422:
+ $ref: '#/components/responses/422'
+ 500:
+ $ref: '#/components/responses/500'
+ """
+ key_type = current_app.config["PERMALINK_KEY_TYPE"]
+ try:
+ state = self.add_model_schema.load(request.json)
+ key = CreateExplorePermalinkCommand(
+ actor=g.user, state=state, key_type=key_type,
+ ).run()
+ http_origin = request.headers.environ.get("HTTP_ORIGIN")
+ url = f"{http_origin}/superset/explore/p/{key}/"
+ return self.response(201, key=key, url=url)
+ except ValidationError as ex:
+ return self.response(400, message=ex.messages)
+ except (
+ ChartAccessDeniedError,
+ DatasetAccessDeniedError,
+ KeyValueAccessDeniedError,
+ ) as ex:
+ return self.response(403, message=str(ex))
+ except (ChartNotFoundError, DatasetNotFoundError) as ex:
+ return self.response(404, message=str(ex))
+
+ @expose("/permalink/", methods=["GET"])
+ @protect()
+ @safe
+ @event_logger.log_this_with_context(
+ action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.get",
+ log_to_statsd=False,
+ )
+ def get(self, key: str) -> Response:
+ """Retrives permanent link state for chart.
+ ---
+ get:
+ description: >-
+ Retrives chart state associated with a permanent link.
+ parameters:
+ - in: path
+ schema:
+ type: string
+ name: key
+ responses:
+ 200:
+ description: Returns the stored form_data.
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ state:
+ type: object
+ description: The stored state
+ 400:
+ $ref: '#/components/responses/400'
+ 401:
+ $ref: '#/components/responses/401'
+ 404:
+ $ref: '#/components/responses/404'
+ 422:
+ $ref: '#/components/responses/422'
+ 500:
+ $ref: '#/components/responses/500'
+ """
+ try:
+ key_type = current_app.config["PERMALINK_KEY_TYPE"]
+ value = GetExplorePermalinkCommand(
+ actor=g.user, key=key, key_type=key_type
+ ).run()
+ if not value:
+ return self.response_404()
+ return self.response(200, **value)
+ except ExplorePermalinkInvalidStateError as ex:
+ return self.response(400, message=str(ex))
+ except (ChartAccessDeniedError, DatasetAccessDeniedError,) as ex:
+ return self.response(403, message=str(ex))
+ except (ChartNotFoundError, DatasetNotFoundError) as ex:
+ return self.response(404, message=str(ex))
diff --git a/superset/explore/permalink/commands/__init__.py b/superset/explore/permalink/commands/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/superset/explore/permalink/commands/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/superset/explore/permalink/commands/base.py b/superset/explore/permalink/commands/base.py
new file mode 100644
index 0000000000000..01a96405da026
--- /dev/null
+++ b/superset/explore/permalink/commands/base.py
@@ -0,0 +1,23 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from abc import ABC
+
+from superset.commands.base import BaseCommand
+
+
+class BaseExplorePermalinkCommand(BaseCommand, ABC):
+ resource = "explore_permalink"
diff --git a/superset/explore/permalink/commands/create.py b/superset/explore/permalink/commands/create.py
new file mode 100644
index 0000000000000..177aa7ae1f091
--- /dev/null
+++ b/superset/explore/permalink/commands/create.py
@@ -0,0 +1,60 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import logging
+from typing import Any, Dict, Optional
+
+from flask_appbuilder.security.sqla.models import User
+from sqlalchemy.exc import SQLAlchemyError
+
+from superset.explore.permalink.commands.base import BaseExplorePermalinkCommand
+from superset.explore.permalink.exceptions import ExplorePermalinkCreateFailedError
+from superset.explore.utils import check_access
+from superset.key_value.commands.create import CreateKeyValueCommand
+from superset.key_value.types import KeyType
+
+logger = logging.getLogger(__name__)
+
+
+class CreateExplorePermalinkCommand(BaseExplorePermalinkCommand):
+ def __init__(self, actor: User, state: Dict[str, Any], key_type: KeyType):
+ self.actor = actor
+ self.chart_id: Optional[int] = state["formData"].get("slice_id")
+ self.datasource: str = state["formData"]["datasource"]
+ self.state = state
+ self.key_type = key_type
+
+ def run(self) -> str:
+ self.validate()
+ try:
+ dataset_id = int(self.datasource.split("__")[0])
+ check_access(dataset_id, self.chart_id, self.actor)
+ value = {
+ "chartId": self.chart_id,
+ "datasetId": dataset_id,
+ "datasource": self.datasource,
+ "state": self.state,
+ }
+ command = CreateKeyValueCommand(
+ self.actor, self.resource, value, self.key_type
+ )
+ return command.run()
+ except SQLAlchemyError as ex:
+ logger.exception("Error running create command")
+ raise ExplorePermalinkCreateFailedError() from ex
+
+ def validate(self) -> None:
+ pass
diff --git a/superset/explore/permalink/commands/get.py b/superset/explore/permalink/commands/get.py
new file mode 100644
index 0000000000000..0db9da164bc6b
--- /dev/null
+++ b/superset/explore/permalink/commands/get.py
@@ -0,0 +1,66 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import logging
+from typing import Optional
+
+from flask_appbuilder.security.sqla.models import User
+from sqlalchemy.exc import SQLAlchemyError
+
+from superset.datasets.commands.exceptions import DatasetNotFoundError
+from superset.explore.permalink.commands.base import BaseExplorePermalinkCommand
+from superset.explore.permalink.exceptions import ExplorePermalinkGetFailedError
+from superset.explore.permalink.types import ExplorePermalinkValue
+from superset.explore.utils import check_access
+from superset.key_value.commands.get import GetKeyValueCommand
+from superset.key_value.exceptions import KeyValueGetFailedError, KeyValueParseKeyError
+from superset.key_value.types import KeyType
+
+logger = logging.getLogger(__name__)
+
+
+class GetExplorePermalinkCommand(BaseExplorePermalinkCommand):
+ def __init__(
+ self, actor: User, key: str, key_type: KeyType,
+ ):
+ self.actor = actor
+ self.key = key
+ self.key_type = key_type
+
+ def run(self) -> Optional[ExplorePermalinkValue]:
+ self.validate()
+ try:
+ value: Optional[ExplorePermalinkValue] = GetKeyValueCommand(
+ self.resource, self.key, key_type=self.key_type
+ ).run()
+ if value:
+ chart_id: Optional[int] = value.get("chartId")
+ dataset_id = value["datasetId"]
+ check_access(dataset_id, chart_id, self.actor)
+ return value
+ return None
+ except (
+ DatasetNotFoundError,
+ KeyValueGetFailedError,
+ KeyValueParseKeyError,
+ ) as ex:
+ raise ExplorePermalinkGetFailedError(message=ex.message) from ex
+ except SQLAlchemyError as ex:
+ logger.exception("Error running get command")
+ raise ExplorePermalinkGetFailedError() from ex
+
+ def validate(self) -> None:
+ pass
diff --git a/superset/explore/permalink/exceptions.py b/superset/explore/permalink/exceptions.py
new file mode 100644
index 0000000000000..7ea607ab08c8a
--- /dev/null
+++ b/superset/explore/permalink/exceptions.py
@@ -0,0 +1,31 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from flask_babel import lazy_gettext as _
+
+from superset.commands.exceptions import CommandException, CreateFailedError
+
+
+class ExplorePermalinkInvalidStateError(CreateFailedError):
+ message = _("Invalid state.")
+
+
+class ExplorePermalinkCreateFailedError(CreateFailedError):
+ message = _("An error occurred while creating the value.")
+
+
+class ExplorePermalinkGetFailedError(CommandException):
+ message = _("An error occurred while accessing the value.")
diff --git a/superset/explore/permalink/schemas.py b/superset/explore/permalink/schemas.py
new file mode 100644
index 0000000000000..7392c2deda250
--- /dev/null
+++ b/superset/explore/permalink/schemas.py
@@ -0,0 +1,37 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from marshmallow import fields, Schema
+
+
+class ExplorePermalinkPostSchema(Schema):
+ formData = fields.Dict(
+ required=True, allow_none=False, description="Chart form data",
+ )
+ urlParams = fields.List(
+ fields.Tuple(
+ (
+ fields.String(required=True, allow_none=True, description="Key"),
+ fields.String(required=True, allow_none=True, description="Value"),
+ ),
+ required=False,
+ allow_none=True,
+ description="URL Parameter key-value pair",
+ ),
+ required=False,
+ allow_none=True,
+ description="URL Parameters",
+ )
diff --git a/superset/explore/permalink/types.py b/superset/explore/permalink/types.py
new file mode 100644
index 0000000000000..b396e335104b0
--- /dev/null
+++ b/superset/explore/permalink/types.py
@@ -0,0 +1,29 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from typing import Any, Dict, List, Optional, Tuple, TypedDict
+
+
+class ExplorePermalinkState(TypedDict, total=False):
+ formData: Dict[str, Any]
+ urlParams: Optional[List[Tuple[str, str]]]
+
+
+class ExplorePermalinkValue(TypedDict):
+ chartId: Optional[int]
+ datasetId: int
+ datasource: str
+ state: ExplorePermalinkState
diff --git a/superset/explore/form_data/utils.py b/superset/explore/utils.py
similarity index 100%
rename from superset/explore/form_data/utils.py
rename to superset/explore/utils.py
diff --git a/superset/extensions.py b/superset/extensions.py
index 33dc1706a6b78..742182b078d1b 100644
--- a/superset/extensions.py
+++ b/superset/extensions.py
@@ -63,22 +63,26 @@ def init_app(self, app: Flask) -> None:
self.app = app
# Preload the cache
self.parse_manifest_json()
-
- @app.context_processor
- def get_manifest() -> Dict[str, Callable[[str], List[str]]]:
- loaded_chunks = set()
-
- def get_files(bundle: str, asset_type: str = "js") -> List[str]:
- files = self.get_manifest_files(bundle, asset_type)
- filtered_files = [f for f in files if f not in loaded_chunks]
- for f in filtered_files:
- loaded_chunks.add(f)
- return filtered_files
-
- return dict(
- js_manifest=lambda bundle: get_files(bundle, "js"),
- css_manifest=lambda bundle: get_files(bundle, "css"),
- )
+ self.register_processor(app)
+
+ def register_processor(self, app: Flask) -> None:
+ app.template_context_processors[None].append(self.get_manifest)
+
+ def get_manifest(self) -> Dict[str, Callable[[str], List[str]]]:
+ loaded_chunks = set()
+
+ def get_files(bundle: str, asset_type: str = "js") -> List[str]:
+ files = self.get_manifest_files(bundle, asset_type)
+ filtered_files = [f for f in files if f not in loaded_chunks]
+ for f in filtered_files:
+ loaded_chunks.add(f)
+ return filtered_files
+
+ return dict(
+ js_manifest=lambda bundle: get_files(bundle, "js"),
+ css_manifest=lambda bundle: get_files(bundle, "css"),
+ assets_prefix=self.app.config["STATIC_ASSETS_PREFIX"] if self.app else "",
+ )
def parse_manifest_json(self) -> None:
try:
diff --git a/superset/initialization/__init__.py b/superset/initialization/__init__.py
index 59e204d6d65e8..6e2d927efd522 100644
--- a/superset/initialization/__init__.py
+++ b/superset/initialization/__init__.py
@@ -49,7 +49,7 @@
talisman,
)
from superset.security import SupersetSecurityManager
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from superset.utils.core import pessimistic_connection_handling
from superset.utils.log import DBEventLogger, get_event_logger_from_cfg_value
@@ -136,11 +136,13 @@ def init_views(self) -> None:
from superset.dashboards.api import DashboardRestApi
from superset.dashboards.filter_sets.api import FilterSetRestApi
from superset.dashboards.filter_state.api import DashboardFilterStateRestApi
+ from superset.dashboards.permalink.api import DashboardPermalinkRestApi
from superset.databases.api import DatabaseRestApi
from superset.datasets.api import DatasetRestApi
from superset.datasets.columns.api import DatasetColumnsRestApi
from superset.datasets.metrics.api import DatasetMetricRestApi
from superset.explore.form_data.api import ExploreFormDataRestApi
+ from superset.explore.permalink.api import ExplorePermalinkRestApi
from superset.queries.api import QueryRestApi
from superset.queries.saved_queries.api import SavedQueryRestApi
from superset.reports.api import ReportScheduleRestApi
@@ -208,12 +210,14 @@ def init_views(self) -> None:
appbuilder.add_api(CssTemplateRestApi)
appbuilder.add_api(CurrentUserRestApi)
appbuilder.add_api(DashboardFilterStateRestApi)
+ appbuilder.add_api(DashboardPermalinkRestApi)
appbuilder.add_api(DashboardRestApi)
appbuilder.add_api(DatabaseRestApi)
appbuilder.add_api(DatasetRestApi)
appbuilder.add_api(DatasetColumnsRestApi)
appbuilder.add_api(DatasetMetricRestApi)
appbuilder.add_api(ExploreFormDataRestApi)
+ appbuilder.add_api(ExplorePermalinkRestApi)
appbuilder.add_api(FilterSetRestApi)
appbuilder.add_api(QueryRestApi)
appbuilder.add_api(ReportScheduleRestApi)
diff --git a/superset/key_value/commands/create.py b/superset/key_value/commands/create.py
index 987c02bee6cb2..5f5bf67318958 100644
--- a/superset/key_value/commands/create.py
+++ b/superset/key_value/commands/create.py
@@ -15,24 +15,56 @@
# specific language governing permissions and limitations
# under the License.
import logging
-from abc import ABC, abstractmethod
+import pickle
+from datetime import datetime
+from typing import Any, Optional
+from flask_appbuilder.security.sqla.models import User
from sqlalchemy.exc import SQLAlchemyError
+from superset import db
from superset.commands.base import BaseCommand
-from superset.key_value.commands.exceptions import KeyValueCreateFailedError
-from superset.key_value.commands.parameters import CommandParameters
+from superset.key_value.exceptions import KeyValueCreateFailedError
+from superset.key_value.models import KeyValueEntry
+from superset.key_value.types import KeyType
+from superset.key_value.utils import extract_key
logger = logging.getLogger(__name__)
-class CreateKeyValueCommand(BaseCommand, ABC):
- def __init__(self, cmd_params: CommandParameters):
- self._cmd_params = cmd_params
+class CreateKeyValueCommand(BaseCommand):
+ actor: User
+ resource: str
+ value: Any
+ key_type: KeyType
+ expires_on: Optional[datetime]
+
+ def __init__(
+ self,
+ actor: User,
+ resource: str,
+ value: Any,
+ key_type: KeyType,
+ expires_on: Optional[datetime] = None,
+ ):
+ """
+ Create a new key-value pair
+
+ :param resource: the resource (dashboard, chart etc)
+ :param value: the value to persist in the key-value store
+ :param key_type: the type of the key to return
+ :param expires_on: entry expiration time
+ :return: the key associated with the persisted value
+ """
+ self.resource = resource
+ self.actor = actor
+ self.value = value
+ self.key_type = key_type
+ self.expires_on = expires_on
def run(self) -> str:
try:
- return self.create(self._cmd_params)
+ return self.create()
except SQLAlchemyError as ex:
logger.exception("Error running create command")
raise KeyValueCreateFailedError() from ex
@@ -40,6 +72,14 @@ def run(self) -> str:
def validate(self) -> None:
pass
- @abstractmethod
- def create(self, cmd_params: CommandParameters) -> str:
- ...
+ def create(self) -> str:
+ entry = KeyValueEntry(
+ resource=self.resource,
+ value=pickle.dumps(self.value),
+ created_on=datetime.now(),
+ created_by_fk=None if self.actor.is_anonymous else self.actor.id,
+ expires_on=self.expires_on,
+ )
+ db.session.add(entry)
+ db.session.commit()
+ return extract_key(entry, self.key_type)
diff --git a/superset/key_value/commands/delete.py b/superset/key_value/commands/delete.py
index 6d2983c063784..6eb340e87fd91 100644
--- a/superset/key_value/commands/delete.py
+++ b/superset/key_value/commands/delete.py
@@ -15,24 +15,45 @@
# specific language governing permissions and limitations
# under the License.
import logging
-from abc import ABC, abstractmethod
+from flask_appbuilder.security.sqla.models import User
from sqlalchemy.exc import SQLAlchemyError
+from superset import db
from superset.commands.base import BaseCommand
-from superset.key_value.commands.exceptions import KeyValueDeleteFailedError
-from superset.key_value.commands.parameters import CommandParameters
+from superset.key_value.exceptions import KeyValueDeleteFailedError
+from superset.key_value.models import KeyValueEntry
+from superset.key_value.types import KeyType
+from superset.key_value.utils import get_filter
logger = logging.getLogger(__name__)
-class DeleteKeyValueCommand(BaseCommand, ABC):
- def __init__(self, cmd_params: CommandParameters):
- self._cmd_params = cmd_params
+class DeleteKeyValueCommand(BaseCommand):
+ actor: User
+ key: str
+ key_type: KeyType
+ resource: str
+
+ def __init__(
+ self, actor: User, resource: str, key: str, key_type: KeyType = "uuid"
+ ):
+ """
+ Delete a key-value pair
+
+ :param resource: the resource (dashboard, chart etc)
+ :param key: the key to delete
+ :param key_type: the type of key
+ :return: was the entry deleted or not
+ """
+ self.resource = resource
+ self.actor = actor
+ self.key = key
+ self.key_type = key_type
def run(self) -> bool:
try:
- return self.delete(self._cmd_params)
+ return self.delete()
except SQLAlchemyError as ex:
logger.exception("Error running delete command")
raise KeyValueDeleteFailedError() from ex
@@ -40,6 +61,16 @@ def run(self) -> bool:
def validate(self) -> None:
pass
- @abstractmethod
- def delete(self, cmd_params: CommandParameters) -> bool:
- ...
+ def delete(self) -> bool:
+ filter_ = get_filter(self.resource, self.key, self.key_type)
+ entry = (
+ db.session.query(KeyValueEntry)
+ .filter_by(**filter_)
+ .autoflush(False)
+ .first()
+ )
+ if entry:
+ db.session.delete(entry)
+ db.session.commit()
+ return True
+ return False
diff --git a/superset/key_value/commands/get.py b/superset/key_value/commands/get.py
index a697c808c01df..b0530b976ba7c 100644
--- a/superset/key_value/commands/get.py
+++ b/superset/key_value/commands/get.py
@@ -14,26 +14,45 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
+
import logging
-from abc import ABC, abstractmethod
-from typing import Optional
+import pickle
+from datetime import datetime
+from typing import Any, Optional
from sqlalchemy.exc import SQLAlchemyError
+from superset import db
from superset.commands.base import BaseCommand
-from superset.key_value.commands.exceptions import KeyValueGetFailedError
-from superset.key_value.commands.parameters import CommandParameters
+from superset.key_value.exceptions import KeyValueGetFailedError
+from superset.key_value.models import KeyValueEntry
+from superset.key_value.types import KeyType
+from superset.key_value.utils import get_filter
logger = logging.getLogger(__name__)
-class GetKeyValueCommand(BaseCommand, ABC):
- def __init__(self, cmd_params: CommandParameters):
- self._cmd_params = cmd_params
+class GetKeyValueCommand(BaseCommand):
+ key: str
+ key_type: KeyType
+ resource: str
+
+ def __init__(self, resource: str, key: str, key_type: KeyType = "uuid"):
+ """
+ Retrieve a key value entry
+
+ :param resource: the resource (dashboard, chart etc)
+ :param key: the key to retrieve
+ :param key_type: the type of the key to retrieve
+ :return: the value associated with the key if present
+ """
+ self.resource = resource
+ self.key = key
+ self.key_type = key_type
- def run(self) -> Optional[str]:
+ def run(self) -> Any:
try:
- return self.get(self._cmd_params)
+ return self.get()
except SQLAlchemyError as ex:
logger.exception("Error running get command")
raise KeyValueGetFailedError() from ex
@@ -41,6 +60,14 @@ def run(self) -> Optional[str]:
def validate(self) -> None:
pass
- @abstractmethod
- def get(self, cmd_params: CommandParameters) -> Optional[str]:
- ...
+ def get(self) -> Optional[Any]:
+ filter_ = get_filter(self.resource, self.key, self.key_type)
+ entry = (
+ db.session.query(KeyValueEntry)
+ .filter_by(**filter_)
+ .autoflush(False)
+ .first()
+ )
+ if entry and (entry.expires_on is None or entry.expires_on > datetime.now()):
+ return pickle.loads(entry.value)
+ return None
diff --git a/superset/key_value/commands/update.py b/superset/key_value/commands/update.py
index b0949193064ac..aed4c292e68ca 100644
--- a/superset/key_value/commands/update.py
+++ b/superset/key_value/commands/update.py
@@ -14,28 +14,62 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
+
import logging
-from abc import ABC, abstractmethod
-from typing import Optional
+import pickle
+from datetime import datetime
+from typing import Any, Optional
+from flask_appbuilder.security.sqla.models import User
from sqlalchemy.exc import SQLAlchemyError
+from superset import db
from superset.commands.base import BaseCommand
-from superset.key_value.commands.exceptions import KeyValueUpdateFailedError
-from superset.key_value.commands.parameters import CommandParameters
+from superset.key_value.exceptions import KeyValueUpdateFailedError
+from superset.key_value.models import KeyValueEntry
+from superset.key_value.types import KeyType
+from superset.key_value.utils import extract_key, get_filter
logger = logging.getLogger(__name__)
-class UpdateKeyValueCommand(BaseCommand, ABC):
+class UpdateKeyValueCommand(BaseCommand):
+ actor: User
+ resource: str
+ value: Any
+ key: str
+ key_type: KeyType
+ expires_on: Optional[datetime]
+
def __init__(
- self, cmd_params: CommandParameters,
+ self,
+ actor: User,
+ resource: str,
+ key: str,
+ value: Any,
+ key_type: KeyType = "uuid",
+ expires_on: Optional[datetime] = None,
):
- self._parameters = cmd_params
+ """
+ Update a key value entry
+
+ :param resource: the resource (dashboard, chart etc)
+ :param key: the key to update
+ :param value: the value to persist in the key-value store
+ :param key_type: the type of the key to update
+ :param expires_on: entry expiration time
+ :return: the key associated with the updated value
+ """
+ self.actor = actor
+ self.resource = resource
+ self.key = key
+ self.value = value
+ self.key_type = key_type
+ self.expires_on = expires_on
def run(self) -> Optional[str]:
try:
- return self.update(self._parameters)
+ return self.update()
except SQLAlchemyError as ex:
logger.exception("Error running update command")
raise KeyValueUpdateFailedError() from ex
@@ -43,6 +77,20 @@ def run(self) -> Optional[str]:
def validate(self) -> None:
pass
- @abstractmethod
- def update(self, cmd_params: CommandParameters) -> Optional[str]:
- ...
+ def update(self) -> Optional[str]:
+ filter_ = get_filter(self.resource, self.key, self.key_type)
+ entry: KeyValueEntry = (
+ db.session.query(KeyValueEntry)
+ .filter_by(**filter_)
+ .autoflush(False)
+ .first()
+ )
+ if entry:
+ entry.value = pickle.dumps(self.value)
+ entry.expires_on = self.expires_on
+ entry.changed_on = datetime.now()
+ entry.changed_by_fk = None if self.actor.is_anonymous else self.actor.id
+ db.session.merge(entry)
+ db.session.commit()
+ return extract_key(entry, self.key_type)
+ return None
diff --git a/superset/key_value/commands/exceptions.py b/superset/key_value/exceptions.py
similarity index 90%
rename from superset/key_value/commands/exceptions.py
rename to superset/key_value/exceptions.py
index 780f705e8ad56..fc66d24c2f323 100644
--- a/superset/key_value/commands/exceptions.py
+++ b/superset/key_value/exceptions.py
@@ -23,6 +23,11 @@
ForbiddenError,
UpdateFailedError,
)
+from superset.exceptions import SupersetException
+
+
+class KeyValueParseKeyError(SupersetException):
+ message = _("An error occurred while parsing the key.")
class KeyValueCreateFailedError(CreateFailedError):
diff --git a/superset/key_value/models.py b/superset/key_value/models.py
new file mode 100644
index 0000000000000..33e749ca364e1
--- /dev/null
+++ b/superset/key_value/models.py
@@ -0,0 +1,38 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from flask_appbuilder import Model
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, LargeBinary, String
+from sqlalchemy.orm import relationship
+
+from superset import security_manager
+from superset.models.helpers import AuditMixinNullable, ImportExportMixin
+
+
+class KeyValueEntry(Model, AuditMixinNullable, ImportExportMixin):
+ """Key value store entity"""
+
+ __tablename__ = "key_value"
+ id = Column(Integer, primary_key=True)
+ resource = Column(String(32), nullable=False)
+ value = Column(LargeBinary(), nullable=False)
+ created_on = Column(DateTime, nullable=True)
+ created_by_fk = Column(Integer, ForeignKey("ab_user.id"), nullable=True)
+ changed_on = Column(DateTime, nullable=True)
+ expires_on = Column(DateTime, nullable=True)
+ changed_by_fk = Column(Integer, ForeignKey("ab_user.id"), nullable=True)
+ created_by = relationship(security_manager.user_model, foreign_keys=[created_by_fk])
+ changed_by = relationship(security_manager.user_model, foreign_keys=[changed_by_fk])
diff --git a/superset/key_value/types.py b/superset/key_value/types.py
new file mode 100644
index 0000000000000..d36520ddbfb75
--- /dev/null
+++ b/superset/key_value/types.py
@@ -0,0 +1,34 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from dataclasses import dataclass
+from typing import Literal, Optional, TypedDict
+from uuid import UUID
+
+
+@dataclass
+class Key:
+ id: Optional[int]
+ uuid: Optional[UUID]
+
+
+KeyType = Literal["id", "uuid"]
+
+
+class KeyValueFilter(TypedDict, total=False):
+ resource: str
+ id: Optional[int]
+ uuid: Optional[UUID]
diff --git a/superset/key_value/utils.py b/superset/key_value/utils.py
index 2f2f71f957e08..50aa34918e434 100644
--- a/superset/key_value/utils.py
+++ b/superset/key_value/utils.py
@@ -14,15 +14,44 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-from secrets import token_urlsafe
-from typing import Any
+from typing import Literal
+from uuid import UUID
-SEPARATOR = ";"
+from flask import current_app
+from superset.key_value.exceptions import KeyValueParseKeyError
+from superset.key_value.models import KeyValueEntry
+from superset.key_value.types import Key, KeyType, KeyValueFilter
-def cache_key(*args: Any) -> str:
- return SEPARATOR.join(str(arg) for arg in args)
+def parse_permalink_key(key: str) -> Key:
+ key_type: Literal["id", "uuid"] = current_app.config["PERMALINK_KEY_TYPE"]
+ if key_type == "id":
+ return Key(id=int(key), uuid=None)
+ return Key(id=None, uuid=UUID(key))
-def random_key() -> str:
- return token_urlsafe(48)
+
+def format_permalink_key(key: Key) -> str:
+ """
+ return the string representation of the key
+
+ :param key: a key object with either a numerical or uuid key
+ :return: a formatted string
+ """
+ return str(key.id if key.id is not None else key.uuid)
+
+
+def extract_key(entry: KeyValueEntry, key_type: KeyType) -> str:
+ return str(entry.id if key_type == "id" else entry.uuid)
+
+
+def get_filter(resource: str, key: str, key_type: KeyType) -> KeyValueFilter:
+ try:
+ filter_: KeyValueFilter = {"resource": resource}
+ if key_type == "uuid":
+ filter_["uuid"] = UUID(key)
+ else:
+ filter_["id"] = int(key)
+ return filter_
+ except ValueError as ex:
+ raise KeyValueParseKeyError() from ex
diff --git a/superset/migrations/versions/58df9d617f14_add_on_saved_query_delete_tab_state_.py b/superset/migrations/versions/58df9d617f14_add_on_saved_query_delete_tab_state_.py
new file mode 100644
index 0000000000000..220370f828049
--- /dev/null
+++ b/superset/migrations/versions/58df9d617f14_add_on_saved_query_delete_tab_state_.py
@@ -0,0 +1,66 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""add_on_saved_query_delete_tab_state_null_constraint"
+
+Revision ID: 58df9d617f14
+Revises: 6766938c6065
+Create Date: 2022-03-16 23:24:40.278937
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = "58df9d617f14"
+down_revision = "6766938c6065"
+
+import sqlalchemy as sa
+from alembic import op
+
+from superset.utils.core import generic_find_fk_constraint_name
+
+
+def upgrade():
+ bind = op.get_bind()
+ insp = sa.engine.reflection.Inspector.from_engine(bind)
+
+ with op.batch_alter_table("tab_state") as batch_op:
+ batch_op.drop_constraint(
+ generic_find_fk_constraint_name("tab_state", {"id"}, "saved_query", insp),
+ type_="foreignkey",
+ )
+
+ batch_op.create_foreign_key(
+ "saved_query_id",
+ "saved_query",
+ ["saved_query_id"],
+ ["id"],
+ ondelete="SET NULL",
+ )
+
+
+def downgrade():
+ bind = op.get_bind()
+ insp = sa.engine.reflection.Inspector.from_engine(bind)
+
+ with op.batch_alter_table("tab_state") as batch_op:
+ batch_op.drop_constraint(
+ generic_find_fk_constraint_name("tab_state", {"id"}, "saved_query", insp),
+ type_="foreignkey",
+ )
+
+ batch_op.create_foreign_key(
+ "saved_query_id", "saved_query", ["saved_query_id"], ["id"],
+ )
diff --git a/superset/migrations/versions/6766938c6065_add_key_value_store.py b/superset/migrations/versions/6766938c6065_add_key_value_store.py
new file mode 100644
index 0000000000000..0a756386aee98
--- /dev/null
+++ b/superset/migrations/versions/6766938c6065_add_key_value_store.py
@@ -0,0 +1,61 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""add key-value store
+
+Revision ID: 6766938c6065
+Revises: 7293b0ca7944
+Create Date: 2022-03-04 09:59:26.922329
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = "6766938c6065"
+down_revision = "7293b0ca7944"
+
+from uuid import uuid4
+
+import sqlalchemy as sa
+from alembic import op
+from sqlalchemy_utils import UUIDType
+
+
+def upgrade():
+ op.create_table(
+ "key_value",
+ sa.Column("id", sa.Integer(), nullable=False),
+ sa.Column("resource", sa.String(32), nullable=False),
+ sa.Column("value", sa.LargeBinary(), nullable=False),
+ sa.Column("uuid", UUIDType(binary=True), default=uuid4),
+ sa.Column("created_on", sa.DateTime(), nullable=True),
+ sa.Column("created_by_fk", sa.Integer(), nullable=True),
+ sa.Column("changed_on", sa.DateTime(), nullable=True),
+ sa.Column("changed_by_fk", sa.Integer(), nullable=True),
+ sa.Column("expires_on", sa.DateTime(), nullable=True),
+ sa.ForeignKeyConstraint(["created_by_fk"], ["ab_user.id"]),
+ sa.ForeignKeyConstraint(["changed_by_fk"], ["ab_user.id"]),
+ sa.PrimaryKeyConstraint("id"),
+ )
+ op.create_index(op.f("ix_key_value_uuid"), "key_value", ["uuid"], unique=True)
+ op.create_index(
+ op.f("ix_key_value_expires_on"), "key_value", ["expires_on"], unique=False
+ )
+
+
+def downgrade():
+ op.drop_index(op.f("ix_key_value_expires_on"), table_name="key_value")
+ op.drop_index(op.f("ix_key_value_uuid"), table_name="key_value")
+ op.drop_table("key_value")
diff --git a/superset/models/core.py b/superset/models/core.py
index 7798ddf05930d..51f0731009950 100755
--- a/superset/models/core.py
+++ b/superset/models/core.py
@@ -212,6 +212,13 @@ def allows_virtual_table_explore(self) -> bool:
def explore_database_id(self) -> int:
return self.get_extra().get("explore_database_id", self.id)
+ @property
+ def disable_data_preview(self) -> bool:
+ # this will prevent any 'trash value' strings from going through
+ if self.get_extra().get("disable_data_preview", False) is not True:
+ return False
+ return True
+
@property
def data(self) -> Dict[str, Any]:
return {
@@ -225,6 +232,7 @@ def data(self) -> Dict[str, Any]:
"allows_virtual_table_explore": self.allows_virtual_table_explore,
"explore_database_id": self.explore_database_id,
"parameters": self.parameters,
+ "disable_data_preview": self.disable_data_preview,
"parameters_schema": self.parameters_schema,
}
diff --git a/superset/models/helpers.py b/superset/models/helpers.py
index f1adadfbc453f..86ac2c1a98717 100644
--- a/superset/models/helpers.py
+++ b/superset/models/helpers.py
@@ -221,7 +221,7 @@ def import_from_dict(
if not obj:
is_new_obj = True
# Create new DB object
- obj = cls(**dict_rep) # type: ignore
+ obj = cls(**dict_rep)
logger.info("Importing new %s %s", obj.__tablename__, str(obj))
if cls.export_parent and parent:
setattr(obj, cls.export_parent, parent)
diff --git a/superset/models/sql_lab.py b/superset/models/sql_lab.py
index d2e9b3fefb018..6a3b4ad8bfd7c 100644
--- a/superset/models/sql_lab.py
+++ b/superset/models/sql_lab.py
@@ -291,7 +291,9 @@ class TabState(Model, AuditMixinNullable, ExtraJSONMixin):
hide_left_bar = Column(Boolean, default=False)
# any saved queries that are associated with the Tab State
- saved_query_id = Column(Integer, ForeignKey("saved_query.id"), nullable=True)
+ saved_query_id = Column(
+ Integer, ForeignKey("saved_query.id", ondelete="SET NULL"), nullable=True
+ )
saved_query = relationship("SavedQuery", foreign_keys=[saved_query_id])
def to_dict(self) -> Dict[str, Any]:
diff --git a/superset/queries/saved_queries/commands/export.py b/superset/queries/saved_queries/commands/export.py
index ca2cfe5de9679..e209ae8ad2fd8 100644
--- a/superset/queries/saved_queries/commands/export.py
+++ b/superset/queries/saved_queries/commands/export.py
@@ -23,7 +23,7 @@
import yaml
from werkzeug.utils import secure_filename
-from superset.commands.export import ExportModelsCommand
+from superset.commands.export.models import ExportModelsCommand
from superset.models.sql_lab import SavedQuery
from superset.queries.saved_queries.commands.exceptions import SavedQueryNotFoundError
from superset.queries.saved_queries.dao import SavedQueryDAO
@@ -38,7 +38,9 @@ class ExportSavedQueriesCommand(ExportModelsCommand):
not_found = SavedQueryNotFoundError
@staticmethod
- def _export(model: SavedQuery) -> Iterator[Tuple[str, str]]:
+ def _export(
+ model: SavedQuery, export_related: bool = True
+ ) -> Iterator[Tuple[str, str]]:
# build filename based on database, optional schema, and label
database_slug = secure_filename(model.database.database_name)
schema_slug = secure_filename(model.schema)
@@ -58,23 +60,24 @@ def _export(model: SavedQuery) -> Iterator[Tuple[str, str]]:
yield file_name, file_content
# include database as well
- file_name = f"databases/{database_slug}.yaml"
+ if export_related:
+ file_name = f"databases/{database_slug}.yaml"
- payload = model.database.export_to_dict(
- recursive=False,
- include_parent_ref=False,
- include_defaults=True,
- export_uuids=True,
- )
- # TODO (betodealmeida): move this logic to export_to_dict once this
- # becomes the default export endpoint
- if "extra" in payload:
- try:
- payload["extra"] = json.loads(payload["extra"])
- except json.decoder.JSONDecodeError:
- logger.info("Unable to decode `extra` field: %s", payload["extra"])
+ payload = model.database.export_to_dict(
+ recursive=False,
+ include_parent_ref=False,
+ include_defaults=True,
+ export_uuids=True,
+ )
+ # TODO (betodealmeida): move this logic to export_to_dict once this
+ # becomes the default export endpoint
+ if "extra" in payload:
+ try:
+ payload["extra"] = json.loads(payload["extra"])
+ except json.decoder.JSONDecodeError:
+ logger.info("Unable to decode `extra` field: %s", payload["extra"])
- payload["version"] = EXPORT_VERSION
+ payload["version"] = EXPORT_VERSION
- file_content = yaml.safe_dump(payload, sort_keys=False)
- yield file_name, file_content
+ file_content = yaml.safe_dump(payload, sort_keys=False)
+ yield file_name, file_content
diff --git a/superset/reports/commands/alert.py b/superset/reports/commands/alert.py
index e00ac9f2df5c1..f5879a037f378 100644
--- a/superset/reports/commands/alert.py
+++ b/superset/reports/commands/alert.py
@@ -77,8 +77,7 @@ def run(self) -> bool:
threshold = json.loads(self._report_schedule.validator_config_json)[
"threshold"
]
-
- return OPERATOR_FUNCTIONS[operator](self._result, threshold)
+ return OPERATOR_FUNCTIONS[operator](self._result, threshold) # type: ignore
except (KeyError, json.JSONDecodeError) as ex:
raise AlertValidatorConfigError() from ex
diff --git a/superset/reports/notifications/base.py b/superset/reports/notifications/base.py
index 3331e51297a75..06bfecf790144 100644
--- a/superset/reports/notifications/base.py
+++ b/superset/reports/notifications/base.py
@@ -50,7 +50,7 @@ class BaseNotification: # pylint: disable=too-few-public-methods
"""
def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None:
- super().__init_subclass__(*args, **kwargs) # type: ignore
+ super().__init_subclass__(*args, **kwargs)
cls.plugins.append(cls)
def __init__(
diff --git a/superset/result_set.py b/superset/result_set.py
index b95b5e680d7db..19035b6d23788 100644
--- a/superset/result_set.py
+++ b/superset/result_set.py
@@ -26,7 +26,7 @@
import pyarrow as pa
from superset.db_engine_specs import BaseEngineSpec
-from superset.typing import DbapiDescription, DbapiResult
+from superset.superset_typing import DbapiDescription, DbapiResult
from superset.utils import core as utils
logger = logging.getLogger(__name__)
diff --git a/superset/typing.py b/superset/superset_typing.py
similarity index 100%
rename from superset/typing.py
rename to superset/superset_typing.py
diff --git a/superset/templates/superset/base.html b/superset/templates/superset/base.html
index a861c659e7034..e3c3d35dfe503 100644
--- a/superset/templates/superset/base.html
+++ b/superset/templates/superset/base.html
@@ -21,7 +21,7 @@
{% block head_css %}
{{ super() }}
-
+
{{ css_bundle("theme") }}
{% endblock %}
diff --git a/superset/templates/superset/basic.html b/superset/templates/superset/basic.html
index 902fc8c328de4..fff57fdb9fa18 100644
--- a/superset/templates/superset/basic.html
+++ b/superset/templates/superset/basic.html
@@ -40,11 +40,11 @@
rel="{{favicon.rel if favicon.rel else "icon"}}"
type="{{favicon.type if favicon.type else "image/png"}}"
{% if favicon.sizes %}sizes={{favicon.sizes}}{% endif %}
- href="{{favicon.href}}"
+ href="{{ assets_prefix }}{{favicon.href}}"
>
{% endfor %}
-
-
+
+
{{ css_bundle("theme") }}
@@ -73,7 +73,7 @@
{% block body %}
-
+
{% endblock %}
diff --git a/superset/templates/superset/theme.html b/superset/templates/superset/theme.html
index feac56f895980..856796a4c4b21 100644
--- a/superset/templates/superset/theme.html
+++ b/superset/templates/superset/theme.html
@@ -1342,5 +1342,5 @@ Source Code
{{ super() }}
-
+
{% endblock %}
diff --git a/superset/temporary_cache/__init__.py b/superset/temporary_cache/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/superset/temporary_cache/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/superset/key_value/api.py b/superset/temporary_cache/api.py
similarity index 87%
rename from superset/key_value/api.py
rename to superset/temporary_cache/api.py
index f01cb363e2e6d..b1c5999630b68 100644
--- a/superset/key_value/api.py
+++ b/superset/temporary_cache/api.py
@@ -37,17 +37,20 @@
DatasetAccessDeniedError,
DatasetNotFoundError,
)
-from superset.key_value.commands.exceptions import KeyValueAccessDeniedError
-from superset.key_value.commands.parameters import CommandParameters
-from superset.key_value.schemas import KeyValuePostSchema, KeyValuePutSchema
+from superset.temporary_cache.commands.exceptions import TemporaryCacheAccessDeniedError
+from superset.temporary_cache.commands.parameters import CommandParameters
+from superset.temporary_cache.schemas import (
+ TemporaryCachePostSchema,
+ TemporaryCachePutSchema,
+)
from superset.views.base_api import requires_json
logger = logging.getLogger(__name__)
-class KeyValueRestApi(BaseApi, ABC):
- add_model_schema = KeyValuePostSchema()
- edit_model_schema = KeyValuePutSchema()
+class TemporaryCacheRestApi(BaseApi, ABC):
+ add_model_schema = TemporaryCachePostSchema()
+ edit_model_schema = TemporaryCachePutSchema()
method_permission_name = MODEL_API_RW_METHOD_PERMISSION_MAP
include_route_methods = {
RouteMethod.POST,
@@ -60,10 +63,10 @@ class KeyValueRestApi(BaseApi, ABC):
def add_apispec_components(self, api_spec: APISpec) -> None:
try:
api_spec.components.schema(
- KeyValuePostSchema.__name__, schema=KeyValuePostSchema,
+ TemporaryCachePostSchema.__name__, schema=TemporaryCachePostSchema,
)
api_spec.components.schema(
- KeyValuePutSchema.__name__, schema=KeyValuePutSchema,
+ TemporaryCachePutSchema.__name__, schema=TemporaryCachePutSchema,
)
except DuplicateComponentNameError:
pass
@@ -85,7 +88,7 @@ def post(self, pk: int) -> Response:
ChartAccessDeniedError,
DashboardAccessDeniedError,
DatasetAccessDeniedError,
- KeyValueAccessDeniedError,
+ TemporaryCacheAccessDeniedError,
) as ex:
return self.response(403, message=str(ex))
except (ChartNotFoundError, DashboardNotFoundError, DatasetNotFoundError) as ex:
@@ -111,7 +114,7 @@ def put(self, pk: int, key: str) -> Response:
ChartAccessDeniedError,
DashboardAccessDeniedError,
DatasetAccessDeniedError,
- KeyValueAccessDeniedError,
+ TemporaryCacheAccessDeniedError,
) as ex:
return self.response(403, message=str(ex))
except (ChartNotFoundError, DashboardNotFoundError, DatasetNotFoundError) as ex:
@@ -128,7 +131,7 @@ def get(self, pk: int, key: str) -> Response:
ChartAccessDeniedError,
DashboardAccessDeniedError,
DatasetAccessDeniedError,
- KeyValueAccessDeniedError,
+ TemporaryCacheAccessDeniedError,
) as ex:
return self.response(403, message=str(ex))
except (ChartNotFoundError, DashboardNotFoundError, DatasetNotFoundError) as ex:
@@ -145,7 +148,7 @@ def delete(self, pk: int, key: str) -> Response:
ChartAccessDeniedError,
DashboardAccessDeniedError,
DatasetAccessDeniedError,
- KeyValueAccessDeniedError,
+ TemporaryCacheAccessDeniedError,
) as ex:
return self.response(403, message=str(ex))
except (ChartNotFoundError, DashboardNotFoundError, DatasetNotFoundError) as ex:
diff --git a/superset/temporary_cache/commands/__init__.py b/superset/temporary_cache/commands/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/superset/temporary_cache/commands/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/superset/temporary_cache/commands/create.py b/superset/temporary_cache/commands/create.py
new file mode 100644
index 0000000000000..af3b5350f652f
--- /dev/null
+++ b/superset/temporary_cache/commands/create.py
@@ -0,0 +1,45 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import logging
+from abc import ABC, abstractmethod
+
+from sqlalchemy.exc import SQLAlchemyError
+
+from superset.commands.base import BaseCommand
+from superset.temporary_cache.commands.exceptions import TemporaryCacheCreateFailedError
+from superset.temporary_cache.commands.parameters import CommandParameters
+
+logger = logging.getLogger(__name__)
+
+
+class CreateTemporaryCacheCommand(BaseCommand, ABC):
+ def __init__(self, cmd_params: CommandParameters):
+ self._cmd_params = cmd_params
+
+ def run(self) -> str:
+ try:
+ return self.create(self._cmd_params)
+ except SQLAlchemyError as ex:
+ logger.exception("Error running create command")
+ raise TemporaryCacheCreateFailedError() from ex
+
+ def validate(self) -> None:
+ pass
+
+ @abstractmethod
+ def create(self, cmd_params: CommandParameters) -> str:
+ ...
diff --git a/superset/temporary_cache/commands/delete.py b/superset/temporary_cache/commands/delete.py
new file mode 100644
index 0000000000000..1281c8debf1fe
--- /dev/null
+++ b/superset/temporary_cache/commands/delete.py
@@ -0,0 +1,45 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import logging
+from abc import ABC, abstractmethod
+
+from sqlalchemy.exc import SQLAlchemyError
+
+from superset.commands.base import BaseCommand
+from superset.temporary_cache.commands.exceptions import TemporaryCacheDeleteFailedError
+from superset.temporary_cache.commands.parameters import CommandParameters
+
+logger = logging.getLogger(__name__)
+
+
+class DeleteTemporaryCacheCommand(BaseCommand, ABC):
+ def __init__(self, cmd_params: CommandParameters):
+ self._cmd_params = cmd_params
+
+ def run(self) -> bool:
+ try:
+ return self.delete(self._cmd_params)
+ except SQLAlchemyError as ex:
+ logger.exception("Error running delete command")
+ raise TemporaryCacheDeleteFailedError() from ex
+
+ def validate(self) -> None:
+ pass
+
+ @abstractmethod
+ def delete(self, cmd_params: CommandParameters) -> bool:
+ ...
diff --git a/superset/key_value/commands/entry.py b/superset/temporary_cache/commands/entry.py
similarity index 100%
rename from superset/key_value/commands/entry.py
rename to superset/temporary_cache/commands/entry.py
diff --git a/superset/temporary_cache/commands/exceptions.py b/superset/temporary_cache/commands/exceptions.py
new file mode 100644
index 0000000000000..0f8c44cb18fd9
--- /dev/null
+++ b/superset/temporary_cache/commands/exceptions.py
@@ -0,0 +1,45 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from flask_babel import lazy_gettext as _
+
+from superset.commands.exceptions import (
+ CommandException,
+ CreateFailedError,
+ DeleteFailedError,
+ ForbiddenError,
+ UpdateFailedError,
+)
+
+
+class TemporaryCacheCreateFailedError(CreateFailedError):
+ message = _("An error occurred while creating the value.")
+
+
+class TemporaryCacheGetFailedError(CommandException):
+ message = _("An error occurred while accessing the value.")
+
+
+class TemporaryCacheDeleteFailedError(DeleteFailedError):
+ message = _("An error occurred while deleting the value.")
+
+
+class TemporaryCacheUpdateFailedError(UpdateFailedError):
+ message = _("An error occurred while updating the value.")
+
+
+class TemporaryCacheAccessDeniedError(ForbiddenError):
+ message = _("You don't have permission to modify the value.")
diff --git a/superset/temporary_cache/commands/get.py b/superset/temporary_cache/commands/get.py
new file mode 100644
index 0000000000000..8c220b9c04583
--- /dev/null
+++ b/superset/temporary_cache/commands/get.py
@@ -0,0 +1,46 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import logging
+from abc import ABC, abstractmethod
+from typing import Optional
+
+from sqlalchemy.exc import SQLAlchemyError
+
+from superset.commands.base import BaseCommand
+from superset.temporary_cache.commands.exceptions import TemporaryCacheGetFailedError
+from superset.temporary_cache.commands.parameters import CommandParameters
+
+logger = logging.getLogger(__name__)
+
+
+class GetTemporaryCacheCommand(BaseCommand, ABC):
+ def __init__(self, cmd_params: CommandParameters):
+ self._cmd_params = cmd_params
+
+ def run(self) -> Optional[str]:
+ try:
+ return self.get(self._cmd_params)
+ except SQLAlchemyError as ex:
+ logger.exception("Error running get command")
+ raise TemporaryCacheGetFailedError() from ex
+
+ def validate(self) -> None:
+ pass
+
+ @abstractmethod
+ def get(self, cmd_params: CommandParameters) -> Optional[str]:
+ ...
diff --git a/superset/key_value/commands/parameters.py b/superset/temporary_cache/commands/parameters.py
similarity index 100%
rename from superset/key_value/commands/parameters.py
rename to superset/temporary_cache/commands/parameters.py
diff --git a/superset/temporary_cache/commands/update.py b/superset/temporary_cache/commands/update.py
new file mode 100644
index 0000000000000..584e16690b61f
--- /dev/null
+++ b/superset/temporary_cache/commands/update.py
@@ -0,0 +1,48 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import logging
+from abc import ABC, abstractmethod
+from typing import Optional
+
+from sqlalchemy.exc import SQLAlchemyError
+
+from superset.commands.base import BaseCommand
+from superset.temporary_cache.commands.exceptions import TemporaryCacheUpdateFailedError
+from superset.temporary_cache.commands.parameters import CommandParameters
+
+logger = logging.getLogger(__name__)
+
+
+class UpdateTemporaryCacheCommand(BaseCommand, ABC):
+ def __init__(
+ self, cmd_params: CommandParameters,
+ ):
+ self._parameters = cmd_params
+
+ def run(self) -> Optional[str]:
+ try:
+ return self.update(self._parameters)
+ except SQLAlchemyError as ex:
+ logger.exception("Error running update command")
+ raise TemporaryCacheUpdateFailedError() from ex
+
+ def validate(self) -> None:
+ pass
+
+ @abstractmethod
+ def update(self, cmd_params: CommandParameters) -> Optional[str]:
+ ...
diff --git a/superset/temporary_cache/schemas.py b/superset/temporary_cache/schemas.py
new file mode 100644
index 0000000000000..474d88b5fcf70
--- /dev/null
+++ b/superset/temporary_cache/schemas.py
@@ -0,0 +1,37 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from marshmallow import fields, Schema
+
+from superset.utils.schema import validate_json
+
+
+class TemporaryCachePostSchema(Schema):
+ value = fields.String(
+ required=True,
+ allow_none=False,
+ description="Any type of JSON supported text.",
+ validate=validate_json,
+ )
+
+
+class TemporaryCachePutSchema(Schema):
+ value = fields.String(
+ required=True,
+ allow_none=False,
+ description="Any type of JSON supported text.",
+ validate=validate_json,
+ )
diff --git a/superset/temporary_cache/utils.py b/superset/temporary_cache/utils.py
new file mode 100644
index 0000000000000..2f2f71f957e08
--- /dev/null
+++ b/superset/temporary_cache/utils.py
@@ -0,0 +1,28 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from secrets import token_urlsafe
+from typing import Any
+
+SEPARATOR = ";"
+
+
+def cache_key(*args: Any) -> str:
+ return SEPARATOR.join(str(arg) for arg in args)
+
+
+def random_key() -> str:
+ return token_urlsafe(48)
diff --git a/superset/utils/core.py b/superset/utils/core.py
index 2fdbc278adb70..36d59333d2ed3 100644
--- a/superset/utils/core.py
+++ b/superset/utils/core.py
@@ -98,7 +98,7 @@
SupersetException,
SupersetTimeoutException,
)
-from superset.typing import (
+from superset.superset_typing import (
AdhocColumn,
AdhocMetric,
AdhocMetricColumn,
diff --git a/superset/views/alerts.py b/superset/views/alerts.py
index e96f701c3d1b7..416966fbe7c35 100644
--- a/superset/views/alerts.py
+++ b/superset/views/alerts.py
@@ -30,8 +30,8 @@
from superset import is_feature_enabled
from superset.constants import RouteMethod
from superset.models.alerts import Alert, AlertLog, SQLObservation
+from superset.superset_typing import FlaskResponse
from superset.tasks.alerts.validator import check_validator
-from superset.typing import FlaskResponse
from superset.utils import core as utils
from superset.utils.core import get_email_address_str, markdown
diff --git a/superset/views/annotations.py b/superset/views/annotations.py
index 4fa83c0ca4be4..dc1df5642af35 100644
--- a/superset/views/annotations.py
+++ b/superset/views/annotations.py
@@ -26,7 +26,7 @@
from superset import is_feature_enabled
from superset.constants import MODEL_VIEW_RW_METHOD_PERMISSION_MAP, RouteMethod
from superset.models.annotations import Annotation, AnnotationLayer
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from superset.views.base import SupersetModelView
diff --git a/superset/views/api.py b/superset/views/api.py
index d4d94ce72346c..bde25236460da 100644
--- a/superset/views/api.py
+++ b/superset/views/api.py
@@ -31,7 +31,7 @@
)
from superset.legacy import update_time_range
from superset.models.slice import Slice
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from superset.utils import core as utils
from superset.utils.date_parser import get_since_until
from superset.views.base import api, BaseSupersetView, handle_api_exception
diff --git a/superset/views/base.py b/superset/views/base.py
index 1249bc43cc4fb..3024c4490d167 100644
--- a/superset/views/base.py
+++ b/superset/views/base.py
@@ -73,8 +73,8 @@
)
from superset.models.helpers import ImportExportMixin
from superset.models.reports import ReportRecipientType
+from superset.superset_typing import FlaskResponse
from superset.translations.utils import get_language_pack
-from superset.typing import FlaskResponse
from superset.utils import core as utils
from .utils import bootstrap_user_data
diff --git a/superset/views/base_api.py b/superset/views/base_api.py
index 87e99e7c74a7b..260e5731788bc 100644
--- a/superset/views/base_api.py
+++ b/superset/views/base_api.py
@@ -37,7 +37,7 @@
from superset.schemas import error_payload_content
from superset.sql_lab import Query as SqllabQuery
from superset.stats_logger import BaseStatsLogger
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from superset.utils.core import time_function
logger = logging.getLogger(__name__)
diff --git a/superset/views/chart/views.py b/superset/views/chart/views.py
index 37ef9a043e881..9ecc69f7b9e8e 100644
--- a/superset/views/chart/views.py
+++ b/superset/views/chart/views.py
@@ -24,7 +24,7 @@
from superset import is_feature_enabled
from superset.constants import MODEL_VIEW_RW_METHOD_PERMISSION_MAP, RouteMethod
from superset.models.slice import Slice
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from superset.utils import core as utils
from superset.views.base import (
check_ownership,
diff --git a/superset/views/core.py b/superset/views/core.py
index f69ee77bef2f0..6957296ab634c 100755
--- a/superset/views/core.py
+++ b/superset/views/core.py
@@ -57,6 +57,7 @@
sql_lab,
viz,
)
+from superset.charts.commands.exceptions import ChartNotFoundError
from superset.charts.dao import ChartDAO
from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType
from superset.common.db_query_status import QueryStatus
@@ -70,6 +71,8 @@
)
from superset.dashboards.commands.importers.v0 import ImportDashboardsCommand
from superset.dashboards.dao import DashboardDAO
+from superset.dashboards.permalink.commands.get import GetDashboardPermalinkCommand
+from superset.dashboards.permalink.exceptions import DashboardPermalinkGetFailedError
from superset.databases.dao import DatabaseDAO
from superset.databases.filters import DatabaseFilter
from superset.datasets.commands.exceptions import DatasetNotFoundError
@@ -88,6 +91,8 @@
)
from superset.explore.form_data.commands.get import GetFormDataCommand
from superset.explore.form_data.commands.parameters import CommandParameters
+from superset.explore.permalink.commands.get import GetExplorePermalinkCommand
+from superset.explore.permalink.exceptions import ExplorePermalinkGetFailedError
from superset.extensions import async_query_manager, cache_manager
from superset.jinja_context import get_template_processor
from superset.models.core import Database, FavStar, Log
@@ -118,8 +123,8 @@
from superset.sqllab.sqllab_execution_context import SqlJsonExecutionContext
from superset.sqllab.utils import apply_display_max_row_configuration_if_require
from superset.sqllab.validators import CanAccessQueryValidatorImpl
+from superset.superset_typing import FlaskResponse
from superset.tasks.async_queries import load_explore_json_into_cache
-from superset.typing import FlaskResponse
from superset.utils import core as utils, csv
from superset.utils.async_query_manager import AsyncQueryTokenException
from superset.utils.cache import etag_cache
@@ -177,6 +182,7 @@
"expose_in_sqllab",
"force_ctas_schema",
"id",
+ "disable_data_preview",
]
DATASOURCE_MISSING_ERR = __("The data source seems to have been deleted")
@@ -647,14 +653,11 @@ def explore_json(
force=force,
)
payload = viz_obj.get_payload()
+ # If the chart query has already been cached, return it immediately.
+ if payload is not None:
+ return self.send_data_payload_response(viz_obj, payload)
except CacheLoadError:
- payload = None # type: ignore
-
- already_cached_result = payload is not None
-
- # If the chart query has already been cached, return it immediately.
- if already_cached_result:
- return self.send_data_payload_response(viz_obj, payload)
+ pass
# Otherwise, kick off a background job to run the chart query.
# Clients will either poll or be notified of query completion,
@@ -733,14 +736,36 @@ def import_dashboards(self) -> FlaskResponse:
@event_logger.log_this
@expose("/explore///", methods=["GET", "POST"])
@expose("/explore/", methods=["GET", "POST"])
+ @expose("/explore/p//", methods=["GET"])
# pylint: disable=too-many-locals,too-many-branches,too-many-statements
def explore(
- self, datasource_type: Optional[str] = None, datasource_id: Optional[int] = None
+ self,
+ datasource_type: Optional[str] = None,
+ datasource_id: Optional[int] = None,
+ key: Optional[str] = None,
) -> FlaskResponse:
initial_form_data = {}
form_data_key = request.args.get("form_data_key")
- if form_data_key:
+ if key is not None:
+ key_type = config["PERMALINK_KEY_TYPE"]
+ command = GetExplorePermalinkCommand(g.user, key, key_type)
+ try:
+ permalink_value = command.run()
+ if permalink_value:
+ state = permalink_value["state"]
+ initial_form_data = state["formData"]
+ url_params = state.get("urlParams")
+ if url_params:
+ initial_form_data["url_params"] = dict(url_params)
+ else:
+ return json_error_response(
+ _("Error: permalink state not found"), status=404
+ )
+ except (ChartNotFoundError, ExplorePermalinkGetFailedError) as ex:
+ flash(__("Error: %(msg)s", msg=ex.message), "danger")
+ return redirect("/chart/list/")
+ elif form_data_key:
parameters = CommandParameters(actor=g.user, key=form_data_key)
value = GetFormDataCommand(parameters).run()
initial_form_data = json.loads(value) if value else {}
@@ -1978,6 +2003,30 @@ def dashboard(
),
)
+ @has_access
+ @expose("/dashboard/p//", methods=["GET"])
+ def dashboard_permalink( # pylint: disable=no-self-use
+ self, key: str,
+ ) -> FlaskResponse:
+ key_type = config["PERMALINK_KEY_TYPE"]
+ try:
+ value = GetDashboardPermalinkCommand(g.user, key, key_type).run()
+ except DashboardPermalinkGetFailedError as ex:
+ flash(__("Error: %(msg)s", msg=ex.message), "danger")
+ return redirect("/dashboard/list/")
+ if not value:
+ return json_error_response(_("permalink state not found"), status=404)
+ dashboard_id = value["dashboardId"]
+ url = f"/superset/dashboard/{dashboard_id}?permalink_key={key}"
+ url_params = value["state"].get("urlParams")
+ if url_params:
+ params = parse.urlencode(url_params)
+ url = f"{url}&{params}"
+ hash_ = value["state"].get("hash")
+ if hash_:
+ url = f"{url}#{hash_}"
+ return redirect(url)
+
@api
@has_access
@event_logger.log_this
diff --git a/superset/views/css_templates.py b/superset/views/css_templates.py
index d26acea5cfac7..2cfbd43ae962a 100644
--- a/superset/views/css_templates.py
+++ b/superset/views/css_templates.py
@@ -22,7 +22,7 @@
from superset import is_feature_enabled
from superset.constants import MODEL_VIEW_RW_METHOD_PERMISSION_MAP, RouteMethod
from superset.models import core as models
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from superset.views.base import DeleteMixin, SupersetModelView
diff --git a/superset/views/dashboard/views.py b/superset/views/dashboard/views.py
index 99782def38a68..49ba61d08e0d2 100644
--- a/superset/views/dashboard/views.py
+++ b/superset/views/dashboard/views.py
@@ -29,7 +29,7 @@
from superset import db, event_logger, is_feature_enabled, security_manager
from superset.constants import MODEL_VIEW_RW_METHOD_PERMISSION_MAP, RouteMethod
from superset.models.dashboard import Dashboard as DashboardModel
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from superset.utils import core as utils
from superset.views.base import (
BaseSupersetView,
diff --git a/superset/views/database/mixins.py b/superset/views/database/mixins.py
index 5382181d2a3ff..d5a5157ef4f7b 100644
--- a/superset/views/database/mixins.py
+++ b/superset/views/database/mixins.py
@@ -145,7 +145,10 @@ class DatabaseMixin:
"4. the ``version`` field is a string specifying the this db's version. "
"This should be used with Presto DBs so that the syntax is correct "
"5. The ``allows_virtual_table_explore`` field is a boolean specifying "
- "whether or not the Explore button in SQL Lab results is shown.",
+ "whether or not the Explore button in SQL Lab results is shown "
+ "6. The ``disable_data_preview`` field is a boolean specifying whether or"
+ "not data preview queries will be run when fetching table metadata in"
+ "SQL Lab.",
True,
),
"encrypted_extra": utils.markdown(
diff --git a/superset/views/database/views.py b/superset/views/database/views.py
index 115d168ed636a..aea4e04383570 100644
--- a/superset/views/database/views.py
+++ b/superset/views/database/views.py
@@ -37,7 +37,7 @@
from superset.exceptions import CertificateException
from superset.extensions import event_logger
from superset.sql_parse import Table
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from superset.utils import core as utils
from superset.views.base import DeleteMixin, SupersetModelView, YamlExportMixin
diff --git a/superset/views/datasource/views.py b/superset/views/datasource/views.py
index e2cb204082dd6..7e1ffa0468e90 100644
--- a/superset/views/datasource/views.py
+++ b/superset/views/datasource/views.py
@@ -38,7 +38,7 @@
from superset.exceptions import SupersetException, SupersetSecurityException
from superset.extensions import security_manager
from superset.models.core import Database
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from superset.views.base import (
api,
BaseSupersetView,
diff --git a/superset/views/health.py b/superset/views/health.py
index 876e7a5e130be..cf85b8927899d 100644
--- a/superset/views/health.py
+++ b/superset/views/health.py
@@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.
from superset import app, talisman
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
@talisman(force_https=False)
diff --git a/superset/views/key_value.py b/superset/views/key_value.py
index 8f8aa99787a21..da39f094b812f 100644
--- a/superset/views/key_value.py
+++ b/superset/views/key_value.py
@@ -23,7 +23,7 @@
from superset import db, event_logger, is_feature_enabled
from superset.models import core as models
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from superset.utils import core as utils
from superset.views.base import BaseSupersetView, json_error_response
diff --git a/superset/views/redirects.py b/superset/views/redirects.py
index da9613f3a0bb8..831fc978b9473 100644
--- a/superset/views/redirects.py
+++ b/superset/views/redirects.py
@@ -17,14 +17,13 @@
import logging
from typing import Optional
-from flask import flash, request, Response
+from flask import flash
from flask_appbuilder import expose
-from flask_appbuilder.security.decorators import has_access_api
from werkzeug.utils import redirect
from superset import db, event_logger
from superset.models import core as models
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from superset.views.base import BaseSupersetView
logger = logging.getLogger(__name__)
@@ -58,21 +57,3 @@ def index(self, url_id: int) -> FlaskResponse:
flash("URL to nowhere...", "danger")
return redirect("/")
-
- @event_logger.log_this
- @has_access_api
- @expose("/shortner/", methods=["POST"])
- def shortner(self) -> FlaskResponse:
- url = request.form.get("data")
- if not self._validate_url(url):
- logger.warning("Invalid URL")
- return Response("Invalid URL", 400)
- obj = models.Url(url=url)
- db.session.add(obj)
- db.session.commit()
- return Response(
- "{scheme}://{request.headers[Host]}/r/{obj.id}".format(
- scheme=request.scheme, request=request, obj=obj
- ),
- mimetype="text/plain",
- )
diff --git a/superset/views/schedules.py b/superset/views/schedules.py
index d1c59f413c839..39d4af9b8b259 100644
--- a/superset/views/schedules.py
+++ b/superset/views/schedules.py
@@ -42,8 +42,8 @@
SliceEmailSchedule,
)
from superset.models.slice import Slice
+from superset.superset_typing import FlaskResponse
from superset.tasks.schedules import schedule_email_report
-from superset.typing import FlaskResponse
from superset.utils.core import get_email_address_list, json_iso_dttm_ser
from superset.views.core import json_success
diff --git a/superset/views/sql_lab.py b/superset/views/sql_lab.py
index 6a5ce26d38ad9..49336a84a18d6 100644
--- a/superset/views/sql_lab.py
+++ b/superset/views/sql_lab.py
@@ -20,11 +20,12 @@
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_appbuilder.security.decorators import has_access, has_access_api
from flask_babel import lazy_gettext as _
+from sqlalchemy import and_
from superset import db, is_feature_enabled
from superset.constants import MODEL_VIEW_RW_METHOD_PERMISSION_MAP, RouteMethod
from superset.models.sql_lab import Query, SavedQuery, TableSchema, TabState
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from superset.utils import core as utils
from .base import BaseSupersetView, DeleteMixin, json_success, SupersetModelView
@@ -228,6 +229,29 @@ def migrate_query( # pylint: disable=no-self-use
def delete_query( # pylint: disable=no-self-use
self, tab_state_id: int, client_id: str
) -> FlaskResponse:
+ # Before deleting the query, ensure it's not tied to any
+ # active tab as the last query. If so, replace the query
+ # with the latest one created in that tab
+ tab_state_query = db.session.query(TabState).filter_by(
+ id=tab_state_id, latest_query_id=client_id
+ )
+ if tab_state_query.count():
+ query = (
+ db.session.query(Query)
+ .filter(
+ and_(
+ Query.client_id != client_id,
+ Query.user_id == g.user.get_id(),
+ Query.sql_editor_id == str(tab_state_id),
+ ),
+ )
+ .order_by(Query.id.desc())
+ .first()
+ )
+ tab_state_query.update(
+ {"latest_query_id": query.client_id if query else None}
+ )
+
db.session.query(Query).filter_by(
client_id=client_id,
user_id=g.user.get_id(),
diff --git a/superset/views/tags.py b/superset/views/tags.py
index c6fac2ff77145..8ab2798f5d84c 100644
--- a/superset/views/tags.py
+++ b/superset/views/tags.py
@@ -33,7 +33,7 @@
from superset.models.slice import Slice
from superset.models.sql_lab import SavedQuery
from superset.models.tags import ObjectTypes, Tag, TaggedObject, TagTypes
-from superset.typing import FlaskResponse
+from superset.superset_typing import FlaskResponse
from .base import BaseSupersetView, json_success
diff --git a/superset/views/utils.py b/superset/views/utils.py
index 17ec6ea1088c9..62639174f647e 100644
--- a/superset/views/utils.py
+++ b/superset/views/utils.py
@@ -46,7 +46,7 @@
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.models.sql_lab import Query
-from superset.typing import FormData
+from superset.superset_typing import FormData
from superset.utils.decorators import stats_timing
from superset.viz import BaseViz
diff --git a/superset/viz.py b/superset/viz.py
index 9a1086442be62..7c0f8e134875b 100644
--- a/superset/viz.py
+++ b/superset/viz.py
@@ -70,7 +70,13 @@
from superset.extensions import cache_manager, security_manager
from superset.models.helpers import QueryResult
from superset.sql_parse import validate_filter_clause
-from superset.typing import Column, Metric, QueryObjectDict, VizData, VizPayload
+from superset.superset_typing import (
+ Column,
+ Metric,
+ QueryObjectDict,
+ VizData,
+ VizPayload,
+)
from superset.utils import core as utils, csv
from superset.utils.cache import set_and_log_cache
from superset.utils.core import (
diff --git a/tests/integration_tests/charts/commands_tests.py b/tests/integration_tests/charts/commands_tests.py
index bf4fe4fe79bd4..ec205b6a6abee 100644
--- a/tests/integration_tests/charts/commands_tests.py
+++ b/tests/integration_tests/charts/commands_tests.py
@@ -176,6 +176,26 @@ def test_export_chart_command_key_order(self, mock_g):
"dataset_uuid",
]
+ @patch("superset.security.manager.g")
+ @pytest.mark.usefixtures("load_energy_table_with_slice")
+ def test_export_chart_command_no_related(self, mock_g):
+ """
+ Test that only the chart is exported when export_related=False.
+ """
+ mock_g.user = security_manager.find_user("admin")
+
+ example_chart = (
+ db.session.query(Slice).filter_by(slice_name="Energy Sankey").one()
+ )
+ command = ExportChartsCommand([example_chart.id], export_related=False)
+ contents = dict(command.run())
+
+ expected = [
+ "metadata.yaml",
+ f"charts/Energy_Sankey_{example_chart.id}.yaml",
+ ]
+ assert expected == list(contents.keys())
+
class TestImportChartsCommand(SupersetTestCase):
@patch("superset.charts.commands.importers.v1.utils.g")
diff --git a/tests/integration_tests/charts/data/api_tests.py b/tests/integration_tests/charts/data/api_tests.py
index 4f63ad51b65d7..bc8ec74feb0d6 100644
--- a/tests/integration_tests/charts/data/api_tests.py
+++ b/tests/integration_tests/charts/data/api_tests.py
@@ -47,7 +47,7 @@
from superset.extensions import async_query_manager, db
from superset.models.annotations import AnnotationLayer
from superset.models.slice import Slice
-from superset.typing import AdhocColumn
+from superset.superset_typing import AdhocColumn
from superset.utils.core import (
AnnotationType,
get_example_default_schema,
diff --git a/tests/integration_tests/cli_tests.py b/tests/integration_tests/cli_tests.py
index 3f4725640e3c3..7426d90ea88af 100644
--- a/tests/integration_tests/cli_tests.py
+++ b/tests/integration_tests/cli_tests.py
@@ -47,6 +47,9 @@ def assert_cli_fails_properly(response, caplog):
assert caplog.records[-1].levelname == "ERROR"
+@mock.patch.dict(
+ "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True
+)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_export_dashboards_original(app_context, fs):
"""
@@ -73,6 +76,9 @@ def test_export_dashboards_original(app_context, fs):
json.loads(contents)
+@mock.patch.dict(
+ "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True
+)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_export_datasources_original(app_context, fs):
"""
@@ -91,6 +97,7 @@ def test_export_datasources_original(app_context, fs):
)
assert response.exit_code == 0
+
assert Path("datasources.yaml").exists()
# check that file is valid JSON
@@ -336,7 +343,7 @@ def test_import_datasets_versioned_export(import_datasets_command, app_context,
@mock.patch.dict(
- "superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": False}, clear=True
+ "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True
)
@mock.patch("superset.datasets.commands.importers.v0.ImportDatasetsCommand")
def test_import_datasets_sync_argument_columns_metrics(
@@ -371,7 +378,7 @@ def test_import_datasets_sync_argument_columns_metrics(
@mock.patch.dict(
- "superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": False}, clear=True
+ "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True
)
@mock.patch("superset.datasets.commands.importers.v0.ImportDatasetsCommand")
def test_import_datasets_sync_argument_columns(
@@ -406,7 +413,7 @@ def test_import_datasets_sync_argument_columns(
@mock.patch.dict(
- "superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": False}, clear=True
+ "superset.cli.lib.feature_flags", {"VERSIONED_EXPORT": False}, clear=True
)
@mock.patch("superset.datasets.commands.importers.v0.ImportDatasetsCommand")
def test_import_datasets_sync_argument_metrics(
diff --git a/tests/integration_tests/commands_test.py b/tests/integration_tests/commands_test.py
index 1adf5bd646288..5ff18b02a93e4 100644
--- a/tests/integration_tests/commands_test.py
+++ b/tests/integration_tests/commands_test.py
@@ -14,9 +14,31 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
+import copy
+import json
+from unittest.mock import patch
+
+import yaml
+
+from superset import db, security_manager
from superset.commands.exceptions import CommandInvalidError
+from superset.commands.importers.v1.assets import ImportAssetsCommand
from superset.commands.importers.v1.utils import is_valid_config
+from superset.models.dashboard import Dashboard
+from superset.models.slice import Slice
from tests.integration_tests.base_tests import SupersetTestCase
+from tests.integration_tests.fixtures.importexport import (
+ chart_config,
+ dashboard_config,
+ database_config,
+ dataset_config,
+)
+
+metadata_config = {
+ "version": "1.0.0",
+ "type": "assets",
+ "timestamp": "2020-11-04T21:27:44.423819+00:00",
+}
class TestCommandsExceptions(SupersetTestCase):
@@ -33,3 +55,148 @@ def test_is_valid_config(self):
assert not is_valid_config(
"__MACOSX/chart_export_20210111T145253/databases/._examples.yaml"
)
+
+
+class TestImportAssetsCommand(SupersetTestCase):
+ @patch("superset.dashboards.commands.importers.v1.utils.g")
+ def test_import_assets(self, mock_g):
+ """Test that we can import multiple assets"""
+ mock_g.user = security_manager.find_user("admin")
+ contents = {
+ "metadata.yaml": yaml.safe_dump(metadata_config),
+ "databases/imported_database.yaml": yaml.safe_dump(database_config),
+ "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config),
+ "charts/imported_chart.yaml": yaml.safe_dump(chart_config),
+ "dashboards/imported_dashboard.yaml": yaml.safe_dump(dashboard_config),
+ }
+ command = ImportAssetsCommand(contents)
+ command.run()
+
+ dashboard = (
+ db.session.query(Dashboard).filter_by(uuid=dashboard_config["uuid"]).one()
+ )
+
+ assert len(dashboard.slices) == 1
+ chart = dashboard.slices[0]
+ assert str(chart.uuid) == chart_config["uuid"]
+ new_chart_id = chart.id
+
+ assert dashboard.dashboard_title == "Test dash"
+ assert dashboard.description is None
+ assert dashboard.css == ""
+ assert dashboard.slug is None
+ assert json.loads(dashboard.position_json) == {
+ "CHART-SVAlICPOSJ": {
+ "children": [],
+ "id": "CHART-SVAlICPOSJ",
+ "meta": {
+ "chartId": new_chart_id,
+ "height": 50,
+ "sliceName": "Number of California Births",
+ "uuid": "0c23747a-6528-4629-97bf-e4b78d3b9df1",
+ "width": 4,
+ },
+ "parents": ["ROOT_ID", "GRID_ID", "ROW-dP_CHaK2q"],
+ "type": "CHART",
+ },
+ "DASHBOARD_VERSION_KEY": "v2",
+ "GRID_ID": {
+ "children": ["ROW-dP_CHaK2q"],
+ "id": "GRID_ID",
+ "parents": ["ROOT_ID"],
+ "type": "GRID",
+ },
+ "HEADER_ID": {
+ "id": "HEADER_ID",
+ "meta": {"text": "Test dash"},
+ "type": "HEADER",
+ },
+ "ROOT_ID": {"children": ["GRID_ID"], "id": "ROOT_ID", "type": "ROOT"},
+ "ROW-dP_CHaK2q": {
+ "children": ["CHART-SVAlICPOSJ"],
+ "id": "ROW-dP_CHaK2q",
+ "meta": {"0": "ROOT_ID", "background": "BACKGROUND_TRANSPARENT"},
+ "parents": ["ROOT_ID", "GRID_ID"],
+ "type": "ROW",
+ },
+ }
+ assert json.loads(dashboard.json_metadata) == {
+ "color_scheme": None,
+ "default_filters": "{}",
+ "expanded_slices": {str(new_chart_id): True},
+ "filter_scopes": {
+ str(new_chart_id): {
+ "region": {"scope": ["ROOT_ID"], "immune": [new_chart_id]}
+ },
+ },
+ "import_time": 1604342885,
+ "refresh_frequency": 0,
+ "remote_id": 7,
+ "timed_refresh_immune_slices": [new_chart_id],
+ }
+
+ dataset = chart.table
+ assert str(dataset.uuid) == dataset_config["uuid"]
+
+ database = dataset.database
+ assert str(database.uuid) == database_config["uuid"]
+
+ assert dashboard.owners == [mock_g.user]
+
+ dashboard.owners = []
+ chart.owners = []
+ dataset.owners = []
+ database.owners = []
+ db.session.delete(dashboard)
+ db.session.delete(chart)
+ db.session.delete(dataset)
+ db.session.delete(database)
+ db.session.commit()
+
+ @patch("superset.dashboards.commands.importers.v1.utils.g")
+ def test_import_v1_dashboard_overwrite(self, mock_g):
+ """Test that assets can be overwritten"""
+ mock_g.user = security_manager.find_user("admin")
+
+ contents = {
+ "metadata.yaml": yaml.safe_dump(metadata_config),
+ "databases/imported_database.yaml": yaml.safe_dump(database_config),
+ "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config),
+ "charts/imported_chart.yaml": yaml.safe_dump(chart_config),
+ "dashboards/imported_dashboard.yaml": yaml.safe_dump(dashboard_config),
+ }
+ command = ImportAssetsCommand(contents)
+ command.run()
+ chart = db.session.query(Slice).filter_by(uuid=chart_config["uuid"]).one()
+ assert chart.cache_timeout is None
+
+ modified_chart_config = copy.deepcopy(chart_config)
+ modified_chart_config["cache_timeout"] = 3600
+ contents = {
+ "metadata.yaml": yaml.safe_dump(metadata_config),
+ "databases/imported_database.yaml": yaml.safe_dump(database_config),
+ "datasets/imported_dataset.yaml": yaml.safe_dump(dataset_config),
+ "charts/imported_chart.yaml": yaml.safe_dump(modified_chart_config),
+ "dashboards/imported_dashboard.yaml": yaml.safe_dump(dashboard_config),
+ }
+ command = ImportAssetsCommand(contents)
+ command.run()
+ chart = db.session.query(Slice).filter_by(uuid=chart_config["uuid"]).one()
+ assert chart.cache_timeout == 3600
+
+ dashboard = (
+ db.session.query(Dashboard).filter_by(uuid=dashboard_config["uuid"]).one()
+ )
+ chart = dashboard.slices[0]
+ dataset = chart.table
+ database = dataset.database
+ dashboard.owners = []
+
+ chart.owners = []
+ dataset.owners = []
+ database.owners = []
+ db.session.delete(dashboard)
+ db.session.delete(chart)
+ db.session.delete(dataset)
+ db.session.delete(database)
+ db.session.commit()
diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py
index dd8f3a910d764..26674054ae394 100644
--- a/tests/integration_tests/core_tests.py
+++ b/tests/integration_tests/core_tests.py
@@ -690,30 +690,6 @@ def test_cache_logging(self):
assert ck.datasource_uid == f"{girls_slice.table.id}__table"
app.config["STORE_CACHE_KEYS_IN_METADATA_DB"] = store_cache_keys
- def test_shortner(self):
- self.login(username="admin")
- data = (
- "//superset/explore/table/1/?viz_type=sankey&groupby=source&"
- "groupby=target&metric=sum__value&row_limit=5000&where=&having=&"
- "flt_col_0=source&flt_op_0=in&flt_eq_0=&slice_id=78&slice_name="
- "Energy+Sankey&collapsed_fieldsets=&action=&datasource_name="
- "energy_usage&datasource_id=1&datasource_type=table&"
- "previous_viz_type=sankey"
- )
- resp = self.client.post("/r/shortner/", data=dict(data=data))
- assert re.search(r"\/r\/[0-9]+", resp.data.decode("utf-8"))
-
- def test_shortner_invalid(self):
- self.login(username="admin")
- invalid_urls = [
- "hhttp://invalid.com",
- "hhttps://invalid.com",
- "www.invalid.com",
- ]
- for invalid_url in invalid_urls:
- resp = self.client.post("/r/shortner/", data=dict(data=invalid_url))
- assert resp.status_code == 400
-
def test_redirect_invalid(self):
model_url = models.Url(url="hhttp://invalid.com")
db.session.add(model_url)
@@ -1541,6 +1517,29 @@ def test_virtual_table_explore_visibility(self):
database.extra = json.dumps(extra)
self.assertEqual(database.allows_virtual_table_explore, True)
+ def test_data_preview_visibility(self):
+ # test that default visibility is allowed
+ database = utils.get_example_database()
+ self.assertEqual(database.disable_data_preview, False)
+
+ # test that visibility is disabled when extra is set to true
+ extra = database.get_extra()
+ extra["disable_data_preview"] = True
+ database.extra = json.dumps(extra)
+ self.assertEqual(database.disable_data_preview, True)
+
+ # test that visibility is enabled when extra is set to false
+ extra = database.get_extra()
+ extra["disable_data_preview"] = False
+ database.extra = json.dumps(extra)
+ self.assertEqual(database.disable_data_preview, False)
+
+ # test that visibility is not broken with bad values
+ extra = database.get_extra()
+ extra["disable_data_preview"] = "trash value"
+ database.extra = json.dumps(extra)
+ self.assertEqual(database.disable_data_preview, False)
+
def test_explore_database_id(self):
database = superset.utils.database.get_example_database()
explore_database = superset.utils.database.get_example_database()
diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py
index 755eb3776016d..2ed627a257247 100644
--- a/tests/integration_tests/dashboards/api_tests.py
+++ b/tests/integration_tests/dashboards/api_tests.py
@@ -1331,6 +1331,11 @@ def test_update_dashboard_not_owned(self):
db.session.delete(user_alpha2)
db.session.commit()
+ @patch.dict(
+ "superset.extensions.feature_flag_manager._feature_flags",
+ {"VERSIONED_EXPORT": False},
+ clear=True,
+ )
@pytest.mark.usefixtures(
"load_world_bank_dashboard_with_slices",
"load_birth_names_dashboard_with_slices",
@@ -1376,11 +1381,6 @@ def test_export_not_allowed(self):
db.session.delete(dashboard)
db.session.commit()
- @patch.dict(
- "superset.extensions.feature_flag_manager._feature_flags",
- {"VERSIONED_EXPORT": True},
- clear=True,
- )
def test_export_bundle(self):
"""
Dashboard API: Test dashboard export
@@ -1396,11 +1396,6 @@ def test_export_bundle(self):
buf = BytesIO(rv.data)
assert is_zipfile(buf)
- @patch.dict(
- "superset.extensions.feature_flag_manager._feature_flags",
- {"VERSIONED_EXPORT": True},
- clear=True,
- )
def test_export_bundle_not_found(self):
"""
Dashboard API: Test dashboard export not found
@@ -1411,11 +1406,6 @@ def test_export_bundle_not_found(self):
rv = self.client.get(uri)
assert rv.status_code == 404
- @patch.dict(
- "superset.extensions.feature_flag_manager._feature_flags",
- {"VERSIONED_EXPORT": True},
- clear=True,
- )
def test_export_bundle_not_allowed(self):
"""
Dashboard API: Test dashboard export not allowed
diff --git a/tests/integration_tests/dashboards/commands_tests.py b/tests/integration_tests/dashboards/commands_tests.py
index 7c7a2046f0592..ae18c741583e7 100644
--- a/tests/integration_tests/dashboards/commands_tests.py
+++ b/tests/integration_tests/dashboards/commands_tests.py
@@ -423,6 +423,28 @@ def test_append_charts(self, mock_suffix):
"DASHBOARD_VERSION_KEY": "v2",
}
+ @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
+ @patch("superset.security.manager.g")
+ @patch("superset.views.base.g")
+ def test_export_dashboard_command_no_related(self, mock_g1, mock_g2):
+ """
+ Test that only the dashboard is exported when export_related=False.
+ """
+ mock_g1.user = security_manager.find_user("admin")
+ mock_g2.user = security_manager.find_user("admin")
+
+ example_dashboard = (
+ db.session.query(Dashboard).filter_by(slug="world_health").one()
+ )
+ command = ExportDashboardsCommand([example_dashboard.id], export_related=False)
+ contents = dict(command.run())
+
+ expected_paths = {
+ "metadata.yaml",
+ "dashboards/World_Banks_Data.yaml",
+ }
+ assert expected_paths == set(contents.keys())
+
class TestImportDashboardsCommand(SupersetTestCase):
def test_import_v0_dashboard_cli_export(self):
diff --git a/tests/integration_tests/dashboards/filter_state/api_tests.py b/tests/integration_tests/dashboards/filter_state/api_tests.py
index 3816f6ac87337..ea00f2e6714f5 100644
--- a/tests/integration_tests/dashboards/filter_state/api_tests.py
+++ b/tests/integration_tests/dashboards/filter_state/api_tests.py
@@ -23,25 +23,20 @@
from superset.dashboards.commands.exceptions import DashboardAccessDeniedError
from superset.extensions import cache_manager
-from superset.key_value.commands.entry import Entry
-from superset.key_value.utils import cache_key
from superset.models.dashboard import Dashboard
+from superset.temporary_cache.commands.entry import Entry
+from superset.temporary_cache.utils import cache_key
from tests.integration_tests.base_tests import login
+from tests.integration_tests.fixtures.client import client
from tests.integration_tests.fixtures.world_bank_dashboard import (
load_world_bank_dashboard_with_slices,
load_world_bank_data,
)
from tests.integration_tests.test_app import app
-key = "test-key"
-value = "test"
-
-
-@pytest.fixture
-def client():
- with app.test_client() as client:
- with app.app_context():
- yield client
+KEY = "test-key"
+INITIAL_VALUE = json.dumps({"test": "initial value"})
+UPDATED_VALUE = json.dumps({"test": "updated value"})
@pytest.fixture
@@ -62,20 +57,20 @@ def admin_id() -> int:
@pytest.fixture(autouse=True)
def cache(dashboard_id, admin_id):
- entry: Entry = {"owner": admin_id, "value": value}
- cache_manager.filter_state_cache.set(cache_key(dashboard_id, key), entry)
+ entry: Entry = {"owner": admin_id, "value": INITIAL_VALUE}
+ cache_manager.filter_state_cache.set(cache_key(dashboard_id, KEY), entry)
def test_post(client, dashboard_id: int):
login(client, "admin")
payload = {
- "value": value,
+ "value": INITIAL_VALUE,
}
resp = client.post(f"api/v1/dashboard/{dashboard_id}/filter_state", json=payload)
assert resp.status_code == 201
-def test_post_bad_request(client, dashboard_id: int):
+def test_post_bad_request_non_string(client, dashboard_id: int):
login(client, "admin")
payload = {
"value": 1234,
@@ -84,12 +79,21 @@ def test_post_bad_request(client, dashboard_id: int):
assert resp.status_code == 400
+def test_post_bad_request_non_json_string(client, dashboard_id: int):
+ login(client, "admin")
+ payload = {
+ "value": "foo",
+ }
+ resp = client.post(f"api/v1/dashboard/{dashboard_id}/filter_state", json=payload)
+ assert resp.status_code == 400
+
+
@patch("superset.security.SupersetSecurityManager.raise_for_dashboard_access")
def test_post_access_denied(mock_raise_for_dashboard_access, client, dashboard_id: int):
login(client, "admin")
mock_raise_for_dashboard_access.side_effect = DashboardAccessDeniedError()
payload = {
- "value": value,
+ "value": INITIAL_VALUE,
}
resp = client.post(f"api/v1/dashboard/{dashboard_id}/filter_state", json=payload)
assert resp.status_code == 403
@@ -98,7 +102,7 @@ def test_post_access_denied(mock_raise_for_dashboard_access, client, dashboard_i
def test_post_same_key_for_same_tab_id(client, dashboard_id: int):
login(client, "admin")
payload = {
- "value": value,
+ "value": INITIAL_VALUE,
}
resp = client.post(
f"api/v1/dashboard/{dashboard_id}/filter_state?tab_id=1", json=payload
@@ -116,7 +120,7 @@ def test_post_same_key_for_same_tab_id(client, dashboard_id: int):
def test_post_different_key_for_different_tab_id(client, dashboard_id: int):
login(client, "admin")
payload = {
- "value": value,
+ "value": INITIAL_VALUE,
}
resp = client.post(
f"api/v1/dashboard/{dashboard_id}/filter_state?tab_id=1", json=payload
@@ -134,7 +138,7 @@ def test_post_different_key_for_different_tab_id(client, dashboard_id: int):
def test_post_different_key_for_no_tab_id(client, dashboard_id: int):
login(client, "admin")
payload = {
- "value": value,
+ "value": INITIAL_VALUE,
}
resp = client.post(f"api/v1/dashboard/{dashboard_id}/filter_state", json=payload)
data = json.loads(resp.data.decode("utf-8"))
@@ -148,10 +152,10 @@ def test_post_different_key_for_no_tab_id(client, dashboard_id: int):
def test_put(client, dashboard_id: int):
login(client, "admin")
payload = {
- "value": "new value",
+ "value": UPDATED_VALUE,
}
resp = client.put(
- f"api/v1/dashboard/{dashboard_id}/filter_state/{key}", json=payload
+ f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}", json=payload
)
assert resp.status_code == 200
@@ -159,15 +163,15 @@ def test_put(client, dashboard_id: int):
def test_put_same_key_for_same_tab_id(client, dashboard_id: int):
login(client, "admin")
payload = {
- "value": value,
+ "value": INITIAL_VALUE,
}
resp = client.put(
- f"api/v1/dashboard/{dashboard_id}/filter_state/{key}?tab_id=1", json=payload
+ f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}?tab_id=1", json=payload
)
data = json.loads(resp.data.decode("utf-8"))
first_key = data.get("key")
resp = client.put(
- f"api/v1/dashboard/{dashboard_id}/filter_state/{key}?tab_id=1", json=payload
+ f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}?tab_id=1", json=payload
)
data = json.loads(resp.data.decode("utf-8"))
second_key = data.get("key")
@@ -177,15 +181,15 @@ def test_put_same_key_for_same_tab_id(client, dashboard_id: int):
def test_put_different_key_for_different_tab_id(client, dashboard_id: int):
login(client, "admin")
payload = {
- "value": value,
+ "value": INITIAL_VALUE,
}
resp = client.put(
- f"api/v1/dashboard/{dashboard_id}/filter_state/{key}?tab_id=1", json=payload
+ f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}?tab_id=1", json=payload
)
data = json.loads(resp.data.decode("utf-8"))
first_key = data.get("key")
resp = client.put(
- f"api/v1/dashboard/{dashboard_id}/filter_state/{key}?tab_id=2", json=payload
+ f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}?tab_id=2", json=payload
)
data = json.loads(resp.data.decode("utf-8"))
second_key = data.get("key")
@@ -195,28 +199,39 @@ def test_put_different_key_for_different_tab_id(client, dashboard_id: int):
def test_put_different_key_for_no_tab_id(client, dashboard_id: int):
login(client, "admin")
payload = {
- "value": value,
+ "value": INITIAL_VALUE,
}
resp = client.put(
- f"api/v1/dashboard/{dashboard_id}/filter_state/{key}", json=payload
+ f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}", json=payload
)
data = json.loads(resp.data.decode("utf-8"))
first_key = data.get("key")
resp = client.put(
- f"api/v1/dashboard/{dashboard_id}/filter_state/{key}", json=payload
+ f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}", json=payload
)
data = json.loads(resp.data.decode("utf-8"))
second_key = data.get("key")
assert first_key != second_key
-def test_put_bad_request(client, dashboard_id: int):
+def test_put_bad_request_non_string(client, dashboard_id: int):
login(client, "admin")
payload = {
"value": 1234,
}
resp = client.put(
- f"api/v1/dashboard/{dashboard_id}/filter_state/{key}", json=payload
+ f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}", json=payload
+ )
+ assert resp.status_code == 400
+
+
+def test_put_bad_request_non_json_string(client, dashboard_id: int):
+ login(client, "admin")
+ payload = {
+ "value": "foo",
+ }
+ resp = client.put(
+ f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}", json=payload
)
assert resp.status_code == 400
@@ -226,10 +241,10 @@ def test_put_access_denied(mock_raise_for_dashboard_access, client, dashboard_id
login(client, "admin")
mock_raise_for_dashboard_access.side_effect = DashboardAccessDeniedError()
payload = {
- "value": "new value",
+ "value": UPDATED_VALUE,
}
resp = client.put(
- f"api/v1/dashboard/{dashboard_id}/filter_state/{key}", json=payload
+ f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}", json=payload
)
assert resp.status_code == 403
@@ -237,10 +252,10 @@ def test_put_access_denied(mock_raise_for_dashboard_access, client, dashboard_id
def test_put_not_owner(client, dashboard_id: int):
login(client, "gamma")
payload = {
- "value": "new value",
+ "value": UPDATED_VALUE,
}
resp = client.put(
- f"api/v1/dashboard/{dashboard_id}/filter_state/{key}", json=payload
+ f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}", json=payload
)
assert resp.status_code == 403
@@ -253,29 +268,29 @@ def test_get_key_not_found(client, dashboard_id: int):
def test_get_dashboard_not_found(client):
login(client, "admin")
- resp = client.get(f"api/v1/dashboard/{-1}/filter_state/{key}")
+ resp = client.get(f"api/v1/dashboard/{-1}/filter_state/{KEY}")
assert resp.status_code == 404
def test_get(client, dashboard_id: int):
login(client, "admin")
- resp = client.get(f"api/v1/dashboard/{dashboard_id}/filter_state/{key}")
+ resp = client.get(f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}")
assert resp.status_code == 200
data = json.loads(resp.data.decode("utf-8"))
- assert value == data.get("value")
+ assert INITIAL_VALUE == data.get("value")
@patch("superset.security.SupersetSecurityManager.raise_for_dashboard_access")
def test_get_access_denied(mock_raise_for_dashboard_access, client, dashboard_id):
login(client, "admin")
mock_raise_for_dashboard_access.side_effect = DashboardAccessDeniedError()
- resp = client.get(f"api/v1/dashboard/{dashboard_id}/filter_state/{key}")
+ resp = client.get(f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}")
assert resp.status_code == 403
def test_delete(client, dashboard_id: int):
login(client, "admin")
- resp = client.delete(f"api/v1/dashboard/{dashboard_id}/filter_state/{key}")
+ resp = client.delete(f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}")
assert resp.status_code == 200
@@ -285,11 +300,11 @@ def test_delete_access_denied(
):
login(client, "admin")
mock_raise_for_dashboard_access.side_effect = DashboardAccessDeniedError()
- resp = client.delete(f"api/v1/dashboard/{dashboard_id}/filter_state/{key}")
+ resp = client.delete(f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}")
assert resp.status_code == 403
def test_delete_not_owner(client, dashboard_id: int):
login(client, "gamma")
- resp = client.delete(f"api/v1/dashboard/{dashboard_id}/filter_state/{key}")
+ resp = client.delete(f"api/v1/dashboard/{dashboard_id}/filter_state/{KEY}")
assert resp.status_code == 403
diff --git a/tests/integration_tests/dashboards/permalink/__init__.py b/tests/integration_tests/dashboards/permalink/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/tests/integration_tests/dashboards/permalink/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/tests/integration_tests/dashboards/permalink/api_tests.py b/tests/integration_tests/dashboards/permalink/api_tests.py
new file mode 100644
index 0000000000000..bd821165dc337
--- /dev/null
+++ b/tests/integration_tests/dashboards/permalink/api_tests.py
@@ -0,0 +1,90 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import json
+from unittest.mock import patch
+
+import pytest
+from flask_appbuilder.security.sqla.models import User
+from sqlalchemy.orm import Session
+
+from superset import db
+from superset.dashboards.commands.exceptions import DashboardAccessDeniedError
+from superset.key_value.models import KeyValueEntry
+from superset.models.dashboard import Dashboard
+from superset.models.slice import Slice
+from tests.integration_tests.base_tests import login
+from tests.integration_tests.fixtures.client import client
+from tests.integration_tests.fixtures.world_bank_dashboard import (
+ load_world_bank_dashboard_with_slices,
+ load_world_bank_data,
+)
+from tests.integration_tests.test_app import app
+
+STATE = {
+ "filterState": {"FILTER_1": "foo",},
+ "hash": "my-anchor",
+}
+
+
+@pytest.fixture
+def dashboard_id(load_world_bank_dashboard_with_slices) -> int:
+ with app.app_context() as ctx:
+ session: Session = ctx.app.appbuilder.get_session
+ dashboard = session.query(Dashboard).filter_by(slug="world_health").one()
+ return dashboard.id
+
+
+def test_post(client, dashboard_id: int):
+ login(client, "admin")
+ resp = client.post(f"api/v1/dashboard/{dashboard_id}/permalink", json=STATE)
+ assert resp.status_code == 201
+ data = json.loads(resp.data.decode("utf-8"))
+ key = data["key"]
+ url = data["url"]
+ assert key in url
+ db.session.query(KeyValueEntry).filter_by(uuid=key).delete()
+ db.session.commit()
+
+
+@patch("superset.security.SupersetSecurityManager.raise_for_dashboard_access")
+def test_post_access_denied(mock_raise_for_dashboard_access, client, dashboard_id: int):
+ login(client, "admin")
+ mock_raise_for_dashboard_access.side_effect = DashboardAccessDeniedError()
+ resp = client.post(f"api/v1/dashboard/{dashboard_id}/permalink", json=STATE)
+ assert resp.status_code == 403
+
+
+def test_post_invalid_schema(client, dashboard_id: int):
+ login(client, "admin")
+ resp = client.post(
+ f"api/v1/dashboard/{dashboard_id}/permalink", json={"foo": "bar"}
+ )
+ assert resp.status_code == 400
+
+
+def test_get(client, dashboard_id: int):
+ login(client, "admin")
+ resp = client.post(f"api/v1/dashboard/{dashboard_id}/permalink", json=STATE)
+ data = json.loads(resp.data.decode("utf-8"))
+ key = data["key"]
+ resp = client.get(f"api/v1/dashboard/permalink/{key}")
+ assert resp.status_code == 200
+ result = json.loads(resp.data.decode("utf-8"))
+ assert result["dashboardId"] == str(dashboard_id)
+ assert result["state"] == STATE
+ db.session.query(KeyValueEntry).filter_by(uuid=key).delete()
+ db.session.commit()
diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py
index 78d7285e010e7..928f3d595730d 100644
--- a/tests/integration_tests/databases/api_tests.py
+++ b/tests/integration_tests/databases/api_tests.py
@@ -179,12 +179,14 @@ def test_get_items(self):
"changed_on_delta_humanized",
"created_by",
"database_name",
+ "disable_data_preview",
"explore_database_id",
"expose_in_sqllab",
"extra",
"force_ctas_schema",
"id",
]
+
self.assertGreater(response["count"], 0)
self.assertEqual(list(response["result"][0].keys()), expected_columns)
diff --git a/tests/integration_tests/databases/commands_tests.py b/tests/integration_tests/databases/commands_tests.py
index c90550ed69903..59fcf39a86727 100644
--- a/tests/integration_tests/databases/commands_tests.py
+++ b/tests/integration_tests/databases/commands_tests.py
@@ -358,6 +358,26 @@ def test_export_database_command_key_order(self, mock_g):
"version",
]
+ @patch("superset.security.manager.g")
+ @pytest.mark.usefixtures(
+ "load_birth_names_dashboard_with_slices", "load_energy_table_with_slice"
+ )
+ def test_export_database_command_no_related(self, mock_g):
+ """
+ Test that only databases are exported when export_related=False.
+ """
+ mock_g.user = security_manager.find_user("admin")
+
+ example_db = get_example_database()
+ db_uuid = example_db.uuid
+
+ command = ExportDatabasesCommand([example_db.id], export_related=False)
+ contents = dict(command.run())
+ prefixes = {path.split("/")[0] for path in contents}
+ assert "metadata.yaml" in prefixes
+ assert "databases" in prefixes
+ assert "datasets" not in prefixes
+
class TestImportDatabasesCommand(SupersetTestCase):
def test_import_v1_database(self):
diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py
index eeda824500fe6..7626de677bf01 100644
--- a/tests/integration_tests/datasets/api_tests.py
+++ b/tests/integration_tests/datasets/api_tests.py
@@ -1455,11 +1455,6 @@ def test_export_dataset_gamma(self):
rv = self.client.get(uri)
assert rv.status_code == 200
- @patch.dict(
- "superset.extensions.feature_flag_manager._feature_flags",
- {"VERSIONED_EXPORT": True},
- clear=True,
- )
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_export_dataset_bundle(self):
"""
@@ -1482,11 +1477,6 @@ def test_export_dataset_bundle(self):
buf = BytesIO(rv.data)
assert is_zipfile(buf)
- @patch.dict(
- "superset.extensions.feature_flag_manager._feature_flags",
- {"VERSIONED_EXPORT": True},
- clear=True,
- )
def test_export_dataset_bundle_not_found(self):
"""
Dataset API: Test export dataset not found
@@ -1499,11 +1489,6 @@ def test_export_dataset_bundle_not_found(self):
assert rv.status_code == 404
- @patch.dict(
- "superset.extensions.feature_flag_manager._feature_flags",
- {"VERSIONED_EXPORT": True},
- clear=True,
- )
@pytest.mark.usefixtures("create_datasets")
def test_export_dataset_bundle_gamma(self):
"""
diff --git a/tests/integration_tests/datasets/commands_tests.py b/tests/integration_tests/datasets/commands_tests.py
index af22d9319b27c..784b1f19e39d9 100644
--- a/tests/integration_tests/datasets/commands_tests.py
+++ b/tests/integration_tests/datasets/commands_tests.py
@@ -219,6 +219,26 @@ def test_export_dataset_command_key_order(self, mock_g):
"database_uuid",
]
+ @patch("superset.security.manager.g")
+ @pytest.mark.usefixtures("load_energy_table_with_slice")
+ def test_export_dataset_command_no_related(self, mock_g):
+ """
+ Test that only datasets are exported when export_related=False.
+ """
+ mock_g.user = security_manager.find_user("admin")
+
+ example_db = get_example_database()
+ example_dataset = _get_table_from_list_by_name(
+ "energy_usage", example_db.tables
+ )
+ command = ExportDatasetsCommand([example_dataset.id], export_related=False)
+ contents = dict(command.run())
+
+ assert list(contents.keys()) == [
+ "metadata.yaml",
+ "datasets/examples/energy_usage.yaml",
+ ]
+
class TestImportDatasetsCommand(SupersetTestCase):
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
diff --git a/tests/integration_tests/explore/form_data/api_tests.py b/tests/integration_tests/explore/form_data/api_tests.py
index 4b646a03586de..c05be00e96186 100644
--- a/tests/integration_tests/explore/form_data/api_tests.py
+++ b/tests/integration_tests/explore/form_data/api_tests.py
@@ -27,21 +27,16 @@
from superset.extensions import cache_manager
from superset.models.slice import Slice
from tests.integration_tests.base_tests import login
+from tests.integration_tests.fixtures.client import client
from tests.integration_tests.fixtures.world_bank_dashboard import (
load_world_bank_dashboard_with_slices,
load_world_bank_data,
)
from tests.integration_tests.test_app import app
-key = "test-key"
-form_data = "test"
-
-
-@pytest.fixture
-def client():
- with app.test_client() as client:
- with app.app_context():
- yield client
+KEY = "test-key"
+INITIAL_FORM_DATA = json.dumps({"test": "initial value"})
+UPDATED_FORM_DATA = json.dumps({"test": "updated value"})
@pytest.fixture
@@ -78,9 +73,9 @@ def cache(chart_id, admin_id, dataset_id):
"owner": admin_id,
"dataset_id": dataset_id,
"chart_id": chart_id,
- "form_data": form_data,
+ "form_data": INITIAL_FORM_DATA,
}
- cache_manager.explore_form_data_cache.set(key, entry)
+ cache_manager.explore_form_data_cache.set(KEY, entry)
def test_post(client, chart_id: int, dataset_id: int):
@@ -88,13 +83,13 @@ def test_post(client, chart_id: int, dataset_id: int):
payload = {
"dataset_id": dataset_id,
"chart_id": chart_id,
- "form_data": form_data,
+ "form_data": INITIAL_FORM_DATA,
}
resp = client.post("api/v1/explore/form_data", json=payload)
assert resp.status_code == 201
-def test_post_bad_request(client, chart_id: int, dataset_id: int):
+def test_post_bad_request_non_string(client, chart_id: int, dataset_id: int):
login(client, "admin")
payload = {
"dataset_id": dataset_id,
@@ -105,12 +100,23 @@ def test_post_bad_request(client, chart_id: int, dataset_id: int):
assert resp.status_code == 400
+def test_post_bad_request_non_json_string(client, chart_id: int, dataset_id: int):
+ login(client, "admin")
+ payload = {
+ "dataset_id": dataset_id,
+ "chart_id": chart_id,
+ "form_data": "foo",
+ }
+ resp = client.post("api/v1/explore/form_data", json=payload)
+ assert resp.status_code == 400
+
+
def test_post_access_denied(client, chart_id: int, dataset_id: int):
login(client, "gamma")
payload = {
"dataset_id": dataset_id,
"chart_id": chart_id,
- "form_data": form_data,
+ "form_data": INITIAL_FORM_DATA,
}
resp = client.post("api/v1/explore/form_data", json=payload)
assert resp.status_code == 404
@@ -121,7 +127,7 @@ def test_post_same_key_for_same_context(client, chart_id: int, dataset_id: int):
payload = {
"dataset_id": dataset_id,
"chart_id": chart_id,
- "form_data": "new form_data",
+ "form_data": UPDATED_FORM_DATA,
}
resp = client.post("api/v1/explore/form_data?tab_id=1", json=payload)
data = json.loads(resp.data.decode("utf-8"))
@@ -139,14 +145,14 @@ def test_post_different_key_for_different_context(
payload = {
"dataset_id": dataset_id,
"chart_id": chart_id,
- "form_data": "new form_data",
+ "form_data": UPDATED_FORM_DATA,
}
resp = client.post("api/v1/explore/form_data?tab_id=1", json=payload)
data = json.loads(resp.data.decode("utf-8"))
first_key = data.get("key")
payload = {
"dataset_id": dataset_id,
- "form_data": "new form_data",
+ "form_data": json.dumps({"test": "initial value"}),
}
resp = client.post("api/v1/explore/form_data?tab_id=1", json=payload)
data = json.loads(resp.data.decode("utf-8"))
@@ -159,7 +165,7 @@ def test_post_same_key_for_same_tab_id(client, chart_id: int, dataset_id: int):
payload = {
"dataset_id": dataset_id,
"chart_id": chart_id,
- "form_data": "new form_data",
+ "form_data": json.dumps({"test": "initial value"}),
}
resp = client.post("api/v1/explore/form_data?tab_id=1", json=payload)
data = json.loads(resp.data.decode("utf-8"))
@@ -177,7 +183,7 @@ def test_post_different_key_for_different_tab_id(
payload = {
"dataset_id": dataset_id,
"chart_id": chart_id,
- "form_data": "new form_data",
+ "form_data": json.dumps({"test": "initial value"}),
}
resp = client.post("api/v1/explore/form_data?tab_id=1", json=payload)
data = json.loads(resp.data.decode("utf-8"))
@@ -193,7 +199,7 @@ def test_post_different_key_for_no_tab_id(client, chart_id: int, dataset_id: int
payload = {
"dataset_id": dataset_id,
"chart_id": chart_id,
- "form_data": "new form_data",
+ "form_data": INITIAL_FORM_DATA,
}
resp = client.post("api/v1/explore/form_data", json=payload)
data = json.loads(resp.data.decode("utf-8"))
@@ -209,9 +215,9 @@ def test_put(client, chart_id: int, dataset_id: int):
payload = {
"dataset_id": dataset_id,
"chart_id": chart_id,
- "form_data": "new form_data",
+ "form_data": UPDATED_FORM_DATA,
}
- resp = client.put(f"api/v1/explore/form_data/{key}", json=payload)
+ resp = client.put(f"api/v1/explore/form_data/{KEY}", json=payload)
assert resp.status_code == 200
@@ -220,12 +226,12 @@ def test_put_same_key_for_same_tab_id(client, chart_id: int, dataset_id: int):
payload = {
"dataset_id": dataset_id,
"chart_id": chart_id,
- "form_data": "new form_data",
+ "form_data": UPDATED_FORM_DATA,
}
- resp = client.put(f"api/v1/explore/form_data/{key}?tab_id=1", json=payload)
+ resp = client.put(f"api/v1/explore/form_data/{KEY}?tab_id=1", json=payload)
data = json.loads(resp.data.decode("utf-8"))
first_key = data.get("key")
- resp = client.put(f"api/v1/explore/form_data/{key}?tab_id=1", json=payload)
+ resp = client.put(f"api/v1/explore/form_data/{KEY}?tab_id=1", json=payload)
data = json.loads(resp.data.decode("utf-8"))
second_key = data.get("key")
assert first_key == second_key
@@ -236,12 +242,12 @@ def test_put_different_key_for_different_tab_id(client, chart_id: int, dataset_i
payload = {
"dataset_id": dataset_id,
"chart_id": chart_id,
- "form_data": "new form_data",
+ "form_data": UPDATED_FORM_DATA,
}
- resp = client.put(f"api/v1/explore/form_data/{key}?tab_id=1", json=payload)
+ resp = client.put(f"api/v1/explore/form_data/{KEY}?tab_id=1", json=payload)
data = json.loads(resp.data.decode("utf-8"))
first_key = data.get("key")
- resp = client.put(f"api/v1/explore/form_data/{key}?tab_id=2", json=payload)
+ resp = client.put(f"api/v1/explore/form_data/{KEY}?tab_id=2", json=payload)
data = json.loads(resp.data.decode("utf-8"))
second_key = data.get("key")
assert first_key != second_key
@@ -252,12 +258,12 @@ def test_put_different_key_for_no_tab_id(client, chart_id: int, dataset_id: int)
payload = {
"dataset_id": dataset_id,
"chart_id": chart_id,
- "form_data": "new form_data",
+ "form_data": UPDATED_FORM_DATA,
}
- resp = client.put(f"api/v1/explore/form_data/{key}", json=payload)
+ resp = client.put(f"api/v1/explore/form_data/{KEY}", json=payload)
data = json.loads(resp.data.decode("utf-8"))
first_key = data.get("key")
- resp = client.put(f"api/v1/explore/form_data/{key}", json=payload)
+ resp = client.put(f"api/v1/explore/form_data/{KEY}", json=payload)
data = json.loads(resp.data.decode("utf-8"))
second_key = data.get("key")
assert first_key != second_key
@@ -270,7 +276,29 @@ def test_put_bad_request(client, chart_id: int, dataset_id: int):
"chart_id": chart_id,
"form_data": 1234,
}
- resp = client.put(f"api/v1/explore/form_data/{key}", json=payload)
+ resp = client.put(f"api/v1/explore/form_data/{KEY}", json=payload)
+ assert resp.status_code == 400
+
+
+def test_put_bad_request_non_string(client, chart_id: int, dataset_id: int):
+ login(client, "admin")
+ payload = {
+ "dataset_id": dataset_id,
+ "chart_id": chart_id,
+ "form_data": 1234,
+ }
+ resp = client.put(f"api/v1/explore/form_data/{KEY}", json=payload)
+ assert resp.status_code == 400
+
+
+def test_put_bad_request_non_json_string(client, chart_id: int, dataset_id: int):
+ login(client, "admin")
+ payload = {
+ "dataset_id": dataset_id,
+ "chart_id": chart_id,
+ "form_data": "foo",
+ }
+ resp = client.put(f"api/v1/explore/form_data/{KEY}", json=payload)
assert resp.status_code == 400
@@ -279,9 +307,9 @@ def test_put_access_denied(client, chart_id: int, dataset_id: int):
payload = {
"dataset_id": dataset_id,
"chart_id": chart_id,
- "form_data": "new form_data",
+ "form_data": UPDATED_FORM_DATA,
}
- resp = client.put(f"api/v1/explore/form_data/{key}", json=payload)
+ resp = client.put(f"api/v1/explore/form_data/{KEY}", json=payload)
assert resp.status_code == 404
@@ -290,9 +318,9 @@ def test_put_not_owner(client, chart_id: int, dataset_id: int):
payload = {
"dataset_id": dataset_id,
"chart_id": chart_id,
- "form_data": "new form_data",
+ "form_data": UPDATED_FORM_DATA,
}
- resp = client.put(f"api/v1/explore/form_data/{key}", json=payload)
+ resp = client.put(f"api/v1/explore/form_data/{KEY}", json=payload)
assert resp.status_code == 404
@@ -304,15 +332,15 @@ def test_get_key_not_found(client):
def test_get(client):
login(client, "admin")
- resp = client.get(f"api/v1/explore/form_data/{key}")
+ resp = client.get(f"api/v1/explore/form_data/{KEY}")
assert resp.status_code == 200
data = json.loads(resp.data.decode("utf-8"))
- assert form_data == data.get("form_data")
+ assert INITIAL_FORM_DATA == data.get("form_data")
def test_get_access_denied(client):
login(client, "gamma")
- resp = client.get(f"api/v1/explore/form_data/{key}")
+ resp = client.get(f"api/v1/explore/form_data/{KEY}")
assert resp.status_code == 404
@@ -320,19 +348,19 @@ def test_get_access_denied(client):
def test_get_dataset_access_denied(mock_can_access_datasource, client):
mock_can_access_datasource.side_effect = DatasetAccessDeniedError()
login(client, "admin")
- resp = client.get(f"api/v1/explore/form_data/{key}")
+ resp = client.get(f"api/v1/explore/form_data/{KEY}")
assert resp.status_code == 403
def test_delete(client):
login(client, "admin")
- resp = client.delete(f"api/v1/explore/form_data/{key}")
+ resp = client.delete(f"api/v1/explore/form_data/{KEY}")
assert resp.status_code == 200
def test_delete_access_denied(client):
login(client, "gamma")
- resp = client.delete(f"api/v1/explore/form_data/{key}")
+ resp = client.delete(f"api/v1/explore/form_data/{KEY}")
assert resp.status_code == 404
@@ -343,7 +371,7 @@ def test_delete_not_owner(client, chart_id: int, dataset_id: int, admin_id: int)
"owner": another_owner,
"dataset_id": dataset_id,
"chart_id": chart_id,
- "form_data": form_data,
+ "form_data": INITIAL_FORM_DATA,
}
cache_manager.explore_form_data_cache.set(another_key, entry)
login(client, "admin")
diff --git a/tests/integration_tests/explore/permalink/__init__.py b/tests/integration_tests/explore/permalink/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/tests/integration_tests/explore/permalink/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/tests/integration_tests/explore/permalink/api_tests.py b/tests/integration_tests/explore/permalink/api_tests.py
new file mode 100644
index 0000000000000..37b0d2455d80c
--- /dev/null
+++ b/tests/integration_tests/explore/permalink/api_tests.py
@@ -0,0 +1,117 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import json
+import pickle
+from typing import Any, Dict
+from uuid import UUID
+
+import pytest
+from sqlalchemy.orm import Session
+
+from superset import db
+from superset.key_value.models import KeyValueEntry
+from superset.models.slice import Slice
+from tests.integration_tests.base_tests import login
+from tests.integration_tests.fixtures.client import client
+from tests.integration_tests.fixtures.world_bank_dashboard import (
+ load_world_bank_dashboard_with_slices,
+ load_world_bank_data,
+)
+from tests.integration_tests.test_app import app
+
+
+@pytest.fixture
+def chart(load_world_bank_dashboard_with_slices) -> Slice:
+ with app.app_context() as ctx:
+ session: Session = ctx.app.appbuilder.get_session
+ chart = session.query(Slice).filter_by(slice_name="World's Population").one()
+ return chart
+
+
+@pytest.fixture
+def form_data(chart) -> Dict[str, Any]:
+ datasource = f"{chart.datasource.id}__{chart.datasource.type}"
+ return {
+ "chart_id": chart.id,
+ "datasource": datasource,
+ }
+
+
+def test_post(client, form_data):
+ login(client, "admin")
+ resp = client.post(f"api/v1/explore/permalink", json={"formData": form_data})
+ assert resp.status_code == 201
+ data = json.loads(resp.data.decode("utf-8"))
+ key = data["key"]
+ url = data["url"]
+ assert key in url
+ db.session.query(KeyValueEntry).filter_by(uuid=key).delete()
+ db.session.commit()
+
+
+def test_post_access_denied(client, form_data):
+ login(client, "gamma")
+ resp = client.post(f"api/v1/explore/permalink", json={"formData": form_data})
+ assert resp.status_code == 404
+
+
+def test_get_missing_chart(client, chart):
+ from superset.key_value.models import KeyValueEntry
+
+ key = 1234
+ uuid_key = "e2ea9d19-7988-4862-aa69-c3a1a7628cb9"
+ entry = KeyValueEntry(
+ id=int(key),
+ uuid=UUID("e2ea9d19-7988-4862-aa69-c3a1a7628cb9"),
+ resource="explore_permalink",
+ value=pickle.dumps(
+ {
+ "chartId": key,
+ "datasetId": chart.datasource.id,
+ "formData": {
+ "slice_id": key,
+ "datasource": f"{chart.datasource.id}__{chart.datasource.type}",
+ },
+ }
+ ),
+ )
+ db.session.add(entry)
+ db.session.commit()
+ login(client, "admin")
+ resp = client.get(f"api/v1/explore/permalink/{uuid_key}")
+ assert resp.status_code == 404
+ db.session.delete(entry)
+ db.session.commit()
+
+
+def test_post_invalid_schema(client):
+ login(client, "admin")
+ resp = client.post(f"api/v1/explore/permalink", json={"abc": 123})
+ assert resp.status_code == 400
+
+
+def test_get(client, form_data):
+ login(client, "admin")
+ resp = client.post(f"api/v1/explore/permalink", json={"formData": form_data})
+ data = json.loads(resp.data.decode("utf-8"))
+ key = data["key"]
+ resp = client.get(f"api/v1/explore/permalink/{key}")
+ assert resp.status_code == 200
+ result = json.loads(resp.data.decode("utf-8"))
+ assert result["state"]["formData"] == form_data
+ db.session.query(KeyValueEntry).filter_by(uuid=key).delete()
+ db.session.commit()
diff --git a/tests/integration_tests/fixtures/client.py b/tests/integration_tests/fixtures/client.py
new file mode 100644
index 0000000000000..f532f438fda4f
--- /dev/null
+++ b/tests/integration_tests/fixtures/client.py
@@ -0,0 +1,26 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import pytest
+
+from tests.integration_tests.test_app import app
+
+
+@pytest.fixture
+def client():
+ with app.test_client() as client:
+ with app.app_context():
+ yield client
diff --git a/tests/integration_tests/key_value/__init__.py b/tests/integration_tests/key_value/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/tests/integration_tests/key_value/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/tests/integration_tests/key_value/commands/__init__.py b/tests/integration_tests/key_value/commands/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/tests/integration_tests/key_value/commands/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/tests/integration_tests/key_value/commands/create_test.py b/tests/integration_tests/key_value/commands/create_test.py
new file mode 100644
index 0000000000000..22a1b517485c8
--- /dev/null
+++ b/tests/integration_tests/key_value/commands/create_test.py
@@ -0,0 +1,64 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import pickle
+from uuid import UUID
+
+from flask.ctx import AppContext
+from flask_appbuilder.security.sqla.models import User
+
+from superset.extensions import db
+from tests.integration_tests.key_value.commands.fixtures import (
+ admin,
+ ID_KEY,
+ RESOURCE,
+ UUID_KEY,
+ VALUE,
+)
+
+
+def test_create_id_entry(app_context: AppContext, admin: User) -> None:
+ from superset.key_value.commands.create import CreateKeyValueCommand
+ from superset.key_value.models import KeyValueEntry
+
+ key = CreateKeyValueCommand(
+ actor=admin, resource=RESOURCE, value=VALUE, key_type="id",
+ ).run()
+ entry = (
+ db.session.query(KeyValueEntry).filter_by(id=int(key)).autoflush(False).one()
+ )
+ assert pickle.loads(entry.value) == VALUE
+ assert entry.created_by_fk == admin.id
+ db.session.delete(entry)
+ db.session.commit()
+
+
+def test_create_uuid_entry(app_context: AppContext, admin: User) -> None:
+ from superset.key_value.commands.create import CreateKeyValueCommand
+ from superset.key_value.models import KeyValueEntry
+
+ key = CreateKeyValueCommand(
+ actor=admin, resource=RESOURCE, value=VALUE, key_type="uuid",
+ ).run()
+ entry = (
+ db.session.query(KeyValueEntry).filter_by(uuid=UUID(key)).autoflush(False).one()
+ )
+ assert pickle.loads(entry.value) == VALUE
+ assert entry.created_by_fk == admin.id
+ db.session.delete(entry)
+ db.session.commit()
diff --git a/tests/integration_tests/key_value/commands/delete_test.py b/tests/integration_tests/key_value/commands/delete_test.py
new file mode 100644
index 0000000000000..3a25cb7d0edc7
--- /dev/null
+++ b/tests/integration_tests/key_value/commands/delete_test.py
@@ -0,0 +1,91 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import pickle
+from typing import TYPE_CHECKING
+from uuid import UUID
+
+import pytest
+from flask.ctx import AppContext
+from flask_appbuilder.security.sqla.models import User
+
+from superset.extensions import db
+from tests.integration_tests.key_value.commands.fixtures import admin, RESOURCE, VALUE
+
+if TYPE_CHECKING:
+ from superset.key_value.models import KeyValueEntry
+
+ID_KEY = "234"
+UUID_KEY = "5aae143c-44f1-478e-9153-ae6154df333a"
+
+
+@pytest.fixture
+def key_value_entry() -> KeyValueEntry:
+ from superset.key_value.models import KeyValueEntry
+
+ entry = KeyValueEntry(
+ id=int(ID_KEY),
+ uuid=UUID(UUID_KEY),
+ resource=RESOURCE,
+ value=pickle.dumps(VALUE),
+ )
+ db.session.add(entry)
+ db.session.commit()
+ return entry
+
+
+def test_delete_id_entry(
+ app_context: AppContext, admin: User, key_value_entry: KeyValueEntry,
+) -> None:
+ from superset.key_value.commands.delete import DeleteKeyValueCommand
+ from superset.key_value.models import KeyValueEntry
+
+ assert (
+ DeleteKeyValueCommand(
+ actor=admin, resource=RESOURCE, key=ID_KEY, key_type="id",
+ ).run()
+ is True
+ )
+
+
+def test_delete_uuid_entry(
+ app_context: AppContext, admin: User, key_value_entry: KeyValueEntry,
+) -> None:
+ from superset.key_value.commands.delete import DeleteKeyValueCommand
+ from superset.key_value.models import KeyValueEntry
+
+ assert (
+ DeleteKeyValueCommand(
+ actor=admin, resource=RESOURCE, key=UUID_KEY, key_type="uuid",
+ ).run()
+ is True
+ )
+
+
+def test_delete_entry_missing(
+ app_context: AppContext, admin: User, key_value_entry: KeyValueEntry,
+) -> None:
+ from superset.key_value.commands.delete import DeleteKeyValueCommand
+ from superset.key_value.models import KeyValueEntry
+
+ assert (
+ DeleteKeyValueCommand(
+ actor=admin, resource=RESOURCE, key="456", key_type="id",
+ ).run()
+ is False
+ )
diff --git a/tests/integration_tests/key_value/commands/fixtures.py b/tests/integration_tests/key_value/commands/fixtures.py
new file mode 100644
index 0000000000000..44e12f7854cb2
--- /dev/null
+++ b/tests/integration_tests/key_value/commands/fixtures.py
@@ -0,0 +1,62 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+import pickle
+from typing import Generator, TYPE_CHECKING
+from uuid import UUID
+
+import pytest
+from flask_appbuilder.security.sqla.models import User
+from sqlalchemy.orm import Session
+
+from superset.extensions import db
+from tests.integration_tests.test_app import app
+
+if TYPE_CHECKING:
+ from superset.key_value.models import KeyValueEntry
+
+ID_KEY = "123"
+UUID_KEY = "3e7a2ab8-bcaf-49b0-a5df-dfb432f291cc"
+RESOURCE = "my_resource"
+VALUE = {"foo": "bar"}
+
+
+@pytest.fixture
+def key_value_entry() -> Generator[KeyValueEntry, None, None]:
+ from superset.key_value.models import KeyValueEntry
+
+ entry = KeyValueEntry(
+ id=int(ID_KEY),
+ uuid=UUID(UUID_KEY),
+ resource=RESOURCE,
+ value=pickle.dumps(VALUE),
+ )
+ db.session.add(entry)
+ db.session.commit()
+ yield entry
+ db.session.delete(entry)
+ db.session.commit()
+
+
+@pytest.fixture
+def admin() -> User:
+ with app.app_context() as ctx:
+ session: Session = ctx.app.appbuilder.get_session
+ admin = session.query(User).filter_by(username="admin").one()
+ return admin
diff --git a/tests/integration_tests/key_value/commands/get_test.py b/tests/integration_tests/key_value/commands/get_test.py
new file mode 100644
index 0000000000000..20efa9dfbd4c5
--- /dev/null
+++ b/tests/integration_tests/key_value/commands/get_test.py
@@ -0,0 +1,100 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import pickle
+import uuid
+from datetime import datetime, timedelta
+from typing import TYPE_CHECKING
+
+from flask.ctx import AppContext
+
+from superset.extensions import db
+from tests.integration_tests.key_value.commands.fixtures import (
+ ID_KEY,
+ key_value_entry,
+ RESOURCE,
+ UUID_KEY,
+ VALUE,
+)
+
+if TYPE_CHECKING:
+ from superset.key_value.models import KeyValueEntry
+
+
+def test_get_id_entry(app_context: AppContext, key_value_entry: KeyValueEntry) -> None:
+ from superset.key_value.commands.get import GetKeyValueCommand
+
+ value = GetKeyValueCommand(resource=RESOURCE, key=ID_KEY, key_type="id").run()
+ assert value == VALUE
+
+
+def test_get_uuid_entry(
+ app_context: AppContext, key_value_entry: KeyValueEntry
+) -> None:
+ from superset.key_value.commands.get import GetKeyValueCommand
+
+ value = GetKeyValueCommand(resource=RESOURCE, key=UUID_KEY, key_type="uuid").run()
+ assert value == VALUE
+
+
+def test_get_id_entry_missing(
+ app_context: AppContext, key_value_entry: KeyValueEntry,
+) -> None:
+ from superset.key_value.commands.get import GetKeyValueCommand
+
+ value = GetKeyValueCommand(resource=RESOURCE, key="456", key_type="id").run()
+ assert value is None
+
+
+def test_get_expired_entry(app_context: AppContext) -> None:
+ from superset.key_value.commands.get import GetKeyValueCommand
+ from superset.key_value.models import KeyValueEntry
+
+ entry = KeyValueEntry(
+ id=678,
+ uuid=uuid.uuid4(),
+ resource=RESOURCE,
+ value=pickle.dumps(VALUE),
+ expires_on=datetime.now() - timedelta(days=1),
+ )
+ db.session.add(entry)
+ db.session.commit()
+ value = GetKeyValueCommand(resource=RESOURCE, key=ID_KEY, key_type="id").run()
+ assert value is None
+ db.session.delete(entry)
+ db.session.commit()
+
+
+def test_get_future_expiring_entry(app_context: AppContext) -> None:
+ from superset.key_value.commands.get import GetKeyValueCommand
+ from superset.key_value.models import KeyValueEntry
+
+ id_ = 789
+ entry = KeyValueEntry(
+ id=id_,
+ uuid=uuid.uuid4(),
+ resource=RESOURCE,
+ value=pickle.dumps(VALUE),
+ expires_on=datetime.now() + timedelta(days=1),
+ )
+ db.session.add(entry)
+ db.session.commit()
+ value = GetKeyValueCommand(resource=RESOURCE, key=str(id_), key_type="id").run()
+ assert value == VALUE
+ db.session.delete(entry)
+ db.session.commit()
diff --git a/tests/integration_tests/key_value/commands/update_test.py b/tests/integration_tests/key_value/commands/update_test.py
new file mode 100644
index 0000000000000..1fbc84d59e332
--- /dev/null
+++ b/tests/integration_tests/key_value/commands/update_test.py
@@ -0,0 +1,87 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import pickle
+from typing import TYPE_CHECKING
+from uuid import UUID
+
+from flask.ctx import AppContext
+from flask_appbuilder.security.sqla.models import User
+
+from superset.extensions import db
+from tests.integration_tests.key_value.commands.fixtures import (
+ admin,
+ ID_KEY,
+ key_value_entry,
+ RESOURCE,
+ UUID_KEY,
+)
+
+if TYPE_CHECKING:
+ from superset.key_value.models import KeyValueEntry
+
+
+NEW_VALUE = "new value"
+
+
+def test_update_id_entry(
+ app_context: AppContext, admin: User, key_value_entry: KeyValueEntry,
+) -> None:
+ from superset.key_value.commands.update import UpdateKeyValueCommand
+ from superset.key_value.models import KeyValueEntry
+
+ key = UpdateKeyValueCommand(
+ actor=admin, resource=RESOURCE, key=ID_KEY, value=NEW_VALUE, key_type="id",
+ ).run()
+ assert key == ID_KEY
+ entry = (
+ db.session.query(KeyValueEntry).filter_by(id=int(ID_KEY)).autoflush(False).one()
+ )
+ assert pickle.loads(entry.value) == NEW_VALUE
+ assert entry.changed_by_fk == admin.id
+
+
+def test_update_uuid_entry(
+ app_context: AppContext, admin: User, key_value_entry: KeyValueEntry,
+) -> None:
+ from superset.key_value.commands.update import UpdateKeyValueCommand
+ from superset.key_value.models import KeyValueEntry
+
+ key = UpdateKeyValueCommand(
+ actor=admin, resource=RESOURCE, key=UUID_KEY, value=NEW_VALUE, key_type="uuid",
+ ).run()
+ assert key == UUID_KEY
+ entry = (
+ db.session.query(KeyValueEntry)
+ .filter_by(uuid=UUID(UUID_KEY))
+ .autoflush(False)
+ .one()
+ )
+ assert pickle.loads(entry.value) == NEW_VALUE
+ assert entry.changed_by_fk == admin.id
+
+
+def test_update_missing_entry(
+ app_context: AppContext, admin: User, key_value_entry: KeyValueEntry,
+) -> None:
+ from superset.key_value.commands.update import UpdateKeyValueCommand
+
+ key = UpdateKeyValueCommand(
+ actor=admin, resource=RESOURCE, key="456", value=NEW_VALUE, key_type="id",
+ ).run()
+ assert key is None
diff --git a/tests/integration_tests/model_tests.py b/tests/integration_tests/model_tests.py
index 6371a06123ae8..5ffa65e583ead 100644
--- a/tests/integration_tests/model_tests.py
+++ b/tests/integration_tests/model_tests.py
@@ -15,10 +15,12 @@
# specific language governing permissions and limitations
# under the License.
# isort:skip_file
+import json
import textwrap
import unittest
from unittest import mock
+from superset.connectors.sqla.models import SqlaTable
from superset.exceptions import SupersetException
from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices,
@@ -578,6 +580,38 @@ def test_data_for_slices_with_query_context(self):
"state",
}
+ @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
+ def test_data_for_slices_with_adhoc_column(self):
+ # should perform sqla.model.BaseDatasource.data_for_slices() with adhoc
+ # column and legacy chart
+ tbl = self.get_table(name="birth_names")
+ dashboard = self.get_dash_by_slug("births")
+ slc = Slice(
+ slice_name="slice with adhoc column",
+ datasource_type="table",
+ viz_type="table",
+ params=json.dumps(
+ {
+ "adhoc_filters": [],
+ "granularity_sqla": "ds",
+ "groupby": [
+ "name",
+ {"label": "adhoc_column", "sqlExpression": "name"},
+ ],
+ "metrics": ["sum__num"],
+ "time_range": "No filter",
+ "viz_type": "table",
+ }
+ ),
+ datasource_id=tbl.id,
+ )
+ dashboard.slices.append(slc)
+ datasource_info = slc.datasource.data_for_slices([slc])
+ assert "database" in datasource_info
+
+ # clean up and auto commit
+ metadata_db.session.delete(slc)
+
def test_literal_dttm_type_factory():
orig_type = DateTime()
diff --git a/tests/integration_tests/queries/saved_queries/commands_tests.py b/tests/integration_tests/queries/saved_queries/commands_tests.py
index f90924ba0e66a..bd90419155422 100644
--- a/tests/integration_tests/queries/saved_queries/commands_tests.py
+++ b/tests/integration_tests/queries/saved_queries/commands_tests.py
@@ -83,6 +83,24 @@ def test_export_query_command(self, mock_g):
"database_uuid": str(self.example_database.uuid),
}
+ @patch("superset.queries.saved_queries.filters.g")
+ def test_export_query_command_no_related(self, mock_g):
+ """
+ Test that only the query is exported when export_related=False.
+ """
+ mock_g.user = security_manager.find_user("admin")
+
+ command = ExportSavedQueriesCommand(
+ [self.example_query.id], export_related=False
+ )
+ contents = dict(command.run())
+
+ expected = [
+ "metadata.yaml",
+ "queries/examples/schema1/The_answer.yaml",
+ ]
+ assert expected == list(contents.keys())
+
@patch("superset.queries.saved_queries.filters.g")
def test_export_query_command_no_access(self, mock_g):
"""Test that users can't export datasets they don't have access to"""
diff --git a/tests/integration_tests/sqla_models_tests.py b/tests/integration_tests/sqla_models_tests.py
index 14480f10bcc69..54779fcbbff9b 100644
--- a/tests/integration_tests/sqla_models_tests.py
+++ b/tests/integration_tests/sqla_models_tests.py
@@ -41,6 +41,7 @@
FilterOperator,
GenericDataType,
TemporalType,
+ backend,
)
from superset.utils.database import get_example_database
from tests.integration_tests.fixtures.birth_names_dashboard import (
@@ -605,6 +606,50 @@ def test_filter_on_text_column(text_column_table):
assert result_object.df["count"][0] == 1
+def test_should_generate_closed_and_open_time_filter_range():
+ with app.app_context():
+ if backend() != "postgresql":
+ pytest.skip(f"{backend()} has different dialect for datetime column")
+
+ table = SqlaTable(
+ table_name="temporal_column_table",
+ sql=(
+ "SELECT '2021-12-31'::timestamp as datetime_col "
+ "UNION SELECT '2022-01-01'::timestamp "
+ "UNION SELECT '2022-03-10'::timestamp "
+ "UNION SELECT '2023-01-01'::timestamp "
+ "UNION SELECT '2023-03-10'::timestamp "
+ ),
+ database=get_example_database(),
+ )
+ TableColumn(
+ column_name="datetime_col", type="TIMESTAMP", table=table, is_dttm=True,
+ )
+ SqlMetric(metric_name="count", expression="count(*)", table=table)
+ result_object = table.query(
+ {
+ "metrics": ["count"],
+ "is_timeseries": False,
+ "filter": [],
+ "from_dttm": datetime(2022, 1, 1),
+ "to_dttm": datetime(2023, 1, 1),
+ "granularity": "datetime_col",
+ }
+ )
+ """ >>> result_object.query
+ SELECT count(*) AS count
+ FROM
+ (SELECT '2021-12-31'::timestamp as datetime_col
+ UNION SELECT '2022-01-01'::timestamp
+ UNION SELECT '2022-03-10'::timestamp
+ UNION SELECT '2023-01-01'::timestamp
+ UNION SELECT '2023-03-10'::timestamp) AS virtual_table
+ WHERE datetime_col >= TO_TIMESTAMP('2022-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')
+ AND datetime_col < TO_TIMESTAMP('2023-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')
+ """
+ assert result_object.df.iloc[0]["count"] == 2
+
+
@pytest.mark.parametrize(
"row,dimension,result",
[
diff --git a/tests/integration_tests/superset_test_config_sqllab_backend_persist.py b/tests/integration_tests/superset_test_config_sqllab_backend_persist_off.py
similarity index 94%
rename from tests/integration_tests/superset_test_config_sqllab_backend_persist.py
rename to tests/integration_tests/superset_test_config_sqllab_backend_persist_off.py
index 41a720deb6953..9f6dd2ead1fa2 100644
--- a/tests/integration_tests/superset_test_config_sqllab_backend_persist.py
+++ b/tests/integration_tests/superset_test_config_sqllab_backend_persist_off.py
@@ -21,4 +21,4 @@
from .superset_test_config import *
-FEATURE_FLAGS = {"SQLLAB_BACKEND_PERSISTENCE": True}
+FEATURE_FLAGS = {"SQLLAB_BACKEND_PERSISTENCE": False}
diff --git a/tests/unit_tests/commands/__init__.py b/tests/unit_tests/commands/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/tests/unit_tests/commands/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/tests/unit_tests/commands/export_test.py b/tests/unit_tests/commands/export_test.py
new file mode 100644
index 0000000000000..91aebf1b684eb
--- /dev/null
+++ b/tests/unit_tests/commands/export_test.py
@@ -0,0 +1,94 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# pylint: disable=invalid-name, unused-argument, import-outside-toplevel
+
+from freezegun import freeze_time
+from pytest_mock import MockFixture
+
+
+def test_export_assets_command(mocker: MockFixture, app_context: None) -> None:
+ """
+ Test that all assets are exported correctly.
+ """
+ from superset.commands.export.assets import ExportAssetsCommand
+
+ ExportDatabasesCommand = mocker.patch(
+ "superset.commands.export.assets.ExportDatabasesCommand"
+ )
+ ExportDatabasesCommand.return_value.run.return_value = [
+ (
+ "metadata.yaml",
+ "version: 1.0.0\ntype: Database\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
+ ),
+ ("databases/example.yaml", ""),
+ ]
+ ExportDatasetsCommand = mocker.patch(
+ "superset.commands.export.assets.ExportDatasetsCommand"
+ )
+ ExportDatasetsCommand.return_value.run.return_value = [
+ (
+ "metadata.yaml",
+ "version: 1.0.0\ntype: Dataset\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
+ ),
+ ("datasets/example/dataset.yaml", ""),
+ ]
+ ExportChartsCommand = mocker.patch(
+ "superset.commands.export.assets.ExportChartsCommand"
+ )
+ ExportChartsCommand.return_value.run.return_value = [
+ (
+ "metadata.yaml",
+ "version: 1.0.0\ntype: Slice\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
+ ),
+ ("charts/pie.yaml", ""),
+ ]
+ ExportDashboardsCommand = mocker.patch(
+ "superset.commands.export.assets.ExportDashboardsCommand"
+ )
+ ExportDashboardsCommand.return_value.run.return_value = [
+ (
+ "metadata.yaml",
+ "version: 1.0.0\ntype: Dashboard\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
+ ),
+ ("dashboards/sales.yaml", ""),
+ ]
+ ExportSavedQueriesCommand = mocker.patch(
+ "superset.commands.export.assets.ExportSavedQueriesCommand"
+ )
+ ExportSavedQueriesCommand.return_value.run.return_value = [
+ (
+ "metadata.yaml",
+ "version: 1.0.0\ntype: SavedQuery\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
+ ),
+ ("queries/example/metric.yaml", ""),
+ ]
+
+ with freeze_time("2022-01-01T00:00:00Z"):
+ command = ExportAssetsCommand()
+ output = list(command.run())
+
+ assert output == [
+ (
+ "metadata.yaml",
+ "version: 1.0.0\ntype: assets\ntimestamp: '2022-01-01T00:00:00+00:00'\n",
+ ),
+ ("databases/example.yaml", ""),
+ ("datasets/example/dataset.yaml", ""),
+ ("charts/pie.yaml", ""),
+ ("dashboards/sales.yaml", ""),
+ ("queries/example/metric.yaml", ""),
+ ]
diff --git a/tests/unit_tests/dataframe_test.py b/tests/unit_tests/dataframe_test.py
index 3e986a5e43a7f..79625cffe63de 100644
--- a/tests/unit_tests/dataframe_test.py
+++ b/tests/unit_tests/dataframe_test.py
@@ -16,7 +16,7 @@
# under the License.
# pylint: disable=unused-argument, import-outside-toplevel
from superset.dataframe import df_to_records
-from superset.typing import DbapiDescription
+from superset.superset_typing import DbapiDescription
def test_df_to_records(app_context: None) -> None:
diff --git a/tests/unit_tests/explore/form_data/utils_test.py b/tests/unit_tests/explore/utils_test.py
similarity index 88%
rename from tests/unit_tests/explore/form_data/utils_test.py
rename to tests/unit_tests/explore/utils_test.py
index 4025dc951e11c..3d12f5e911ee9 100644
--- a/tests/unit_tests/explore/form_data/utils_test.py
+++ b/tests/unit_tests/explore/utils_test.py
@@ -30,8 +30,8 @@
dataset_find_by_id = "superset.datasets.dao.DatasetDAO.find_by_id"
chart_find_by_id = "superset.charts.dao.ChartDAO.find_by_id"
-is_user_admin = "superset.explore.form_data.utils.is_user_admin"
-is_owner = "superset.explore.form_data.utils.is_owner"
+is_user_admin = "superset.explore.utils.is_user_admin"
+is_owner = "superset.explore.utils.is_owner"
can_access_datasource = (
"superset.security.SupersetSecurityManager.can_access_datasource"
)
@@ -39,7 +39,7 @@
def test_unsaved_chart_no_dataset_id(app_context: AppContext) -> None:
- from superset.explore.form_data.utils import check_access
+ from superset.explore.utils import check_access
with raises(DatasetNotFoundError):
check_access(dataset_id=0, chart_id=0, actor=User())
@@ -48,7 +48,7 @@ def test_unsaved_chart_no_dataset_id(app_context: AppContext) -> None:
def test_unsaved_chart_unknown_dataset_id(
mocker: MockFixture, app_context: AppContext
) -> None:
- from superset.explore.form_data.utils import check_access
+ from superset.explore.utils import check_access
with raises(DatasetNotFoundError):
mocker.patch(dataset_find_by_id, return_value=None)
@@ -59,7 +59,7 @@ def test_unsaved_chart_unauthorized_dataset(
mocker: MockFixture, app_context: AppContext
) -> None:
from superset.connectors.sqla.models import SqlaTable
- from superset.explore.form_data import utils
+ from superset.explore import utils
with raises(DatasetAccessDeniedError):
mocker.patch(dataset_find_by_id, return_value=SqlaTable())
@@ -71,7 +71,7 @@ def test_unsaved_chart_authorized_dataset(
mocker: MockFixture, app_context: AppContext
) -> None:
from superset.connectors.sqla.models import SqlaTable
- from superset.explore.form_data.utils import check_access
+ from superset.explore.utils import check_access
mocker.patch(dataset_find_by_id, return_value=SqlaTable())
mocker.patch(can_access_datasource, return_value=True)
@@ -82,7 +82,7 @@ def test_saved_chart_unknown_chart_id(
mocker: MockFixture, app_context: AppContext
) -> None:
from superset.connectors.sqla.models import SqlaTable
- from superset.explore.form_data.utils import check_access
+ from superset.explore.utils import check_access
with raises(ChartNotFoundError):
mocker.patch(dataset_find_by_id, return_value=SqlaTable())
@@ -95,7 +95,7 @@ def test_saved_chart_unauthorized_dataset(
mocker: MockFixture, app_context: AppContext
) -> None:
from superset.connectors.sqla.models import SqlaTable
- from superset.explore.form_data import utils
+ from superset.explore import utils
with raises(DatasetAccessDeniedError):
mocker.patch(dataset_find_by_id, return_value=SqlaTable())
@@ -105,19 +105,19 @@ def test_saved_chart_unauthorized_dataset(
def test_saved_chart_is_admin(mocker: MockFixture, app_context: AppContext) -> None:
from superset.connectors.sqla.models import SqlaTable
- from superset.explore.form_data.utils import check_access
+ from superset.explore.utils import check_access
from superset.models.slice import Slice
mocker.patch(dataset_find_by_id, return_value=SqlaTable())
mocker.patch(can_access_datasource, return_value=True)
mocker.patch(is_user_admin, return_value=True)
mocker.patch(chart_find_by_id, return_value=Slice())
- assert check_access(dataset_id=1, chart_id=1, actor=User()) == True
+ assert check_access(dataset_id=1, chart_id=1, actor=User()) is True
def test_saved_chart_is_owner(mocker: MockFixture, app_context: AppContext) -> None:
from superset.connectors.sqla.models import SqlaTable
- from superset.explore.form_data.utils import check_access
+ from superset.explore.utils import check_access
from superset.models.slice import Slice
mocker.patch(dataset_find_by_id, return_value=SqlaTable())
@@ -130,7 +130,7 @@ def test_saved_chart_is_owner(mocker: MockFixture, app_context: AppContext) -> N
def test_saved_chart_has_access(mocker: MockFixture, app_context: AppContext) -> None:
from superset.connectors.sqla.models import SqlaTable
- from superset.explore.form_data.utils import check_access
+ from superset.explore.utils import check_access
from superset.models.slice import Slice
mocker.patch(dataset_find_by_id, return_value=SqlaTable())
@@ -144,7 +144,7 @@ def test_saved_chart_has_access(mocker: MockFixture, app_context: AppContext) ->
def test_saved_chart_no_access(mocker: MockFixture, app_context: AppContext) -> None:
from superset.connectors.sqla.models import SqlaTable
- from superset.explore.form_data.utils import check_access
+ from superset.explore.utils import check_access
from superset.models.slice import Slice
with raises(ChartAccessDeniedError):
diff --git a/tests/unit_tests/extension_tests.py b/tests/unit_tests/extension_tests.py
new file mode 100644
index 0000000000000..724b03f01a2ab
--- /dev/null
+++ b/tests/unit_tests/extension_tests.py
@@ -0,0 +1,51 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from os.path import dirname
+from unittest.mock import Mock
+
+from superset.extensions import UIManifestProcessor
+
+APP_DIR = f"{dirname(__file__)}/fixtures"
+
+
+def test_get_manifest_with_prefix():
+ app = Mock(
+ config={"STATIC_ASSETS_PREFIX": "https://cool.url/here"},
+ template_context_processors={None: []},
+ )
+ manifest_processor = UIManifestProcessor(APP_DIR)
+ manifest_processor.init_app(app)
+ manifest = manifest_processor.get_manifest()
+ assert manifest["js_manifest"]("main") == ["/static/dist/main-js.js"]
+ assert manifest["css_manifest"]("main") == ["/static/dist/main-css.css"]
+ assert manifest["js_manifest"]("styles") == ["/static/dist/styles-js.js"]
+ assert manifest["css_manifest"]("styles") == []
+ assert manifest["assets_prefix"] == "https://cool.url/here"
+
+
+def test_get_manifest_no_prefix():
+ app = Mock(
+ config={"STATIC_ASSETS_PREFIX": ""}, template_context_processors={None: []}
+ )
+ manifest_processor = UIManifestProcessor(APP_DIR)
+ manifest_processor.init_app(app)
+ manifest = manifest_processor.get_manifest()
+ assert manifest["js_manifest"]("main") == ["/static/dist/main-js.js"]
+ assert manifest["css_manifest"]("main") == ["/static/dist/main-css.css"]
+ assert manifest["js_manifest"]("styles") == ["/static/dist/styles-js.js"]
+ assert manifest["css_manifest"]("styles") == []
+ assert manifest["assets_prefix"] == ""
diff --git a/tests/unit_tests/fixtures/static/assets/manifest.json b/tests/unit_tests/fixtures/static/assets/manifest.json
new file mode 100644
index 0000000000000..7482a04eac74e
--- /dev/null
+++ b/tests/unit_tests/fixtures/static/assets/manifest.json
@@ -0,0 +1,20 @@
+{
+ "entrypoints": {
+ "styles": {
+ "js": [
+ "/static/dist/styles-js.js"
+ ]
+ },
+ "main": {
+ "css": [
+ "/static/dist/main-css.css"
+ ],
+ "js": [
+ "/static/dist/main-js.js"
+ ]
+ }
+ },
+ "main.css": "/static/dist/main.b51d3f6225194da423d6.entry.css",
+ "main.js": "/static/dist/main.b51d3f6225194da423d6.entry.js",
+ "styles.js": "/static/dist/styles.35840b4bbf794f902b7c.entry.js"
+}
diff --git a/tests/unit_tests/key_value/__init__.py b/tests/unit_tests/key_value/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/tests/unit_tests/key_value/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/tests/unit_tests/key_value/utils_test.py b/tests/unit_tests/key_value/utils_test.py
new file mode 100644
index 0000000000000..f5ad0958bc749
--- /dev/null
+++ b/tests/unit_tests/key_value/utils_test.py
@@ -0,0 +1,117 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import json
+from typing import TYPE_CHECKING
+from unittest.mock import patch
+from uuid import UUID
+
+if TYPE_CHECKING:
+ from superset.key_value.models import KeyValueEntry
+
+import pytest
+from flask.ctx import AppContext
+
+from superset.key_value.types import Key
+
+RESOURCE = "my-resource"
+UUID_KEY = "3e7a2ab8-bcaf-49b0-a5df-dfb432f291cc"
+ID_KEY = "123"
+
+
+@pytest.fixture
+def key_value_entry(app_context: AppContext):
+ from superset.key_value.models import KeyValueEntry
+
+ return KeyValueEntry(
+ id=int(ID_KEY), uuid=UUID(UUID_KEY), value=json.dumps({"foo": "bar"}),
+ )
+
+
+def test_parse_permalink_key_uuid_valid(app_context: AppContext) -> None:
+ from superset.key_value.utils import parse_permalink_key
+
+ assert parse_permalink_key(UUID_KEY) == Key(id=None, uuid=UUID(UUID_KEY))
+
+
+def test_parse_permalink_key_id_invalid(app_context: AppContext) -> None:
+ from superset.key_value.utils import parse_permalink_key
+
+ with pytest.raises(ValueError):
+ parse_permalink_key(ID_KEY)
+
+
+@patch("superset.key_value.utils.current_app.config", {"PERMALINK_KEY_TYPE": "id"})
+def test_parse_permalink_key_id_valid(app_context: AppContext) -> None:
+ from superset.key_value.utils import parse_permalink_key
+
+ assert parse_permalink_key(ID_KEY) == Key(id=int(ID_KEY), uuid=None)
+
+
+@patch("superset.key_value.utils.current_app.config", {"PERMALINK_KEY_TYPE": "id"})
+def test_parse_permalink_key_uuid_invalid(app_context: AppContext) -> None:
+ from superset.key_value.utils import parse_permalink_key
+
+ with pytest.raises(ValueError):
+ parse_permalink_key(UUID_KEY)
+
+
+def test_format_permalink_key_uuid(app_context: AppContext) -> None:
+ from superset.key_value.utils import format_permalink_key
+
+ assert format_permalink_key(Key(id=None, uuid=UUID(UUID_KEY))) == UUID_KEY
+
+
+def test_format_permalink_key_id(app_context: AppContext) -> None:
+ from superset.key_value.utils import format_permalink_key
+
+ assert format_permalink_key(Key(id=int(ID_KEY), uuid=None)) == ID_KEY
+
+
+def test_extract_key_uuid(
+ app_context: AppContext, key_value_entry: KeyValueEntry,
+) -> None:
+ from superset.key_value.utils import extract_key
+
+ assert extract_key(key_value_entry, "id") == ID_KEY
+
+
+def test_extract_key_id(
+ app_context: AppContext, key_value_entry: KeyValueEntry,
+) -> None:
+ from superset.key_value.utils import extract_key
+
+ assert extract_key(key_value_entry, "uuid") == UUID_KEY
+
+
+def test_get_filter_uuid(app_context: AppContext,) -> None:
+ from superset.key_value.utils import get_filter
+
+ assert get_filter(resource=RESOURCE, key=UUID_KEY, key_type="uuid",) == {
+ "resource": RESOURCE,
+ "uuid": UUID(UUID_KEY),
+ }
+
+
+def test_get_filter_id(app_context: AppContext,) -> None:
+ from superset.key_value.utils import get_filter
+
+ assert get_filter(resource=RESOURCE, key=ID_KEY, key_type="id",) == {
+ "resource": RESOURCE,
+ "id": int(ID_KEY),
+ }