diff --git a/superset-frontend/cypress-base/cypress.json b/superset-frontend/cypress-base/cypress.json
index 8e023d8a1a24b..f9729be1c3c91 100644
--- a/superset-frontend/cypress-base/cypress.json
+++ b/superset-frontend/cypress-base/cypress.json
@@ -1,7 +1,7 @@
{
"baseUrl": "http://localhost:8088",
"chromeWebSecurity": false,
- "defaultCommandTimeout": 5000,
+ "defaultCommandTimeout": 8000,
"numTestsKeptInMemory": 0,
"experimentalFetchPolyfill": true,
"requestTimeout": 10000,
diff --git a/superset-frontend/cypress-base/cypress/integration/dashboard/key_value.test.ts b/superset-frontend/cypress-base/cypress/integration/dashboard/key_value.test.ts
index ba27bf30163a2..24b6ff0aa7a62 100644
--- a/superset-frontend/cypress-base/cypress/integration/dashboard/key_value.test.ts
+++ b/superset-frontend/cypress-base/cypress/integration/dashboard/key_value.test.ts
@@ -27,16 +27,19 @@ interface QueryString {
native_filters_key: string;
}
-describe('nativefiler url param key', () => {
+xdescribe('nativefiler url param key', () => {
// const urlParams = { param1: '123', param2: 'abc' };
before(() => {
cy.login();
- cy.visit(WORLD_HEALTH_DASHBOARD);
- WORLD_HEALTH_CHARTS.forEach(waitForChartLoad);
- cy.wait(1000); // wait for key to be published (debounced)
});
+
let initialFilterKey: string;
it('should have cachekey in nativefilter param', () => {
+ // things in `before` will not retry and the `waitForChartLoad` check is
+ // especically flaky and may need more retries
+ cy.visit(WORLD_HEALTH_DASHBOARD);
+ WORLD_HEALTH_CHARTS.forEach(waitForChartLoad);
+ cy.wait(1000); // wait for key to be published (debounced)
cy.location().then(loc => {
const queryParams = qs.parse(loc.search) as QueryString;
expect(typeof queryParams.native_filters_key).eq('string');
@@ -44,6 +47,9 @@ describe('nativefiler url param key', () => {
});
it('should have different key when page reloads', () => {
+ cy.visit(WORLD_HEALTH_DASHBOARD);
+ WORLD_HEALTH_CHARTS.forEach(waitForChartLoad);
+ cy.wait(1000); // wait for key to be published (debounced)
cy.location().then(loc => {
const queryParams = qs.parse(loc.search) as QueryString;
expect(queryParams.native_filters_key).not.equal(initialFilterKey);
diff --git a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/QueryAutoRefresh.test.jsx b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/QueryAutoRefresh.test.jsx
index 93cea6d08d65f..06bf187e1185a 100644
--- a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/QueryAutoRefresh.test.jsx
+++ b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/QueryAutoRefresh.test.jsx
@@ -17,14 +17,12 @@
* under the License.
*/
import React from 'react';
-import { render } from 'spec/helpers/testing-library';
-import { ThemeProvider, supersetTheme } from '@superset-ui/core';
+import { shallow } from 'enzyme';
+import sinon from 'sinon';
import thunk from 'redux-thunk';
import configureStore from 'redux-mock-store';
import QueryAutoRefresh from 'src/SqlLab/components/QueryAutoRefresh';
import { initialState, runningQuery } from 'src/SqlLab/fixtures';
-import fetchMock from 'fetch-mock';
-import * as actions from 'src/SqlLab/actions/sqlLab';
describe('QueryAutoRefresh', () => {
const middlewares = [thunk];
@@ -40,29 +38,31 @@ describe('QueryAutoRefresh', () => {
sqlLab,
};
const store = mockStore(state);
- const setup = (overrides = {}) => (
-
-
-
- );
-
- const mockFetch = fetchMock.get('glob:*/superset/queries/*', {});
+ const getWrapper = () =>
+ shallow()
+ .dive()
+ .dive();
+ let wrapper;
it('shouldCheckForQueries', () => {
- render(setup(), {
- useRedux: true,
- });
-
- expect(mockFetch.called()).toBe(true);
+ wrapper = getWrapper();
+ expect(wrapper.instance().shouldCheckForQueries()).toBe(true);
});
it('setUserOffline', () => {
- const spy = jest.spyOn(actions, 'setUserOffline');
+ wrapper = getWrapper();
+ const spy = sinon.spy(wrapper.instance().props.actions, 'setUserOffline');
- render(setup(), {
- useRedux: true,
+ // state not changed
+ wrapper.setState({
+ offline: false,
});
+ expect(spy.called).toBe(false);
- expect(spy).toHaveBeenCalled();
+ // state is changed
+ wrapper.setState({
+ offline: true,
+ });
+ expect(spy.callCount).toBe(1);
});
});
diff --git a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.jsx b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.jsx
index 43f6c5d8a7d6e..b54936b691efe 100644
--- a/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.jsx
+++ b/superset-frontend/src/SqlLab/components/QueryAutoRefresh/index.jsx
@@ -16,7 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
-import { useState, useEffect } from 'react';
+import React from 'react';
import PropTypes from 'prop-types';
import { bindActionCreators } from 'redux';
import { connect } from 'react-redux';
@@ -28,12 +28,31 @@ const QUERY_UPDATE_BUFFER_MS = 5000;
const MAX_QUERY_AGE_TO_POLL = 21600000;
const QUERY_TIMEOUT_LIMIT = 10000;
-function QueryAutoRefresh({ offline, queries, queriesLastUpdate, actions }) {
- const [offlineState, setOfflineState] = useState(offline);
- let timer = null;
+class QueryAutoRefresh extends React.PureComponent {
+ constructor(props) {
+ super(props);
+ this.state = {
+ offline: props.offline,
+ };
+ }
+
+ UNSAFE_componentWillMount() {
+ this.startTimer();
+ }
+
+ componentDidUpdate(prevProps) {
+ if (prevProps.offline !== this.state.offline) {
+ this.props.actions.setUserOffline(this.state.offline);
+ }
+ }
+
+ componentWillUnmount() {
+ this.stopTimer();
+ }
- const shouldCheckForQueries = () => {
+ shouldCheckForQueries() {
// if there are started or running queries, this method should return true
+ const { queries } = this.props;
const now = new Date().getTime();
const isQueryRunning = q =>
['running', 'started', 'pending', 'fetching'].indexOf(q.state) >= 0;
@@ -41,57 +60,46 @@ function QueryAutoRefresh({ offline, queries, queriesLastUpdate, actions }) {
return Object.values(queries).some(
q => isQueryRunning(q) && now - q.startDttm < MAX_QUERY_AGE_TO_POLL,
);
- };
+ }
+
+ startTimer() {
+ if (!this.timer) {
+ this.timer = setInterval(this.stopwatch.bind(this), QUERY_UPDATE_FREQ);
+ }
+ }
- const stopwatch = () => {
+ stopTimer() {
+ clearInterval(this.timer);
+ this.timer = null;
+ }
+
+ stopwatch() {
// only poll /superset/queries/ if there are started or running queries
- if (shouldCheckForQueries()) {
+ if (this.shouldCheckForQueries()) {
SupersetClient.get({
endpoint: `/superset/queries/${
- queriesLastUpdate - QUERY_UPDATE_BUFFER_MS
+ this.props.queriesLastUpdate - QUERY_UPDATE_BUFFER_MS
}`,
timeout: QUERY_TIMEOUT_LIMIT,
})
.then(({ json }) => {
if (Object.keys(json).length > 0) {
- actions.refreshQueries(json);
+ this.props.actions.refreshQueries(json);
}
-
- setOfflineState(false);
+ this.setState({ offline: false });
})
.catch(() => {
- setOfflineState(true);
+ this.setState({ offline: true });
});
} else {
- setOfflineState(false);
+ this.setState({ offline: false });
}
- };
-
- const startTimer = () => {
- if (!timer) {
- timer = setInterval(stopwatch(), QUERY_UPDATE_FREQ);
- }
- };
-
- const stopTimer = () => {
- clearInterval(timer);
- timer = null;
- };
-
- useEffect(() => {
- startTimer();
- return () => {
- stopTimer();
- };
- }, []);
+ }
- useEffect(() => {
- actions.setUserOffline(offlineState);
- }, [offlineState]);
-
- return null;
+ render() {
+ return null;
+ }
}
-
QueryAutoRefresh.propTypes = {
offline: PropTypes.bool.isRequired,
queries: PropTypes.object.isRequired,
diff --git a/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.stories.tsx b/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.stories.tsx
index cf9d1d6e730ed..6bf0d438daca6 100644
--- a/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.stories.tsx
+++ b/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.stories.tsx
@@ -18,7 +18,7 @@
*/
import React from 'react';
import { useArgs } from '@storybook/client-api';
-import TimezoneSelector, { TimezoneProps } from './index';
+import TimezoneSelector, { TimezoneSelectorProps } from './index';
export default {
title: 'TimezoneSelector',
@@ -26,7 +26,7 @@ export default {
};
// eslint-disable-next-line @typescript-eslint/no-unused-vars
-export const InteractiveTimezoneSelector = (args: TimezoneProps) => {
+export const InteractiveTimezoneSelector = (args: TimezoneSelectorProps) => {
const [{ timezone }, updateArgs] = useArgs();
const onTimezoneChange = (value: string) => {
updateArgs({ timezone: value });
diff --git a/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.test.tsx b/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.test.tsx
index 79830fd820921..19c713adf4f13 100644
--- a/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.test.tsx
+++ b/superset-frontend/src/components/TimezoneSelector/TimezoneSelector.test.tsx
@@ -20,21 +20,42 @@ import React from 'react';
import moment from 'moment-timezone';
import { render, screen, waitFor } from 'spec/helpers/testing-library';
import userEvent from '@testing-library/user-event';
-import TimezoneSelector from './index';
+import type { TimezoneSelectorProps } from './index';
-jest.spyOn(moment.tz, 'guess').mockReturnValue('America/New_York');
+const loadComponent = (mockCurrentTime?: string) => {
+ if (mockCurrentTime) {
+ jest.useFakeTimers('modern');
+ jest.setSystemTime(new Date(mockCurrentTime));
+ }
+ return new Promise>(resolve => {
+ jest.isolateModules(() => {
+ const { default: TimezoneSelector } = module.require('./index');
+ resolve(TimezoneSelector);
+ jest.useRealTimers();
+ });
+ });
+};
const getSelectOptions = () =>
waitFor(() => document.querySelectorAll('.ant-select-item-option-content'));
-it('use the timezone from `moment` if no timezone provided', () => {
+const openSelectMenu = async () => {
+ const searchInput = screen.getByRole('combobox');
+ userEvent.click(searchInput);
+};
+
+jest.spyOn(moment.tz, 'guess').mockReturnValue('America/New_York');
+
+test('use the timezone from `moment` if no timezone provided', async () => {
+ const TimezoneSelector = await loadComponent('2022-01-01');
const onTimezoneChange = jest.fn();
render();
expect(onTimezoneChange).toHaveBeenCalledTimes(1);
expect(onTimezoneChange).toHaveBeenCalledWith('America/Nassau');
});
-it('update to closest deduped timezone when timezone is provided', async () => {
+test('update to closest deduped timezone when timezone is provided', async () => {
+ const TimezoneSelector = await loadComponent('2022-01-01');
const onTimezoneChange = jest.fn();
render(
{
expect(onTimezoneChange).toHaveBeenLastCalledWith('America/Vancouver');
});
-it('use the default timezone when an invalid timezone is provided', async () => {
+test('use the default timezone when an invalid timezone is provided', async () => {
+ const TimezoneSelector = await loadComponent('2022-01-01');
const onTimezoneChange = jest.fn();
render(
,
@@ -55,7 +77,8 @@ it('use the default timezone when an invalid timezone is provided', async () =>
expect(onTimezoneChange).toHaveBeenLastCalledWith('Africa/Abidjan');
});
-it.skip('can select a timezone values and returns canonical value', async () => {
+test('render timezones in correct oder for standard time', async () => {
+ const TimezoneSelector = await loadComponent('2022-01-01');
const onTimezoneChange = jest.fn();
render(
timezone="America/Nassau"
/>,
);
-
- const searchInput = screen.getByRole('combobox', {
- name: 'Timezone selector',
- });
- expect(searchInput).toBeInTheDocument();
- userEvent.click(searchInput);
- const isDaylight = moment(moment.now()).isDST();
-
- const selectedTimezone = isDaylight
- ? 'GMT -04:00 (Eastern Daylight Time)'
- : 'GMT -05:00 (Eastern Standard Time)';
-
- // selected option ranks first
+ await openSelectMenu();
const options = await getSelectOptions();
- expect(options[0]).toHaveTextContent(selectedTimezone);
-
- // others are ranked by offset
+ expect(options[0]).toHaveTextContent('GMT -05:00 (Eastern Standard Time)');
expect(options[1]).toHaveTextContent('GMT -11:00 (Pacific/Pago_Pago)');
expect(options[2]).toHaveTextContent('GMT -10:00 (Hawaii Standard Time)');
expect(options[3]).toHaveTextContent('GMT -10:00 (America/Adak)');
+});
+
+test('render timezones in correct order for daylight saving time', async () => {
+ const TimezoneSelector = await loadComponent('2022-07-01');
+ const onTimezoneChange = jest.fn();
+ render(
+ ,
+ );
+ await openSelectMenu();
+ const options = await getSelectOptions();
+ // first option is always current timezone
+ expect(options[0]).toHaveTextContent('GMT -04:00 (Eastern Daylight Time)');
+ expect(options[1]).toHaveTextContent('GMT -11:00 (Pacific/Pago_Pago)');
+ expect(options[2]).toHaveTextContent('GMT -10:00 (Hawaii Standard Time)');
+ expect(options[3]).toHaveTextContent('GMT -09:30 (Pacific/Marquesas)');
+});
+test('can select a timezone values and returns canonical timezone name', async () => {
+ const TimezoneSelector = await loadComponent('2022-01-01');
+ const onTimezoneChange = jest.fn();
+ render(
+ ,
+ );
+
+ await openSelectMenu();
+
+ const searchInput = screen.getByRole('combobox');
// search for mountain time
await userEvent.type(searchInput, 'mou', { delay: 10 });
-
- const findTitle = isDaylight
- ? 'GMT -06:00 (Mountain Daylight Time)'
- : 'GMT -07:00 (Mountain Standard Time)';
+ const findTitle = 'GMT -07:00 (Mountain Standard Time)';
const selectOption = await screen.findByTitle(findTitle);
- expect(selectOption).toBeInTheDocument();
userEvent.click(selectOption);
expect(onTimezoneChange).toHaveBeenCalledTimes(1);
expect(onTimezoneChange).toHaveBeenLastCalledWith('America/Cambridge_Bay');
});
-it('can update props and rerender with different values', async () => {
+test('can update props and rerender with different values', async () => {
+ const TimezoneSelector = await loadComponent('2022-01-01');
const onTimezoneChange = jest.fn();
const { rerender } = render(
{
);
};
-export interface TimezoneProps {
- onTimezoneChange: (value: string) => void;
- timezone?: string | null;
-}
-
const ALL_ZONES = moment.tz
.countries()
.map(country => moment.tz.zonesForCountry(country, true))
@@ -106,7 +101,15 @@ const matchTimezoneToOptions = (timezone: string) =>
TIMEZONE_OPTIONS.find(option => option.offsets === getOffsetKey(timezone))
?.value || DEFAULT_TIMEZONE.value;
-const TimezoneSelector = ({ onTimezoneChange, timezone }: TimezoneProps) => {
+export type TimezoneSelectorProps = {
+ onTimezoneChange: (value: string) => void;
+ timezone?: string | null;
+};
+
+export default function TimezoneSelector({
+ onTimezoneChange,
+ timezone,
+}: TimezoneSelectorProps) {
const validTimezone = useMemo(
() => matchTimezoneToOptions(timezone || moment.tz.guess()),
[timezone],
@@ -129,6 +132,4 @@ const TimezoneSelector = ({ onTimezoneChange, timezone }: TimezoneProps) => {
sortComparator={TIMEZONE_OPTIONS_SORT_COMPARATOR}
/>
);
-};
-
-export default TimezoneSelector;
+}
diff --git a/superset-frontend/src/views/CRUD/utils.tsx b/superset-frontend/src/views/CRUD/utils.tsx
index d9d21c8565d17..c2ae0d8cbed13 100644
--- a/superset-frontend/src/views/CRUD/utils.tsx
+++ b/superset-frontend/src/views/CRUD/utils.tsx
@@ -412,12 +412,11 @@ export const hasTerminalValidation = (errors: Record[]) =>
);
export const checkUploadExtensions = (
- perm: Array | string | undefined | boolean,
- cons: Array,
+ perm: Array,
+ cons: Array,
) => {
if (perm !== undefined) {
- if (typeof perm === 'boolean') return perm;
- return intersection(perm, cons).length;
+ return intersection(perm, cons).length > 0;
}
return false;
};
diff --git a/superset-frontend/src/views/components/Menu.tsx b/superset-frontend/src/views/components/Menu.tsx
index 068cad8e363d1..77a074f71b6c7 100644
--- a/superset-frontend/src/views/components/Menu.tsx
+++ b/superset-frontend/src/views/components/Menu.tsx
@@ -74,7 +74,7 @@ interface MenuObjectChildProps {
index?: number;
url?: string;
isFrontendRoute?: boolean;
- perm?: string | Array | boolean;
+ perm?: string | boolean;
view?: string;
}
diff --git a/superset-frontend/src/views/components/MenuRight.tsx b/superset-frontend/src/views/components/MenuRight.tsx
index 531ed53847346..ab5b5d8d82a7e 100644
--- a/superset-frontend/src/views/components/MenuRight.tsx
+++ b/superset-frontend/src/views/components/MenuRight.tsx
@@ -79,7 +79,6 @@ const RightMenu = ({
ALLOWED_EXTENSIONS,
HAS_GSHEETS_INSTALLED,
} = useSelector(state => state.common.conf);
-
const [showModal, setShowModal] = useState(false);
const [engine, setEngine] = useState('');
const canSql = findPermission('can_sqllab', 'Superset', roles);
@@ -124,19 +123,25 @@ const RightMenu = ({
label: t('Upload CSV to database'),
name: 'Upload a CSV',
url: '/csvtodatabaseview/form',
- perm: CSV_EXTENSIONS && canUploadCSV,
+ perm:
+ checkUploadExtensions(CSV_EXTENSIONS, ALLOWED_EXTENSIONS) &&
+ canUploadCSV,
},
{
label: t('Upload columnar file to database'),
name: 'Upload a Columnar file',
url: '/columnartodatabaseview/form',
- perm: COLUMNAR_EXTENSIONS && canUploadColumnar,
+ perm:
+ checkUploadExtensions(COLUMNAR_EXTENSIONS, ALLOWED_EXTENSIONS) &&
+ canUploadColumnar,
},
{
label: t('Upload Excel file to database'),
name: 'Upload Excel',
url: '/exceltodatabaseview/form',
- perm: EXCEL_EXTENSIONS && canUploadExcel,
+ perm:
+ checkUploadExtensions(EXCEL_EXTENSIONS, ALLOWED_EXTENSIONS) &&
+ canUploadExcel,
},
],
},
@@ -209,9 +214,7 @@ const RightMenu = ({
title={menuIconAndLabel(menu)}
>
{menu.childs.map((item, idx) =>
- typeof item !== 'string' &&
- item.name &&
- checkUploadExtensions(item.perm, ALLOWED_EXTENSIONS) ? (
+ typeof item !== 'string' && item.name && item.perm ? (
<>
{idx === 2 && }
diff --git a/superset/charts/commands/export.py b/superset/charts/commands/export.py
index 35f70a82eab65..9b3a06c473585 100644
--- a/superset/charts/commands/export.py
+++ b/superset/charts/commands/export.py
@@ -26,7 +26,7 @@
from superset.charts.commands.exceptions import ChartNotFoundError
from superset.charts.dao import ChartDAO
from superset.datasets.commands.export import ExportDatasetsCommand
-from superset.commands.export import ExportModelsCommand
+from superset.commands.export.models import ExportModelsCommand
from superset.models.slice import Slice
from superset.utils.dict_import_export import EXPORT_VERSION
@@ -43,7 +43,7 @@ class ExportChartsCommand(ExportModelsCommand):
not_found = ChartNotFoundError
@staticmethod
- def _export(model: Slice) -> Iterator[Tuple[str, str]]:
+ def _export(model: Slice, export_related: bool = True) -> Iterator[Tuple[str, str]]:
chart_slug = secure_filename(model.slice_name)
file_name = f"charts/{chart_slug}_{model.id}.yaml"
@@ -72,5 +72,5 @@ def _export(model: Slice) -> Iterator[Tuple[str, str]]:
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
- if model.table:
+ if model.table and export_related:
yield from ExportDatasetsCommand([model.table.id]).run()
diff --git a/superset/commands/export/__init__.py b/superset/commands/export/__init__.py
new file mode 100644
index 0000000000000..13a83393a9124
--- /dev/null
+++ b/superset/commands/export/__init__.py
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
diff --git a/superset/commands/export.py b/superset/commands/export/models.py
similarity index 86%
rename from superset/commands/export.py
rename to superset/commands/export/models.py
index 2b54de87852e9..dd4ff3bc57172 100644
--- a/superset/commands/export.py
+++ b/superset/commands/export/models.py
@@ -14,10 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-# isort:skip_file
-from datetime import datetime
-from datetime import timezone
+from datetime import datetime, timezone
from typing import Iterator, List, Tuple, Type
import yaml
@@ -36,14 +34,15 @@ class ExportModelsCommand(BaseCommand):
dao: Type[BaseDAO] = BaseDAO
not_found: Type[CommandException] = CommandException
- def __init__(self, model_ids: List[int]):
+ def __init__(self, model_ids: List[int], export_related: bool = True):
self.model_ids = model_ids
+ self.export_related = export_related
# this will be set when calling validate()
self._models: List[Model] = []
@staticmethod
- def _export(model: Model) -> Iterator[Tuple[str, str]]:
+ def _export(model: Model, export_related: bool = True) -> Iterator[Tuple[str, str]]:
raise NotImplementedError("Subclasses MUST implement _export")
def run(self) -> Iterator[Tuple[str, str]]:
@@ -58,7 +57,7 @@ def run(self) -> Iterator[Tuple[str, str]]:
seen = {METADATA_FILE_NAME}
for model in self._models:
- for file_name, file_content in self._export(model):
+ for file_name, file_content in self._export(model, self.export_related):
if file_name not in seen:
yield file_name, file_content
seen.add(file_name)
diff --git a/superset/dashboards/commands/export.py b/superset/dashboards/commands/export.py
index a0467972d5feb..87408bab37706 100644
--- a/superset/dashboards/commands/export.py
+++ b/superset/dashboards/commands/export.py
@@ -29,7 +29,7 @@
from superset.dashboards.commands.exceptions import DashboardNotFoundError
from superset.dashboards.commands.importers.v1.utils import find_chart_uuids
from superset.dashboards.dao import DashboardDAO
-from superset.commands.export import ExportModelsCommand
+from superset.commands.export.models import ExportModelsCommand
from superset.datasets.commands.export import ExportDatasetsCommand
from superset.datasets.dao import DatasetDAO
from superset.models.dashboard import Dashboard
@@ -106,8 +106,11 @@ class ExportDashboardsCommand(ExportModelsCommand):
dao = DashboardDAO
not_found = DashboardNotFoundError
+ # pylint: disable=too-many-locals
@staticmethod
- def _export(model: Dashboard) -> Iterator[Tuple[str, str]]:
+ def _export(
+ model: Dashboard, export_related: bool = True
+ ) -> Iterator[Tuple[str, str]]:
dashboard_slug = secure_filename(model.dashboard_title)
file_name = f"dashboards/{dashboard_slug}.yaml"
@@ -138,7 +141,8 @@ def _export(model: Dashboard) -> Iterator[Tuple[str, str]]:
if dataset_id is not None:
dataset = DatasetDAO.find_by_id(dataset_id)
target["datasetUuid"] = str(dataset.uuid)
- yield from ExportDatasetsCommand([dataset_id]).run()
+ if export_related:
+ yield from ExportDatasetsCommand([dataset_id]).run()
# the mapping between dashboard -> charts is inferred from the position
# attribute, so if it's not present we need to add a default config
@@ -160,5 +164,6 @@ def _export(model: Dashboard) -> Iterator[Tuple[str, str]]:
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
- chart_ids = [chart.id for chart in model.slices]
- yield from ExportChartsCommand(chart_ids).run()
+ if export_related:
+ chart_ids = [chart.id for chart in model.slices]
+ yield from ExportChartsCommand(chart_ids).run()
diff --git a/superset/databases/commands/export.py b/superset/databases/commands/export.py
index 134bda580c7e5..9e8cb7e374426 100644
--- a/superset/databases/commands/export.py
+++ b/superset/databases/commands/export.py
@@ -25,7 +25,7 @@
from superset.databases.commands.exceptions import DatabaseNotFoundError
from superset.databases.dao import DatabaseDAO
-from superset.commands.export import ExportModelsCommand
+from superset.commands.export.models import ExportModelsCommand
from superset.models.core import Database
from superset.utils.dict_import_export import EXPORT_VERSION
@@ -55,7 +55,9 @@ class ExportDatabasesCommand(ExportModelsCommand):
not_found = DatabaseNotFoundError
@staticmethod
- def _export(model: Database) -> Iterator[Tuple[str, str]]:
+ def _export(
+ model: Database, export_related: bool = True
+ ) -> Iterator[Tuple[str, str]]:
database_slug = secure_filename(model.database_name)
file_name = f"databases/{database_slug}.yaml"
@@ -90,18 +92,19 @@ def _export(model: Database) -> Iterator[Tuple[str, str]]:
file_content = yaml.safe_dump(payload, sort_keys=False)
yield file_name, file_content
- for dataset in model.tables:
- dataset_slug = secure_filename(dataset.table_name)
- file_name = f"datasets/{database_slug}/{dataset_slug}.yaml"
-
- payload = dataset.export_to_dict(
- recursive=True,
- include_parent_ref=False,
- include_defaults=True,
- export_uuids=True,
- )
- payload["version"] = EXPORT_VERSION
- payload["database_uuid"] = str(model.uuid)
-
- file_content = yaml.safe_dump(payload, sort_keys=False)
- yield file_name, file_content
+ if export_related:
+ for dataset in model.tables:
+ dataset_slug = secure_filename(dataset.table_name)
+ file_name = f"datasets/{database_slug}/{dataset_slug}.yaml"
+
+ payload = dataset.export_to_dict(
+ recursive=True,
+ include_parent_ref=False,
+ include_defaults=True,
+ export_uuids=True,
+ )
+ payload["version"] = EXPORT_VERSION
+ payload["database_uuid"] = str(model.uuid)
+
+ file_content = yaml.safe_dump(payload, sort_keys=False)
+ yield file_name, file_content
diff --git a/superset/datasets/commands/export.py b/superset/datasets/commands/export.py
index 4e3843a0daee9..be9210a06c669 100644
--- a/superset/datasets/commands/export.py
+++ b/superset/datasets/commands/export.py
@@ -23,7 +23,7 @@
import yaml
from werkzeug.utils import secure_filename
-from superset.commands.export import ExportModelsCommand
+from superset.commands.export.models import ExportModelsCommand
from superset.connectors.sqla.models import SqlaTable
from superset.datasets.commands.exceptions import DatasetNotFoundError
from superset.datasets.dao import DatasetDAO
@@ -40,7 +40,9 @@ class ExportDatasetsCommand(ExportModelsCommand):
not_found = DatasetNotFoundError
@staticmethod
- def _export(model: SqlaTable) -> Iterator[Tuple[str, str]]:
+ def _export(
+ model: SqlaTable, export_related: bool = True
+ ) -> Iterator[Tuple[str, str]]:
database_slug = secure_filename(model.database.database_name)
dataset_slug = secure_filename(model.table_name)
file_name = f"datasets/{database_slug}/{dataset_slug}.yaml"
@@ -76,23 +78,24 @@ def _export(model: SqlaTable) -> Iterator[Tuple[str, str]]:
yield file_name, file_content
# include database as well
- file_name = f"databases/{database_slug}.yaml"
-
- payload = model.database.export_to_dict(
- recursive=False,
- include_parent_ref=False,
- include_defaults=True,
- export_uuids=True,
- )
- # TODO (betodealmeida): move this logic to export_to_dict once this
- # becomes the default export endpoint
- if payload.get("extra"):
- try:
- payload["extra"] = json.loads(payload["extra"])
- except json.decoder.JSONDecodeError:
- logger.info("Unable to decode `extra` field: %s", payload["extra"])
+ if export_related:
+ file_name = f"databases/{database_slug}.yaml"
+
+ payload = model.database.export_to_dict(
+ recursive=False,
+ include_parent_ref=False,
+ include_defaults=True,
+ export_uuids=True,
+ )
+ # TODO (betodealmeida): move this logic to export_to_dict once this
+ # becomes the default export endpoint
+ if payload.get("extra"):
+ try:
+ payload["extra"] = json.loads(payload["extra"])
+ except json.decoder.JSONDecodeError:
+ logger.info("Unable to decode `extra` field: %s", payload["extra"])
- payload["version"] = EXPORT_VERSION
+ payload["version"] = EXPORT_VERSION
- file_content = yaml.safe_dump(payload, sort_keys=False)
- yield file_name, file_content
+ file_content = yaml.safe_dump(payload, sort_keys=False)
+ yield file_name, file_content
diff --git a/superset/queries/saved_queries/commands/export.py b/superset/queries/saved_queries/commands/export.py
index ca2cfe5de9679..e209ae8ad2fd8 100644
--- a/superset/queries/saved_queries/commands/export.py
+++ b/superset/queries/saved_queries/commands/export.py
@@ -23,7 +23,7 @@
import yaml
from werkzeug.utils import secure_filename
-from superset.commands.export import ExportModelsCommand
+from superset.commands.export.models import ExportModelsCommand
from superset.models.sql_lab import SavedQuery
from superset.queries.saved_queries.commands.exceptions import SavedQueryNotFoundError
from superset.queries.saved_queries.dao import SavedQueryDAO
@@ -38,7 +38,9 @@ class ExportSavedQueriesCommand(ExportModelsCommand):
not_found = SavedQueryNotFoundError
@staticmethod
- def _export(model: SavedQuery) -> Iterator[Tuple[str, str]]:
+ def _export(
+ model: SavedQuery, export_related: bool = True
+ ) -> Iterator[Tuple[str, str]]:
# build filename based on database, optional schema, and label
database_slug = secure_filename(model.database.database_name)
schema_slug = secure_filename(model.schema)
@@ -58,23 +60,24 @@ def _export(model: SavedQuery) -> Iterator[Tuple[str, str]]:
yield file_name, file_content
# include database as well
- file_name = f"databases/{database_slug}.yaml"
+ if export_related:
+ file_name = f"databases/{database_slug}.yaml"
- payload = model.database.export_to_dict(
- recursive=False,
- include_parent_ref=False,
- include_defaults=True,
- export_uuids=True,
- )
- # TODO (betodealmeida): move this logic to export_to_dict once this
- # becomes the default export endpoint
- if "extra" in payload:
- try:
- payload["extra"] = json.loads(payload["extra"])
- except json.decoder.JSONDecodeError:
- logger.info("Unable to decode `extra` field: %s", payload["extra"])
+ payload = model.database.export_to_dict(
+ recursive=False,
+ include_parent_ref=False,
+ include_defaults=True,
+ export_uuids=True,
+ )
+ # TODO (betodealmeida): move this logic to export_to_dict once this
+ # becomes the default export endpoint
+ if "extra" in payload:
+ try:
+ payload["extra"] = json.loads(payload["extra"])
+ except json.decoder.JSONDecodeError:
+ logger.info("Unable to decode `extra` field: %s", payload["extra"])
- payload["version"] = EXPORT_VERSION
+ payload["version"] = EXPORT_VERSION
- file_content = yaml.safe_dump(payload, sort_keys=False)
- yield file_name, file_content
+ file_content = yaml.safe_dump(payload, sort_keys=False)
+ yield file_name, file_content
diff --git a/superset/utils/async_query_manager.py b/superset/utils/async_query_manager.py
index a026fd6f3f3d7..fcda931fcd880 100644
--- a/superset/utils/async_query_manager.py
+++ b/superset/utils/async_query_manager.py
@@ -71,7 +71,7 @@ class AsyncQueryManager:
def __init__(self) -> None:
super().__init__()
- self._redis: redis.Redis # type: ignore
+ self._redis: redis.Redis
self._stream_prefix: str = ""
self._stream_limit: Optional[int]
self._stream_limit_firehose: Optional[int]
diff --git a/tests/integration_tests/charts/commands_tests.py b/tests/integration_tests/charts/commands_tests.py
index bf4fe4fe79bd4..ec205b6a6abee 100644
--- a/tests/integration_tests/charts/commands_tests.py
+++ b/tests/integration_tests/charts/commands_tests.py
@@ -176,6 +176,26 @@ def test_export_chart_command_key_order(self, mock_g):
"dataset_uuid",
]
+ @patch("superset.security.manager.g")
+ @pytest.mark.usefixtures("load_energy_table_with_slice")
+ def test_export_chart_command_no_related(self, mock_g):
+ """
+ Test that only the chart is exported when export_related=False.
+ """
+ mock_g.user = security_manager.find_user("admin")
+
+ example_chart = (
+ db.session.query(Slice).filter_by(slice_name="Energy Sankey").one()
+ )
+ command = ExportChartsCommand([example_chart.id], export_related=False)
+ contents = dict(command.run())
+
+ expected = [
+ "metadata.yaml",
+ f"charts/Energy_Sankey_{example_chart.id}.yaml",
+ ]
+ assert expected == list(contents.keys())
+
class TestImportChartsCommand(SupersetTestCase):
@patch("superset.charts.commands.importers.v1.utils.g")
diff --git a/tests/integration_tests/dashboards/commands_tests.py b/tests/integration_tests/dashboards/commands_tests.py
index 7c7a2046f0592..ae18c741583e7 100644
--- a/tests/integration_tests/dashboards/commands_tests.py
+++ b/tests/integration_tests/dashboards/commands_tests.py
@@ -423,6 +423,28 @@ def test_append_charts(self, mock_suffix):
"DASHBOARD_VERSION_KEY": "v2",
}
+ @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
+ @patch("superset.security.manager.g")
+ @patch("superset.views.base.g")
+ def test_export_dashboard_command_no_related(self, mock_g1, mock_g2):
+ """
+ Test that only the dashboard is exported when export_related=False.
+ """
+ mock_g1.user = security_manager.find_user("admin")
+ mock_g2.user = security_manager.find_user("admin")
+
+ example_dashboard = (
+ db.session.query(Dashboard).filter_by(slug="world_health").one()
+ )
+ command = ExportDashboardsCommand([example_dashboard.id], export_related=False)
+ contents = dict(command.run())
+
+ expected_paths = {
+ "metadata.yaml",
+ "dashboards/World_Banks_Data.yaml",
+ }
+ assert expected_paths == set(contents.keys())
+
class TestImportDashboardsCommand(SupersetTestCase):
def test_import_v0_dashboard_cli_export(self):
diff --git a/tests/integration_tests/databases/commands_tests.py b/tests/integration_tests/databases/commands_tests.py
index c90550ed69903..59fcf39a86727 100644
--- a/tests/integration_tests/databases/commands_tests.py
+++ b/tests/integration_tests/databases/commands_tests.py
@@ -358,6 +358,26 @@ def test_export_database_command_key_order(self, mock_g):
"version",
]
+ @patch("superset.security.manager.g")
+ @pytest.mark.usefixtures(
+ "load_birth_names_dashboard_with_slices", "load_energy_table_with_slice"
+ )
+ def test_export_database_command_no_related(self, mock_g):
+ """
+ Test that only databases are exported when export_related=False.
+ """
+ mock_g.user = security_manager.find_user("admin")
+
+ example_db = get_example_database()
+ db_uuid = example_db.uuid
+
+ command = ExportDatabasesCommand([example_db.id], export_related=False)
+ contents = dict(command.run())
+ prefixes = {path.split("/")[0] for path in contents}
+ assert "metadata.yaml" in prefixes
+ assert "databases" in prefixes
+ assert "datasets" not in prefixes
+
class TestImportDatabasesCommand(SupersetTestCase):
def test_import_v1_database(self):
diff --git a/tests/integration_tests/datasets/commands_tests.py b/tests/integration_tests/datasets/commands_tests.py
index af22d9319b27c..784b1f19e39d9 100644
--- a/tests/integration_tests/datasets/commands_tests.py
+++ b/tests/integration_tests/datasets/commands_tests.py
@@ -219,6 +219,26 @@ def test_export_dataset_command_key_order(self, mock_g):
"database_uuid",
]
+ @patch("superset.security.manager.g")
+ @pytest.mark.usefixtures("load_energy_table_with_slice")
+ def test_export_dataset_command_no_related(self, mock_g):
+ """
+ Test that only datasets are exported when export_related=False.
+ """
+ mock_g.user = security_manager.find_user("admin")
+
+ example_db = get_example_database()
+ example_dataset = _get_table_from_list_by_name(
+ "energy_usage", example_db.tables
+ )
+ command = ExportDatasetsCommand([example_dataset.id], export_related=False)
+ contents = dict(command.run())
+
+ assert list(contents.keys()) == [
+ "metadata.yaml",
+ "datasets/examples/energy_usage.yaml",
+ ]
+
class TestImportDatasetsCommand(SupersetTestCase):
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
diff --git a/tests/integration_tests/queries/saved_queries/commands_tests.py b/tests/integration_tests/queries/saved_queries/commands_tests.py
index f90924ba0e66a..bd90419155422 100644
--- a/tests/integration_tests/queries/saved_queries/commands_tests.py
+++ b/tests/integration_tests/queries/saved_queries/commands_tests.py
@@ -83,6 +83,24 @@ def test_export_query_command(self, mock_g):
"database_uuid": str(self.example_database.uuid),
}
+ @patch("superset.queries.saved_queries.filters.g")
+ def test_export_query_command_no_related(self, mock_g):
+ """
+ Test that only the query is exported when export_related=False.
+ """
+ mock_g.user = security_manager.find_user("admin")
+
+ command = ExportSavedQueriesCommand(
+ [self.example_query.id], export_related=False
+ )
+ contents = dict(command.run())
+
+ expected = [
+ "metadata.yaml",
+ "queries/examples/schema1/The_answer.yaml",
+ ]
+ assert expected == list(contents.keys())
+
@patch("superset.queries.saved_queries.filters.g")
def test_export_query_command_no_access(self, mock_g):
"""Test that users can't export datasets they don't have access to"""