Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

allow cache and force refresh on table list #6078

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,10 @@ describe('SqlEditorLeftBar', () => {
expect(wrapper.find(TableElement)).toHaveLength(1);
});
describe('onDatabaseChange', () => {
it('should fetch tables', () => {
sinon.stub(wrapper.instance(), 'fetchTables');
it('should fetch schemas', () => {
sinon.stub(wrapper.instance(), 'fetchSchemas');
wrapper.instance().onDatabaseChange({ value: 1, label: 'main' });

expect(wrapper.instance().fetchTables.getCall(0).args[0]).toBe(1);
expect(wrapper.instance().fetchSchemas.getCall(0).args[0]).toBe(1);
wrapper.instance().fetchTables.restore();
wrapper.instance().fetchSchemas.restore();
});
it('should clear tableOptions', () => {
Expand Down Expand Up @@ -103,9 +99,9 @@ describe('SqlEditorLeftBar', () => {
d.resolve(tables);
return d.promise();
});
wrapper.instance().fetchTables(1, 'main', 'birth_names');
wrapper.instance().fetchTables(1, 'main', 'true', 'birth_names');

expect(ajaxStub.getCall(0).args[0]).toBe('/superset/tables/1/main/birth_names/');
expect(ajaxStub.getCall(0).args[0]).toBe('/superset/tables/1/main/birth_names/true/');
expect(wrapper.state().tableLength).toBe(3);
});
it('should handle error', () => {
Expand Down
77 changes: 43 additions & 34 deletions superset/assets/src/SqlLab/components/SqlEditorLeftBar.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -40,13 +40,10 @@ class SqlEditorLeftBar extends React.PureComponent {
}
onDatabaseChange(db, force) {
const val = db ? db.value : null;
this.setState({ schemaOptions: [] });
this.setState({ schemaOptions: [], tableOptions: [] });
this.props.actions.queryEditorSetSchema(this.props.queryEditor, null);
this.props.actions.queryEditorSetDb(this.props.queryEditor, val);
if (!(db)) {
this.setState({ tableOptions: [] });
} else {
this.fetchTables(val, this.props.queryEditor.schema);
Copy link
Contributor Author

@youngyjd youngyjd Oct 12, 2018

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fetchTables here makes no sense because the database has been changed but this.props.queryEditor.schema is still the schema selected for the previously selected db.
And it is set null on line 44:
this.props.actions.queryEditorSetSchema(this.props.queryEditor, null);

so I think removing it is necessary.

if (db) {
this.fetchSchemas(val, force || false);
}
}
Expand All @@ -69,11 +66,12 @@ class SqlEditorLeftBar extends React.PureComponent {
resetState() {
this.props.actions.resetState();
}
fetchTables(dbId, schema, substr) {
fetchTables(dbId, schema, force, substr) {
// This can be large so it shouldn't be put in the Redux store
const forceRefresh = force || false;
if (dbId && schema) {
this.setState({ tableLoading: true, tableOptions: [] });
const url = `/superset/tables/${dbId}/${schema}/${substr}/`;
const url = `/superset/tables/${dbId}/${schema}/${substr}/${forceRefresh}/`;
$.get(url).done((data) => {
const filterOptions = createFilterOptions({ options: data.options });
this.setState({
Expand Down Expand Up @@ -110,10 +108,10 @@ class SqlEditorLeftBar extends React.PureComponent {
}
this.props.actions.addTable(this.props.queryEditor, tableName, schemaName);
}
changeSchema(schemaOpt) {
changeSchema(schemaOpt, force) {
const schema = (schemaOpt) ? schemaOpt.value : null;
this.props.actions.queryEditorSetSchema(this.props.queryEditor, schema);
this.fetchTables(this.props.queryEditor.dbId, schema);
this.fetchTables(this.props.queryEditor.dbId, schema, force);
}
fetchSchemas(dbId, force) {
const actualDbId = dbId || this.props.queryEditor.dbId;
Expand Down Expand Up @@ -196,7 +194,7 @@ class SqlEditorLeftBar extends React.PureComponent {
<RefreshLabel
onClick={this.onDatabaseChange.bind(
this, { value: database.id }, true)}
tooltipContent="force refresh schema list"
tooltipContent={t('force refresh schema list')}
/>
</div>
</div>
Expand All @@ -214,30 +212,41 @@ class SqlEditorLeftBar extends React.PureComponent {
</i>)
</small>
</ControlLabel>
{this.props.queryEditor.schema &&
<Select
name="select-table"
ref="selectTable"
isLoading={this.state.tableLoading}
placeholder={t('Select table or type table name')}
autosize={false}
onChange={this.changeTable.bind(this)}
filterOptions={this.state.filterOptions}
options={this.state.tableOptions}
/>
}
{!this.props.queryEditor.schema &&
<Select
async
name="async-select-table"
ref="selectTable"
placeholder={tableSelectPlaceholder}
disabled={tableSelectDisabled}
autosize={false}
onChange={this.changeTable.bind(this)}
loadOptions={this.getTableNamesBySubStr.bind(this)}
/>
}
<div className="row">
<div className="col-md-11 col-xs-11" style={{ paddingRight: '2px' }}>
{this.props.queryEditor.schema &&
<Select
name="select-table"
ref="selectTable"
isLoading={this.state.tableLoading}
placeholder={t('Select table or type table name')}
autosize={false}
onChange={this.changeTable.bind(this)}
filterOptions={this.state.filterOptions}
options={this.state.tableOptions}
/>
}
{!this.props.queryEditor.schema &&
<Select
async
name="async-select-table"
ref="selectTable"
placeholder={tableSelectPlaceholder}
disabled={tableSelectDisabled}
autosize={false}
onChange={this.changeTable.bind(this)}
loadOptions={this.getTableNamesBySubStr.bind(this)}
/>
}
</div>
<div className="col-md-1 col-xs-1" style={{ paddingTop: '8px', paddingLeft: '0px' }}>
<RefreshLabel
onClick={this.changeSchema.bind(
this, { value: this.props.queryEditor.schema }, true)}
tooltipContent={t('force refresh table list')}
/>
</div>
</div>
</div>
<hr />
<div className="m-t-5">
Expand Down
31 changes: 12 additions & 19 deletions superset/cache_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,25 +9,17 @@ def view_cache_key(*unused_args, **unused_kwargs):
return 'view/{}/{}'.format(request.path, args_hash)


def default_timeout(*unused_args, **unused_kwargs):
return 5 * 60


def default_enable_cache(*unused_args, **unused_kwargs):
return True
def memoized_func(key=view_cache_key, use_tables_cache=False):
"""Use this decorator to cache functions that have predefined first arg.

enable_cache is treated as True by default,
except enable_cache = False is passed to the decorated function.

def memoized_func(timeout=default_timeout,
key=view_cache_key,
enable_cache=default_enable_cache,
use_tables_cache=False):
"""Use this decorator to cache functions that have predefined first arg.
force means whether to force refresh the cache and is treated as False by default,
except force = True is passed to the decorated function.

If enable_cache() is False,
the function will never be cached.
If enable_cache() is True,
cache is adopted and will timeout in timeout() seconds.
If force is True, cache will be refreshed.
timeout of cache is set to 600 seconds by default,
except cache_timeout = {timeout in seconds} is passed to the decorated function.

memoized_func uses simple_cache and stored the data in memory.
Key is a callable function that takes function arguments and
Expand All @@ -42,15 +34,16 @@ def wrap(f):

if selected_cache:
def wrapped_f(cls, *args, **kwargs):
if not enable_cache(*args, **kwargs):
if not kwargs.get('enable_cache', True):
return f(cls, *args, **kwargs)

cache_key = key(*args, **kwargs)
o = selected_cache.get(cache_key)
if not kwargs['force'] and o is not None:
if not kwargs.get('force') and o is not None:
return o
o = f(cls, *args, **kwargs)
selected_cache.set(cache_key, o, timeout=timeout(*args, **kwargs))
selected_cache.set(cache_key, o,
timeout=kwargs.get('cache_timeout', 600))
return o
else:
# noop
Expand Down
34 changes: 23 additions & 11 deletions superset/db_engine_specs.py
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,6 @@ def convert_dttm(cls, target_type, dttm):

@classmethod
@cache_util.memoized_func(
timeout=600,
key=lambda *args, **kwargs: 'db:{}:{}'.format(args[0].id, args[1]),
use_tables_cache=True)
def fetch_result_sets(cls, db, datasource_type, force=False):
Expand Down Expand Up @@ -295,8 +294,6 @@ def patch(cls):

@classmethod
@cache_util.memoized_func(
enable_cache=lambda *args, **kwargs: kwargs.get('enable_cache', False),
timeout=lambda *args, **kwargs: kwargs.get('cache_timeout'),
key=lambda *args, **kwargs: 'db:{}:schema_list'.format(kwargs.get('db_id')))
def get_schema_names(cls, inspector, db_id,
enable_cache, cache_timeout, force=False):
Expand All @@ -312,9 +309,21 @@ def get_schema_names(cls, inspector, db_id,
return inspector.get_schema_names()

@classmethod
def get_table_names(cls, schema, inspector):
@cache_util.memoized_func(
key=lambda *args, **kwargs: 'db:{db_id}:schema:{schema}:table_list'.format(
db_id=kwargs.get('db_id'), schema=kwargs.get('schema')))
def get_table_names(cls, inspector, db_id, schema,
enable_cache, cache_timeout, force=False):
return sorted(inspector.get_table_names(schema))

@classmethod
@cache_util.memoized_func(
key=lambda *args, **kwargs: 'db:{db_id}:schema:{schema}:view_list'.format(
db_id=kwargs.get('db_id'), schema=kwargs.get('schema')))
def get_view_names(cls, inspector, db_id, schema,
enable_cache, cache_timeout, force=False):
return sorted(inspector.get_view_names(schema))

@classmethod
def where_latest_partition(
cls, table_name, schema, database, qry, columns=None):
Expand Down Expand Up @@ -438,7 +447,11 @@ class PostgresEngineSpec(PostgresBaseEngineSpec):
engine = 'postgresql'

@classmethod
def get_table_names(cls, schema, inspector):
@cache_util.memoized_func(
key=lambda *args, **kwargs: 'db:{db_id}:schema:{schema}:table_list'.format(
db_id=kwargs.get('db_id'), schema=kwargs.get('schema')))
def get_table_names(cls, inspector, db_id, schema,
enable_cache, cache_timeout, force=False):
"""Need to consider foreign tables for PostgreSQL"""
tables = inspector.get_table_names(schema)
tables.extend(inspector.get_foreign_table_names(schema))
Expand Down Expand Up @@ -570,7 +583,6 @@ def epoch_to_dttm(cls):

@classmethod
@cache_util.memoized_func(
timeout=600,
key=lambda *args, **kwargs: 'db:{}:{}'.format(args[0].id, args[1]),
use_tables_cache=True)
def fetch_result_sets(cls, db, datasource_type, force=False):
Expand All @@ -596,7 +608,11 @@ def convert_dttm(cls, target_type, dttm):
return "'{}'".format(iso)

@classmethod
def get_table_names(cls, schema, inspector):
@cache_util.memoized_func(
key=lambda *args, **kwargs: 'db:{db_id}:schema:{schema}:table_list'.format(
db_id=kwargs.get('db_id'), schema=kwargs.get('schema')))
def get_table_names(cls, inspector, db_id, schema,
enable_cache, cache_timeout, force=False):
"""Need to disregard the schema for Sqlite"""
return sorted(inspector.get_table_names())

Expand Down Expand Up @@ -721,7 +737,6 @@ def epoch_to_dttm(cls):

@classmethod
@cache_util.memoized_func(
timeout=600,
key=lambda *args, **kwargs: 'db:{}:{}'.format(args[0].id, args[1]),
use_tables_cache=True)
def fetch_result_sets(cls, db, datasource_type, force=False):
Expand Down Expand Up @@ -1003,7 +1018,6 @@ def patch(cls):

@classmethod
@cache_util.memoized_func(
timeout=600,
key=lambda *args, **kwargs: 'db:{}:{}'.format(args[0].id, args[1]),
use_tables_cache=True)
def fetch_result_sets(cls, db, datasource_type, force=False):
Expand Down Expand Up @@ -1461,8 +1475,6 @@ def convert_dttm(cls, target_type, dttm):

@classmethod
@cache_util.memoized_func(
enable_cache=lambda *args, **kwargs: kwargs.get('enable_cache', False),
timeout=lambda *args, **kwargs: kwargs.get('cache_timeout'),
key=lambda *args, **kwargs: 'db:{}:schema_list'.format(kwargs.get('db_id')))
def get_schema_names(cls, inspector, db_id,
enable_cache, cache_timeout, force=False):
Expand Down
26 changes: 23 additions & 3 deletions superset/models/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -847,8 +847,18 @@ def all_table_names(self, schema=None, force=False):
tables_dict = self.db_engine_spec.fetch_result_sets(
self, 'table', force=force)
return tables_dict.get('', [])
return sorted(
self.db_engine_spec.get_table_names(schema, self.inspector))

extra = self.get_extra()
medatada_cache_timeout = extra.get('metadata_cache_timeout', {})
table_cache_timeout = medatada_cache_timeout.get('table_cache_timeout')
enable_cache = 'table_cache_timeout' in medatada_cache_timeout
return sorted(self.db_engine_spec.get_table_names(
inspector=self.inspector,
db_id=self.id,
schema=schema,
enable_cache=enable_cache,
cache_timeout=table_cache_timeout,
force=force))

def all_view_names(self, schema=None, force=False):
if not schema:
Expand All @@ -859,7 +869,17 @@ def all_view_names(self, schema=None, force=False):
return views_dict.get('', [])
views = []
try:
views = self.inspector.get_view_names(schema)
extra = self.get_extra()
medatada_cache_timeout = extra.get('metadata_cache_timeout', {})
table_cache_timeout = medatada_cache_timeout.get('table_cache_timeout')
enable_cache = 'table_cache_timeout' in medatada_cache_timeout
views = self.db_engine_spec.get_view_names(
inspector=self.inspector,
db_id=self.id,
schema=schema,
enable_cache=enable_cache,
cache_timeout=table_cache_timeout,
force=force)
except Exception:
pass
return views
Expand Down
13 changes: 8 additions & 5 deletions superset/views/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,8 @@ class DatabaseView(SupersetModelView, DeleteMixin, YamlExportMixin): # noqa
'#sqlalchemy.schema.MetaData) call.<br/>'
'2. The ``metadata_cache_timeout`` is a cache timeout setting '
'in seconds for metadata fetch of this database. Specify it as '
'**"metadata_cache_timeout": {"schema_cache_timeout": 600}**. '
'**"metadata_cache_timeout": {"schema_cache_timeout": 600, '
'"table_cache_timeout": 600}**. '
'If unset, cache will not be enabled for the functionality. '
'A timeout of 0 indicates that the cache never expires.<br/>'
'3. The ``schemas_allowed_for_csv_upload`` is a comma separated list '
Expand Down Expand Up @@ -1538,7 +1539,7 @@ def checkbox(self, model_view, id_, attr, value):
@has_access_api
@expose('/schemas/<db_id>/')
@expose('/schemas/<db_id>/<force_refresh>/')
def schemas(self, db_id, force_refresh='true'):
def schemas(self, db_id, force_refresh='false'):
db_id = int(db_id)
force_refresh = force_refresh.lower() == 'true'
database = (
Expand All @@ -1556,16 +1557,18 @@ def schemas(self, db_id, force_refresh='true'):
@api
@has_access_api
@expose('/tables/<db_id>/<schema>/<substr>/')
def tables(self, db_id, schema, substr):
@expose('/tables/<db_id>/<schema>/<substr>/<force_refresh>/')
def tables(self, db_id, schema, substr, force_refresh='false'):
"""Endpoint to fetch the list of tables for given database"""
db_id = int(db_id)
force_refresh = force_refresh.lower() == 'true'
schema = utils.js_string_to_python(schema)
substr = utils.js_string_to_python(substr)
database = db.session.query(models.Database).filter_by(id=db_id).one()
table_names = security_manager.accessible_by_user(
database, database.all_table_names(schema), schema)
database, database.all_table_names(schema, force_refresh), schema)
view_names = security_manager.accessible_by_user(
database, database.all_view_names(schema), schema)
database, database.all_view_names(schema, force_refresh), schema)

if substr:
table_names = [tn for tn in table_names if substr in tn]
Expand Down