From 61f624d7d9452a25f25670e3994f539ac5602eed Mon Sep 17 00:00:00 2001 From: Andrew Schonfeld Date: Sun, 29 Mar 2020 10:16:21 -0400 Subject: [PATCH] 1.8.2: * #129, show dtype when hovering over header in "Highlight Dtypes" mode and description tooltips added to main menu * made "No Aggregation" the default aggregation in charts * bugfix for line charts with more than 15000 points * updated "Value Counts" & "Category Breakdown" to return top on initial load * #118, added scattergeo & choropleth maps * #121, added "not equal" toggle to filters * #132, updated resize button to "Refresh Widths" * added "Animate" toggle to scatter, line & bar charts * #131, changes to "Reshape Data" window * #130, updates to pivot reshaper * #128, additional hover display of code snippets for column creation * #112, updated "Group" selection to give users the ability to select group values --- .circleci/config.yml | 2 +- docker/2_7/Dockerfile | 2 +- docker/3_6/Dockerfile | 2 +- docs/source/conf.py | 4 +- dtale/charts/utils.py | 89 ++-- dtale/cli/loaders/csv_loader.py | 17 + dtale/cli/loaders/json_loader.py | 2 + dtale/column_filters.py | 13 +- dtale/dash_application/charts.py | 332 +++++++++---- dtale/dash_application/layout.py | 444 +++++++++++++----- dtale/dash_application/views.py | 126 ++++- dtale/data_reshapers.py | 19 +- dtale/static/css/dash.css | 30 +- dtale/static/css/main.css | 33 ++ dtale/utils.py | 20 + dtale/views.py | 69 +-- package.json | 2 +- setup.py | 2 +- .../__tests__/dtale/DataViewer-base-test.jsx | 4 +- .../DataViewer-reshape-aggregate-test.jsx | 25 +- .../reshape/DataViewer-reshape-pivot-test.jsx | 14 +- .../DataViewer-reshape-transpose-test.jsx | 12 +- .../filters/ColumnFilter-numeric-test.jsx | 43 +- .../filters/ColumnFilter-string-test.jsx | 20 +- .../__tests__/iframe/DataViewer-base-test.jsx | 6 +- .../iframe/DataViewer-within-iframe-test.jsx | 2 +- static/dtale/DataViewerMenu.jsx | 55 ++- static/dtale/Header.jsx | 14 +- static/dtale/menu-descriptions.json | 16 + static/filters/ColumnFilter.jsx | 2 +- static/filters/NumericFilter.jsx | 21 +- static/filters/StringFilter.jsx | 76 ++- static/popups/analysis/ColumnAnalysis.jsx | 3 +- .../popups/analysis/ColumnAnalysisFilters.jsx | 3 +- static/popups/create/CreateColumn.jsx | 49 +- static/popups/create/CreateRandom.jsx | 34 +- static/popups/create/codeSnippets.js | 56 +++ static/popups/reshape/Aggregate.jsx | 2 +- static/popups/reshape/Pivot.jsx | 20 +- static/popups/reshape/Reshape.jsx | 41 +- tests/conftest.py | 8 + tests/data/state-codes.csv | 52 ++ tests/dtale/test_charts.py | 4 + tests/dtale/test_dash.py | 275 +++++++++-- tests/dtale/test_instance.py | 7 +- tests/dtale/test_show_loaders.py | 18 +- tests/dtale/test_views.py | 46 +- 47 files changed, 1651 insertions(+), 485 deletions(-) create mode 100644 static/dtale/menu-descriptions.json create mode 100644 static/popups/create/codeSnippets.js create mode 100755 tests/data/state-codes.csv diff --git a/.circleci/config.yml b/.circleci/config.yml index 3364eb3ca..2ca1cb29c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -5,7 +5,7 @@ defaults: &defaults CIRCLE_ARTIFACTS: /tmp/circleci-artifacts CIRCLE_TEST_REPORTS: /tmp/circleci-test-results CODECOV_TOKEN: b0d35139-0a75-427a-907b-2c78a762f8f0 - VERSION: 1.8.1 + VERSION: 1.8.2 PANDOC_RELEASES_URL: https://github.com/jgm/pandoc/releases steps: - checkout diff --git a/docker/2_7/Dockerfile b/docker/2_7/Dockerfile index e1a1fe9cf..1767bb8c9 100644 --- a/docker/2_7/Dockerfile +++ b/docker/2_7/Dockerfile @@ -44,4 +44,4 @@ WORKDIR /app RUN set -eux \ ; . /root/.bashrc \ - ; easy_install dtale-1.8.1-py2.7.egg + ; easy_install dtale-1.8.2-py2.7.egg diff --git a/docker/3_6/Dockerfile b/docker/3_6/Dockerfile index 2cefc4d6d..b1de0e802 100644 --- a/docker/3_6/Dockerfile +++ b/docker/3_6/Dockerfile @@ -44,4 +44,4 @@ WORKDIR /app RUN set -eux \ ; . /root/.bashrc \ - ; easy_install dtale-1.8.1-py3.7.egg + ; easy_install dtale-1.8.2-py3.7.egg diff --git a/docs/source/conf.py b/docs/source/conf.py index 7049000b9..4e4b056f8 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -64,9 +64,9 @@ # built documents. # # The short X.Y version. -version = u'1.8.1' +version = u'1.8.2' # The full version, including alpha/beta/rc tags. -release = u'1.8.1' +release = u'1.8.2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/dtale/charts/utils.py b/dtale/charts/utils.py index 859a0804d..a6a5cff86 100644 --- a/dtale/charts/utils.py +++ b/dtale/charts/utils.py @@ -1,10 +1,12 @@ import pandas as pd -from dtale.utils import (classify_type, find_dtype_formatter, get_dtypes, +from dtale.utils import (ChartBuildingError, classify_type, + find_dtype_formatter, flatten_lists, get_dtypes, grid_columns, grid_formatter, json_int, make_list) YAXIS_CHARTS = ['line', 'bar', 'scatter'] ZAXIS_CHARTS = ['heatmap', '3d_scatter', 'surface'] +MAX_GROUPS = 30 def valid_chart(chart_type=None, x=None, y=None, z=None, **inputs): @@ -24,6 +26,14 @@ def valid_chart(chart_type=None, x=None, y=None, z=None, **inputs): :return: `True` if executed from test, `False` otherwise :rtype: bool """ + if chart_type == 'maps': + map_type = inputs.get('map_type') + if map_type == 'choropleth' and all(inputs.get(p) is not None for p in ['loc_mode', 'loc', 'map_val']): + return True + elif map_type == 'scattergeo' and all(inputs.get(p) is not None for p in ['lat', 'lon', 'map_val']): + return True + return False + if x is None or not len(y or []): return False @@ -95,6 +105,8 @@ def group_filter_handler(col_def, group_val, group_classifier): col_def_segs = col_def.split('|') if len(col_def_segs) > 1: col, freq = col_def_segs + if group_val == 'nan': + return '{col} != {col}'.format(col=col) if freq == 'WD': return '{}.dt.dayofweek == {}'.format(col, group_val) elif freq == 'H2': @@ -125,31 +137,27 @@ def group_filter_handler(col_def, group_val, group_classifier): elif freq == 'Y': ts_val = pd.Timestamp(group_val) return "{col}.dt.year == {year}".format(col=col, year=ts_val.year) + if group_val == 'nan': + return '{col} != {col}'.format(col=col_def) if group_classifier in ['I', 'F']: return '{col} == {val}'.format(col=col_def, val=group_val) return "{col} == '{val}'".format(col=col_def, val=group_val) -def retrieve_chart_data(df, x, y, z, group=None): +def retrieve_chart_data(df, *args, **kwargs): """ Retrieves data from a dataframe for x, y, z & group inputs complete with date frequency formatting (:meth:`dtale.charts.utils.date_freq_handler`) if specified :param df: dataframe that contains data for chart :type df: :class:`pandas:pandas.DataFrame` - :param x: column to use for the X-Axis - :type x: str - :param y: columns to use for the Y-Axes - :type y: list of str - :param z: column to use for the Z-Axis - :type z: str - :param group: column(s) to use for grouping - :type group: list of str or str + :param args: columns to use + :type args: iterable of str :return: dataframe of data required for chart construction :rtype: :class:`pandas:pandas.DataFrame` """ freq_handler = date_freq_handler(df) - cols = [x] + make_list(y) + make_list(z) + make_list(group) + cols = flatten_lists([make_list(a) for a in args]) all_code = [] all_data = [] for col in cols: @@ -158,8 +166,26 @@ def retrieve_chart_data(df, x, y, z, group=None): all_data.append(s) if code is not None: all_code.append(code) + all_data = pd.concat(all_data, axis=1) all_code = ["chart_data = pd.concat(["] + all_code + ["], axis=1)"] - return pd.concat(all_data, axis=1), all_code + if len(make_list(kwargs.get('group_val'))): + dtypes = get_dtypes(all_data) + + def _group_filter(group_val): + for gc, gv in group_val.items(): + classifier = classify_type(dtypes[gc]) + yield group_filter_handler(gc, gv, classifier) + + def _full_filter(): + for group_val in kwargs['group_val']: + group_filter = ' and '.join(list(_group_filter(group_val))) + yield group_filter + + filters = list(_full_filter()) + filters = '({})'.format(') or ('.join(filters)) + all_data = all_data.query(filters) + all_code.append('chart_data = chart_data.query({})'.format(filters)) + return all_data, all_code def check_all_nan(df, cols=None): @@ -198,12 +224,12 @@ def check_exceptions(df, allow_duplicates, unlimited_data=False, data_limit=1500 :raises Exception: if any failure condition is met """ if not allow_duplicates and any(df.duplicated()): - raise Exception(( + raise ChartBuildingError(( "{} contains duplicates, please specify group or additional filtering or select 'No Aggregation' from" ' Aggregation drop-down.' ).format(', '.join(df.columns))) if not unlimited_data and len(df) > data_limit: - raise Exception(limit_msg.format(data_limit)) + raise ChartBuildingError(limit_msg.format(data_limit)) def build_agg_data(df, x, y, inputs, agg, z=None): @@ -264,8 +290,8 @@ def build_agg_data(df, x, y, inputs, agg, z=None): ] -def build_chart(raw_data, x, y, group_col=None, agg=None, allow_duplicates=False, return_raw=False, - unlimited_data=False, **kwargs): +def build_base_chart(raw_data, x, y, group_col=None, group_val=None, agg=None, allow_duplicates=False, return_raw=False, + unlimited_data=False, **kwargs): """ Helper function to return data for 'chart-data' & 'correlations-ts' endpoints. Will return a dictionary of dictionaries (one for each series) which contain the data for the x & y axes of the chart as well as the minimum & @@ -289,7 +315,7 @@ def build_chart(raw_data, x, y, group_col=None, agg=None, allow_duplicates=False :return: dict """ - data, code = retrieve_chart_data(raw_data, x, y, kwargs.get('z'), group_col) + data, code = retrieve_chart_data(raw_data, x, y, kwargs.get('z'), group_col, group_val=group_val) x_col = str('x') y_cols = make_list(y) z_col = kwargs.get('z') @@ -300,34 +326,28 @@ def build_chart(raw_data, x, y, group_col=None, agg=None, allow_duplicates=False check_all_nan(data, [x] + y_cols) data = data.rename(columns={x: x_col}) code.append("chart_data = chart_data.rename(columns={'" + x + "': '" + x_col + "'})") - if agg is not None: + if agg is not None and agg != 'raw': data = data.groupby(group_col + [x_col]) data = getattr(data, agg)().reset_index() code.append("chart_data = chart_data.groupby(['{cols}']).{agg}().reset_index()".format( cols="', '".join(group_col + [x]), agg=agg )) - max_groups = 30 + MAX_GROUPS = 30 group_vals = data[group_col].drop_duplicates() - if len(group_vals) > max_groups: + if len(group_vals) > MAX_GROUPS: dtypes = get_dtypes(group_vals) group_fmt_overrides = {'I': lambda v, as_string: json_int(v, as_string=as_string, fmt='{}')} group_fmts = {c: find_dtype_formatter(dtypes[c], overrides=group_fmt_overrides) for c in group_col} - def _group_filter(): - for gv, gc in zip(group_vals.values[0], group_col): - classifier = classify_type(dtypes[gc]) - yield group_filter_handler(gc, group_fmts[gc](gv, as_string=True), classifier) - group_filter = ' and '.join(list(_group_filter())) - group_f, _ = build_formatters(group_vals) group_vals = group_f.format_lists(group_vals) group_vals = pd.DataFrame(group_vals, columns=group_col) msg = ( - 'Group ({}) contains more than {} unique values, please add additional filtering' - ' or else chart will be unreadable. Additional filtering can be added above, for example:\n\n' - '{}\n\nHere are the values to choose from:\n\n{}' - ).format(', '.join(group_col), max_groups, group_filter, group_vals.to_string(index=False)) - raise Exception(msg) + 'Group ({}) contains more than {} unique values, more groups than that will make the chart unreadable. ' + 'You can choose specific groups to display from then "Group(s)" dropdown above. The available group(s) ' + 'are listed below:' + ).format(', '.join(group_col), MAX_GROUPS, group_vals.to_string(index=False)) + raise ChartBuildingError(msg, group_vals.to_string(index=False)) data = data.dropna() if return_raw: @@ -402,3 +422,10 @@ def weekday_tick_handler(col_data, col): if col.endswith('|WD'): return [WEEKDAY_MAP[d] for d in col_data] return col_data + + +def find_group_vals(df, group_cols): + group_vals, _ = retrieve_chart_data(df, group_cols) + group_vals = group_vals.drop_duplicates() + group_f, _ = build_formatters(group_vals) + return group_f.format_dicts(group_vals.itertuples()) diff --git a/dtale/cli/loaders/csv_loader.py b/dtale/cli/loaders/csv_loader.py index ed1275b56..16b469951 100644 --- a/dtale/cli/loaders/csv_loader.py +++ b/dtale/cli/loaders/csv_loader.py @@ -1,8 +1,16 @@ import pandas as pd +import requests +from six import PY3 from dtale.app import show from dtale.cli.clickutils import get_loader_options +if PY3: + from io import StringIO +else: + from StringIO import StringIO + + ''' IMPORTANT!!! These global variables are required for building any customized CLI loader. When build_loaders runs startup it will search for any modules containing the global variable LOADER_KEY. @@ -10,6 +18,7 @@ LOADER_KEY = 'csv' LOADER_PROPS = [ dict(name='path', help='path to CSV file'), + dict(name='proxy', help="proxy URL if you're passing in a URL for --csv-path"), dict(name='parse_dates', help='comma-separated string of column names which should be parsed as dates') ] @@ -21,6 +30,14 @@ def show_loader(**kwargs): def loader_func(**kwargs): path = kwargs.pop('path') + if path.startswith('http://') or path.startswith('https://'): # add support for URLs + proxy = kwargs.pop('proxy', None) + req_kwargs = {} + if proxy is not None: + req_kwargs['proxies'] = dict(http=proxy, https=proxy) + resp = requests.get(path, **req_kwargs) + assert resp.status_code == 200 + path = StringIO(resp.content if PY3 else resp.content.decode('utf-8')) return pd.read_csv(path, **{k: v for k, v in kwargs.items() if k in LOADER_PROPS}) diff --git a/dtale/cli/loaders/json_loader.py b/dtale/cli/loaders/json_loader.py index 82fdb6c99..fc4c6cde1 100644 --- a/dtale/cli/loaders/json_loader.py +++ b/dtale/cli/loaders/json_loader.py @@ -12,6 +12,7 @@ LOADER_KEY = 'json' LOADER_PROPS = [ dict(name='path', help='path to JSON file or URL to JSON endpoint'), + dict(name='proxy', help="proxy URL if you're passing in a URL for --json-path"), dict(name='convert_dates', help='comma-separated string of column names which should be parsed as dates') ] @@ -34,6 +35,7 @@ def loader_func(**kwargs): if proxy is not None: req_kwargs['proxies'] = dict(http=proxy, https=proxy) resp = requests.get(path, **req_kwargs) + assert resp.status_code == 200 path = resp.json() if normalize else resp.text if normalize: normalize_func = pd.json_normalize if is_pandas1() else pd.io.json.json_normalize diff --git a/dtale/column_filters.py b/dtale/column_filters.py index 077157579..49333aeff 100644 --- a/dtale/column_filters.py +++ b/dtale/column_filters.py @@ -56,13 +56,14 @@ def build_filter(self): return super(StringFilter, self).handle_missing(None) state = self.cfg.get('value', []) + operand = self.cfg.get('operand', '=') fltr = dict(value=state) if len(state) == 1: val_str = ("'{}'" if self.classification == 'S' else '{}').format(state[0]) - fltr['query'] = "{} == {}".format(self.column, val_str) + fltr['query'] = "{} {} {}".format(self.column, '==' if operand == '=' else '!=', val_str) else: val_str = ("'{}'".format("', '".join(state)) if self.classification == 'S' else ','.join(state)) - fltr['query'] = "{} in ({})".format(self.column, val_str) + fltr['query'] = "{} {} ({})".format(self.column, 'in' if operand == '=' else 'not in', val_str) return super(StringFilter, self).handle_missing(fltr) @@ -76,15 +77,17 @@ def build_filter(self): return super(NumericFilter, self).handle_missing(None) cfg_val, cfg_operand, cfg_min, cfg_max = (self.cfg.get(p) for p in ['value', 'operand', 'min', 'max']) - if cfg_operand == '=': + if cfg_operand in ['=', 'ne']: state = make_list(cfg_val or []) if not len(state): return super(NumericFilter, self).handle_missing(None) fltr = dict(value=cfg_val, operand=cfg_operand) if len(state) == 1: - fltr['query'] = "{} == {}".format(self.column, state[0]) + fltr['query'] = "{} {} {}".format(self.column, '==' if cfg_operand == '=' else '!=', state[0]) else: - fltr['query'] = "{} in ({})".format(self.column, ", ".join(state)) + fltr['query'] = "{} {} ({})".format( + self.column, 'in' if cfg_operand == '=' else 'not in', ", ".join(state) + ) return super(NumericFilter, self).handle_missing(fltr) if cfg_operand in ['<', '>', '<=', '>=']: if cfg_val is None: diff --git a/dtale/dash_application/charts.py b/dtale/dash_application/charts.py index 00da01ebb..9b1b8acd4 100644 --- a/dtale/dash_application/charts.py +++ b/dtale/dash_application/charts.py @@ -14,13 +14,14 @@ import dtale.dash_application.components as dash_components import dtale.global_state as global_state -from dtale.charts.utils import YAXIS_CHARTS, ZAXIS_CHARTS, build_agg_data -from dtale.charts.utils import build_chart as build_chart_data +from dtale.charts.utils import (YAXIS_CHARTS, ZAXIS_CHARTS, build_agg_data, + build_base_chart) from dtale.charts.utils import build_formatters as chart_formatters from dtale.charts.utils import (check_all_nan, check_exceptions, retrieve_chart_data, valid_chart, weekday_tick_handler) -from dtale.dash_application.layout import (AGGS, build_error, +from dtale.dash_application.layout import (AGGS, ANIMATION_CHARTS, build_error, + test_plotly_version, update_label_for_freq) from dtale.utils import (build_code_export, classify_type, dict_merge, divide_chunks, export_to_csv_buffer, flatten_lists, @@ -64,10 +65,11 @@ def chart_url_params(search): params = dict(get_url_parser()(search.lstrip('?'))) else: params = search - for gp in ['y', 'group', 'yaxis']: + for gp in ['y', 'group', 'group_val', 'yaxis']: if gp in params: params[gp] = json.loads(params[gp]) params['cpg'] = 'true' == params.get('cpg') + params['animate'] = 'true' == params.get('animate') if 'window' in params: params['window'] = int(params['window']) if 'group_filter' in params: @@ -82,10 +84,24 @@ def url_encode_func(): def chart_url_querystring(params, data=None, group_filter=None): - base_props = ['chart_type', 'query', 'x', 'z', 'agg', 'window', 'rolling_comp', 'barmode', 'barsort'] + base_props = ['chart_type', 'query', 'x', 'z', 'agg', 'window', 'rolling_comp'] + chart_type = params.get('chart_type') + if chart_type == 'bar': + base_props += ['barmode', 'barsort'] + elif chart_type == 'maps': + if params.get('map_type') == 'scattergeo': + base_props += ['map_type', 'lat', 'lon', 'map_val', 'scope', 'proj'] + else: + base_props += ['map_type', 'loc_mode', 'loc', 'map_val'] + + if chart_type in ['maps', 'heatmap']: + base_props += ['colorscale'] + final_params = {k: params[k] for k in base_props if params.get(k) is not None} final_params['cpg'] = 'true' if params.get('cpg') is True else 'false' - for gp in ['y', 'group']: + if chart_type in ANIMATION_CHARTS: + final_params['animate'] = 'true' if params.get('animate') is True else 'false' + for gp in ['y', 'group', 'group_val']: list_param = [val for val in params.get(gp) or [] if val is not None] if len(list_param): final_params[gp] = json.dumps(list_param) @@ -395,7 +411,7 @@ def _formatter(chart): ] -def scatter_builder(data, x, y, axes_builder, wrapper, group=None, z=None, agg=None): +def scatter_builder(data, x, y, axes_builder, wrapper, group=None, z=None, agg=None, animate=False): """ Builder function for :plotly:`plotly.graph_objects.Scatter ` @@ -434,21 +450,41 @@ def marker(series): return {'size': 15, 'line': {'width': 0.5, 'color': 'white'}} scatter_func = go.Scatter3d if z is not None else go.Scattergl - return [ - wrapper(graph_wrapper( - id='scatter-{}-{}'.format(group or 'all', y2), - figure={'data': [ + + def _build_final_scatter(y_val): + figure_cfg = { + 'data': [ scatter_func(**dict_merge( - dict(x=d['x'], y=d[y2], mode='markers', opacity=0.7, name=series_key, marker=marker(d)), + dict(x=d['x'], y=d[y_val], mode='markers', opacity=0.7, name=series_key, marker=marker(d)), dict(z=d[z]) if z is not None else dict()) ) - for series_key, d in data['data'].items() if y2 in d and (group is None or group == series_key) + for series_key, d in data['data'].items() if y_val in d and (group is None or group == series_key) ], 'layout': build_layout( - dict_merge(build_title(x, y2, group, z=z, agg=agg), layout(axes_builder([y2])[0])) - )} - ), group_filter=dict_merge(dict(y=y2), {} if group is None else dict(group=group))) - for y2 in y - ] + dict_merge(build_title(x, y_val, group, z=z, agg=agg), layout(axes_builder([y_val])[0])) + ) + } + if animate: + def build_frame(i): + for series_key, series in data['data'].items(): + if y_val in series and (group is None or group == series_key): + yield scatter_func(**dict( + x=series['x'][:i], y=series[y_val][:i], mode='markers', opacity=0.7, + name=series_key, marker=marker(series) + )) + + def build_frames(): + x = next(iter(data['data'].values()), {}).get('x', []) + for i in range(1, len(x) + 1): + yield dict(data=list(build_frame(i))) + + update_cfg_w_frames(figure_cfg, list(build_frames())) + + return wrapper(graph_wrapper( + id='scatter-{}-{}'.format(group or 'all', y_val), + figure=figure_cfg + ), group_filter=dict_merge(dict(y=y_val), {} if group is None else dict(group=group))) + + return [_build_final_scatter(y2) for y2 in y] def surface_builder(data, x, y, z, axes_builder, wrapper, agg=None): @@ -526,6 +562,17 @@ def build_grouped_bars_with_multi_yaxis(series_cfgs, y): ) +def update_cfg_w_frames(cfg, frames): + cfg['frames'] = frames + cfg['layout']['updatemenus'] = [{'type': 'buttons', 'showactive': True, 'buttons': [ + {'label': 'Play', 'method': 'animate', 'args': [None]}, + {'label': 'Pause', 'method': 'animate', 'args': [ + [None], + {"frame": {"duration": 0, "redraw": False}, "mode": "immediate", "transition": {"duration": 0}} + ]} + ]}] + + def bar_builder(data, x, y, axes_builder, wrapper, cpg=False, barmode='group', barsort=None, **kwargs): """ Builder function for :plotly:`plotly.graph_objects.Surface ` @@ -553,20 +600,19 @@ def bar_builder(data, x, y, axes_builder, wrapper, cpg=False, barmode='group', b allow_multiaxis = barmode is None or barmode == 'group' axes, allow_multiaxis = axes_builder(y) if allow_multiaxis else axes_builder([y[0]]) name_builder = build_series_name(y, cpg) - if barsort is not None: - for series_key, series in data['data'].items(): - barsort_col = 'x' if barsort == x or barsort not in series else barsort - if barsort_col != 'x' or kwargs.get('agg') == 'raw': - df = pd.DataFrame(series) - df = df.sort_values(barsort_col) - data['data'][series_key] = {c: df[c].values for c in df.columns} - tickvals = list(range(len(df['x']))) - data['data'][series_key]['x'] = tickvals - hover_text[series_key] = {'hovertext': df['x'].values, 'hoverinfo': 'y+text'} - axes['xaxis'] = dict_merge( - axes.get('xaxis', {}), - build_spaced_ticks(df['x'].values, mode='array') - ) + for series_key, series in data['data'].items(): + barsort_col = 'x' if barsort == x or barsort not in series else barsort + if barsort_col != 'x' or kwargs.get('agg') == 'raw': + df = pd.DataFrame(series) + df = df.sort_values(barsort_col) + data['data'][series_key] = {c: df[c].values for c in df.columns} + tickvals = list(range(len(df['x']))) + data['data'][series_key]['x'] = tickvals + hover_text[series_key] = {'hovertext': df['x'].values, 'hoverinfo': 'y+text'} + axes['xaxis'] = dict_merge( + axes.get('xaxis', {}), + build_spaced_ticks(df['x'].values, mode='array') + ) if cpg: charts = [ @@ -606,11 +652,32 @@ def bar_builder(data, x, y, axes_builder, wrapper, cpg=False, barmode='group', b if barmode == 'group' and allow_multiaxis: data_cfgs = list(build_grouped_bars_with_multi_yaxis(data_cfgs, y)) - return wrapper(graph_wrapper( - id='bar-graph', - figure={'data': data_cfgs, 'layout': build_layout( - dict_merge(build_title(x, y, agg=kwargs.get('agg')), axes, dict(barmode=barmode or 'group')))} - )) + figure_cfg = { + 'data': data_cfgs, + 'layout': build_layout( + dict_merge(build_title(x, y, agg=kwargs.get('agg')), axes, dict(barmode=barmode or 'group')) + ) + } + if kwargs.get('animate', False): + + def build_frame(i): + for series_key, series in data['data'].items(): + for j, y2 in enumerate(y, 1): + yield dict_merge( + {'x': series['x'][:i], 'y': series[y2][:i], 'type': 'bar'}, + name_builder(y2, series_key), + {} if j == 1 or not allow_multiaxis else {'yaxis': 'y{}'.format(j)}, + hover_text.get(series_key) or {} + ) + + def build_frames(): + x = next(iter(data['data'].values()), {}).get('x', []) + for i in range(1, len(x) + 1): + yield dict(data=list(build_frame(i))) + + update_cfg_w_frames(figure_cfg, list(build_frames())) + + return wrapper(graph_wrapper(id='bar-graph', figure=figure_cfg)) def line_builder(data, x, y, axes_builder, wrapper, cpg=False, **inputs): @@ -643,7 +710,7 @@ def line_func(s): def line_cfg(s): if len(s['x']) > 15000: - {'mode': 'lines', 'line': {'shape': 'linear'}} + return {'mode': 'lines', 'line': {'shape': 'linear'}} return {'mode': 'lines', 'line': {'shape': 'spline', 'smoothing': 0.3}} if cpg: @@ -679,10 +746,28 @@ def line_cfg(s): ] for series_key, series in data['data'].items() ]) - return wrapper(graph_wrapper( - id='line-graph', - figure={'data': data_cfgs, 'layout': build_layout(dict_merge(build_title(x, y, agg=inputs.get('agg')), axes))} - )) + + figure_cfg = {'data': data_cfgs, 'layout': build_layout(dict_merge(build_title(x, y, agg=inputs.get('agg')), axes))} + if inputs.get('animate', False): + + def build_frame(i): + for series_key, series in data['data'].items(): + for j, y2 in enumerate(y, 1): + yield line_func(series)(**dict_merge( + line_cfg(series), + {'x': series['x'][:i], 'y': series[y2][:i]}, + name_builder(y2, series_key), + {} if j == 1 or not multi_yaxis else {'yaxis': 'y{}'.format(j)} + )) + + def build_frames(): + x = next(iter(data['data'].values()), {}).get('x', []) + for i in range(1, len(x) + 1): + yield dict(data=list(build_frame(i))) + + update_cfg_w_frames(figure_cfg, list(build_frames())) + + return wrapper(graph_wrapper(id='line-graph', figure=figure_cfg)) def pie_builder(data, x, y, wrapper, export=False, **inputs): @@ -774,7 +859,7 @@ def heatmap_builder(data_id, export=False, **inputs): global_state.get_context_variables(data_id) ) wrapper = chart_wrapper(data_id, raw_data, inputs) - hm_kwargs = dict(colorscale='Greens', showscale=True, hoverinfo='x+y+z') # hoverongaps=False, + hm_kwargs = dict(colorscale=inputs.get('colorscale') or 'Greens', showscale=True, hoverinfo='x+y+z') x, y, z, agg = (inputs.get(p) for p in ['x', 'y', 'z', 'agg']) y = y[0] data, code = retrieve_chart_data(raw_data, x, y, z) @@ -858,11 +943,79 @@ def heatmap_builder(data_id, export=False, **inputs): return chart return wrapper(chart), code except BaseException as e: - return build_error(str(e), str(traceback.format_exc())), code + return build_error(e, traceback.format_exc()), code -def build_figure_data(data_id, chart_type=None, query=None, x=None, y=None, z=None, group=None, agg=None, window=None, - rolling_comp=None, return_raw=False, **kwargs): +def map_builder(data_id, export=False, **inputs): + code = None + try: + if not valid_chart(**inputs): + return None, None + props = ['map_type', 'loc_mode', 'loc', 'lat', 'lon', 'map_val', 'scope', 'proj', 'agg'] + map_type, loc_mode, loc, lat, lon, map_val, scope, proj, agg = (inputs.get(p) for p in props) + raw_data = run_query( + global_state.get_data(data_id), + inputs.get('query'), + global_state.get_context_variables(data_id) + ) + wrapper = chart_wrapper(data_id, raw_data, inputs) + title = 'Map of {}'.format(map_val) + if agg: + agg_title = AGGS[agg] + title = '{} ({})'.format(title, agg_title) + layout = build_layout(dict(title=title, autosize=True, margin={'l': 0, 'r': 0, 'b': 0})) + if map_type == 'scattergeo': + data, code = retrieve_chart_data(raw_data, lat, lon, map_val) + if agg is not None: + data, agg_code = build_agg_data(data, lat, lon, {}, agg, z=map_val) + code += agg_code + + geo_layout = {} + if test_plotly_version('4.5.0'): + geo_layout['fitbounds'] = 'locations' + if scope is not None: + geo_layout['scope'] = scope + if proj is not None: + geo_layout['projection_type'] = proj + if len(geo_layout): + layout['geo'] = geo_layout + chart = graph_wrapper( + id='scattergeo-graph', + style={'margin-right': 'auto', 'margin-left': 'auto'}, + figure=dict( + data=[go.Scattergeo( + lon=data[lon], lat=data[lat], mode='markers', marker_color=data[map_val] + )], + layout=layout + ) + ) + else: + data, code = retrieve_chart_data(raw_data, loc, map_val) + if agg is not None: + data, agg_code = build_agg_data(data, loc, map_val, {}, agg) + code += agg_code + if loc_mode == 'USA-states': + layout['geo'] = dict(scope='usa') + chart = graph_wrapper( + id='choropleth-graph', + style={'margin-right': 'auto', 'margin-left': 'auto'}, + figure=dict( + data=[go.Choropleth( + locations=data[loc], locationmode=loc_mode, z=data[map_val], + colorscale=inputs.get('colorscale') or 'Reds', colorbar_title=map_val + )], + layout=layout + ) + ) + if export: + return chart + return wrapper(chart), code + except BaseException as e: + return build_error(e, traceback.format_exc()), code + + +def build_figure_data(data_id, chart_type=None, query=None, x=None, y=None, z=None, group=None, group_val=None, + agg=None, window=None, rolling_comp=None, return_raw=False, **kwargs): """ Builds chart figure data for loading into dash:`dash_core_components.Graph ` components @@ -892,27 +1045,26 @@ def build_figure_data(data_id, chart_type=None, query=None, x=None, y=None, z=No :return: dictionary of series data, min/max ranges of columns used in chart :rtype: dict """ - code = None - try: - if not valid_chart(**dict(x=x, y=y, z=z, chart_type=chart_type, agg=agg, window=window, - rolling_comp=rolling_comp)): - return None, None + if not valid_chart(**dict(x=x, y=y, z=z, chart_type=chart_type, agg=agg, window=window, + rolling_comp=rolling_comp)): + return None, None - data = run_query( - global_state.get_data(data_id), - query, - global_state.get_context_variables(data_id) - ) - code = build_code_export(data_id, query=query) - chart_kwargs = dict(group_col=group, agg=agg, allow_duplicates=chart_type == 'scatter', rolling_win=window, - rolling_comp=rolling_comp) - if chart_type in ZAXIS_CHARTS: - chart_kwargs['z'] = z - del chart_kwargs['group_col'] - data, chart_code = build_chart_data(data, x, y, unlimited_data=True, **chart_kwargs) - return data, code + chart_code - except BaseException as e: - return dict(error=str(e), traceback=str(traceback.format_exc())), code + data = run_query( + global_state.get_data(data_id), + query, + global_state.get_context_variables(data_id) + ) + if data is None or not len(data): + return None, None + + code = build_code_export(data_id, query=query) + chart_kwargs = dict(group_col=group, group_val=group_val, agg=agg, allow_duplicates=chart_type == 'scatter', + rolling_win=window, rolling_comp=rolling_comp) + if chart_type in ZAXIS_CHARTS: + chart_kwargs['z'] = z + del chart_kwargs['group_col'] + data, chart_code = build_base_chart(data, x, y, unlimited_data=True, **chart_kwargs) + return data, code + chart_code def build_raw_figure_data(data_id, chart_type=None, query=None, x=None, y=None, z=None, group=None, agg=None, @@ -946,23 +1098,37 @@ def build_raw_figure_data(data_id, chart_type=None, query=None, x=None, y=None, :return: dataframe of all data used in chart :rtype: :class:`pandas:pandas.DataFrame` """ - if not valid_chart(**dict(x=x, y=y, z=z, chart_type=chart_type, agg=agg, window=window, - rolling_comp=rolling_comp)): - raise ValueError('invalid chart configuration: {}'.format( - dict(x=x, y=y, z=z, chart_type=chart_type, agg=agg, window=window, rolling_comp=rolling_comp) - )) + chart_params = dict_merge( + dict(x=x, y=y, z=z, chart_type=chart_type, agg=agg, window=window, rolling_comp=rolling_comp), + kwargs + ) + if not valid_chart(**chart_params): + raise ValueError('invalid chart configuration: {}'.format(chart_params)) data = run_query( global_state.get_data(data_id), query, global_state.get_context_variables(data_id) ) + if chart_type == 'maps': + if kwargs.get('map_type') == 'choropleth': + loc, map_val = (kwargs.get(p) for p in ['loc', 'map_val']) + data, _ = retrieve_chart_data(data, loc, map_val) + if agg is not None: + data, _ = build_agg_data(data, loc, map_val, {}, agg) + return data + lat, lon, map_val = (kwargs.get(p) for p in ['lat', 'lon', 'map_val']) + data, _ = retrieve_chart_data(data, lat, lon, map_val) + if agg is not None: + data, _ = build_agg_data(data, lat, lon, {}, agg, z=map_val) + return data + chart_kwargs = dict(group_col=group, agg=agg, allow_duplicates=chart_type == 'scatter', rolling_win=window, rolling_comp=rolling_comp) if chart_type in ZAXIS_CHARTS: chart_kwargs['z'] = z del chart_kwargs['group_col'] - return build_chart_data(data, x, y, unlimited_data=True, return_raw=True, **chart_kwargs) + return build_base_chart(data, x, y, unlimited_data=True, return_raw=True, **chart_kwargs) def build_chart(data_id=None, **inputs): @@ -992,6 +1158,10 @@ def build_chart(data_id=None, **inputs): chart, code = heatmap_builder(data_id, **inputs) return chart, None, code + if inputs.get('chart_type') == 'maps': + chart, code = map_builder(data_id, **inputs) + return chart, None, code + data, code = build_figure_data(data_id, **inputs) if data is None: return None, None, None @@ -1003,7 +1173,9 @@ def build_chart(data_id=None, **inputs): range_data = dict(min=data['min'], max=data['max']) axis_inputs = inputs.get('yaxis') or {} chart_builder = chart_wrapper(data_id, data, inputs) - chart_type, x, y, z, agg, group = (inputs.get(p) for p in ['chart_type', 'x', 'y', 'z', 'agg', 'group']) + chart_type, x, y, z, agg, group = ( + inputs.get(p) for p in ['chart_type', 'x', 'y', 'z', 'agg', 'group'] + ) z = z if chart_type in ZAXIS_CHARTS else None chart_inputs = {k: v for k, v in inputs.items() if k not in ['chart_type', 'x', 'y', 'z', 'group']} @@ -1029,7 +1201,9 @@ def build_chart(data_id=None, **inputs): return cpg_chunker(scatter_charts), range_data, code if chart_type == '3d_scatter': - return scatter_builder(data, x, y, axes_builder, chart_builder, z=z, agg=agg), range_data, code + chart = scatter_builder(data, x, y, axes_builder, chart_builder, z=z, agg=agg, + animate=inputs.get('animate', False)) + return chart, range_data, code if chart_type == 'surface': return surface_builder(data, x, y, z, axes_builder, chart_builder, agg=agg), range_data, code @@ -1042,7 +1216,7 @@ def build_chart(data_id=None, **inputs): raise NotImplementedError('chart type: {}'.format(chart_type)) except BaseException as e: - return build_error(str(e), str(traceback.format_exc())), None, code + return build_error(e, traceback.format_exc()), None, code def build_raw_chart(data_id=None, **inputs): @@ -1082,14 +1256,14 @@ def _raw_chart_builder(): chart = heatmap_builder(data_id, **inputs) return chart + if inputs.get('chart_type') == 'maps': + chart = map_builder(data_id, **inputs) + return chart + data, _ = build_figure_data(data_id, **inputs) if data is None: return None - if 'error' in data: - logger.error(data['traceback']) - return None - chart_type, x, y, z, agg = (inputs.get(p) for p in ['chart_type', 'x', 'y', 'z', 'agg']) z = z if chart_type in ZAXIS_CHARTS else None diff --git a/dtale/dash_application/layout.py b/dtale/dash_application/layout.py index c0f93b8da..d0c79c793 100644 --- a/dtale/dash_application/layout.py +++ b/dtale/dash_application/layout.py @@ -1,10 +1,19 @@ +import json + import dash_core_components as dcc import dash_daq as daq import dash_html_components as html +import plotly +from pkg_resources import parse_version + +from dtale.charts.utils import YAXIS_CHARTS, ZAXIS_CHARTS, find_group_vals +from dtale.utils import (ChartBuildingError, classify_type, dict_merge, + flatten_lists, get_dtypes, inner_build_query, + make_list) + -from dtale.charts.utils import YAXIS_CHARTS, ZAXIS_CHARTS -from dtale.utils import (classify_type, dict_merge, flatten_lists, get_dtypes, - inner_build_query, make_list) +def test_plotly_version(version_num): + return parse_version(plotly.__version__) >= parse_version(version_num) def base_layout(github_fork, **kwargs): @@ -137,7 +146,8 @@ def build_option(value, label=None): CHARTS = [ dict(value='line'), dict(value='bar'), dict(value='scatter'), dict(value='pie'), dict(value='wordcloud'), - dict(value='heatmap'), dict(value='3d_scatter', label='3D Scatter'), dict(value='surface') + dict(value='heatmap'), dict(value='3d_scatter', label='3D Scatter'), dict(value='surface'), + dict(value='maps', label='Maps') ] CHART_INPUT_SETTINGS = { 'line': dict(x=dict(type='single'), y=dict(type='multi'), z=dict(display=False), @@ -165,9 +175,20 @@ def build_option(value, label=None): FREQS = ['H', 'H2', 'WD', 'D', 'W', 'M', 'Q', 'Y'] FREQ_LABELS = dict(H='Hourly', H2='Hour', WD='Weekday', W='Weekly', M='Monthly', Q='Quarterly', Y='Yearly') +SCOPES = ['world', 'usa', 'europe', 'asia', 'africa', 'north america', 'south america'] +PROJECTIONS = ['equirectangular', 'mercator', 'orthographic', 'natural earth', 'kavrayskiy7', 'miller', 'robinson', + 'eckert4', 'azimuthal equal area', 'azimuthal equidistant', 'conic equal area', 'conic conformal', + 'conic equidistant', 'gnomonic', 'stereographic', 'mollweide', 'hammer', 'transverse mercator', + 'albers usa', 'winkel tripel', 'aitoff', 'sinusoidal'] + +COLORSCALES = ['Blackbody', 'Bluered', 'Blues', 'Earth', 'Electric', 'Greens', 'Greys', 'Hot', 'Jet', 'Picnic', + 'Portland', 'Rainbow', 'RdBu', 'Reds', 'Viridis', 'YlGnBu', 'YlOrRd'] + +ANIMATION_CHARTS = ['line', 'bar', '3d_scatter'] + def show_input_handler(chart_type): - settings = CHART_INPUT_SETTINGS.get(chart_type) or {} + settings = CHART_INPUT_SETTINGS.get(chart_type or 'line') or {} def _show_input(input_id, input_type='single'): cfg = settings.get(input_id, {}) @@ -204,8 +225,12 @@ def build_error(error, tb): :return: error component :rtype: :dash:`dash_html_components.Div ` """ + if isinstance(error, ChartBuildingError): + if error.details: + tb = error.details + error = error.error return html.Div([ - html.I(className='ico-error'), html.Span(str(error)), html.Div(html.Pre(tb), className='traceback') + html.I(className='ico-error'), html.Span(str(error)), html.Div(html.Pre(str(tb)), className='traceback') ], className='dtale-alert alert alert-danger') @@ -244,7 +269,7 @@ def build_input_options(df, **inputs): Builds dropdown options for (X, Y, Z, Group, Barsort & Y-Axis Ranges) with filtering based on currently selected values for the following inputs: x, y, z, group. """ - [chart_type, x, y, z, group] = [inputs.get(p) for p in ['chart_type', 'x', 'y', 'z', 'group']] + chart_type, x, y, z, group = (inputs.get(p) for p in ['chart_type', 'x', 'y', 'z', 'group']) col_opts = list(build_cols(df.columns, get_dtypes(df))) group_val, z_val = (None, z) if chart_type in ZAXIS_CHARTS else (group, None) x_options = [build_option(c, l) for c, l in col_opts if c not in build_selections(y, z_val, group_val)] @@ -255,9 +280,21 @@ def build_input_options(df, **inputs): group_options = [build_option(c, l) for c, l in col_opts if c not in build_selections(x, y, z_val)] barsort_options = [build_option(o) for o in build_selections(x, y)] yaxis_options = [build_option(y2) for y2 in y or []] + return x_options, y_multi_options, y_single_options, z_options, group_options, barsort_options, yaxis_options +def build_map_options(df, type='choropleth', loc=None, lat=None, lon=None, map_val=None): + lat_options = [build_option(c) for c in df.columns if c not in build_selections(lon, map_val)] + lon_options = [build_option(c) for c in df.columns if c not in build_selections(lat, map_val)] + loc_options = [build_option(c) for c in df.columns if c not in build_selections(map_val)] + if type == 'choropleth': + val_options = [build_option(c) for c in df.columns if c not in build_selections(loc)] + else: + val_options = [build_option(c) for c in df.columns if c not in build_selections(lon, lat)] + return loc_options, lat_options, lon_options, val_options + + def bar_input_style(**inputs): """ Sets display CSS property for bar chart inputs @@ -265,12 +302,23 @@ def bar_input_style(**inputs): return dict(display='block' if inputs.get('chart_type') == 'bar' else 'none') +def colorscale_input_style(**inputs): + return dict(display='block' if inputs.get('chart_type') in ['heatmap', 'maps'] else 'none') + + +def animate_input_style(**inputs): + chart_type, cpg = (inputs.get(p) for p in ['chart_type', 'cpg']) + show = not cpg and chart_type in ANIMATION_CHARTS and test_plotly_version('4.4.1') + return dict(display='block' if show else 'none') + + def show_chart_per_group(**inputs): """ Boolean function to determine whether "Chart Per Group" toggle should be displayed or not """ [chart_type, group] = [inputs.get(p) for p in ['chart_type', 'group']] - return show_input_handler(chart_type)('group') and len(group or []) and chart_type not in ['pie', 'wordcloud'] + invalid_type = chart_type in ['pie', 'wordcloud', 'maps'] + return show_input_handler(chart_type)('group') and len(group or []) and not invalid_type def show_yaxis_ranges(**inputs): @@ -291,6 +339,21 @@ def get_yaxis_type_tabs(y): return tabs + [build_tab('Multi', 'multi', {'padding': '2px', 'minWidth': '4em'})] +def build_group_val_options(df, group_cols): + group_vals = find_group_vals(df, group_cols) + return [ + build_option(json.dumps(gv), '|'.join([str(gv.get(p, 'NaN')) for p in group_cols])) + for gv in group_vals + ] + + +def main_inputs_and_group_val_display(inputs): + group_cols = make_list(inputs.get('group')) + if not show_input_handler(inputs.get('chart_type', 'line'))('group') or not len(group_cols): + return dict(display='none'), 'col-md-12' + return dict(display='block'), 'col-md-8' + + def charts_layout(df, settings, **inputs): """ Builds main dash inputs with dropdown options populated with the columns of the dataframe associated with the @@ -303,12 +366,13 @@ def charts_layout(df, settings, **inputs): :type param: dict :return: dash markup """ - [chart_type, x, y, z, group, agg] = [inputs.get(p) for p in ['chart_type', 'x', 'y', 'z', 'group', 'agg']] + chart_type, x, y, z, group, agg = (inputs.get(p) for p in ['chart_type', 'x', 'y', 'z', 'group', 'agg']) y = y or [] show_input = show_input_handler(chart_type) show_cpg = show_chart_per_group(**inputs) show_yaxis = show_yaxis_ranges(**inputs) bar_style = bar_input_style(**inputs) + animate_style = animate_input_style(**inputs) options = build_input_options(df, **inputs) x_options, y_multi_options, y_single_options, z_options, group_options, barsort_options, yaxis_options = options @@ -324,10 +388,31 @@ def charts_layout(df, settings, **inputs): ], className='input-group-addon', style={'minWidth': '7em'}) yaxis_type = (inputs.get('yaxis') or {}).get('type') or 'default' yaxis_type_style = {'borderRadius': '0 0.25rem 0.25rem 0'} if yaxis_type == 'default' else None + show_map = chart_type == 'maps' + map_props = ['map_type', 'loc_mode', 'loc', 'lat', 'lon', 'map_val'] + map_type, loc_mode, loc, lat, lon, map_val = (inputs.get(p) for p in map_props) + loc_options, lat_options, lon_options, map_val_options = build_map_options(df, type=map_type, loc=loc, lat=lat, + lon=lon, map_val=map_val) + cscale_style = colorscale_input_style(**inputs) + default_cscale = 'Greens' if chart_type == 'heatmap' else 'Reds' + + group_val_style, main_input_class = main_inputs_and_group_val_display(inputs) + group_val = [json.dumps(gv) for gv in inputs.get('group_val') or []] + + def show_style(show): + return {'display': 'block' if show else 'none'} + + def show_map_style(show): + return {} if show else {'display': 'none'} return html.Div([ dcc.Store(id='query-data', data=inputs.get('query')), dcc.Store(id='input-data', data={k: v for k, v in inputs.items() if k not in ['cpg', 'barmode', 'barsort']}), dcc.Store(id='chart-input-data', data={k: v for k, v in inputs.items() if k in ['cpg', 'barmode', 'barsort']}), + dcc.Store( + id='map-input-data', + data={k: v for k, v in inputs.items() if k in ['map_type', 'map_code', 'lat', 'lon', 'map_val', 'scope', + 'proj']} + ), dcc.Store(id='range-data'), dcc.Store(id='yaxis-data', data=inputs.get('yaxis')), dcc.Store(id='last-chart-input-data', data=inputs), @@ -346,128 +431,233 @@ def charts_layout(df, settings, **inputs): ], className='input-group mr-3')], className='col' ), className='row pt-3 pb-3 charts-filters'), - html.Div([ - build_input('X', dcc.Dropdown( - id='x-dropdown', - options=x_options, - placeholder='Select a column', - value=x, - style=dict(width='inherit'), - )), - build_input('Y', dcc.Dropdown( - id='y-multi-dropdown', - options=y_multi_options, - multi=True, - placeholder='Select a column(s)', - style=dict(width='inherit'), - value=y if show_input('y', 'multi') else None - ), className='col', id='y-multi-input', style={'display': 'block' if show_input('y', 'multi') else 'none'}), - build_input('Y', dcc.Dropdown( - id='y-single-dropdown', - options=y_single_options, - placeholder='Select a column', - style=dict(width='inherit'), - value=y[0] if show_input('y') and len(y) else None - ), className='col', id='y-single-input', style={'display': 'block' if show_input('y') else 'none'}), - build_input('Z', dcc.Dropdown( - id='z-dropdown', - options=z_options, - placeholder='Select a column', - style=dict(width='inherit'), - value=z - ), className='col', id='z-input', style={'display': 'block' if show_input('z') else 'none'}), - build_input('Group', dcc.Dropdown( - id='group-dropdown', - options=group_options, - multi=True, - placeholder='Select a group(s)', - value=group, - style=dict(width='inherit'), - ), className='col', id='group-input', style={'display': 'block' if show_input('group') else 'none'}), - ], className='row pt-3 pb-3 charts-filters'), - html.Div([ - build_input('Aggregation', dcc.Dropdown( - id='agg-dropdown', - options=[build_option(v, AGGS[v]) for v in ['count', 'nunique', 'sum', 'mean', 'rolling', 'corr', - 'first', 'last', 'median', 'min', 'max', 'std', 'var', - 'mad', 'prod', 'raw']], - placeholder='Select an aggregation', - style=dict(width='inherit'), - value=agg, - )), - html.Div([ - build_input('Window', dcc.Input( - id='window-input', type='number', placeholder='Enter days', className='form-control text-center', - style={'lineHeight': 'inherit'}, value=inputs.get('window') - )), - build_input('Computation', dcc.Dropdown( - id='rolling-comp-dropdown', - options=[ - build_option('corr', 'Correlation'), - build_option('count', 'Count'), - build_option('cov', 'Covariance'), - build_option('kurt', 'Kurtosis'), - build_option('max', 'Maximum'), - build_option('mean', 'Mean'), - build_option('median', 'Median'), - build_option('min', 'Minimum'), - build_option('skew', 'Skew'), - build_option('std', 'Standard Deviation'), - build_option('sum', 'Sum'), - build_option('var', 'Variance'), - ], - placeholder='Select an computation', - style=dict(width='inherit'), value=inputs.get('rolling_comp') - )) - ], id='rolling-inputs', style=dict(display='block' if agg == 'rolling' else 'none')) - ], className='row pt-3 pb-3 charts-filters'), html.Div( - [ - build_input('Chart Per\nGroup', - html.Div(daq.BooleanSwitch(id='cpg-toggle', on=inputs.get('cpg') or False), - className='toggle-wrapper'), - id='cpg-input', style={'display': 'block' if show_cpg else 'none'}, className='col-auto'), - build_input('Barmode', dcc.Dropdown( - id='barmode-dropdown', - options=[ - build_option('group', 'Group'), - build_option('stack', 'Stack'), - build_option('relative', 'Relative'), - ], - value=inputs.get('barmode') or 'group', - placeholder='Select a mode', - ), className='col-auto addon-min-width', style=bar_style, id='barmode-input'), - build_input('Barsort', dcc.Dropdown( - id='barsort-dropdown', options=barsort_options, value=inputs.get('barsort') - ), className='col-auto addon-min-width', style=bar_style, id='barsort-input'), + [html.Div([ html.Div( - html.Div( - [ - html.Span('Y-Axis', className='input-group-addon'), - html.Div(dcc.Tabs( - id='yaxis-type', - value=yaxis_type, - children=get_yaxis_type_tabs(y), - ), id='yaxis-type-div', className='form-control col-auto pt-3', style=yaxis_type_style), - dcc.Dropdown(id='yaxis-dropdown', options=yaxis_options), - html.Span('Min:', className='input-group-addon col-auto', id='yaxis-min-label'), - dcc.Input( - id='yaxis-min-input', type='number', className='form-control col-auto', - style={'lineHeight': 'inherit'} + [ + build_input('X', dcc.Dropdown( + id='x-dropdown', + options=x_options, + placeholder='Select a column', + value=x, + style=dict(width='inherit'), + )), + build_input( + 'Y', + dcc.Dropdown( + id='y-multi-dropdown', + options=y_multi_options, + multi=True, + placeholder='Select a column(s)', + style=dict(width='inherit'), + value=y if show_input('y', 'multi') else None + ), + className='col', + id='y-multi-input', + style=show_style(show_input('y', 'multi')) + ), + build_input('Y', dcc.Dropdown( + id='y-single-dropdown', + options=y_single_options, + placeholder='Select a column', + style=dict(width='inherit'), + value=y[0] if show_input('y') and len(y) else None + ), className='col', id='y-single-input', style=show_style(show_input('y'))), + build_input('Z', dcc.Dropdown( + id='z-dropdown', + options=z_options, + placeholder='Select a column', + style=dict(width='inherit'), + value=z + ), className='col', id='z-input', style=show_style(show_input('z'))), + build_input( + 'Group', + dcc.Dropdown( + id='group-dropdown', + options=group_options, + multi=True, + placeholder='Select a group(s)', + value=group, + style=dict(width='inherit'), ), - html.Span('Max:', className='input-group-addon col-auto', id='yaxis-max-label'), - dcc.Input( - id='yaxis-max-input', type='number', className='form-control col-auto', - style={'lineHeight': 'inherit'} - ) - ], - className='input-group', id='yaxis-min-max-options', - ), - className='col-auto addon-min-width', id='yaxis-input', - style=dict(display='block' if show_yaxis else 'none') + className='col', + id='group-input', + style=show_style(show_input('group')) + ) + ], + id='non-map-inputs', style={} if not show_map else {'display': 'none'}, + className='row pt-3 pb-3 charts-filters' ), - ], - className='row pt-3 pb-5 charts-filters' + html.Div( + [ + build_input('Map Type', dcc.Dropdown( + id='map-type-dropdown', + options=[build_option(v, v.capitalize()) for v in ['choropleth', 'scattergeo']], + value=map_type or 'choropleth', + style=dict(width='inherit'), + className='map-dd' + )), + build_input('Location Mode', dcc.Dropdown( + id='map-loc-mode-dropdown', + options=[build_option(v) for v in ["ISO-3", "USA-states", "country names"]], + style=dict(width='inherit'), + value=loc_mode + ), id='map-loc-mode-input', style=show_map_style(map_type == 'choropleth')), + build_input('Locations', dcc.Dropdown( + id='map-loc-dropdown', + options=loc_options, + placeholder='Select a column', + value=loc, + style=dict(width='inherit'), + ), id='map-loc-input', style=show_map_style(map_type == 'choropleth')), + build_input('Lat', dcc.Dropdown( + id='map-lat-dropdown', + options=lat_options, + placeholder='Select a column', + value=lat, + style=dict(width='inherit'), + ), id='map-lat-input', style=show_map_style(map_type == 'scattergeo')), + build_input('Lon', dcc.Dropdown( + id='map-lon-dropdown', + options=lon_options, + placeholder='Select a column', + style=dict(width='inherit'), + value=lon + ), id='map-lon-input', style=show_map_style(map_type == 'scattergeo')), + build_input('Value', dcc.Dropdown( + id='map-val-dropdown', + options=map_val_options, + placeholder='Select a column', + style=dict(width='inherit'), + value=map_val + )), + build_input('Scope', dcc.Dropdown( + id='map-scope-dropdown', + options=[build_option(v) for v in SCOPES], + style=dict(width='inherit'), + value='world' + ), id='map-scope-input', style=show_map_style(map_type == 'scattergeo')), + build_input('Projection', dcc.Dropdown( + id='map-proj-dropdown', + options=[build_option(v) for v in PROJECTIONS], + style=dict(width='inherit'), + value=None + ), id='map-proj-input', style=show_map_style(map_type == 'scattergeo')), + ], + id='map-inputs', className='row pt-3 pb-3 charts-filters', + style={} if show_map else {'display': 'none'} + ), + html.Div([ + build_input('Aggregation', dcc.Dropdown( + id='agg-dropdown', + options=[build_option(v, AGGS[v]) for v in ['count', 'nunique', 'sum', 'mean', 'rolling', + 'corr', 'first', 'last', 'median', 'min', 'max', + 'std', 'var', 'mad', 'prod', 'raw']], + placeholder='Select an aggregation', + style=dict(width='inherit'), + value=agg or 'raw', + )), + html.Div([ + build_input('Window', dcc.Input( + id='window-input', type='number', placeholder='Enter days', + className='form-control text-center', style={'lineHeight': 'inherit'}, + value=inputs.get('window') + )), + build_input('Computation', dcc.Dropdown( + id='rolling-comp-dropdown', + options=[ + build_option('corr', 'Correlation'), + build_option('count', 'Count'), + build_option('cov', 'Covariance'), + build_option('kurt', 'Kurtosis'), + build_option('max', 'Maximum'), + build_option('mean', 'Mean'), + build_option('median', 'Median'), + build_option('min', 'Minimum'), + build_option('skew', 'Skew'), + build_option('std', 'Standard Deviation'), + build_option('sum', 'Sum'), + build_option('var', 'Variance'), + ], + placeholder='Select an computation', + style=dict(width='inherit'), value=inputs.get('rolling_comp') + )) + ], id='rolling-inputs', style=show_style(agg == 'rolling')) + ], className='row pt-3 pb-3 charts-filters'), + html.Div( + [ + build_input('Chart Per\nGroup', + html.Div(daq.BooleanSwitch(id='cpg-toggle', on=inputs.get('cpg') or False), + className='toggle-wrapper'), + id='cpg-input', style=show_style(show_cpg), className='col-auto'), + build_input('Barmode', dcc.Dropdown( + id='barmode-dropdown', + options=[ + build_option('group', 'Group'), + build_option('stack', 'Stack'), + build_option('relative', 'Relative'), + ], + value=inputs.get('barmode') or 'group', + placeholder='Select a mode', + ), className='col-auto addon-min-width', style=bar_style, id='barmode-input'), + build_input('Barsort', dcc.Dropdown( + id='barsort-dropdown', options=barsort_options, value=inputs.get('barsort') + ), className='col-auto addon-min-width', style=bar_style, id='barsort-input'), + html.Div( + html.Div( + [ + html.Span('Y-Axis', className='input-group-addon'), + html.Div( + dcc.Tabs( + id='yaxis-type', + value=yaxis_type, + children=get_yaxis_type_tabs(y), + ), + id='yaxis-type-div', + className='form-control col-auto pt-3', + style=yaxis_type_style + ), + dcc.Dropdown(id='yaxis-dropdown', options=yaxis_options), + html.Span('Min:', className='input-group-addon col-auto', id='yaxis-min-label'), + dcc.Input( + id='yaxis-min-input', type='number', className='form-control col-auto', + style={'lineHeight': 'inherit'} + ), + html.Span('Max:', className='input-group-addon col-auto', id='yaxis-max-label'), + dcc.Input( + id='yaxis-max-input', type='number', className='form-control col-auto', + style={'lineHeight': 'inherit'} + ) + ], + className='input-group', id='yaxis-min-max-options', + ), + className='col-auto addon-min-width', id='yaxis-input', + style=show_style(show_yaxis) + ), + build_input('Colorscale', dcc.Dropdown( + id='colorscale-dropdown', options=[build_option(o) for o in COLORSCALES], + value=inputs.get('colorscale') or default_cscale + ), className='col-auto addon-min-width', style=cscale_style, id='colorscale-input'), + build_input( + 'Animate', + html.Div(daq.BooleanSwitch(id='animate-toggle', on=inputs.get('animate') or False), + className='toggle-wrapper'), + id='animate-input', + style=animate_style, + className='col-auto' + ), + ], + className='row pt-3 pb-5 charts-filters' + )], + id='main-inputs', className=main_input_class + ), build_input('Group(s)', dcc.Dropdown( + id='group-val-dropdown', + multi=True, + placeholder='Select a group value(s)', + value=group_val, + style=dict(width='inherit'), + ), className='col-md-4 pt-3 pb-5', id='group-val-input', style=group_val_style)], + className='row' ), dcc.Loading(html.Div(id='chart-content', style={'height': '69vh'}), type='circle'), dcc.Textarea(id="copy-text", style=dict(position='absolute', left='-110%')) diff --git a/dtale/dash_application/views.py b/dtale/dash_application/views.py index 799f573a8..7c3be31cb 100644 --- a/dtale/dash_application/views.py +++ b/dtale/dash_application/views.py @@ -1,3 +1,4 @@ +import json as json from logging import getLogger import dash @@ -7,11 +8,16 @@ from dash.exceptions import PreventUpdate import dtale.global_state as global_state -from dtale.charts.utils import ZAXIS_CHARTS +from dtale.charts.utils import MAX_GROUPS, ZAXIS_CHARTS from dtale.dash_application.charts import build_chart, chart_url_params -from dtale.dash_application.layout import (bar_input_style, base_layout, - build_input_options, charts_layout, +from dtale.dash_application.layout import (animate_input_style, + bar_input_style, base_layout, + build_group_val_options, + build_input_options, + build_map_options, charts_layout, + colorscale_input_style, get_yaxis_type_tabs, + main_inputs_and_group_val_display, show_chart_per_group, show_input_handler, show_yaxis_ranges) @@ -122,6 +128,9 @@ def query_input(query, pathname, curr_query): Output('group-dropdown', 'options'), Output('barsort-dropdown', 'options'), Output('yaxis-dropdown', 'options'), + Output('non-map-inputs', 'style'), + Output('map-inputs', 'style'), + Output('colorscale-input', 'style'), ], [ Input('query-data', 'modified_timestamp'), @@ -131,26 +140,77 @@ def query_input(query, pathname, curr_query): Input('y-single-dropdown', 'value'), Input('z-dropdown', 'value'), Input('group-dropdown', 'value'), + Input('group-val-dropdown', 'value'), Input('agg-dropdown', 'value'), Input('window-input', 'value'), Input('rolling-comp-dropdown', 'value'), ], [State('url', 'pathname'), State('query-data', 'data')] ) - def input_data(_ts, chart_type, x, y_multi, y_single, z, group, agg, window, rolling_comp, pathname, query): + def input_data(_ts, chart_type, x, y_multi, y_single, z, group, group_val, agg, window, rolling_comp, pathname, + query): """ dash callback for maintaining chart input state and column-based dropdown options. This will guard against users selecting the same column for multiple axes. """ y_val = make_list(y_single if chart_type in ZAXIS_CHARTS else y_multi) - inputs = dict(query=query, chart_type=chart_type, x=x, y=y_val, z=z, group=group, agg=agg, window=window, - rolling_comp=rolling_comp) + if group_val is not None: + group_val = [json.loads(gv) for gv in group_val] + inputs = dict(query=query, chart_type=chart_type, x=x, y=y_val, z=z, group=group, group_val=group_val, agg=agg, + window=window, rolling_comp=rolling_comp) data_id = get_data_id(pathname) options = build_input_options(global_state.get_data(data_id), **inputs) x_options, y_multi_options, y_single_options, z_options, group_options, barsort_options, yaxis_options = options + show_map = chart_type == 'maps' + map_style = {} if show_map else {'display': 'none'} + non_map_style = {'display': 'none'} if show_map else {} + cscale_style = colorscale_input_style(chart_type=chart_type) return ( inputs, x_options, y_single_options, y_multi_options, z_options, group_options, barsort_options, - yaxis_options + yaxis_options, non_map_style, map_style, cscale_style + ) + + @dash_app.callback( + [ + Output('map-input-data', 'data'), + Output('map-loc-dropdown', 'options'), + Output('map-lat-dropdown', 'options'), + Output('map-lon-dropdown', 'options'), + Output('map-value-dropdown', 'options'), + Output('map-loc-mode-input', 'style'), + Output('map-loc-input', 'style'), + Output('map-lat-input', 'style'), + Output('map-lon-input', 'style'), + Output('map-scope-input', 'style'), + Output('map-proj-input', 'style'), + ], + [ + Input('map-type-dropdown', 'value'), + Input('map-loc-mode-dropdown', 'value'), + Input('map-loc-dropdown', 'value'), + Input('map-lat-dropdown', 'value'), + Input('map-lon-dropdown', 'value'), + Input('map-val-dropdown', 'value'), + Input('map-scope-dropdown', 'value'), + Input('map-proj-dropdown', 'value') + ], + [State('url', 'pathname')] + ) + def map_data(map_type, loc_mode, loc, lat, lon, map_val, scope, proj, pathname): + data_id = get_data_id(pathname) + map_type = map_type or 'choropleth' + if map_type == 'choropleth': + map_data = dict(map_type=map_type, loc_mode=loc_mode, loc=loc, map_val=map_val) + else: + map_data = dict(map_type=map_type, lat=lat, lon=lon, map_val=map_val, scope=scope, proj=proj) + df = global_state.get_data(data_id) + loc_options, lat_options, lon_options, map_val_options = build_map_options(df, type=map_type, loc=loc, + lat=lat, lon=lon, map_val=map_val) + choro_style = {} if map_type == 'choropleth' else {'display': 'none'} + scatt_style = {} if map_type == 'scattergeo' else {'display': 'none'} + return ( + map_data, loc_options, lat_options, lon_options, map_val_options, choro_style, choro_style, scatt_style, + scatt_style, scatt_style, scatt_style ) @dash_app.callback( @@ -163,7 +223,8 @@ def input_data(_ts, chart_type, x, y_multi, y_single, z, group, agg, window, rol Output('cpg-input', 'style'), Output('barmode-input', 'style'), Output('barsort-input', 'style'), - Output('yaxis-input', 'style') + Output('yaxis-input', 'style'), + Output('animate-input', 'style'), ], [Input('input-data', 'modified_timestamp')], [State('input-data', 'data')] @@ -184,10 +245,11 @@ def input_toggles(_ts, inputs): cpg_style = {'display': 'block' if show_chart_per_group(**inputs) else 'none'} bar_style = bar_input_style(**inputs) yaxis_style = {'display': 'block' if show_yaxis_ranges(**inputs) else 'none'} + animate_style = animate_input_style(**inputs) return ( y_multi_style, y_single_style, z_style, group_style, rolling_style, cpg_style, bar_style, bar_style, - yaxis_style + yaxis_style, animate_style ) @dash_app.callback( @@ -196,16 +258,18 @@ def input_toggles(_ts, inputs): Input('cpg-toggle', 'on'), Input('barmode-dropdown', 'value'), Input('barsort-dropdown', 'value'), + Input('colorscale-dropdown', 'value'), + Input('animate-toggle', 'on'), ] ) - def chart_input_data(cpg, barmode, barsort): + def chart_input_data(cpg, barmode, barsort, colorscale, animate): """ dash callback for maintaining selections in chart-formatting inputs - chart per group flag - bar chart mode - bar chart sorting """ - return dict(cpg=cpg, barmode=barmode, barsort=barsort) + return dict(cpg=cpg, barmode=barmode, barsort=barsort, colorscale=colorscale, animate=animate) @dash_app.callback( [ @@ -225,20 +289,22 @@ def chart_input_data(cpg, barmode, barsort): Input('input-data', 'modified_timestamp'), Input('chart-input-data', 'modified_timestamp'), Input('yaxis-data', 'modified_timestamp'), + Input('map-input-data', 'modified_timestamp') ], [ State('url', 'pathname'), State('input-data', 'data'), State('chart-input-data', 'data'), State('yaxis-data', 'data'), + State('map-input-data', 'data'), State('last-chart-input-data', 'data') ] ) - def on_data(_ts1, _ts2, _ts3, pathname, inputs, chart_inputs, yaxis_data, last_chart_inputs): + def on_data(_ts1, _ts2, _ts3, _ts4, pathname, inputs, chart_inputs, yaxis_data, map_data, last_chart_inputs): """ dash callback controlling the building of dash charts """ - all_inputs = dict_merge(inputs, chart_inputs, dict(yaxis=yaxis_data or {})) + all_inputs = dict_merge(inputs, chart_inputs, dict(yaxis=yaxis_data or {}), map_data) if all_inputs == last_chart_inputs: raise PreventUpdate charts, range_data, code = build_chart(get_data_id(pathname), **all_inputs) @@ -327,6 +393,40 @@ def update_yaxis_data(yaxis_type, yaxis_min, yaxis_max, yaxis, yaxis_data, range yaxis_data['data'][yaxis_name] = dict(min=yaxis_min, max=yaxis_max) return yaxis_data + @dash_app.callback( + [Output('group-val-input', 'style'), Output('main-inputs', 'className')], + [Input('input-data', 'modified_timestamp')], + [State('input-data', 'data')] + ) + def main_input_class(ts_, inputs): + return main_inputs_and_group_val_display(inputs) + + @dash_app.callback( + [Output('group-val-dropdown', 'options'), Output('group-val-dropdown', 'value')], + [Input('group-dropdown', 'value')], + [State('url', 'pathname'), State('input-data', 'data'), State('group-val-dropdown', 'value')] + ) + def group_values(group_cols, pathname, inputs, prev_group_vals): + group_cols = make_list(group_cols) + if not show_input_handler(inputs.get('chart_type', 'line'))('group') or not len(group_cols): + return [], None + print('loading group vals...') + data_id = get_data_id(pathname) + group_vals = run_query( + global_state.get_data(data_id), + inputs.get('query'), + global_state.get_context_variables(data_id) + ) + group_vals = build_group_val_options(group_vals, group_cols) + selections = [] + available_vals = [gv['value'] for gv in group_vals] + if prev_group_vals is not None: + selections = [pgv for pgv in prev_group_vals if pgv in available_vals] + if not len(selections) and len(group_vals) <= MAX_GROUPS: + selections = available_vals + + return group_vals, selections + @dash_app.callback( Output('popup-content', 'children'), [Input('url', 'modified_timestamp')], diff --git a/dtale/data_reshapers.py b/dtale/data_reshapers.py index 545903f6e..85ca51231 100644 --- a/dtale/data_reshapers.py +++ b/dtale/data_reshapers.py @@ -1,11 +1,16 @@ import pandas as pd import dtale.global_state as global_state -from dtale.utils import run_query +from dtale.utils import make_list, run_query -def flatten_columns(df): - return [' '.join([str(c) for c in col]).strip() for col in df.columns.values] +def flatten_columns(df, columns=None): + if columns is not None: + return [ + ' '.join(['{}-{}'.format(c1, str(c2)) for c1, c2 in zip(make_list(columns), make_list(col_val))]).strip() + for col_val in df.columns.values + ] + return [' '.join([str(c) for c in make_list(col)]).strip() for col in df.columns.values] class DataReshaper(object): @@ -42,12 +47,14 @@ def reshape(self, data): index, columns, values, aggfunc = (self.cfg.get(p) for p in ['index', 'columns', 'values', 'aggfunc']) if aggfunc is not None or len(values) > 1: pivot_data = pd.pivot_table(data, values=values, index=index, columns=columns, aggfunc=aggfunc) - if len(values) > 1: - pivot_data.columns = flatten_columns(pivot_data) - elif len(values) == 1: + if len(values) == 1: pivot_data.columns = pivot_data.columns.droplevel(0) else: pivot_data = data.pivot(index=index, columns=columns, values=values[0]) + if self.cfg.get('columnNameHeaders', False): + pivot_data.columns = flatten_columns(pivot_data, columns=columns) + else: + pivot_data.columns = flatten_columns(pivot_data) pivot_data = pivot_data.rename_axis(None, axis=1) return pivot_data diff --git a/dtale/static/css/dash.css b/dtale/static/css/dash.css index 2b5ddf01d..5f47907b6 100644 --- a/dtale/static/css/dash.css +++ b/dtale/static/css/dash.css @@ -442,7 +442,8 @@ div.tab-container > div.tab:last-child { #agg-dropdown .Select-menu-outer, #barmode-dropdown .Select-menu-outer, #barsort-dropdown .Select-menu-outer, -#yaxis-dropdown .Select-menu-outer { +#yaxis-dropdown .Select-menu-outer, +#colorscale-dropdown .Select-menu-outer{ z-index: 7; } #yaxis-type-div { @@ -480,14 +481,18 @@ div.tab-container > div.tab:last-child { #yaxis-max-input { border-radius: 0 0.25rem 0.25rem 0; } -#cpg-input .input-group-addon { +#cpg-input .input-group-addon, +#animate-input .input-group-addon, +#map-loc-mode-input .input-group-addon { height: 36px; white-space: pre-line; } -#cpg-input > div.input-group div:last-child { +#cpg-input > div.input-group div:last-child, +#animate-input > div.input-group div:last-child{ min-width: 3.75em; } -#cpg-toggle { +#cpg-toggle, +#animate-toggle { margin-top: 3px; } .toggle-wrapper { @@ -500,3 +505,20 @@ div.modebar > div.modebar-group:first-child /* hide plotly "export to png" */ display: none; } +#map-loc-mode-dropdown .Select, +#map-loc-dropdown .Select, +#map-lat-dropdown .Select, +#map-lon-dropdown .Select, +#map-scope-dropdown .Select, +#map-proj-dropdown .Select{ + min-width: 8em; +} + +#map-type-dropdown .Select { + min-width: 10em; +} + +#map-inputs > div.col-auto + div.col-auto { + padding-left: 0; +} + diff --git a/dtale/static/css/main.css b/dtale/static/css/main.css index cc82488ac..bddacb791 100644 --- a/dtale/static/css/main.css +++ b/dtale/static/css/main.css @@ -10131,6 +10131,9 @@ select.form-control:focus, .hoverable:hover, .hoverable:focus, .hoverable:active { color: #2a91d1; } +li.hoverable:hover { + border-bottom: solid 1px #a7b3b7; +} .hoverable:focus { outline: 0; @@ -10161,6 +10164,16 @@ select.form-control:focus, text-align: left; color: #404040; } +.hoverable__content.menu-description { + top: -0.8em; + left: 11.5em; +} +.hoverable__content.build-code { + width: auto; + right: unset; + left: 0; + top: 3em; +} .hoverable__content.copy-tt-bottom { padding: .5em .5em; width: 10em; @@ -10195,6 +10208,16 @@ div.hoverable.label > div.hoverable__content { border-right: 0.5em solid transparent; border-left: 0.5em solid transparent; } +.hoverable__content.menu-description::before { + left: -0.7em; + top: 1em; + right: inherit; + transform: rotate(270deg); +} +.hoverable__content.build-code::before { + right: unset; + left: 2em; +} .hoverable__content.copy-tt-top::before { bottom: unset; top: 95%; @@ -10218,6 +10241,16 @@ div.hoverable.label > div.hoverable__content { border-right: 0.5em solid transparent; border-left: 0.5em solid transparent; } +.hoverable__content.menu-description::after { + left: -0.6em; + top: 1em; + right: inherit; + transform: rotate(270deg); +} +.hoverable__content.build-code::after { + right: unset; + left: 2em; +} .hoverable__content.copy-tt-top::after { bottom: unset; top: calc(92% + .1em); diff --git a/dtale/utils.py b/dtale/utils.py index 99718640d..2818ec589 100644 --- a/dtale/utils.py +++ b/dtale/utils.py @@ -6,6 +6,7 @@ import socket import sys import time +import traceback from builtins import map, object from logging import getLogger @@ -550,6 +551,25 @@ def jsonify(return_data={}, **kwargs): return _jsonify(return_data) +class ChartBuildingError(Exception): + """ + Exception for signalling there was an issue constructing the data for your chart. + """ + def __init__(self, error, details=None): + super(ChartBuildingError, self).__init__('Chart Error') + self.error = error + self.details = details + + +def jsonify_error(e): + tb = traceback.format_exc() + if isinstance(e, ChartBuildingError): + if e.details: + tb = e.details + e = e.error + return jsonify(dict(error=str(e), traceback=str(tb))) + + def find_selected_column(data, col): """ In case we come across a series which after reset_index() diff --git a/dtale/views.py b/dtale/views.py index 2e289d491..579f64f01 100644 --- a/dtale/views.py +++ b/dtale/views.py @@ -17,7 +17,7 @@ import dtale.global_state as global_state from dtale import dtale -from dtale.charts.utils import build_chart +from dtale.charts.utils import build_base_chart from dtale.cli.clickutils import retrieve_meta_info_and_version from dtale.column_builders import ColumnBuilder from dtale.column_filters import ColumnFilter @@ -32,9 +32,9 @@ get_bool_arg, get_dtypes, get_int_arg, get_json_arg, get_str_arg, grid_columns, grid_formatter, json_date, json_float, json_int, json_timestamp, jsonify, - make_list, retrieve_grid_params, run_query, - running_with_flask_debug, running_with_pytest, - sort_df_for_grid) + jsonify_error, make_list, retrieve_grid_params, + run_query, running_with_flask_debug, + running_with_pytest, sort_df_for_grid) logger = getLogger(__name__) @@ -686,7 +686,7 @@ def _load_process(data_id): processes = sorted([_load_process(data_id) for data_id in global_state.get_data()], key=lambda p: p['ts']) return jsonify(dict(data=processes, success=True)) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) @dtale.route('/update-settings/') @@ -705,7 +705,7 @@ def update_settings(data_id): global_state.set_settings(data_id, updated_settings) return jsonify(dict(success=True)) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) def refresh_col_indexes(data_id): @@ -757,7 +757,7 @@ def update_column_position(data_id): refresh_col_indexes(data_id) return jsonify(success=True) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) @dtale.route('/update-locked/') @@ -789,7 +789,7 @@ def update_locked(data_id): refresh_col_indexes(data_id) return jsonify(success=True) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) @dtale.route('/update-visibility/', methods=['POST']) @@ -819,7 +819,7 @@ def update_visibility(data_id): global_state.set_dtypes(data_id, curr_dtypes) return jsonify(success=True) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) @dtale.route('/build-column/') @@ -869,7 +869,7 @@ def build_column(data_id): global_state.set_history(data_id, curr_history) return jsonify(success=True) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) @dtale.route('/reshape/') @@ -887,7 +887,7 @@ def reshape_data(data_id): global_state.set_settings(instance._data_id, dict_merge(curr_settings, dict(startup_code=builder.build_code()))) return jsonify(success=True, data_id=instance._data_id) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) @dtale.route('/test-filter/') @@ -917,7 +917,7 @@ def test_filter(data_id): global_state.set_settings(data_id, curr_settings) return jsonify(dict(success=True)) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) @dtale.route('/dtypes/') @@ -1027,7 +1027,7 @@ def describe(data_id, column): return_data['code'] = '\n'.join(code) return jsonify(return_data) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) @dtale.route('/column-filter-data//') @@ -1047,7 +1047,7 @@ def get_column_filter_data(data_id, column): ret['uniques'] = vals return jsonify(ret) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) @dtale.route('/save-column-filter//') @@ -1057,7 +1057,7 @@ def save_column_filter(data_id, column): curr_filters = (global_state.get_settings(data_id) or {}).get('columnFilters') or {} return jsonify(success=True, currFilters=curr_filters) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) @dtale.route('/data/') @@ -1136,7 +1136,7 @@ def get_data(data_id): return_data = dict(results=results, columns=columns, total=total) return jsonify(return_data) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) @dtale.route('/data-export/') @@ -1174,10 +1174,18 @@ def get_column_analysis(data_id): :param top: the number of top values to display in your value counts, default is 100 :returns: JSON {results: DATA, desc: output from pd.DataFrame[col].describe(), success: True/False} """ + def handle_top(df, top): + if top is not None: + top = int(top) + return df[:top] if top > 0 else df[top:], top + elif len(df) > 100: + top = 100 + return df[:top], top + return df, len(df) + try: col = get_str_arg(request, 'col', 'values') bins = get_int_arg(request, 'bins', 20) - top = get_int_arg(request, 'top', 100) ordinal_col = get_str_arg(request, 'ordinalCol') ordinal_agg = get_str_arg(request, 'ordinalAgg', 'sum') category_col = get_str_arg(request, 'categoryCol') @@ -1213,12 +1221,11 @@ def get_column_analysis(data_id): ).format(col=selected_col, ordinal=ordinal_col, agg=ordinal_agg)) hist.index.name = 'labels' hist = hist.reset_index() - if top is not None: - top = int(top) - hist = hist[:top] if top > 0 else hist[top:] + hist, top = handle_top(hist, get_int_arg(request, 'top')) col_types = grid_columns(hist) f = grid_formatter(col_types, nan_display=None) return_data = f.format_lists(hist) + return_data['top'] = top elif data_type == 'categories': hist = data.groupby(category_col)[[selected_col]].agg(['count', category_agg]) hist.columns = hist.columns.droplevel(0) @@ -1229,11 +1236,10 @@ def get_column_analysis(data_id): ) hist.index.name = 'labels' hist = hist.reset_index() - if top is not None: - top = int(top) - hist = hist[:top] if top > 0 else hist[top:] + hist, top = handle_top(hist, get_int_arg(request, 'top')) f = grid_formatter(grid_columns(hist), nan_display=None) return_data = f.format_lists(hist) + return_data['top'] = top elif data_type == 'histogram': hist_data, hist_labels = np.histogram(data, bins=bins) hist_data = [json_float(h) for h in hist_data] @@ -1246,7 +1252,7 @@ def get_column_analysis(data_id): cols = global_state.get_dtypes(data_id) return jsonify(code='\n'.join(code), query=query, cols=cols, dtype=dtype, chart_type=data_type, **return_data) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) @dtale.route('/correlations/') @@ -1316,7 +1322,7 @@ def get_correlations(data_id): f = grid_formatter(col_types, nan_display=None) return jsonify(data=f.format_dicts(data.itertuples()), dates=valid_date_cols, rolling=rolling, code=code) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) @dtale.route('/chart-data/') @@ -1357,11 +1363,12 @@ def get_chart_data(data_id): allow_duplicates = get_bool_arg(request, 'allowDupes') window = get_int_arg(request, 'rollingWin') comp = get_str_arg(request, 'rollingComp') - data, code = build_chart(data, x, y, group_col, agg, allow_duplicates, rolling_win=window, rolling_comp=comp) + data, code = build_base_chart(data, x, y, group_col=group_col, agg=agg, allow_duplicates=allow_duplicates, + rolling_win=window, rolling_comp=comp) data['success'] = True return jsonify(data) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) @dtale.route('/correlations-ts/') @@ -1415,12 +1422,12 @@ def get_correlations_ts(data_id): ).format(col1=col1, col2=col2, date_col=date_col, cols="', '".join(cols))) data.columns = ['date', 'corr'] code.append("corr_ts.columns = ['date', 'corr']") - return_data, _code = build_chart(data.fillna(0), 'date', 'corr') + return_data, _code = build_base_chart(data.fillna(0), 'date', 'corr') return_data['success'] = True return_data['code'] = '\n'.join(code) return jsonify(return_data) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) @dtale.route('/scatter/') @@ -1515,14 +1522,14 @@ def get_scatter(data_id): code='\n'.join(code), error='Dataset exceeds 15,000 records, cannot render scatter. Please apply filter...' ) - data, _code = build_chart(data, cols[0], y_cols, allow_duplicates=True) + data, _code = build_base_chart(data, cols[0], y_cols, allow_duplicates=True) data['x'] = cols[0] data['y'] = cols[1] data['stats'] = stats data['code'] = '\n'.join(code) return jsonify(data) except BaseException as e: - return jsonify(dict(error=str(e), traceback=str(traceback.format_exc()))) + return jsonify_error(e) def build_context_variables(data_id, new_context_vars=None): diff --git a/package.json b/package.json index 2650546de..8bf87d494 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "dtale", - "version": "1.8.1", + "version": "1.8.2", "description": "Visualizer for Pandas Data Structures", "main": "main.js", "directories": { diff --git a/setup.py b/setup.py index 036785c82..628ba42e2 100644 --- a/setup.py +++ b/setup.py @@ -50,7 +50,7 @@ def run_tests(self): setup( name="dtale", - version="1.8.1", + version="1.8.2", author="MAN Alpha Technology", author_email="ManAlphaTech@man.com", description="Web Client for Visualizing Pandas Objects", diff --git a/static/__tests__/dtale/DataViewer-base-test.jsx b/static/__tests__/dtale/DataViewer-base-test.jsx index 8ee78853a..045675a84 100644 --- a/static/__tests__/dtale/DataViewer-base-test.jsx +++ b/static/__tests__/dtale/DataViewer-base-test.jsx @@ -116,12 +116,12 @@ describe("DataViewer tests", () => { .map(s => s.text()), _.concat( ["Describe", "Filter", "Build Column", "Reshape", "Correlations", "Charts", "Heat Map", "Highlight Dtypes"], - ["Instances 1", "Code Export", "Export", "Resize", "About", "Shutdown"] + ["Instances 1", "Code Export", "Export", "Refresh Widths", "About", "Shutdown"] ), "Should render default menu options" ); setTimeout(() => { - clickMainMenuButton(result, "Resize"); + clickMainMenuButton(result, "Refresh Widths"); clickMainMenuButton(result, "Shutdown", "a"); done(); }, 400); diff --git a/static/__tests__/dtale/reshape/DataViewer-reshape-aggregate-test.jsx b/static/__tests__/dtale/reshape/DataViewer-reshape-aggregate-test.jsx index 364f1355a..ec809541d 100644 --- a/static/__tests__/dtale/reshape/DataViewer-reshape-aggregate-test.jsx +++ b/static/__tests__/dtale/reshape/DataViewer-reshape-aggregate-test.jsx @@ -1,3 +1,4 @@ +/* eslint max-lines: "off" */ import { mount } from "enzyme"; import React from "react"; import { Provider } from "react-redux"; @@ -115,6 +116,13 @@ describe("DataViewer tests", () => { .find("i") .first() .simulate("click"); + result + .find("div.modal-body") + .find("div.row") + .last() + .find("button") + .last() + .simulate("click"); result .find("div.modal-footer") .first() @@ -125,10 +133,11 @@ describe("DataViewer tests", () => { result.update(); t.equal(result.find(Reshape).length, 1, "should hide reshape"); result - .find(Reshape) .find("div.modal-body") - .find("button") + .find("div.row") .last() + .find("button") + .first() .simulate("click"); result .find("div.modal-footer") @@ -188,6 +197,13 @@ describe("DataViewer tests", () => { .at(2) .instance() .onChange({ value: "col2" }); + result + .find("div.modal-body") + .find("div.row") + .last() + .find("button") + .last() + .simulate("click"); result .find("div.modal-footer") .first() @@ -196,10 +212,11 @@ describe("DataViewer tests", () => { .simulate("click"); setTimeout(() => { result - .find(Reshape) .find("div.modal-body") - .find("button") + .find("div.row") .last() + .find("button") + .first() .simulate("click"); result .find("div.modal-footer") diff --git a/static/__tests__/dtale/reshape/DataViewer-reshape-pivot-test.jsx b/static/__tests__/dtale/reshape/DataViewer-reshape-pivot-test.jsx index 106dd74da..7f2c8075c 100644 --- a/static/__tests__/dtale/reshape/DataViewer-reshape-pivot-test.jsx +++ b/static/__tests__/dtale/reshape/DataViewer-reshape-pivot-test.jsx @@ -119,11 +119,18 @@ describe("DataViewer tests", () => { pivotInputs .at(2) .instance() - .onChange({ value: "col3" }); + .onChange([{ value: "col3" }]); pivotInputs .last() .instance() .onChange({ value: "count" }); + result + .find("div.modal-body") + .find("div.row") + .last() + .find("button") + .last() + .simulate("click"); result .find("div.modal-footer") .first() @@ -134,10 +141,11 @@ describe("DataViewer tests", () => { result.update(); t.equal(result.find(Reshape).length, 1, "should hide reshape"); result - .find(Reshape) .find("div.modal-body") - .find("button") + .find("div.row") .last() + .find("button") + .first() .simulate("click"); result .find("div.modal-footer") diff --git a/static/__tests__/dtale/reshape/DataViewer-reshape-transpose-test.jsx b/static/__tests__/dtale/reshape/DataViewer-reshape-transpose-test.jsx index f1fa4a6a2..c311cc4f1 100644 --- a/static/__tests__/dtale/reshape/DataViewer-reshape-transpose-test.jsx +++ b/static/__tests__/dtale/reshape/DataViewer-reshape-transpose-test.jsx @@ -112,6 +112,13 @@ describe("DataViewer tests", () => { .last() .instance() .onChange([{ value: "col2" }]); + result + .find("div.modal-body") + .find("div.row") + .last() + .find("button") + .last() + .simulate("click"); result .find("div.modal-footer") .first() @@ -122,10 +129,11 @@ describe("DataViewer tests", () => { result.update(); t.equal(result.find(Reshape).length, 1, "should hide reshape"); result - .find(Reshape) .find("div.modal-body") - .find("button") + .find("div.row") .last() + .find("button") + .first() .simulate("click"); result .find("div.modal-footer") diff --git a/static/__tests__/filters/ColumnFilter-numeric-test.jsx b/static/__tests__/filters/ColumnFilter-numeric-test.jsx index b6e1dd5aa..a7f34e633 100644 --- a/static/__tests__/filters/ColumnFilter-numeric-test.jsx +++ b/static/__tests__/filters/ColumnFilter-numeric-test.jsx @@ -73,28 +73,43 @@ describe("ColumnFilter numeric tests", () => { .find("div.row") .first() .find("button") - .at(2) + .at(1) .simulate("click"); + t.deepEqual(result.state().cfg, { + type: "int", + operand: "ne", + value: [1], + }); setTimeout(() => { result.update(); result .find(NumericFilter) - .find("input") - .first() - .simulate("change", { target: { value: "a" } }); - result - .find(NumericFilter) - .find("input") + .find("div.row") .first() - .simulate("change", { target: { value: "0" } }); + .find("button") + .at(3) + .simulate("click"); setTimeout(() => { result.update(); - t.deepEqual(result.state().cfg, { - type: "int", - operand: ">", - value: 0, - }); - done(); + result + .find(NumericFilter) + .find("input") + .first() + .simulate("change", { target: { value: "a" } }); + result + .find(NumericFilter) + .find("input") + .first() + .simulate("change", { target: { value: "0" } }); + setTimeout(() => { + result.update(); + t.deepEqual(result.state().cfg, { + type: "int", + operand: ">", + value: 0, + }); + done(); + }, 400); }, 400); }, 400); }, 400); diff --git a/static/__tests__/filters/ColumnFilter-string-test.jsx b/static/__tests__/filters/ColumnFilter-string-test.jsx index 5a62b9f23..e083a42b6 100644 --- a/static/__tests__/filters/ColumnFilter-string-test.jsx +++ b/static/__tests__/filters/ColumnFilter-string-test.jsx @@ -54,8 +54,24 @@ describe("ColumnFilter string tests", () => { .onChange([{ value: "a" }]); setTimeout(() => { result.update(); - t.deepEqual(result.state().cfg, { type: "string", value: ["a"] }); - done(); + t.deepEqual(result.state().cfg, { + type: "string", + operand: "=", + value: ["a"], + }); + result + .find("button") + .last() + .simulate("click"); + setTimeout(() => { + result.update(); + t.deepEqual(result.state().cfg, { + type: "string", + operand: "ne", + value: ["a"], + }); + done(); + }, 400); }, 400); }, 400); }, 400); diff --git a/static/__tests__/iframe/DataViewer-base-test.jsx b/static/__tests__/iframe/DataViewer-base-test.jsx index d30c226b4..3daaba73f 100644 --- a/static/__tests__/iframe/DataViewer-base-test.jsx +++ b/static/__tests__/iframe/DataViewer-base-test.jsx @@ -145,7 +145,7 @@ describe("DataViewer iframe tests", () => { .map(s => s.text()), _.concat( ["Describe", "Filter", "Build Column", "Reshape", "Correlations", "Charts", "Heat Map", "Highlight Dtypes"], - ["Instances 1", "Code Export", "Export", "Resize", "About", "Refresh", "Open Popup", "Shutdown"] + ["Instances 1", "Code Export", "Export", "Refresh Widths", "About", "Reload Data", "Open Popup", "Shutdown"] ), "Should render default menu options" ); @@ -323,8 +323,8 @@ describe("DataViewer iframe tests", () => { .simulate("click"); exportURL = window.open.mock.calls[window.open.mock.calls.length - 1][0]; t.ok(_.startsWith(exportURL, "/dtale/data-export/1") && _.includes(exportURL, "tsv=true")); - clickMainMenuButton(result, "Resize"); - clickMainMenuButton(result, "Refresh"); + clickMainMenuButton(result, "Refresh Widths"); + clickMainMenuButton(result, "Reload Data"); expect(window.location.reload).toHaveBeenCalled(); clickMainMenuButton(result, "Shutdown", "a"); clickColMenuButton(result, "Formats"); diff --git a/static/__tests__/iframe/DataViewer-within-iframe-test.jsx b/static/__tests__/iframe/DataViewer-within-iframe-test.jsx index f5d0902a0..50c209946 100644 --- a/static/__tests__/iframe/DataViewer-within-iframe-test.jsx +++ b/static/__tests__/iframe/DataViewer-within-iframe-test.jsx @@ -89,7 +89,7 @@ describe("DataViewer within iframe tests", () => { .map(s => s.text()), _.concat( ["Describe", "Filter", "Build Column", "Reshape", "Correlations", "Charts", "Heat Map", "Highlight Dtypes"], - ["Instances 1", "Code Export", "Export", "Resize", "About", "Refresh", "Open Popup", "Shutdown"] + ["Instances 1", "Code Export", "Export", "Refresh Widths", "About", "Reload Data", "Open Popup", "Shutdown"] ), "Should render default iframe menu options" ); diff --git a/static/dtale/DataViewerMenu.jsx b/static/dtale/DataViewerMenu.jsx index 99cbbc9bc..216d2acf8 100644 --- a/static/dtale/DataViewerMenu.jsx +++ b/static/dtale/DataViewerMenu.jsx @@ -6,6 +6,7 @@ import { connect } from "react-redux"; import ConditionalRender from "../ConditionalRender"; import { openChart } from "../actions/charts"; import menuFuncs from "./dataViewerMenuUtils"; +import Descriptions from "./menu-descriptions.json"; class ReactDataViewerMenu extends React.Component { render() { @@ -20,7 +21,7 @@ class ReactDataViewerMenu extends React.Component { } }; const openCodeExport = () => menuFuncs.open("/dtale/popup/code-export", dataId, 450, 700); - const resize = () => + const refreshWidths = () => this.props.propagateState({ columns: _.map(this.props.columns, c => _.assignIn({}, c)), }); @@ -43,47 +44,52 @@ class ReactDataViewerMenu extends React.Component { style={{ minWidth: "11em", top: "1em", left: "0.5em" }}>
D-TALE
    -
  • +
  • +
    {Descriptions.describe}
  • -
  • +
  • +
    {Descriptions.filter}
  • -
  • +
  • +
    {Descriptions.build}
  • -
  • +
  • +
    {Descriptions.reshape}
  • -
  • +
  • +
    {Descriptions.corr}
  • -
  • +
  • +
    {Descriptions.charts}
  • -
  • +
  • +
    {Descriptions.heatmap}
  • -
  • +
  • +
    {Descriptions.highlight_dtypes}
  • -
  • +
  • +
    + {Descriptions.instances} +
  • -
  • +
  • +
    {Descriptions.code}
  • -
  • +
  • @@ -154,16 +167,18 @@ class ReactDataViewerMenu extends React.Component { ) )} +
    {Descriptions.export}
  • -
  • +
  • - +
    {Descriptions.widths}
  • -
  • +
  • +
    {Descriptions.about}
  • @@ -202,13 +218,14 @@ class ReactDataViewerMenu extends React.Component {
    -
  • +
  • Shutdown +
    {Descriptions.shutdown}
diff --git a/static/dtale/Header.jsx b/static/dtale/Header.jsx index 8b5745297..d8522d717 100644 --- a/static/dtale/Header.jsx +++ b/static/dtale/Header.jsx @@ -51,7 +51,8 @@ class ReactHeader extends React.Component { if (columnIndex == 0) { return this.renderMenu(); } - const colName = _.get(gu.getCol(columnIndex, this.props), "name"); + const colCfg = gu.getCol(columnIndex, this.props); + const colName = _.get(colCfg, "name"); const toggleId = gu.buildToggleId(colName); const menuHandler = menuUtils.openMenu( `${colName}Actions`, @@ -61,11 +62,17 @@ class ReactHeader extends React.Component { ignoreMenuClicks ); const sortDir = (_.find(sortInfo, ([col, _dir]) => col === colName) || [null, null])[1]; + let headerStyle = _.assignIn({}, style); + let colNameMarkup = colName; + if (this.props.dtypeHighlighting) { + headerStyle = _.assignIn(gu.dtypeHighlighting(colCfg), headerStyle); + colNameMarkup =
{colName}
; + } return ( -
+
{_.get(SORT_CHARS, sortDir, "")} - {colName} + {colNameMarkup}
); @@ -82,6 +89,7 @@ ReactHeader.propTypes = { rowCount: PropTypes.number, toggleColumnMenu: PropTypes.func, hideColumnMenu: PropTypes.func, + dtypeHighlighting: PropTypes.bool, }; const ReduxHeader = connect( diff --git a/static/dtale/menu-descriptions.json b/static/dtale/menu-descriptions.json new file mode 100644 index 000000000..65d71c3d2 --- /dev/null +++ b/static/dtale/menu-descriptions.json @@ -0,0 +1,16 @@ +{ + "describe": "Describe column's values (Top unique values, Min, Max, Sum, STD, Var,...)", + "filter": "Defining custom Filtering for the main spreadsheet, Charts, ...", + "build": "Creating a/multi new column based on the existing columns or random values", + "reshape": "Creating new PIVOT table, Aggregated table GroupBy, Transpose", + "corr": "Presenting correlations matrix and chart", + "charts": "Plot chart (line, bar, scatter, pie, wordcloud,...)", + "heatmap": "Presenting heatmap for numerical values in the main table", + "highlight_dtypes": "Highlight columns based on pandas dtypes. For dtype's name hover the mouse over column name", + "instances": "Show existing datasets loaded to D-Tale", + "code": "Code snippet of commands performed upon your data to produce this grid", + "export": "Download DataFrame as CSV or TSV", + "widths": "Auto-Resize the column widths", + "about": "Link to the source code repository, documentation", + "shutdown": "Close D-Tale and all other open datasets without save. Do you want to export DataFrame?" +} \ No newline at end of file diff --git a/static/filters/ColumnFilter.jsx b/static/filters/ColumnFilter.jsx index 9f054d88c..b473949f5 100644 --- a/static/filters/ColumnFilter.jsx +++ b/static/filters/ColumnFilter.jsx @@ -89,7 +89,7 @@ class ColumnFilter extends React.Component { this.updateState(_.assignIn({}, this.state.cfg, { type: colType, missing: !missing })); return (
  • - {showIcon && } + {showIcon && }
    Show Only Missing diff --git a/static/filters/NumericFilter.jsx b/static/filters/NumericFilter.jsx index 3eb577acf..6446e8b3b 100644 --- a/static/filters/NumericFilter.jsx +++ b/static/filters/NumericFilter.jsx @@ -3,15 +3,19 @@ import PropTypes from "prop-types"; import React from "react"; import Select, { createFilter } from "react-select"; -const OPERANDS = [ +const NE = "\u2260"; +const EQ_TOGGLE = [ ["=", "Equals"], + [NE, "Not Equals"], +]; +const OPERANDS = _.concat(EQ_TOGGLE, [ ["<", "Less Than"], [">", "Greater Than"], ["<=", "Less Than or Equal"], [">=", "Greater Than or Equal"], ["[]", "Range (Inclusive)"], ["()", "Range (Exclusive)"], -]; +]); function createValueInput(updateState, { missing }, state, prop) { return ( @@ -32,12 +36,12 @@ function createValueInput(updateState, { missing }, state, prop) { function buildState({ columnFilters, selectedCol, min, max }) { const cfg = _.get(columnFilters, selectedCol, { operand: "=" }); - const selected = cfg.operand === "=" ? _.map(cfg.value || null, v => ({ value: v })) : null; - const value = cfg.operand === "=" ? "" : cfg.value; + const selected = _.includes(["=", "ne"], cfg.operand) ? _.map(cfg.value || null, v => ({ value: v })) : null; + const value = _.includes(["=", "ne"], cfg.operand) ? "" : cfg.value; const { operand } = cfg; return { selected, - operand, + operand: operand === "ne" ? NE : "=", minimum: (cfg.min || min) + "", maximum: (cfg.max || max) + "", value: value + "", @@ -72,9 +76,11 @@ class NumericFilter extends React.Component { cfg.value = numVal; }; switch (cfg.operand) { - case "=": { + case "=": + case NE: { if (colType === "int") { cfg.value = _.map(updatedState.selected || [], "value"); + cfg.operand = cfg.operand === NE ? "ne" : cfg.operand; } else { updateCfgForVal(); } @@ -126,6 +132,7 @@ class NumericFilter extends React.Component { createValueInput(this.updateState, this.props, this.state, "maximum"), ]; case "=": + case NE: default: { if (colType === "float") { return createValueInput(this.updateState, this.props, this.state, "value"); @@ -191,4 +198,4 @@ NumericFilter.propTypes = { missing: PropTypes.bool, }; -export { NumericFilter }; +export { NumericFilter, EQ_TOGGLE, NE }; diff --git a/static/filters/StringFilter.jsx b/static/filters/StringFilter.jsx index 5a9740771..5a3162895 100644 --- a/static/filters/StringFilter.jsx +++ b/static/filters/StringFilter.jsx @@ -3,35 +3,71 @@ import PropTypes from "prop-types"; import React from "react"; import Select, { createFilter } from "react-select"; +import { EQ_TOGGLE, NE } from "./NumericFilter"; + class StringFilter extends React.Component { constructor(props) { super(props); - const selected = _.map(_.get(props.columnFilters, [props.selectedCol, "value"], null), v => ({ value: v })); - this.state = { selected: selected }; + const currFilter = _.get(props.columnFilters, props.selectedCol, {}); + currFilter.operand = currFilter.operand === "ne" ? NE : "="; + const selected = _.map(_.get(currFilter, "value", null), v => ({ + value: v, + })); + this.state = { selected: selected, operand: currFilter.operand }; this.updateState = this.updateState.bind(this); } - updateState(selected) { - const cfg = { type: "string", value: _.map(selected || [], "value") }; - this.setState({ selected }, () => this.props.updateState(cfg)); + updateState(state) { + const updatedState = _.assignIn({}, this.state, state); + const cfg = { + type: "string", + value: _.map(updatedState.selected || [], "value"), + operand: updatedState.operand, + }; + cfg.operand = cfg.operand === NE ? "ne" : cfg.operand; + this.setState(updatedState, () => this.props.updateState(cfg)); } render() { - return ( - ({ value: o }))} + getOptionLabel={_.property("value")} + getOptionValue={_.property("value")} + value={this.state.selected} + onChange={selected => this.updateState({ selected })} + isClearable + filterOption={createFilter({ ignoreAccents: false })} // required for performance reasons! + /> +
    +
    , + ]; } } StringFilter.displayName = "StringFilter"; diff --git a/static/popups/analysis/ColumnAnalysis.jsx b/static/popups/analysis/ColumnAnalysis.jsx index ce5417e6c..65b886b97 100644 --- a/static/popups/analysis/ColumnAnalysis.jsx +++ b/static/popups/analysis/ColumnAnalysis.jsx @@ -68,6 +68,7 @@ class ReactColumnAnalysis extends React.Component { newState.type = _.get(fetchedChartData, "chart_type", "histogram"); newState.query = _.get(fetchedChartData, "query"); newState.cols = _.get(fetchedChartData, "cols", []); + newState.top = _.get(fetchedChartData, "top", null); const builder = ctx => { if (!_.get(fetchedChartData, "data", []).length) { return null; @@ -99,7 +100,7 @@ class ReactColumnAnalysis extends React.Component { filters = (
    diff --git a/static/popups/analysis/ColumnAnalysisFilters.jsx b/static/popups/analysis/ColumnAnalysisFilters.jsx index 5d34d5a98..a8b36b7a3 100644 --- a/static/popups/analysis/ColumnAnalysisFilters.jsx +++ b/static/popups/analysis/ColumnAnalysisFilters.jsx @@ -25,7 +25,7 @@ class ColumnAnalysisFilters extends React.Component { this.state = { type: props.type, bins: "20", - top: "100", + top: (props.top || 100) + "", ordinalCol: null, categoryCol: null, }; @@ -272,6 +272,7 @@ ColumnAnalysisFilters.propTypes = { dtype: PropTypes.string, code: PropTypes.string, type: PropTypes.string, + top: PropTypes.number, buildChart: PropTypes.func, }; diff --git a/static/popups/create/CreateColumn.jsx b/static/popups/create/CreateColumn.jsx index d676e12c6..1632defd7 100644 --- a/static/popups/create/CreateColumn.jsx +++ b/static/popups/create/CreateColumn.jsx @@ -30,6 +30,7 @@ class ReactCreateColumn extends React.Component { this.state = _.assign({}, BASE_STATE); this.save = this.save.bind(this); this.renderBody = this.renderBody.bind(this); + this.renderCode = this.renderCode.bind(this); } componentDidMount() { @@ -164,26 +165,46 @@ class ReactCreateColumn extends React.Component { ); } - render() { - let error = null; - if (this.state.error) { - error = ( -
    -
    {this.state.error}
    -
    - ); - } - let codeMarkup = null; + renderCode() { if (_.get(this.state, ["code", this.state.type])) { const code = _.concat(_.get(this.state, ["code", this.state.type], []), []); - codeMarkup = ( -
    - Code: + let markup = null; + if (_.size(code) > 2) { + markup = ( +
    +
    {code[0]}
    +
    {code[1]}
    +
    {"hover to see more..."}
    +
    +
    {_.join(code, "\n")}
    +
    +
    + ); + } else { + markup = (
    {_.map(code, (c, i) => (
    {c}
    ))}
    + ); + } + return ( +
    + Code: + {markup} +
    + ); + } + return null; + } + + render() { + let error = null; + if (this.state.error) { + error = ( +
    +
    {this.state.error}
    ); } @@ -193,7 +214,7 @@ class ReactCreateColumn extends React.Component { {this.renderBody()} ,
    - {codeMarkup} + {this.renderCode()} + ); } - return ( - - ); - })} + )}
    @@ -145,8 +156,8 @@ class ReactReshape extends React.Component {
    {_.map( [ - ["override", "Override Current"], ["new", "New Instance"], + ["override", "Override Current"], ], ([output, label], i) => { const buttonProps = { className: "btn" }; diff --git a/tests/conftest.py b/tests/conftest.py index 602c4c22d..74a59c82f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,5 @@ import getpass +import os import random import string import unittest as ut @@ -108,6 +109,13 @@ def _add_date(date, security_data): return data +@pytest.fixture(scope="module") +def state_data(): + df = pd.read_csv('{}/data/state-codes.csv'.format(os.path.dirname(__file__))) + df.loc[:, 'val'] = df.index.values + return df + + @pytest.fixture(scope="module") def builtin_pkg(): if PY3: diff --git a/tests/data/state-codes.csv b/tests/data/state-codes.csv new file mode 100755 index 000000000..8401d93ba --- /dev/null +++ b/tests/data/state-codes.csv @@ -0,0 +1,52 @@ +"State","Abbrev","Code" +"Alabama","Ala.","AL" +"Alaska","Alaska","AK" +"Arizona","Ariz.","AZ" +"Arkansas","Ark.","AR" +"California","Calif.","CA" +"Colorado","Colo.","CO" +"Connecticut","Conn.","CT" +"Delaware","Del.","DE" +"District of Columbia","D.C.","DC" +"Florida","Fla.","FL" +"Georgia","Ga.","GA" +"Hawaii","Hawaii","HI" +"Idaho","Idaho","ID" +"Illinois","Ill.","IL" +"Indiana","Ind.","IN" +"Iowa","Iowa","IA" +"Kansas","Kans.","KS" +"Kentucky","Ky.","KY" +"Louisiana","La.","LA" +"Maine","Maine","ME" +"Maryland","Md.","MD" +"Massachusetts","Mass.","MA" +"Michigan","Mich.","MI" +"Minnesota","Minn.","MN" +"Mississippi","Miss.","MS" +"Missouri","Mo.","MO" +"Montana","Mont.","MT" +"Nebraska","Nebr.","NE" +"Nevada","Nev.","NV" +"New Hampshire","N.H.","NH" +"New Jersey","N.J.","NJ" +"New Mexico","N.M.","NM" +"New York","N.Y.","NY" +"North Carolina","N.C.","NC" +"North Dakota","N.D.","ND" +"Ohio","Ohio","OH" +"Oklahoma","Okla.","OK" +"Oregon","Ore.","OR" +"Pennsylvania","Pa.","PA" +"Rhode Island","R.I.","RI" +"South Carolina","S.C.","SC" +"South Dakota","S.D.","SD" +"Tennessee","Tenn.","TN" +"Texas","Tex.","TX" +"Utah","Utah","UT" +"Vermont","Vt.","VT" +"Virginia","Va.","VA" +"Washington","Wash.","WA" +"West Virginia","W.Va.","WV" +"Wisconsin","Wis.","WI" +"Wyoming","Wyo.","WY" \ No newline at end of file diff --git a/tests/dtale/test_charts.py b/tests/dtale/test_charts.py index e46846d4b..68f64c15a 100644 --- a/tests/dtale/test_charts.py +++ b/tests/dtale/test_charts.py @@ -22,6 +22,8 @@ def test_date_freq_handler(): def test_group_filter_handler(): s = chart_utils.group_filter_handler('date|WD', 1, 'I') assert s == 'date.dt.dayofweek == 1' + s = chart_utils.group_filter_handler('date|WD', 'nan', 'I') + assert s == 'date != date' s = chart_utils.group_filter_handler('date|H2', 1, 'I') assert s == 'date.dt.hour == 1' s = chart_utils.group_filter_handler('date|H', '20190101', 'D') @@ -40,6 +42,8 @@ def test_group_filter_handler(): assert s == "foo == 1" s = chart_utils.group_filter_handler('foo', 'bar', 'S') assert s == "foo == 'bar'" + s = chart_utils.group_filter_handler('foo', 'nan', 'S') + assert s == "foo != foo" @pytest.mark.unit diff --git a/tests/dtale/test_dash.py b/tests/dtale/test_dash.py index 51868f362..e3226d3c5 100644 --- a/tests/dtale/test_dash.py +++ b/tests/dtale/test_dash.py @@ -1,3 +1,5 @@ +import json + import mock import numpy as np import pandas as pd @@ -48,8 +50,11 @@ def test_display_page(unittest): response = c.post('/charts/_dash-update-component', json=params) resp_data = response.get_json()['response'] component_defs = resp_data['props']['children']['props']['children'] - x_dd = component_defs[9]['props']['children'][0] - x_dd_options = x_dd['props']['children'][0]['props']['children'][1]['props']['options'] + x_dd = component_defs[10]['props']['children'][0] + x_dd = x_dd['props']['children'][0] + x_dd = x_dd['props']['children'][0] + x_dd = x_dd['props']['children'][0] + x_dd_options = x_dd['props']['children'][1]['props']['options'] unittest.assertEqual([dict(label=v, value=v) for v in ['a', 'b', 'c']], x_dd_options) @@ -94,7 +99,7 @@ def test_input_changes(unittest): 'output': ( '..input-data.data...x-dropdown.options...y-single-dropdown.options...y-multi-dropdown.options.' '..z-dropdown.options...group-dropdown.options...barsort-dropdown.options.' - '..yaxis-dropdown.options..' + '..yaxis-dropdown.options...non-map-inputs.style...map-inputs.style...colorscale-input.style..' ), 'changedPropIds': ['chart-tabs.value'], 'inputs': [ @@ -105,6 +110,7 @@ def test_input_changes(unittest): {'id': 'y-single-dropdown', 'property': 'value'}, {'id': 'z-dropdown', 'property': 'value'}, {'id': 'group-dropdown', 'property': 'value'}, + {'id': 'group-val-dropdown', 'property': 'value'}, {'id': 'agg-dropdown', 'property': 'value'}, {'id': 'window-input', 'property': 'value'}, {'id': 'rolling-comp-dropdown', 'property': 'value'} @@ -114,8 +120,8 @@ def test_input_changes(unittest): response = c.post('/charts/_dash-update-component', json=params) resp_data = response.get_json() unittest.assertEqual(resp_data['response']['input-data']['data'], { - 'chart_type': 'line', 'x': None, 'y': [], 'z': None, 'group': None, 'agg': None, 'window': None, - 'rolling_comp': None, 'query': None + 'chart_type': 'line', 'x': None, 'y': [], 'z': None, 'group': None, 'group_val': None, 'agg': None, + 'window': None, 'rolling_comp': None, 'query': None }) unittest.assertEqual( resp_data['response']['x-dropdown']['options'], @@ -127,18 +133,150 @@ def test_input_changes(unittest): ) params['inputs'][2]['value'] = 'a' params['inputs'][3]['value'] = ['b', 'c'] + params['inputs'][6]['value'] = ['d'] + params['inputs'][7]['value'] = [json.dumps(dict(d='20200101'))] response = c.post('/charts/_dash-update-component', json=params) resp_data = response.get_json()['response'] unittest.assertEqual([o['value'] for o in resp_data['barsort-dropdown']['options']], ['a', 'b', 'c']) unittest.assertEqual([o['value'] for o in resp_data['yaxis-dropdown']['options']], ['b', 'c']) +@pytest.mark.unit +def test_map_data(unittest): + import dtale.views as views + + df = pd.DataFrame(dict(a=[1, 2, 3], b=[4, 5, 6], c=[7, 8, 9], d=pd.date_range('20200101', '20200103'))) + with app.test_client() as c: + with ExitStack() as stack: + df, _ = views.format_data(df) + stack.enter_context(mock.patch('dtale.global_state.DATA', {c.port: df})) + pathname = path_builder(c.port) + params = { + 'output': ( + '..map-input-data.data...map-loc-dropdown.options...map-lat-dropdown.options...' + 'map-lon-dropdown.options...map-value-dropdown.options...map-loc-mode-input.style...' + 'map-loc-input.style...map-lat-input.style...map-lon-input.style...map-scope-input.style...' + 'map-proj-input.style..' + ), + 'changedPropIds': ['map-type-dropdown.value'], + 'inputs': [ + {'id': 'map-type-dropdown', 'property': 'value', 'value': 'scattergeo'}, + {'id': 'map-loc-mode-dropdown', 'property': 'value', 'value': None}, + {'id': 'map-loc-dropdown', 'property': 'value', 'value': None}, + {'id': 'map-lat-dropdown', 'property': 'value', 'value': None}, + {'id': 'map-lon-dropdown', 'property': 'value', 'value': None}, + {'id': 'map-val-dropdown', 'property': 'value', 'value': None}, + {'id': 'map-scope-dropdown', 'property': 'value', 'value': 'world'}, + {'id': 'map-proj-dropdown', 'property': 'value', 'value': None} + ], + 'state': [ + pathname + ] + } + response = c.post('/charts/_dash-update-component', json=params) + resp_data = response.get_json()['response'] + unittest.assertEqual( + resp_data['map-input-data']['data'], + {'map_type': 'scattergeo', 'lat': None, 'lon': None, 'map_val': None, 'scope': 'world', 'proj': None}, + ) + unittest.assertEqual( + resp_data['map-loc-dropdown']['options'], + [ + {'label': 'a', 'value': 'a'}, + {'label': 'b', 'value': 'b'}, + {'label': 'c', 'value': 'c'}, + {'label': 'd', 'value': 'd'} + ] + ) + unittest.assertEqual(resp_data['map-loc-mode-input']['style'], {'display': 'none'}) + unittest.assertEqual(resp_data['map-lat-input']['style'], {}) + + params['inputs'][0]['value'] = 'choropleth' + response = c.post('/charts/_dash-update-component', json=params) + resp_data = response.get_json()['response'] + unittest.assertEqual(resp_data['map-loc-mode-input']['style'], {}) + unittest.assertEqual(resp_data['map-lat-input']['style'], {'display': 'none'}) + + +@pytest.mark.unit +def test_group_values(unittest): + import dtale.views as views + + df = pd.DataFrame(dict(a=[1, 2, 3], b=[4, 5, 6], c=[7, 8, 9], d=pd.date_range('20200101', '20200103'))) + with app.test_client() as c: + with ExitStack() as stack: + df, _ = views.format_data(df) + stack.enter_context(mock.patch('dtale.global_state.DATA', {c.port: df})) + pathname = path_builder(c.port) + params = { + 'output': '..group-val-dropdown.options...group-val-dropdown.value..', + 'changedPropIds': ['group-dropdown.value'], + 'inputs': [ + {'id': 'group-dropdown', 'property': 'value', 'value': None} + ], + 'state': [ + pathname, + {'id': 'input-data', 'property': 'data', 'value': {}}, + {'id': 'group-val-dropdown', 'property': 'value', 'value': None} + ] + } + response = c.post('/charts/_dash-update-component', json=params) + unittest.assertEqual( + response.get_json()['response'], + {'group-val-dropdown': {'options': [], 'value': None}} + ) + params['inputs'][0]['value'] = ['c'] + params['state'][1]['value'] = dict(chart_type='line') + + response = c.post('/charts/_dash-update-component', json=params) + unittest.assertEqual( + response.get_json()['response'], + {'group-val-dropdown': {'options': [ + {'label': '7', 'value': '{"c": 7}'}, + {'label': '8', 'value': '{"c": 8}'}, + {'label': '9', 'value': '{"c": 9}'} + ], 'value': ['{"c": 7}', '{"c": 8}', '{"c": 9}']}} + ) + + params['state'][2]['value'] = ['{"c": 7}'] + response = c.post('/charts/_dash-update-component', json=params) + unittest.assertEqual( + response.get_json()['response']['group-val-dropdown']['value'], + ['{"c": 7}'] + ) + + +@pytest.mark.unit +def test_main_input_styling(unittest): + + with app.test_client() as c: + params = { + 'output': '..group-val-input.style...main-inputs.className..', + 'changedPropIds': ['input-data.modified_timestamp'], + 'inputs': [ts_builder('input-data')], + 'state': [ + {'id': 'input-data', 'property': 'data', 'value': {'chart_type': 'maps'}}] + } + response = c.post('/charts/_dash-update-component', json=params) + unittest.assertEqual( + response.get_json()['response'], + {'group-val-input': {'style': {'display': 'none'}}, 'main-inputs': {'className': 'col-md-12'}} + ) + params['state'][0]['value']['chart_type'] = 'line' + params['state'][0]['value']['group'] = ['foo'] + response = c.post('/charts/_dash-update-component', json=params) + unittest.assertEqual( + response.get_json()['response'], + {'group-val-input': {'style': {'display': 'block'}}, 'main-inputs': {'className': 'col-md-8'}} + ) + + @pytest.mark.unit def test_chart_type_changes(unittest): with app.test_client() as c: fig_data_outputs = ( '..y-multi-input.style...y-single-input.style...z-input.style...group-input.style...rolling-inputs.style...' - 'cpg-input.style...barmode-input.style...barsort-input.style...yaxis-input.style..' + 'cpg-input.style...barmode-input.style...barsort-input.style...yaxis-input.style...animate-input.style..' ) inputs = {'id': 'input-data', 'property': 'data', 'value': { 'chart_type': 'line', 'x': 'a', 'y': ['b'], 'z': None, 'group': None, 'agg': None, @@ -304,13 +442,15 @@ def test_chart_input_updates(unittest): {'id': 'cpg-toggle', 'property': 'on', 'value': False}, {'id': 'barmode-dropdown', 'property': 'value', 'value': 'group'}, {'id': 'barsort-dropdown', 'property': 'value'}, + {'id': 'colorscale-dropdown', 'property': 'value'}, + {'id': 'animate-toggle', 'property': 'on'}, ], } response = c.post('/charts/_dash-update-component', json=params) resp_data = response.get_json() unittest.assertEqual(resp_data['response']['props']['data'], { - 'cpg': False, 'barmode': 'group', 'barsort': None + 'animate': None, 'cpg': False, 'barmode': 'group', 'barsort': None, 'colorscale': None, }) @@ -371,20 +511,21 @@ def test_yaxis_data(unittest): params['state'][0]['value'] = 'Col1' -def build_chart_params(pathname, inputs={}, chart_inputs={}, yaxis={}, last_inputs={}): +def build_chart_params(pathname, inputs={}, chart_inputs={}, yaxis={}, last_inputs={}, map_inputs={}): return { 'output': ( '..chart-content.children...last-chart-input-data.data...range-data.data...chart-code.value...' 'yaxis-type.children..' ), 'changedPropIds': ['input-data.modified_timestamp'], - 'inputs': [ts_builder('input-data'), ts_builder('chart-input-data'), ts_builder('yaxis-data')], + 'inputs': [ts_builder(k) for k in ['input-data', 'chart-input-data', 'yaxis-data', 'map-input-data']], 'state': [ pathname, {'id': 'input-data', 'property': 'data', 'value': inputs}, {'id': 'chart-input-data', 'property': 'data', 'value': chart_inputs}, {'id': 'yaxis-data', 'property': 'data', 'value': yaxis}, - {'id': 'last-chart-input-data', 'property': 'data', 'value': last_inputs} + {'id': 'last-chart-input-data', 'property': 'data', 'value': last_inputs}, + {'id': 'map-input-data', 'property': 'data', 'value': map_inputs} ] } @@ -403,7 +544,8 @@ def test_chart_building_nones(unittest): params['state'][2]['value'] = {'cpg': False, 'barmode': 'group', 'barsort': None} params['state'][-1]['value'] = {'cpg': False, 'barmode': 'group', 'barsort': None, 'yaxis': {}} response = c.post('/charts/_dash-update-component', json=params) - assert response.get_json() is None + resp_data = response.get_json() + assert resp_data['response']['chart-content']['children'] is None @pytest.mark.unit @@ -480,7 +622,8 @@ def test_chart_building_bar_and_popup(unittest): url_params = dict(get_url_parser()(url.split('?')[-1])) unittest.assertEqual( url_params, - {'chart_type': 'bar', 'x': 'a', 'barmode': 'group', 'cpg': 'false', 'y': '["b", "c"]'} + {'chart_type': 'bar', 'x': 'a', 'barmode': 'group', 'cpg': 'false', 'y': '["b", "c"]', + 'animate': 'false'} ) unittest.assertEqual( resp_data['chart-content']['children']['props']['children'][1]['props']['figure']['layout'], @@ -507,6 +650,13 @@ def test_chart_building_bar_and_popup(unittest): }) assert response.status_code == 200 + chart_inputs['animate'] = True + params = build_chart_params(pathname, inputs, chart_inputs) + response = c.post('/charts/_dash-update-component', json=params) + resp_data = response.get_json()['response'] + assert 'frames' in resp_data['chart-content']['children']['props']['children'][1]['props']['figure'] + + chart_inputs['animate'] = False chart_inputs['barmode'] = 'stack' inputs['agg'] = 'raw' params = build_chart_params(pathname, inputs, chart_inputs) @@ -517,7 +667,11 @@ def test_chart_building_bar_and_popup(unittest): {'barmode': 'stack', 'legend': {'orientation': 'h', 'y': 1.2}, 'title': {'text': 'b, c by a (No Aggregation)'}, - 'xaxis': {'tickformat': '.0f', 'title': {'text': 'a'}}, + 'xaxis': {'tickformat': '.0f', + 'tickmode': 'array', + 'ticktext': [1, 2, 3], + 'tickvals': [0, 1, 2], + 'title': {'text': 'a'}}, 'yaxis': {'tickformat': '.0f', 'title': {'text': 'b (No Aggregation)'}}} ) @@ -559,22 +713,29 @@ def test_chart_building_line(unittest): stack.enter_context(mock.patch('dtale.global_state.DATA', {c.port: df})) pathname = path_builder(c.port) inputs = { - 'chart_type': 'line', 'x': 'a', 'y': ['b'], 'z': None, 'group': ['c'], 'agg': None, - 'window': None, 'rolling_comp': None + 'chart_type': 'line', 'x': 'a', 'y': ['b'], 'z': None, 'group': ['c'], + 'group_val': [dict(c=7)], 'agg': None, 'window': None, 'rolling_comp': None } chart_inputs = {'cpg': True, 'barmode': 'group', 'barsort': 'b'} params = build_chart_params(pathname, inputs, chart_inputs) response = c.post('/charts/_dash-update-component', json=params) resp_data = response.get_json()['response'] - assert len(resp_data['chart-content']['children']) == 2 + assert len(resp_data['chart-content']['children']) == 1 inputs['group'] = None + inputs['group_val'] = None chart_inputs['cpg'] = False params = build_chart_params(pathname, inputs, chart_inputs) response = c.post('/charts/_dash-update-component', json=params) resp_data = response.get_json()['response'] assert resp_data['chart-content']['children']['type'] == 'Div' + chart_inputs['animate'] = True + params = build_chart_params(pathname, inputs, chart_inputs) + response = c.post('/charts/_dash-update-component', json=params) + resp_data = response.get_json()['response'] + assert 'frames' in resp_data['chart-content']['children']['props']['children'][1]['props']['figure'] + df = pd.DataFrame([dict(sec_id=i, y=1) for i in range(15500)]) with app.test_client() as c: with ExitStack() as stack: @@ -659,7 +820,6 @@ def test_chart_building_heatmap(unittest, test_data, rolling_data): params = build_chart_params(pathname, inputs, chart_inputs) response = c.post('/charts/_dash-update-component', json=params) chart_markup = response.get_json()['response']['chart-content']['children']['props']['children'][1] - print(chart_markup) unittest.assertEqual( chart_markup['props']['figure']['layout']['title'], {'text': 'b by a weighted by c'} @@ -735,6 +895,12 @@ def test_chart_building_3D_scatter(unittest, test_data): {'text': 'b by a weighted by c'} ) + chart_inputs['animate'] = True + params = build_chart_params(pathname, inputs, chart_inputs) + response = c.post('/charts/_dash-update-component', json=params) + resp_data = response.get_json()['response'] + assert 'frames' in resp_data['chart-content']['children'][0]['props']['children'][1]['props']['figure'] + with app.test_client() as c: with ExitStack() as stack: df, _ = views.format_data(test_data) @@ -796,6 +962,59 @@ def test_chart_building_surface(unittest, test_data): ) +@pytest.mark.unit +def test_chart_building_map(unittest, state_data): + import dtale.views as views + + with app.test_client() as c: + with ExitStack() as stack: + df, _ = views.format_data(state_data) + stack.enter_context(mock.patch('dtale.global_state.DATA', {c.port: df})) + pathname = path_builder(c.port) + inputs = {'chart_type': 'maps', 'agg': 'raw'} + map_inputs = {'map_type': 'choropleth', 'loc_mode': 'USA-states', 'loc': 'Code'} + chart_inputs = {'colorscale': 'Reds'} + params = build_chart_params(pathname, inputs, chart_inputs, map_inputs=map_inputs) + response = c.post('/charts/_dash-update-component', json=params) + assert response.get_json()['response']['chart-content']['children'] is None + map_inputs['map_val'] = 'val' + params = build_chart_params(pathname, inputs, chart_inputs, map_inputs=map_inputs) + response = c.post('/charts/_dash-update-component', json=params) + chart_markup = response.get_json()['response']['chart-content']['children']['props']['children'][1] + unittest.assertEqual( + chart_markup['props']['figure']['layout']['title'], + {'text': 'Map of val (No Aggregation)'} + ) + + df = pd.DataFrame({ + 'lat': np.random.uniform(-40, 40, 50), + 'lon': np.random.uniform(-40, 40, 50), + 'val': np.random.randint(0, high=100, size=50) + }) + with app.test_client() as c: + with ExitStack() as stack: + df, _ = views.format_data(df) + stack.enter_context(mock.patch('dtale.global_state.DATA', {c.port: df})) + pathname = path_builder(c.port) + inputs = {'chart_type': 'maps', 'agg': 'raw'} + map_inputs = {'map_type': 'scattergeo', 'lat': 'lat', 'lon': 'lon', 'map_val': 'val', 'scope': 'world', + 'proj': 'mercator'} + chart_inputs = {'colorscale': 'Reds'} + params = build_chart_params(pathname, inputs, chart_inputs, map_inputs=map_inputs) + response = c.post('/charts/_dash-update-component', json=params) + chart_markup = response.get_json()['response']['chart-content']['children']['props']['children'][1] + unittest.assertEqual( + chart_markup['props']['figure']['layout']['title'], + {'text': 'Map of val (No Aggregation)'} + ) + map_inputs['map_val'] = 'foo' + params = build_chart_params(pathname, inputs, chart_inputs, map_inputs=map_inputs) + response = c.post('/charts/_dash-update-component', json=params) + resp_data = response.get_json()['response'] + error = resp_data['chart-content']['children']['props']['children'][1]['props']['children'] + assert "'foo'" in error + + @pytest.mark.unit def test_load_chart_error(unittest): import dtale.views as views @@ -806,11 +1025,11 @@ def test_load_chart_error(unittest): df, _ = views.format_data(df) stack.enter_context(mock.patch('dtale.global_state.DATA', {c.port: df})) - def build_chart_data_mock(raw_data, x, y, group_col=None, agg=None, allow_duplicates=False, **kwargs): + def build_base_chart_mock(raw_data, x, y, group_col=None, agg=None, allow_duplicates=False, **kwargs): raise Exception('error test') stack.enter_context(mock.patch( - 'dtale.dash_application.charts.build_chart_data', - side_effect=build_chart_data_mock + 'dtale.dash_application.charts.build_base_chart', + side_effect=build_base_chart_mock )) pathname = {'id': 'url', 'property': 'pathname', 'value': '/charts/{}'.format(c.port)} inputs = {'chart_type': 'line', 'x': 'a', 'y': ['b'], 'z': None, 'group': None, 'agg': None, @@ -932,9 +1151,9 @@ def test_build_axes(unittest): def test_build_figure_data(unittest): assert build_figure_data('/charts/1', x=None)[0] is None assert build_figure_data('/charts/1', x='a', y=['b'], chart_type='heatmap')[0] is None - with mock.patch('dtale.global_state.DATA', {}): - fig_data, _code = build_figure_data('/charts/1', x='a', y=['b'], chart_type='line') - assert 'error' in fig_data and 'traceback' in fig_data + with mock.patch('dtale.global_state.DATA', {'1': pd.DataFrame([dict(a=1, b=2, c=3)])}): + with pytest.raises(BaseException): + build_figure_data('/charts/1', query='d == 4', x='a', y=['b'], chart_type='line') @pytest.mark.unit @@ -945,8 +1164,8 @@ def test_chart_wrapper(unittest): cw = chart_wrapper('1', dict(min={'b': 4}, max={'b': 6}), url_params) output = cw('foo') url_params = chart_url_params('?{}'.format(output.children[0].children[0].href.split('?')[-1])) - unittest.assertEqual(url_params, {'chart_type': 'line', 'agg': 'rolling', 'window': 10, 'cpg': False, - 'y': ['b', 'c'], 'yaxis': {'b': {'min': 3, 'max': 6}}}) + unittest.assertEqual(url_params, {'animate': False, 'chart_type': 'line', 'agg': 'rolling', 'window': 10, + 'cpg': False, 'y': ['b', 'c'], 'yaxis': {'b': {'min': 3, 'max': 6}}}) @pytest.mark.unit @@ -992,12 +1211,14 @@ def test_update_label_for_freq(unittest): def test_chart_url_params_w_group_filter(unittest): from dtale.dash_application.charts import chart_url_params, chart_url_querystring - querystring = chart_url_querystring(dict(chart_type='bar', x='foo', y=['bar'], group=['baz']), + querystring = chart_url_querystring(dict(chart_type='bar', x='foo', y=['bar'], group=['baz'], + group_val=[dict(baz='bizzle')]), group_filter=dict(group="baz == 'bizzle'")) parsed_params = chart_url_params(querystring) unittest.assertEqual( parsed_params, - {'chart_type': 'bar', 'x': 'foo', 'cpg': False, 'y': ['bar'], 'group': ['baz'], 'query': "baz == 'bizzle'"} + {'animate': False, 'chart_type': 'bar', 'x': 'foo', 'cpg': False, 'y': ['bar'], 'group': ['baz'], + 'group_val': [{'baz': 'bizzle'}], 'query': "baz == 'bizzle'"} ) diff --git a/tests/dtale/test_instance.py b/tests/dtale/test_instance.py index 10b40190b..d18cddfbd 100644 --- a/tests/dtale/test_instance.py +++ b/tests/dtale/test_instance.py @@ -89,13 +89,14 @@ def mock_requests_get(url, verify=True): [path, query] = mock_iframe.call_args[0][0].split('?') assert path == 'http://localhost:9999/charts/9999' assert dict(url_parser(query)) == dict(chart_type='line', agg='count', group='["col3", "col4"]', x='col1', - y='["col2"]', cpg='false') + y='["col2"]', cpg='false', animate='false') instance.notebook_charts(x='col1', y='col2', agg='count') [_path, query] = mock_iframe.call_args[0][0].split('?') - assert dict(url_parser(query)) == dict(chart_type='line', agg='count', x='col1', y='["col2"]', cpg='false') + assert dict(url_parser(query)) == dict(chart_type='line', agg='count', x='col1', y='["col2"]', cpg='false', + animate='false') instance.notebook_charts(x='col1', y='col2', group=['col3', 'col4']) [_path, query] = mock_iframe.call_args[0][0].split('?') assert dict(url_parser(query)) == dict(chart_type='line', x='col1', y='["col2"]', group='["col3", "col4"]', - cpg='false') + cpg='false', animate='false') diff --git a/tests/dtale/test_show_loaders.py b/tests/dtale/test_show_loaders.py index c69361cde..13081fbb6 100644 --- a/tests/dtale/test_show_loaders.py +++ b/tests/dtale/test_show_loaders.py @@ -15,10 +15,25 @@ def test_show_csv(): import dtale + csv_path = "/../".join([os.path.dirname(__file__), 'data/test_df.csv']) + with mock.patch('dtale.app.show', mock.Mock()): - csv_path = "/../".join([os.path.dirname(__file__), 'data/test_df.csv']) dtale.show_csv(path=csv_path) + with open(csv_path, 'r') as f: + csv_txt = f.read() + with ExitStack() as stack: + stack.enter_context(mock.patch('dtale.app.show', mock.Mock())) + + class MockRequest(object): + def __init__(self): + self.content = str(csv_txt) + self.status_code = 200 + + stack.enter_context(mock.patch('requests.get', mock.Mock(return_value=MockRequest()))) + dtale.show_csv(path='http://test-csv') + dtale.show_csv(path='http://test-csv', proxy='http://test-proxy') + @pytest.mark.unit def test_show_json(): @@ -37,6 +52,7 @@ def test_show_json(): class MockRequest(object): def __init__(self): self.text = json_txt + self.status_code = 200 def json(self): return json.loads(json_txt) diff --git a/tests/dtale/test_views.py b/tests/dtale/test_views.py index 8946057eb..237734e97 100644 --- a/tests/dtale/test_views.py +++ b/tests/dtale/test_views.py @@ -512,7 +512,7 @@ def test_reshape(custom_data, unittest): stack.enter_context(mock.patch('dtale.global_state.DATA', data)) stack.enter_context(mock.patch('dtale.global_state.DTYPES', dtypes)) stack.enter_context(mock.patch('dtale.global_state.SETTINGS', settings)) - reshape_cfg = dict(index='date', columns='security_id', values=['Col0']) # , aggfunc=None + reshape_cfg = dict(index='date', columns='security_id', values=['Col0']) resp = c.get( '/dtale/reshape/{}'.format(c.port), query_string=dict(output='new', type='pivot', cfg=json.dumps(reshape_cfg)) @@ -529,6 +529,7 @@ def test_reshape(custom_data, unittest): assert json.loads(resp.data)['success'] assert len(data.keys()) == 1 + reshape_cfg['columnNameHeaders'] = True reshape_cfg['aggfunc'] = 'sum' resp = c.get( '/dtale/reshape/{}'.format(c.port), @@ -537,11 +538,15 @@ def test_reshape(custom_data, unittest): response_data = json.loads(resp.data) assert response_data['data_id'] == new_key assert len(data.keys()) == 2 - unittest.assertEqual([d['name'] for d in dtypes[new_key]], ['date', '100000', '100001']) + unittest.assertEqual( + [d['name'] for d in dtypes[new_key]], + ['date', 'security_id-100000', 'security_id-100001'] + ) assert len(data[new_key]) == 365 assert settings[new_key].get('startup_code') is not None c.get('/dtale/cleanup/{}'.format(new_key)) + reshape_cfg['columnNameHeaders'] = False reshape_cfg['values'] = ['Col0', 'Col1'] resp = c.get( '/dtale/reshape/{}'.format(c.port), @@ -950,7 +955,7 @@ def test_get_column_analysis(unittest, test_data): settings[c.port] = dict() response = c.get( '/dtale/column-analysis/{}'.format(c.port), - query_string=dict(col='foo', type='value_counts') + query_string=dict(col='foo', type='value_counts', top=2) ) response_data = json.loads(response.data) assert response_data['chart_type'] == 'value_counts' @@ -1430,7 +1435,7 @@ def test_version_info(): @pytest.mark.unit @pytest.mark.parametrize('custom_data', [dict(rows=1000, cols=3)], indirect=True) -def test_chart_exports(custom_data): +def test_chart_exports(custom_data, state_data): import dtale.views as views with app.test_client() as c: @@ -1501,6 +1506,39 @@ def test_chart_exports(custom_data): response = c.get('/dtale/chart-csv-export/{}'.format(c.port), query_string=params) assert response.content_type == 'application/json' + with app.test_client() as c: + with ExitStack() as stack: + stack.enter_context(mock.patch('dtale.global_state.DATA', {c.port: state_data})) + stack.enter_context( + mock.patch('dtale.global_state.DTYPES', {c.port: views.build_dtypes_state(state_data)}) + ) + params = dict(chart_type='maps', map_type='choropleth', loc_mode='USA-states', loc='Code', map_val='val', + agg='raw') + response = c.get('/dtale/chart-export/{}'.format(c.port), query_string=params) + assert response.content_type == 'text/html' + + response = c.get('/dtale/chart-csv-export/{}'.format(c.port), query_string=params) + assert response.content_type == 'text/csv' + + df = pd.DataFrame({ + 'lat': np.random.uniform(-40, 40, 50), + 'lon': np.random.uniform(-40, 40, 50), + 'val': np.random.randint(0, high=100, size=50) + }) + with app.test_client() as c: + with ExitStack() as stack: + stack.enter_context(mock.patch('dtale.global_state.DATA', {c.port: df})) + stack.enter_context( + mock.patch('dtale.global_state.DTYPES', {c.port: views.build_dtypes_state(df)}) + ) + params = dict(chart_type='maps', map_type='scattergeo', lat='lat', lon='lon', map_val='val', scope='world', + agg='raw') + response = c.get('/dtale/chart-export/{}'.format(c.port), query_string=params) + assert response.content_type == 'text/html' + + response = c.get('/dtale/chart-csv-export/{}'.format(c.port), query_string=params) + assert response.content_type == 'text/csv' + @pytest.mark.unit def test_main():