Skip to content

Commit 3ae0645

Browse files
reorder computation to avoid an extra scan through the data
1 parent dd2137b commit 3ae0645

File tree

1 file changed

+29
-39
lines changed
  • packages/python/plotly/plotly/express

1 file changed

+29
-39
lines changed

packages/python/plotly/plotly/express/_core.py

+29-39
Original file line numberDiff line numberDiff line change
@@ -1904,7 +1904,7 @@ def infer_config(args, constructor, trace_patch, layout_patch):
19041904
return trace_specs, grouped_mappings, sizeref, show_colorbar
19051905

19061906

1907-
def get_orderings(args, grouper, grouped, all_same_group):
1907+
def get_orderings(args, grouper):
19081908
"""
19091909
`orders` is the user-supplied ordering with the remaining data-frame-supplied
19101910
ordering appended if the column is used for grouping. It includes anything the user
@@ -1916,47 +1916,42 @@ def get_orderings(args, grouper, grouped, all_same_group):
19161916
of a single dimension-group
19171917
"""
19181918
orders = {} if "category_orders" not in args else args["category_orders"].copy()
1919-
sorted_group_names = []
1920-
1921-
if all_same_group:
1922-
for col in grouper:
1923-
if col != one_group:
1924-
single_val = args["data_frame"][col].iloc[0]
1925-
sorted_group_names.append(single_val)
1926-
orders[col] = [single_val]
1927-
else:
1928-
sorted_group_names.append("")
1929-
return orders, [tuple(sorted_group_names)]
19301919

1920+
# figure out orders and what the single group name would be if there were one
1921+
single_group_name = []
19311922
for col in grouper:
1932-
if col != one_group:
1923+
if col == one_group:
1924+
single_group_name.append("")
1925+
else:
19331926
uniques = list(args["data_frame"][col].unique())
1927+
if len(uniques) == 1:
1928+
single_group_name.append(uniques[0])
19341929
if col not in orders:
19351930
orders[col] = uniques
19361931
else:
19371932
orders[col] = list(OrderedDict.fromkeys(list(orders[col]) + uniques))
19381933

1939-
for group_name in grouped.groups:
1940-
if len(grouper) == 1:
1941-
group_name = (group_name,)
1942-
sorted_group_names.append(group_name)
1943-
1944-
for i, col in reversed(list(enumerate(grouper))):
1945-
if col != one_group:
1946-
sorted_group_names = sorted(
1947-
sorted_group_names,
1948-
key=lambda g: orders[col].index(g[i]) if g[i] in orders[col] else -1,
1949-
)
1950-
return orders, sorted_group_names
1951-
1934+
if len(single_group_name) == len(grouper):
1935+
# we have a single group, so we can skip all group-by operations!
1936+
grouped = None
1937+
sorted_group_names = [tuple(single_group_name)]
1938+
else:
1939+
grouped = args["data_frame"].groupby(grouper, sort=False)
1940+
sorted_group_names = []
1941+
for group_name in grouped.groups:
1942+
if len(grouper) == 1:
1943+
group_name = (group_name,)
1944+
sorted_group_names.append(group_name)
19521945

1953-
def _all_same_group(args, grouper):
1954-
for g in set(grouper):
1955-
if g != one_group:
1956-
arr = args["data_frame"][g].values
1957-
if not (arr[0] == arr).all(axis=0):
1958-
return False
1959-
return True
1946+
for i, col in reversed(list(enumerate(grouper))):
1947+
if col != one_group:
1948+
sorted_group_names = sorted(
1949+
sorted_group_names,
1950+
key=lambda g: orders[col].index(g[i])
1951+
if g[i] in orders[col]
1952+
else -1,
1953+
)
1954+
return grouped, orders, sorted_group_names
19601955

19611956

19621957
def make_figure(args, constructor, trace_patch=None, layout_patch=None):
@@ -1975,12 +1970,7 @@ def make_figure(args, constructor, trace_patch=None, layout_patch=None):
19751970
args, constructor, trace_patch, layout_patch
19761971
)
19771972
grouper = [x.grouper or one_group for x in grouped_mappings] or [one_group]
1978-
grouped = None
1979-
all_same_group = _all_same_group(args, grouper)
1980-
if not all_same_group:
1981-
grouped = args["data_frame"].groupby(grouper, sort=False)
1982-
1983-
orders, sorted_group_names = get_orderings(args, grouper, grouped, all_same_group)
1973+
grouped, orders, sorted_group_names = get_orderings(args, grouper)
19841974

19851975
col_labels = []
19861976
row_labels = []

0 commit comments

Comments
 (0)