Skip to content

Fixed STYLE #49656: concat.py and merge.py #49849

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 10 additions & 10 deletions pandas/core/internals/concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@


def _concatenate_array_managers(
mgrs_indexers, axes: list[Index], concat_axis: AxisInt, copy: bool
mgrs_indexers, axes: list[Index], concat_axis: AxisInt, is_copied: bool
) -> Manager:
"""
Concatenate array managers into one.
Expand All @@ -95,7 +95,7 @@ def _concatenate_array_managers(
)
if ax == 1 and indexer is not None:
axis1_made_copy = True
if copy and concat_axis == 0 and not axis1_made_copy:
if is_copied and concat_axis == 0 and not axis1_made_copy:
# for concat_axis 1 we will always get a copy through concat_arrays
mgr = mgr.copy()
mgrs.append(mgr)
Expand Down Expand Up @@ -151,7 +151,7 @@ def concat_arrays(to_concat: list) -> ArrayLike:
to_concat = [
arr.to_array(target_dtype)
if isinstance(arr, NullArrayProxy)
else astype_array(arr, target_dtype, copy=False)
else astype_array(arr, target_dtype, is_copied=False)
for arr in to_concat
]

Expand All @@ -173,7 +173,7 @@ def concat_arrays(to_concat: list) -> ArrayLike:


def concatenate_managers(
mgrs_indexers, axes: list[Index], concat_axis: AxisInt, copy: bool
mgrs_indexers, axes: list[Index], concat_axis: AxisInt, is_copied: bool
) -> Manager:
"""
Concatenate block managers into one.
Expand All @@ -191,7 +191,7 @@ def concatenate_managers(
"""
# TODO(ArrayManager) this assumes that all managers are of the same type
if isinstance(mgrs_indexers[0][0], ArrayManager):
return _concatenate_array_managers(mgrs_indexers, axes, concat_axis, copy)
return _concatenate_array_managers(mgrs_indexers, axes, concat_axis, is_copied)

mgrs_indexers = _maybe_reindex_columns_na_proxy(axes, mgrs_indexers)

Expand All @@ -207,7 +207,7 @@ def concatenate_managers(

if len(join_units) == 1 and not join_units[0].indexers:
values = blk.values
if copy:
if is_copied:
values = values.copy()
else:
values = values.view()
Expand All @@ -229,7 +229,7 @@ def concatenate_managers(

fastpath = blk.values.dtype == values.dtype
else:
values = _concatenate_join_units(join_units, concat_axis, copy=copy)
values = _concatenate_join_units(join_units, concat_axis, is_copied=is_copied)
fastpath = False

if fastpath:
Expand Down Expand Up @@ -260,7 +260,7 @@ def _maybe_reindex_columns_na_proxy(
axes[0],
indexers[0],
axis=0,
copy=False,
is_copied=False,
only_slice=True,
allow_dups=True,
use_na_proxy=True,
Expand Down Expand Up @@ -524,7 +524,7 @@ def get_reindexed_values(self, empty_dtype: DtypeObj, upcasted_na) -> ArrayLike:


def _concatenate_join_units(
join_units: list[JoinUnit], concat_axis: AxisInt, copy: bool
join_units: list[JoinUnit], concat_axis: AxisInt, is_copied: bool
) -> ArrayLike:
"""
Concatenate values from several join units along selected axis.
Expand All @@ -546,7 +546,7 @@ def _concatenate_join_units(
if len(to_concat) == 1:
# Only one block, nothing to concatenate.
concat_values = to_concat[0]
if copy:
if is_copied:
if isinstance(concat_values, np.ndarray):
# non-reindexed (=not yet copied) arrays are made into a view
# in JoinUnit.get_reindexed_values
Expand Down
32 changes: 16 additions & 16 deletions pandas/core/reshape/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def merge(
right_index: bool = False,
sort: bool = False,
suffixes: Suffixes = ("_x", "_y"),
copy: bool = True,
is_copied: bool = True,
indicator: str | bool = False,
validate: str | None = None,
) -> DataFrame:
Expand All @@ -124,7 +124,7 @@ def merge(
indicator=indicator,
validate=validate,
)
return op.get_result(copy=copy)
return op.get_result(is_copied=is_copied)


if __debug__:
Expand Down Expand Up @@ -183,7 +183,7 @@ def _groupby_and_merge(by, left: DataFrame, right: DataFrame, merge_pieces):
from pandas.core.reshape.concat import concat

result = concat(pieces, ignore_index=True)
result = result.reindex(columns=pieces[0].columns, copy=False)
result = result.reindex(columns=pieces[0].columns, is_copied=False)
return result, lby


Expand Down Expand Up @@ -624,7 +624,7 @@ class _MergeOperation:
bm_axis: AxisInt
sort: bool
suffixes: Suffixes
copy: bool
is_copied: bool
indicator: str | bool
validate: str | None
join_names: list[Hashable]
Expand Down Expand Up @@ -721,7 +721,7 @@ def _reindex_and_concat(
join_index: Index,
left_indexer: npt.NDArray[np.intp] | None,
right_indexer: npt.NDArray[np.intp] | None,
copy: bool,
is_copied: bool,
) -> DataFrame:
"""
reindex along index and concat along columns.
Expand All @@ -742,7 +742,7 @@ def _reindex_and_concat(
join_index,
left_indexer,
axis=1,
copy=False,
is_copied=False,
only_slice=True,
allow_dups=True,
use_na_proxy=True,
Expand All @@ -755,7 +755,7 @@ def _reindex_and_concat(
join_index,
right_indexer,
axis=1,
copy=False,
is_copied=False,
only_slice=True,
allow_dups=True,
use_na_proxy=True,
Expand All @@ -767,17 +767,17 @@ def _reindex_and_concat(

left.columns = llabels
right.columns = rlabels
result = concat([left, right], axis=1, copy=copy)
result = concat([left, right], axis=1, is_copied=is_copied)
return result

def get_result(self, copy: bool = True) -> DataFrame:
def get_result(self, is_copied: bool = True) -> DataFrame:
if self.indicator:
self.left, self.right = self._indicator_pre_merge(self.left, self.right)

join_index, left_indexer, right_indexer = self._get_join_info()

result = self._reindex_and_concat(
join_index, left_indexer, right_indexer, copy=copy
join_index, left_indexer, right_indexer, is_copied=is_copied
)
result = result.__finalize__(self, method=self._merge_type)

Expand Down Expand Up @@ -1776,7 +1776,7 @@ def __init__(
sort=True, # factorize sorts
)

def get_result(self, copy: bool = True) -> DataFrame:
def get_result(self, is_copied: bool = True) -> DataFrame:
join_index, left_indexer, right_indexer = self._get_join_info()

llabels, rlabels = _items_overlap_with_suffix(
Expand All @@ -1799,7 +1799,7 @@ def get_result(self, copy: bool = True) -> DataFrame:
right_join_indexer = right_indexer

result = self._reindex_and_concat(
join_index, left_join_indexer, right_join_indexer, copy=copy
join_index, left_join_indexer, right_join_indexer, is_copied=is_copied
)
self._maybe_add_join_keys(result, left_indexer, right_indexer)

Expand Down Expand Up @@ -1845,7 +1845,7 @@ def __init__(
right_by=None,
axis: AxisInt = 1,
suffixes: Suffixes = ("_x", "_y"),
copy: bool = True,
is_copied: bool = True,
fill_method: str | None = None,
how: str = "asof",
tolerance=None,
Expand Down Expand Up @@ -2156,7 +2156,7 @@ def _get_multiindex_indexer(
if sort:
rcodes = list(map(np.take, rcodes, index.codes))
else:
i8copy = lambda a: a.astype("i8", subok=False, copy=True)
i8copy = lambda a: a.astype("i8", subok=False, is_copied=True)
rcodes = list(map(i8copy, index.codes))

# fix right labels if there were any nulls
Expand Down Expand Up @@ -2420,8 +2420,8 @@ def _get_join_keys(

# get keys for the first `nlev` levels
stride = np.prod(shape[1:nlev], dtype="i8")
lkey = stride * llab[0].astype("i8", subok=False, copy=False)
rkey = stride * rlab[0].astype("i8", subok=False, copy=False)
lkey = stride * llab[0].astype("i8", subok=False, is_copied=False)
rkey = stride * rlab[0].astype("i8", subok=False, is_copied=False)

for i in range(1, nlev):
with np.errstate(divide="ignore"):
Expand Down