Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove Expr._required_attribute #799

Merged
merged 2 commits into from
Jan 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 4 additions & 14 deletions dask_expr/_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,18 +39,6 @@ def __init__(self, *args, **kwargs):
assert not kwargs, kwargs
operands = [_unpack_collections(o) for o in operands]
self.operands = operands
if self._required_attribute:
dep = next(iter(self.dependencies()))._meta
if not hasattr(dep, self._required_attribute):
# Raise a ValueError instead of AttributeError to
# avoid infinite recursion
raise ValueError(f"{dep} has no attribute {self._required_attribute}")

@property
def _required_attribute(self) -> str:
# Specify if the first `dependency` must support
# a specific attribute for valid behavior.
return None

def __str__(self):
s = ", ".join(
Expand Down Expand Up @@ -407,8 +395,8 @@ def __getattr__(self, key):
try:
return object.__getattribute__(self, key)
except AttributeError as err:
if key == "_meta":
# Avoid a recursive loop if/when `self._meta`
if key.startswith("_meta"):
# Avoid a recursive loop if/when `self._meta*`
# produces an `AttributeError`
raise RuntimeError(
f"Failed to generate metadata for {self}. "
Expand All @@ -422,6 +410,8 @@ def __getattr__(self, key):
if key in _parameters:
idx = _parameters.index(key)
return self.operands[idx]
if is_dataframe_like(self._meta) and key in self._meta.columns:
return self[key]

link = "https://github.com/dask-contrib/dask-expr/blob/main/README.md#api-coverage"
raise AttributeError(
Expand Down
35 changes: 0 additions & 35 deletions dask_expr/_expr.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,35 +88,6 @@ def optimize(self, **kwargs):
def __hash__(self):
return hash(self._name)

def __getattr__(self, key):
try:
return object.__getattribute__(self, key)
except AttributeError as err:
if key == "_meta":
# Avoid a recursive loop if/when `self._meta`
# produces an `AttributeError`
raise RuntimeError(
f"Failed to generate metadata for {self}. "
"This operation may not be supported by the current backend."
)

# Allow operands to be accessed as attributes
# as long as the keys are not already reserved
# by existing methods/properties
_parameters = type(self)._parameters
if key in _parameters:
idx = _parameters.index(key)
return self.operands[idx]
if is_dataframe_like(self._meta) and key in self._meta.columns:
return self[key]

link = "https://github.com/dask-contrib/dask-expr/blob/main/README.md#api-coverage"
raise AttributeError(
f"{err}\n\n"
"This often means that you are attempting to use an unsupported "
f"API function. Current API coverage is documented here: {link}."
)

@property
def index(self):
return Index(self)
Expand Down Expand Up @@ -493,12 +464,6 @@ class Blockwise(Expr):
_projection_passthrough = False
_filter_passthrough = False

@property
def _required_attribute(self):
if isinstance(self.operation, type(M.method_caller)):
return self.operation.method
return None

@functools.cached_property
def _meta(self):
args = [op._meta if isinstance(op, Expr) else op for op in self._args]
Expand Down
9 changes: 4 additions & 5 deletions dask_expr/_reductions.py
Original file line number Diff line number Diff line change
Expand Up @@ -863,27 +863,27 @@ class IdxMin(Reduction):
reduction_chunk = idxmaxmin_chunk
reduction_combine = idxmaxmin_combine
reduction_aggregate = idxmaxmin_agg
_required_attribute = "idxmin"
_reduction_attribute = "idxmin"

@property
def chunk_kwargs(self):
return dict(
skipna=self.skipna,
fn=self._required_attribute,
fn=self._reduction_attribute,
numeric_only=self.numeric_only,
)

@property
def combine_kwargs(self):
return dict(skipna=self.skipna, fn=self._required_attribute)
return dict(skipna=self.skipna, fn=self._reduction_attribute)

@property
def aggregate_kwargs(self):
return {**self.chunk_kwargs, "scalar": is_series_like(self.frame._meta)}


class IdxMax(IdxMin):
_required_attribute = "idxmax"
_reduction_attribute = "idxmax"


class Cov(Reduction):
Expand Down Expand Up @@ -967,7 +967,6 @@ def _simplify_up(self, parent, dependents):
class NBytes(Reduction):
# Only supported for Series objects
reduction_aggregate = sum
_required_attribute = "nbytes"

@staticmethod
def reduction_chunk(ser):
Expand Down
Loading