Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add type error suppressions for upcoming upgrade #1342

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions captum/attr/_core/dataloader_attr.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,6 +369,9 @@ def attribute(
assert len(input_roles) == len(inputs), (
"input_roles must have the same size as the return of the dataloader,",
f"length of input_roles is {len(input_roles)} ",
# pyre-fixme[6]: For 1st argument expected
# `pyre_extensions.ReadOnly[Sized]` but got
# `Optional[typing.Tuple[typing.Any, ...]]`.
f"whereas the length of dataloader return is {len(inputs)}",
)

Expand All @@ -395,6 +398,9 @@ def attribute(
"Baselines must have the same size as the return of the dataloader ",
"that need attribution",
f"length of baseline is {len(baselines)} ",
# pyre-fixme[6]: For 1st argument expected
# `pyre_extensions.ReadOnly[Sized]` but got
# `Optional[typing.Tuple[typing.Any, ...]]`.
f'whereas the length of dataloader return with role "0" is {len(inputs)}',
)

Expand All @@ -413,6 +419,9 @@ def attribute(
"Feature mask must have the same size as the return of the dataloader ",
"that need attribution",
f"length of feature_mask is {len(feature_mask)} ",
# pyre-fixme[6]: For 1st argument expected
# `pyre_extensions.ReadOnly[Sized]` but got
# `Optional[typing.Tuple[typing.Any, ...]]`.
f'whereas the length of dataloader return with role "0" is {len(inputs)}',
)

Expand Down
3 changes: 3 additions & 0 deletions captum/attr/_core/deep_lift.py
Original file line number Diff line number Diff line change
Expand Up @@ -833,6 +833,9 @@ def attribute( # type: ignore
" with more than one example but found: {}."
" If baselines are provided in shape of scalars or with a single"
" baseline example, `DeepLift`"
# pyre-fixme[16]: Item `Callable` of `Union[(...) ->
# TensorOrTupleOfTensorsGeneric, TensorOrTupleOfTensorsGeneric]` has no
# attribute `__getitem__`.
" approach can be used instead.".format(baselines[0])
)

Expand Down
3 changes: 3 additions & 0 deletions captum/attr/_core/gradient_shap.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,9 @@ def attribute(
# attribute `__getitem__`.
assert isinstance(baselines[0], torch.Tensor), (
"Baselines distribution has to be provided in a form "
# pyre-fixme[16]: Item `Callable` of `Union[(...) ->
# TensorOrTupleOfTensorsGeneric, TensorOrTupleOfTensorsGeneric]` has no
# attribute `__getitem__`.
"of a torch.Tensor {}.".format(baselines[0])
)

Expand Down
2 changes: 2 additions & 0 deletions captum/attr/_core/layer/layer_gradient_shap.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,6 +308,8 @@ def attribute(
# TensorOrTupleOfTensorsGeneric]` has no attribute `__getitem__`.
assert isinstance(baselines[0], torch.Tensor), (
"Baselines distribution has to be provided in a form "
# pyre-fixme[16]: Item `Callable` of `Union[(...) -> Any,
# TensorOrTupleOfTensorsGeneric]` has no attribute `__getitem__`.
"of a torch.Tensor {}.".format(baselines[0])
)

Expand Down
4 changes: 4 additions & 0 deletions captum/attr/_utils/attribution.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ def __init__(self, forward_func: Callable) -> None:
self.forward_func = forward_func

# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
# pyre-fixme[13]: Attribute `attribute` is never initialized.
attribute: Callable
r"""
This method computes and returns the attribution values for each input tensor.
Expand Down Expand Up @@ -74,6 +75,7 @@ def __init__(self, forward_func: Callable) -> None:
"""

# pyre-fixme[24] Generic type `Callable` expects 2 type parameters.
# pyre-fixme[13]: Attribute `attribute_future` is never initialized.
attribute_future: Callable

r"""
Expand Down Expand Up @@ -126,6 +128,7 @@ def has_convergence_delta(self) -> bool:
return False

# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
# pyre-fixme[13]: Attribute `compute_convergence_delta` is never initialized.
compute_convergence_delta: Callable
r"""
The attribution algorithms which derive `Attribution` class and provide
Expand Down Expand Up @@ -504,6 +507,7 @@ def __init__(
InternalAttribution.__init__(self, forward_func, layer, device_ids)

# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
# pyre-fixme[13]: Attribute `attribute` is never initialized.
attribute: Callable
r"""
This method computes and returns the neuron attribution values for each
Expand Down
1 change: 1 addition & 0 deletions captum/concept/_core/concept.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ def __init__(self, model: Module) -> None:
self.model = model

# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
# pyre-fixme[13]: Attribute `interpret` is never initialized.
interpret: Callable
r"""
An abstract interpret method that performs concept-based model interpretability
Expand Down
4 changes: 4 additions & 0 deletions captum/robust/_core/metrics/min_param_perturbation.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,12 @@ def drange(
min_val: Union[int, float], max_val: Union[int, float], step_val: Union[int, float]
) -> Generator[Union[int, float], None, None]:
curr = min_val
# pyre-fixme[58]: `>` is not supported for operand types `Union[float, int]` and
# `int`.
while curr < max_val:
yield curr
# pyre-fixme[58]: `+` is not supported for operand types `Union[float, int]`
# and `Union[float, int]`.
curr += step_val


Expand Down
1 change: 1 addition & 0 deletions captum/robust/_core/perturbation.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ class Perturbation:
"""

# pyre-fixme[24]: Generic type `Callable` expects 2 type parameters.
# pyre-fixme[13]: Attribute `perturb` is never initialized.
perturb: Callable
r"""
This method computes and returns the perturbed input for each input tensor.
Expand Down
3 changes: 3 additions & 0 deletions tests/attr/test_llm_attr.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,9 @@ def device(self) -> torch.device:
# pyre-fixme[13]: Attribute `device` is never initialized.
# pyre-fixme[13]: Attribute `use_cached_outputs` is never initialized.
class TestLLMAttr(BaseTest):
# pyre-fixme[13]: Attribute `device` is never initialized.
device: str
# pyre-fixme[13]: Attribute `use_cached_outputs` is never initialized.
use_cached_outputs: bool

# pyre-fixme[56]: Pyre was not able to infer the type of argument `comprehension
Expand Down Expand Up @@ -377,6 +379,7 @@ def test_futures_not_implemented(self) -> None:
)
# pyre-fixme[13]: Attribute `device` is never initialized.
class TestLLMGradAttr(BaseTest):
# pyre-fixme[13]: Attribute `device` is never initialized.
device: str

@parameterized.expand(
Expand Down
3 changes: 3 additions & 0 deletions tests/attr/test_llm_attr_gpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,9 @@ def device(self) -> torch._C.device:
# pyre-fixme[13]: Attribute `use_cached_outputs` is declared in class `TestLlmAttrGpu`
# to have type `bool` but is never initialized.
class TestLlmAttrGpu(BaseTest):
# pyre-fixme[13]: Attribute `device` is never initialized.
device: str
# pyre-fixme[13]: Attribute `use_cached_outputs` is never initialized.
use_cached_outputs: bool

@parameterized.expand([(FeatureAblation,), (ShapleyValueSampling,)])
Expand Down Expand Up @@ -235,6 +237,7 @@ def test_llm_attr_without_token_gpu(
# pyre-fixme[13]: Attribute `device` is declared in class `TestLLMGradAttrGPU`
# to have type `str` but is never initialized.
class TestLLMGradAttrGPU(BaseTest):
# pyre-fixme[13]: Attribute `device` is never initialized.
device: str

def test_llm_attr(self) -> None:
Expand Down