Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Unify r/fbeta/fbeta_score #740

Merged
merged 7 commits into from
Jan 11, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
* `torchmetrics.functional.hinge` -> `torchmetrics.functional.hinge_loss`
* `torchmetrics.Hinge` -> `torchmetrics.HingeLoss`

- Renamed F-Beta metrics: ([#740](https://github.com/PyTorchLightning/metrics/pull/740))
* `torchmetrics.functional.fbeta` -> `torchmetrics.functional.fbeta_score`
* `torchmetrics.FBeta` -> `torchmetrics.FBetaScore`


### Removed

Expand Down
4 changes: 2 additions & 2 deletions docs/source/references/functional.rst
Original file line number Diff line number Diff line change
Expand Up @@ -126,10 +126,10 @@ f1 [func]
:noindex:


fbeta [func]
fbeta_score [func]
~~~~~~~~~~~~~~~~~~

.. autofunction:: torchmetrics.functional.fbeta
.. autofunction:: torchmetrics.functional.fbeta_score
:noindex:

hamming_distance [func]
Expand Down
6 changes: 3 additions & 3 deletions docs/source/references/modules.rst
Original file line number Diff line number Diff line change
Expand Up @@ -286,10 +286,10 @@ F1Score
.. autoclass:: torchmetrics.F1Score
:noindex:

FBeta
~~~~~
FBetaScore
~~~~~~~~~~

.. autoclass:: torchmetrics.FBeta
.. autoclass:: torchmetrics.FBetaScore
:noindex:

HammingDistance
Expand Down
20 changes: 10 additions & 10 deletions tests/classification/test_f_beta.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@
from tests.classification.inputs import _input_multilabel_prob as _input_mlb_prob
from tests.helpers import seed_all
from tests.helpers.testers import NUM_BATCHES, NUM_CLASSES, THRESHOLD, MetricTester
from torchmetrics import F1Score, FBeta, Metric
from torchmetrics import F1Score, FBetaScore, Metric
from torchmetrics.functional import f1_score as f1_score_pl
from torchmetrics.functional import fbeta
from torchmetrics.functional import fbeta_score as fbeta_score_pl
from torchmetrics.utilities.checks import _input_format_classification
from torchmetrics.utilities.enums import AverageMethod

Expand Down Expand Up @@ -93,7 +93,7 @@ def _sk_fbeta_f1_multidim_multiclass(
@pytest.mark.parametrize(
"metric_class, metric_fn",
[
(partial(FBeta, beta=2.0), partial(fbeta, beta=2.0)),
(partial(FBetaScore, beta=2.0), partial(fbeta_score_pl, beta=2.0)),
(F1Score, f1_score_pl),
],
)
Expand Down Expand Up @@ -129,7 +129,7 @@ def test_wrong_params(metric_class, metric_fn, average, mdmc_average, num_classe
@pytest.mark.parametrize(
"metric_class, metric_fn",
[
(partial(FBeta, beta=2.0), partial(fbeta, beta=2.0)),
(partial(FBetaScore, beta=2.0), partial(fbeta_score_pl, beta=2.0)),
(F1Score, f1_score_pl),
],
)
Expand All @@ -151,7 +151,7 @@ def test_zero_division(metric_class, metric_fn):
@pytest.mark.parametrize(
"metric_class, metric_fn",
[
(partial(FBeta, beta=2.0), partial(fbeta, beta=2.0)),
(partial(FBetaScore, beta=2.0), partial(fbeta_score_pl, beta=2.0)),
(F1Score, f1_score_pl),
],
)
Expand Down Expand Up @@ -182,7 +182,7 @@ def test_no_support(metric_class, metric_fn):
@pytest.mark.parametrize(
"metric_class, metric_fn",
[
(partial(FBeta, beta=2.0), partial(fbeta, beta=2.0)),
(partial(FBetaScore, beta=2.0), partial(fbeta_score_pl, beta=2.0)),
(F1Score, f1_score_pl),
],
)
Expand Down Expand Up @@ -210,7 +210,7 @@ def test_class_not_present(metric_class, metric_fn, ignore_index, expected):
@pytest.mark.parametrize(
"metric_class, metric_fn, sk_fn",
[
(partial(FBeta, beta=2.0), partial(fbeta, beta=2.0), partial(fbeta_score, beta=2.0)),
(partial(FBetaScore, beta=2.0), partial(fbeta_score_pl, beta=2.0), partial(fbeta_score, beta=2.0)),
(F1Score, f1_score_pl, f1_score),
],
)
Expand Down Expand Up @@ -397,8 +397,8 @@ def test_fbeta_f1_differentiability(
@pytest.mark.parametrize(
"metric_class, metric_fn",
[
(partial(FBeta, beta=2.0), partial(fbeta, beta=2.0)),
(F1Score, fbeta),
(partial(FBetaScore, beta=2.0), partial(fbeta_score_pl, beta=2.0)),
(F1Score, fbeta_score_pl),
],
)
@pytest.mark.parametrize(
Expand Down Expand Up @@ -441,7 +441,7 @@ def test_top_k(
@pytest.mark.parametrize(
"metric_class, metric_functional, sk_fn",
[
(partial(FBeta, beta=2.0), partial(fbeta, beta=2.0), partial(fbeta_score, beta=2.0)),
(partial(FBetaScore, beta=2.0), partial(fbeta_score_pl, beta=2.0), partial(fbeta_score, beta=2.0)),
(F1Score, f1_score_pl, f1_score),
],
)
Expand Down
2 changes: 2 additions & 0 deletions torchmetrics/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
ConfusionMatrix,
F1Score,
FBeta,
FBetaScore,
HammingDistance,
Hinge,
HingeLoss,
Expand Down Expand Up @@ -120,6 +121,7 @@
"F1",
"F1Score",
"FBeta",
"FBetaScore",
"HammingDistance",
"Hinge",
"HingeLoss",
Expand Down
2 changes: 1 addition & 1 deletion torchmetrics/classification/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from torchmetrics.classification.calibration_error import CalibrationError # noqa: F401
from torchmetrics.classification.cohen_kappa import CohenKappa # noqa: F401
from torchmetrics.classification.confusion_matrix import ConfusionMatrix # noqa: F401
from torchmetrics.classification.f_beta import F1, F1Score, FBeta # noqa: F401
from torchmetrics.classification.f_beta import F1, F1Score, FBeta, FBetaScore # noqa: F401
from torchmetrics.classification.hamming_distance import HammingDistance # noqa: F401
from torchmetrics.classification.hinge import Hinge, HingeLoss # noqa: F401
from torchmetrics.classification.iou import IoU # noqa: F401
Expand Down
53 changes: 49 additions & 4 deletions torchmetrics/classification/f_beta.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from torchmetrics.utilities.enums import AverageMethod


class FBeta(StatScores):
class FBetaScore(StatScores):
r"""
Computes `F-score`_, specifically:

Expand Down Expand Up @@ -124,10 +124,10 @@ class FBeta(StatScores):
If ``average`` is none of ``"micro"``, ``"macro"``, ``"weighted"``, ``"none"``, ``None``.

Example:
>>> from torchmetrics import FBeta
>>> from torchmetrics import FBetaScore
>>> target = torch.tensor([0, 1, 2, 0, 1, 2])
>>> preds = torch.tensor([0, 2, 1, 0, 0, 1])
>>> f_beta = FBeta(num_classes=3, beta=0.5)
>>> f_beta = FBetaScore(num_classes=3, beta=0.5)
>>> f_beta(preds, target)
tensor(0.3333)

Expand Down Expand Up @@ -175,7 +175,52 @@ def compute(self) -> Tensor:
return _fbeta_compute(tp, fp, tn, fn, self.beta, self.ignore_index, self.average, self.mdmc_reduce)


class F1Score(FBeta):
class FBeta(FBetaScore):
r"""
Computes `F-score`_, specifically:

.. deprecated:: v0.7
Use :class:`torchmetrics.FBetaScore`. Will be removed in v0.8.

Example::
>>> f_beta = FBetaScore(num_classes=3, beta=0.5)
>>> f_beta(torch.tensor([0, 2, 1, 0, 0, 1]), torch.tensor([0, 1, 2, 0, 1, 2]))
tensor(0.3333)
"""
Borda marked this conversation as resolved.
Show resolved Hide resolved

def __init__(
self,
num_classes: Optional[int] = None,
beta: float = 1.0,
threshold: float = 0.5,
average: str = "micro",
mdmc_average: Optional[str] = None,
ignore_index: Optional[int] = None,
top_k: Optional[int] = None,
multiclass: Optional[bool] = None,
compute_on_step: bool = True,
dist_sync_on_step: bool = False,
process_group: Optional[Any] = None,
dist_sync_fn: Callable = None,
) -> None:
warn("`FBeta` was renamed to `FBetaScore` in v0.7 and it will be removed in v0.8", DeprecationWarning)
Borda marked this conversation as resolved.
Show resolved Hide resolved
super().__init__(
num_classes=num_classes,
beta=beta,
threshold=threshold,
average=average,
mdmc_average=mdmc_average,
ignore_index=ignore_index,
top_k=top_k,
multiclass=multiclass,
compute_on_step=compute_on_step,
dist_sync_on_step=dist_sync_on_step,
process_group=process_group,
dist_sync_fn=dist_sync_fn,
)


class F1Score(FBetaScore):
"""Computes F1 metric. F1 metrics correspond to a harmonic mean of the precision and recall scores.

Works with binary, multiclass, and multilabel data. Accepts logits or probabilities from a model
Expand Down
3 changes: 2 additions & 1 deletion torchmetrics/functional/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from torchmetrics.functional.classification.cohen_kappa import cohen_kappa
from torchmetrics.functional.classification.confusion_matrix import confusion_matrix
from torchmetrics.functional.classification.dice import dice_score
from torchmetrics.functional.classification.f_beta import f1, f1_score, fbeta
from torchmetrics.functional.classification.f_beta import f1, f1_score, fbeta, fbeta_score
from torchmetrics.functional.classification.hamming_distance import hamming_distance
from torchmetrics.functional.classification.hinge import hinge, hinge_loss
from torchmetrics.functional.classification.iou import iou # noqa: F401
Expand Down Expand Up @@ -98,6 +98,7 @@
"f1",
"f1_score",
"fbeta",
"fbeta_score",
"hamming_distance",
"hinge",
"hinge_loss",
Expand Down
2 changes: 1 addition & 1 deletion torchmetrics/functional/classification/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from torchmetrics.functional.classification.cohen_kappa import cohen_kappa # noqa: F401
from torchmetrics.functional.classification.confusion_matrix import confusion_matrix # noqa: F401
from torchmetrics.functional.classification.dice import dice_score # noqa: F401
from torchmetrics.functional.classification.f_beta import f1, f1_score, fbeta # noqa: F401
from torchmetrics.functional.classification.f_beta import f1, f1_score, fbeta, fbeta_score # noqa: F401
from torchmetrics.functional.classification.hamming_distance import hamming_distance # noqa: F401
from torchmetrics.functional.classification.hinge import hinge, hinge_loss # noqa: F401
from torchmetrics.functional.classification.jaccard import jaccard_index # noqa: F401
Expand Down
38 changes: 34 additions & 4 deletions torchmetrics/functional/classification/f_beta.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def _fbeta_compute(
)


def fbeta(
def fbeta_score(
preds: Tensor,
target: Tensor,
beta: float = 1.0,
Expand Down Expand Up @@ -208,10 +208,10 @@ def fbeta(
of classes

Example:
>>> from torchmetrics.functional import fbeta
>>> from torchmetrics.functional import fbeta_score
>>> target = torch.tensor([0, 1, 2, 0, 1, 2])
>>> preds = torch.tensor([0, 2, 1, 0, 0, 1])
>>> fbeta(preds, target, num_classes=3, beta=0.5)
>>> fbeta_score(preds, target, num_classes=3, beta=0.5)
tensor(0.3333)

"""
Expand Down Expand Up @@ -244,6 +244,34 @@ def fbeta(
return _fbeta_compute(tp, fp, tn, fn, beta, ignore_index, average, mdmc_average)


def fbeta(
preds: Tensor,
target: Tensor,
beta: float = 1.0,
average: str = "micro",
mdmc_average: Optional[str] = None,
ignore_index: Optional[int] = None,
num_classes: Optional[int] = None,
threshold: float = 0.5,
top_k: Optional[int] = None,
multiclass: Optional[bool] = None,
) -> Tensor:
r"""
Computes f_beta metric.

.. deprecated:: v0.7
Use :class:`torchmetrics.functional.f1_score`. Will be removed in v0.8.

Example::
>>> fbeta(torch.tensor([0, 2, 1, 0, 0, 1]), torch.tensor([0, 1, 2, 0, 1, 2]), num_classes=3, beta=0.5)
tensor(0.3333)
"""
Borda marked this conversation as resolved.
Show resolved Hide resolved
warn("`f1` was renamed to `f1_score` in v0.7 and it will be removed in v0.8", DeprecationWarning)
return fbeta_score(
preds, target, beta, average, mdmc_average, ignore_index, num_classes, threshold, top_k, multiclass
)


def f1_score(
preds: Tensor,
target: Tensor,
Expand Down Expand Up @@ -349,7 +377,9 @@ def f1_score(
>>> f1_score(preds, target, num_classes=3)
tensor(0.3333)
"""
return fbeta(preds, target, 1.0, average, mdmc_average, ignore_index, num_classes, threshold, top_k, multiclass)
return fbeta_score(
preds, target, 1.0, average, mdmc_average, ignore_index, num_classes, threshold, top_k, multiclass
)


def f1(
Expand Down