Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add plot to detection #1585

Merged
merged 20 commits into from
Mar 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
[#1480](https://github.com/Lightning-AI/metrics/pull/1480),
[#1490](https://github.com/Lightning-AI/metrics/pull/1490),
[#1581](https://github.com/Lightning-AI/metrics/pull/1581),
[#1585](https://github.com/Lightning-AI/metrics/pull/1585),
[#1593](https://github.com/Lightning-AI/metrics/pull/1593),
)

Expand Down
1 change: 1 addition & 0 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,6 +404,7 @@ def _get_version_str():
import os
import torch

from torch import Tensor
from torchmetrics import Metric

"""
Expand Down
32 changes: 32 additions & 0 deletions examples/plotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,6 +362,37 @@ def universal_image_quality_index():
return fig, ax


def mean_average_precision():
"""Plot MAP metric."""
from torchmetrics.detection.mean_ap import MeanAveragePrecision

preds = lambda: [
{
"boxes": torch.tensor([[258.0, 41.0, 606.0, 285.0]]) + torch.randint(10, (1, 4)),
"scores": torch.tensor([0.536]) + 0.1 * torch.rand(1),
"labels": torch.tensor([0]),
}
]
target = [
{
"boxes": torch.tensor([[214.0, 41.0, 562.0, 285.0]]),
"labels": torch.tensor([0]),
}
]

# plot single value
metric = MeanAveragePrecision()
metric.update(preds(), target)
fig, ax = metric.plot()

# plot multiple values
metric = MeanAveragePrecision()
vals = [metric(preds(), target) for _ in range(10)]
fig, ax = metric.plot(vals)

return fig, ax


if __name__ == "__main__":
metrics_func = {
"accuracy": accuracy_example,
Expand All @@ -373,6 +404,7 @@ def universal_image_quality_index():
"si-snr": si_snr_example,
"stoi": stoi_example,
"mean_squared_error": mean_squared_error_example,
"mean_average_precision": mean_average_precision,
"confusion_matrix": confusion_matrix_example,
"spectral_distortion_index": spectral_distortion_index_example,
"error_relative_global_dimensionless_synthesis": error_relative_global_dimensionless_synthesis,
Expand Down
1 change: 1 addition & 0 deletions requirements/docs.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,4 @@ sphinx-copybutton>=0.3
-r integrate.txt
-r visual.txt
-r audio.txt
-r detection.txt
2 changes: 1 addition & 1 deletion src/torchmetrics/aggregation.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from torchmetrics.utilities import rank_zero_warn
from torchmetrics.utilities.data import dim_zero_cat
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE, plot_single_or_multi_val
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

if not _MATPLOTLIB_AVAILABLE:
__doctest_skip__ = ["SumMetric.plot", "MeanMetric.plot", "MaxMetric.plot", "MinMetric.plot"]
Expand Down
2 changes: 1 addition & 1 deletion src/torchmetrics/audio/pesq.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from torchmetrics.functional.audio.pesq import perceptual_evaluation_speech_quality
from torchmetrics.metric import Metric
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE, _PESQ_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE, plot_single_or_multi_val
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

__doctest_requires__ = {"PerceptualEvaluationSpeechQuality": ["pesq"]}

Expand Down
2 changes: 1 addition & 1 deletion src/torchmetrics/audio/pit.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from torchmetrics.functional.audio.pit import permutation_invariant_training
from torchmetrics.metric import Metric
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE, plot_single_or_multi_val
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

__doctest_requires__ = {"PermutationInvariantTraining": ["pit"]}

Expand Down
2 changes: 1 addition & 1 deletion src/torchmetrics/audio/sdr.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from torchmetrics.functional.audio.sdr import scale_invariant_signal_distortion_ratio, signal_distortion_ratio
from torchmetrics.metric import Metric
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE, plot_single_or_multi_val
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

__doctest_requires__ = {"SignalDistortionRatio": ["fast_bss_eval"]}

Expand Down
2 changes: 1 addition & 1 deletion src/torchmetrics/audio/snr.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from torchmetrics.functional.audio.snr import scale_invariant_signal_noise_ratio, signal_noise_ratio
from torchmetrics.metric import Metric
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE, plot_single_or_multi_val
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

if not _MATPLOTLIB_AVAILABLE:
__doctest_skip__ = ["SignalNoiseRatio.plot", "ScaleInvariantSignalNoiseRatio.plot"]
Expand Down
2 changes: 1 addition & 1 deletion src/torchmetrics/audio/stoi.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from torchmetrics.functional.audio.stoi import short_time_objective_intelligibility
from torchmetrics.metric import Metric
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE, _PYSTOI_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE, plot_single_or_multi_val
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

__doctest_requires__ = {"ShortTimeObjectiveIntelligibility": ["pystoi"]}

Expand Down
2 changes: 1 addition & 1 deletion src/torchmetrics/classification/accuracy.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from torchmetrics.metric import Metric
from torchmetrics.utilities.enums import ClassificationTask
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE, plot_single_or_multi_val
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

if not _MATPLOTLIB_AVAILABLE:
__doctest_skip__ = ["BinaryAccuracy.plot", "MulticlassAccuracy.plot", "MultilabelAccuracy.plot"]
Expand Down
2 changes: 1 addition & 1 deletion src/torchmetrics/classification/auroc.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
from torchmetrics.utilities.data import dim_zero_cat
from torchmetrics.utilities.enums import ClassificationTask
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE, plot_single_or_multi_val
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

if not _MATPLOTLIB_AVAILABLE:
__doctest_skip__ = ["BinaryAUROC.plot", "MulticlassAUROC.plot", "MultilabelAUROC.plot"]
Expand Down
68 changes: 65 additions & 3 deletions src/torchmetrics/detection/mean_ap.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,19 +20,23 @@

from torchmetrics.metric import Metric
from torchmetrics.utilities.data import _cumsum
from torchmetrics.utilities.imports import _PYCOCOTOOLS_AVAILABLE, _TORCHVISION_GREATER_EQUAL_0_8
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE, _PYCOCOTOOLS_AVAILABLE, _TORCHVISION_GREATER_EQUAL_0_8
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

if not _MATPLOTLIB_AVAILABLE:
__doctest_skip__ = ["MeanAveragePrecision.plot"]

if _TORCHVISION_GREATER_EQUAL_0_8:
from torchvision.ops import box_area, box_convert, box_iou
else:
box_convert = box_iou = box_area = None
__doctest_skip__ = ["MeanAveragePrecision"]
__doctest_skip__ = ["MeanAveragePrecision.plot", "MeanAveragePrecision"]

if _PYCOCOTOOLS_AVAILABLE:
import pycocotools.mask as mask_utils
else:
mask_utils = None
__doctest_skip__ = ["MeanAveragePrecision"]
__doctest_skip__ = ["MeanAveragePrecision.plot", "MeanAveragePrecision"]


log = logging.getLogger(__name__)
Expand Down Expand Up @@ -912,3 +916,61 @@ def compute(self) -> dict:
metrics[f"mar_{self.max_detection_thresholds[-1]}_per_class"] = mar_max_dets_per_class_values
metrics.classes = torch.tensor(classes, dtype=torch.int)
return metrics

def plot(
self, val: Optional[Union[Dict[str, Tensor], Sequence[Dict[str, Tensor]]]] = None, ax: Optional[_AX_TYPE] = None
) -> _PLOT_OUT_TYPE:
"""Plot a single or multiple values from the metric.

Args:
val: Either a single result from calling `metric.forward` or `metric.compute` or a list of these results.
If no value is provided, will automatically call `metric.compute` and plot that result.
ax: An matplotlib axis object. If provided will add plot to that axis

Returns:
Figure object and Axes object

Raises:
ModuleNotFoundError:
If `matplotlib` is not installed

.. plot::
:scale: 75

>>> from torch import tensor
>>> from torchmetrics.detection.mean_ap import MeanAveragePrecision
>>> preds = [dict(
... boxes=tensor([[258.0, 41.0, 606.0, 285.0]]),
... scores=tensor([0.536]),
... labels=tensor([0]),
... )]
>>> target = [dict(
... boxes=tensor([[214.0, 41.0, 562.0, 285.0]]),
... labels=tensor([0]),
... )]
>>> metric = MeanAveragePrecision()
>>> metric.update(preds, target)
>>> fig_, ax_ = metric.plot()

.. plot::
:scale: 75

>>> # Example plotting multiple values
>>> import torch
>>> from torchmetrics.detection.mean_ap import MeanAveragePrecision
>>> preds = lambda: [dict(
... boxes=torch.tensor([[258.0, 41.0, 606.0, 285.0]]) + torch.randint(10, (1,4)),
... scores=torch.tensor([0.536]) + 0.1*torch.rand(1),
... labels=torch.tensor([0]),
... )]
>>> target = [dict(
... boxes=torch.tensor([[214.0, 41.0, 562.0, 285.0]]),
... labels=torch.tensor([0]),
... )]
>>> metric = MeanAveragePrecision()
>>> vals = []
>>> for _ in range(20):
... vals.append(metric(preds(), target))
>>> fig_, ax_ = metric.plot(vals)
"""
return self._plot(val, ax)
70 changes: 68 additions & 2 deletions src/torchmetrics/detection/panoptic_quality.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
from typing import Any, Collection
from typing import Any, Collection, Optional, Sequence, Union

import torch
from torch import Tensor
Expand All @@ -27,6 +27,11 @@
_validate_inputs,
)
from torchmetrics.metric import Metric
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

if not _MATPLOTLIB_AVAILABLE:
__doctest_skip__ = ["PanopticQuality.plot"]


class PanopticQuality(Metric):
Expand Down Expand Up @@ -59,8 +64,9 @@ class PanopticQuality(Metric):
TypeError:
If ``things``, ``stuffs`` contain non-integer ``category_id``.

Example:
Example:ty
>>> from torch import tensor
>>> from torchmetrics import PanopticQuality
>>> preds = tensor([[[[6, 0], [0, 0], [6, 0], [6, 0]],
... [[0, 0], [0, 0], [6, 0], [0, 1]],
... [[0, 0], [0, 0], [6, 0], [0, 1]],
Expand Down Expand Up @@ -146,3 +152,63 @@ def update(self, preds: Tensor, target: Tensor) -> None:
def compute(self) -> Tensor:
"""Compute panoptic quality based on inputs passed in to ``update`` previously."""
return _panoptic_quality_compute(self.iou_sum, self.true_positives, self.false_positives, self.false_negatives)

def plot(
self, val: Optional[Union[Tensor, Sequence[Tensor]]] = None, ax: Optional[_AX_TYPE] = None
) -> _PLOT_OUT_TYPE:
"""Plot a single or multiple values from the metric.

Args:
val: Either a single result from calling `metric.forward` or `metric.compute` or a list of these results.
If no value is provided, will automatically call `metric.compute` and plot that result.
ax: An matplotlib axis object. If provided will add plot to that axis

Returns:
Figure object and Axes object

Raises:
ModuleNotFoundError:
If `matplotlib` is not installed

.. plot::
:scale: 75

>>> from torch import tensor
>>> from torchmetrics import PanopticQuality
>>> preds = tensor([[[[6, 0], [0, 0], [6, 0], [6, 0]],
... [[0, 0], [0, 0], [6, 0], [0, 1]],
... [[0, 0], [0, 0], [6, 0], [0, 1]],
... [[0, 0], [7, 0], [6, 0], [1, 0]],
... [[0, 0], [7, 0], [7, 0], [7, 0]]]])
>>> target = tensor([[[[6, 0], [0, 1], [6, 0], [0, 1]],
... [[0, 1], [0, 1], [6, 0], [0, 1]],
... [[0, 1], [0, 1], [6, 0], [1, 0]],
... [[0, 1], [7, 0], [1, 0], [1, 0]],
... [[0, 1], [7, 0], [7, 0], [7, 0]]]])
>>> metric = PanopticQuality(things = {0, 1}, stuffs = {6, 7})
>>> metric.update(preds, target)
>>> fig_, ax_ = metric.plot()

.. plot::
:scale: 75

>>> # Example plotting multiple values
>>> from torch import tensor
>>> from torchmetrics import PanopticQuality
>>> preds = tensor([[[[6, 0], [0, 0], [6, 0], [6, 0]],
... [[0, 0], [0, 0], [6, 0], [0, 1]],
... [[0, 0], [0, 0], [6, 0], [0, 1]],
... [[0, 0], [7, 0], [6, 0], [1, 0]],
... [[0, 0], [7, 0], [7, 0], [7, 0]]]])
>>> target = tensor([[[[6, 0], [0, 1], [6, 0], [0, 1]],
... [[0, 1], [0, 1], [6, 0], [0, 1]],
... [[0, 1], [0, 1], [6, 0], [1, 0]],
... [[0, 1], [7, 0], [1, 0], [1, 0]],
... [[0, 1], [7, 0], [7, 0], [7, 0]]]])
>>> metric = PanopticQuality(things = {0, 1}, stuffs = {6, 7})
>>> vals = []
>>> for _ in range(20):
... vals.append(metric(preds, target))
>>> fig_, ax_ = metric.plot(vals)
"""
return self._plot(val, ax)
2 changes: 1 addition & 1 deletion src/torchmetrics/image/d_lambda.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from torchmetrics.utilities import rank_zero_warn
from torchmetrics.utilities.data import dim_zero_cat
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE, plot_single_or_multi_val
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

if not _MATPLOTLIB_AVAILABLE:
__doctest_skip__ = ["SpectralDistortionIndex.plot"]
Expand Down
2 changes: 1 addition & 1 deletion src/torchmetrics/image/ergas.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from torchmetrics.utilities import rank_zero_warn
from torchmetrics.utilities.data import dim_zero_cat
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE, plot_single_or_multi_val
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

if not _MATPLOTLIB_AVAILABLE:
__doctest_skip__ = ["ErrorRelativeGlobalDimensionlessSynthesis.plot"]
Expand Down
2 changes: 1 addition & 1 deletion src/torchmetrics/image/psnr.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from torchmetrics.metric import Metric
from torchmetrics.utilities import rank_zero_warn
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE, plot_single_or_multi_val
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

if not _MATPLOTLIB_AVAILABLE:
__doctest_skip__ = ["PeakSignalNoiseRatio.plot"]
Expand Down
2 changes: 1 addition & 1 deletion src/torchmetrics/image/sam.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from torchmetrics.utilities import rank_zero_warn
from torchmetrics.utilities.data import dim_zero_cat
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE, plot_single_or_multi_val
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

if not _MATPLOTLIB_AVAILABLE:
__doctest_skip__ = ["SpectralAngleMapper.plot"]
Expand Down
2 changes: 1 addition & 1 deletion src/torchmetrics/image/ssim.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from torchmetrics.metric import Metric
from torchmetrics.utilities.data import dim_zero_cat
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE, plot_single_or_multi_val
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

if not _MATPLOTLIB_AVAILABLE:
__doctest_skip__ = ["StructuralSimilarityIndexMeasure.plot", "MultiScaleStructuralSimilarityIndexMeasure.plot"]
Expand Down
2 changes: 1 addition & 1 deletion src/torchmetrics/image/uqi.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from torchmetrics.utilities import rank_zero_warn
from torchmetrics.utilities.data import dim_zero_cat
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE, plot_single_or_multi_val
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

if not _MATPLOTLIB_AVAILABLE:
__doctest_skip__ = ["UniversalImageQualityIndex.plot"]
Expand Down
2 changes: 1 addition & 1 deletion src/torchmetrics/regression/mse.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from torchmetrics.functional.regression.mse import _mean_squared_error_compute, _mean_squared_error_update
from torchmetrics.metric import Metric
from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE, plot_single_or_multi_val
from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE

if not _MATPLOTLIB_AVAILABLE:
__doctest_skip__ = ["MeanSquaredError.plot"]
Expand Down
Loading