# Copyright The Lightning team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from collections.abc import Sequence from typing import Any, List, Optional, Union from torch import Tensor, tensor from typing_extensions import Literal from torchmetrics.functional.image.uqi import _uqi_compute, _uqi_update from torchmetrics.metric import Metric from torchmetrics.utilities import rank_zero_warn from torchmetrics.utilities.data import dim_zero_cat from torchmetrics.utilities.imports import _MATPLOTLIB_AVAILABLE from torchmetrics.utilities.plot import _AX_TYPE, _PLOT_OUT_TYPE if not _MATPLOTLIB_AVAILABLE: __doctest_skip__ = ["UniversalImageQualityIndex.plot"] class UniversalImageQualityIndex(Metric): """Compute Universal Image Quality Index (UniversalImageQualityIndex_). As input to ``forward`` and ``update`` the metric accepts the following input - ``preds`` (:class:`~torch.Tensor`): Predictions from model of shape ``(N,C,H,W)`` - ``target`` (:class:`~torch.Tensor`): Ground truth values of shape ``(N,C,H,W)`` As output of `forward` and `compute` the metric returns the following output - ``uiqi`` (:class:`~torch.Tensor`): if ``reduction!='none'`` returns float scalar tensor with average UIQI value over sample else returns tensor of shape ``(N,)`` with UIQI values per sample Args: kernel_size: size of the gaussian kernel sigma: Standard deviation of the gaussian kernel reduction: a method to reduce metric score over labels. - ``'elementwise_mean'``: takes the mean (default) - ``'sum'``: takes the sum - ``'none'`` or ``None``: no reduction will be applied kwargs: Additional keyword arguments, see :ref:`Metric kwargs` for more info. Return: Tensor with UniversalImageQualityIndex score Example: >>> import torch >>> from torchmetrics.image import UniversalImageQualityIndex >>> preds = torch.rand([16, 1, 16, 16]) >>> target = preds * 0.75 >>> uqi = UniversalImageQualityIndex() >>> uqi(preds, target) tensor(0.9216) """ is_differentiable: bool = True higher_is_better: bool = True full_state_update: bool = False plot_lower_bound: float = 0.0 plot_upper_bound: float = 1.0 preds: List[Tensor] target: List[Tensor] sum_uqi: Tensor numel: Tensor def __init__( self, kernel_size: Sequence[int] = (11, 11), sigma: Sequence[float] = (1.5, 1.5), reduction: Literal["elementwise_mean", "sum", "none", None] = "elementwise_mean", **kwargs: Any, ) -> None: super().__init__(**kwargs) if reduction not in ("elementwise_mean", "sum", "none", None): raise ValueError( f"The `reduction` {reduction} is not valid. Valid options are `elementwise_mean`, `sum`, `none`, None." ) if reduction is None or reduction == "none": rank_zero_warn( "Metric `UniversalImageQualityIndex` will save all targets and predictions in the buffer when using" "`reduction=None` or `reduction='none'. For large datasets, this may lead to a large memory footprint." ) self.add_state("preds", default=[], dist_reduce_fx="cat") self.add_state("target", default=[], dist_reduce_fx="cat") else: self.add_state("sum_uqi", tensor(0.0), dist_reduce_fx="sum") self.add_state("numel", tensor(0), dist_reduce_fx="sum") self.kernel_size = kernel_size self.sigma = sigma self.reduction = reduction def update(self, preds: Tensor, target: Tensor) -> None: """Update state with predictions and targets.""" preds, target = _uqi_update(preds, target) if self.reduction is None or self.reduction == "none": self.preds.append(preds) self.target.append(target) else: uqi_score = _uqi_compute(preds, target, self.kernel_size, self.sigma, reduction="sum") self.sum_uqi += uqi_score ps = preds.shape self.numel += ps[0] * ps[1] * (ps[2] - self.kernel_size[0] + 1) * (ps[3] - self.kernel_size[1] + 1) def compute(self) -> Tensor: """Compute explained variance over state.""" if self.reduction == "none" or self.reduction is None: preds = dim_zero_cat(self.preds) target = dim_zero_cat(self.target) return _uqi_compute(preds, target, self.kernel_size, self.sigma, self.reduction) return self.sum_uqi / self.numel if self.reduction == "elementwise_mean" else self.sum_uqi def plot( self, val: Optional[Union[Tensor, Sequence[Tensor]]] = None, ax: Optional[_AX_TYPE] = None ) -> _PLOT_OUT_TYPE: """Plot a single or multiple values from the metric. Args: val: Either a single result from calling `metric.forward` or `metric.compute` or a list of these results. If no value is provided, will automatically call `metric.compute` and plot that result. ax: An matplotlib axis object. If provided will add plot to that axis Returns: Figure and Axes object Raises: ModuleNotFoundError: If `matplotlib` is not installed .. plot:: :scale: 75 >>> # Example plotting a single value >>> import torch >>> from torchmetrics.image import UniversalImageQualityIndex >>> preds = torch.rand([16, 1, 16, 16]) >>> target = preds * 0.75 >>> metric = UniversalImageQualityIndex() >>> metric.update(preds, target) >>> fig_, ax_ = metric.plot() .. plot:: :scale: 75 >>> # Example plotting multiple values >>> import torch >>> from torchmetrics.image import UniversalImageQualityIndex >>> preds = torch.rand([16, 1, 16, 16]) >>> target = preds * 0.75 >>> metric = UniversalImageQualityIndex() >>> values = [ ] >>> for _ in range(10): ... values.append(metric(preds, target)) >>> fig_, ax_ = metric.plot(values) """ return self._plot(val, ax)