Repository URL to install this package:
|
Version:
0.0.26 ▾
|
import numpy as np
import torch
import torch.nn.functional as F
import torchmetrics.functional as tm_F
from torch import as_tensor
from torch.nn.functional import binary_cross_entropy, cross_entropy, nll_loss
class Metric:
"""Just a function but with a specific name for logging
General usage: `metric(y_label=y_label, y_pred=y_pred)`
"""
name: str = 'Metric'
def __call__(self, *, y_label: torch.Tensor, y_pred: torch.Tensor):
raise NotImplementedError
class AUCBinary(Metric):
"""
>>> from sklearn.metrics import roc_auc_score
>>> target = np.array([0, 0, 1, 1, 1], dtype=np.float32)
>>> preds_prob = np.array([0.13, 0.26, 0.95, 0.19, 0.34], dtype=np.float32)
>>> round(roc_auc_score(y_true=target, y_score=preds_prob), 5)
0.83333
>>> AUCBinary(pos_label=1)(y_label=as_tensor(target), y_pred=as_tensor(preds_prob))
tensor(0.8333)
>>> round(roc_auc_score(y_true=[1, 1, 0, 0, 0], y_score=preds_prob), 5)
0.16667
>>> AUCBinary(pos_label=0)(y_label=as_tensor(target), y_pred=as_tensor(preds_prob))
tensor(0.1667)
"""
name = 'AUC'
def __init__(self, pos_label=1):
super(AUCBinary, self).__init__()
self.pos_label = pos_label
def __call__(self, *, y_label: torch.Tensor, y_pred: torch.Tensor):
return tm_F.auroc(preds=y_pred, target=y_label.int(), pos_label=self.pos_label)
class AveragePrecision(Metric):
""" Similar to AUC or ROC, AveragePrecision is the AUC of PR curve.
Ref:
- https://en.wikipedia.org/w/index.php?title=Information_retrieval&oldid=793358396#Average_precision
- https://scikit-learn.org/stable/modules/model_evaluation.html#precision-recall-and-f-measures
>>> from sklearn.metrics import average_precision_score
>>> target = np.array([0, 0, 1, 1, 1], dtype=np.float32)
>>> preds_prob = np.array([0.13, 0.26, 0.95, 0.19, 0.34], dtype=np.float32)
>>> should_be = average_precision_score(y_true=target, y_score=preds_prob)
>>> round(should_be, 5)
0.91667
>>> x = AveragePrecision(pos_label=1)(y_label=as_tensor(target), y_pred=as_tensor(preds_prob))
>>> torch.allclose(x, as_tensor(should_be, dtype=x.dtype))
True
"""
name = "AvgPrecision"
def __init__(self, pos_label=1):
super(AveragePrecision, self).__init__()
self.pos_label = pos_label
def __call__(self, *, y_label: torch.Tensor, y_pred: torch.Tensor):
return tm_F.average_precision(preds=y_pred, target=y_label, pos_label=self.pos_label)
class AccuracyBinary(Metric):
"""
>>> target = [0, 0, 1, 1, 1]
>>> preds_label = [0, 0, 1, 0, 0]
>>> from sklearn.metrics import accuracy_score
>>> round(accuracy_score(y_true=target, y_pred=preds_label), 5)
0.6
>>> np.isclose(AccuracyBinary()(y_label=as_tensor(target), y_pred=as_tensor(preds_label)).numpy(), 0.6)
True
>>> preds_prob = as_tensor([0.2, 0.2, 0.49, 0.2, 0.2])
>>> np.isclose(AccuracyBinary(0.5)(y_label=as_tensor(target), y_pred=preds_prob).numpy(), 0.4)
True
>>> np.isclose(AccuracyBinary(0.4)(y_label=as_tensor(target), y_pred=preds_prob).numpy(), 0.6)
True
"""
name = 'Accuracy'
def __init__(self, threshold=0.5):
super(AccuracyBinary, self).__init__()
self.threshold = threshold
def __call__(self, *, y_label: torch.Tensor, y_pred: torch.Tensor):
return tm_F.accuracy(preds=y_pred, target=y_label.int(), threshold=self.threshold)
class AUCMulticlass(Metric):
name = 'AUC'
def __init__(self, num_classes):
import warnings
warnings.warn('AUCMulticlass has not been tested.', UserWarning)
super(AUCMulticlass, self).__init__()
self.num_classes = num_classes
def __call__(self, *, y_label: torch.Tensor, y_pred: torch.Tensor):
return tm_F.auroc(preds=y_pred, target=y_label, num_classes=self.num_classes)
class AccuracyMulticlass(Metric):
name = 'Accuracy'
def __init__(self, num_classes):
import warnings
warnings.warn('AccuracyMulticlass has not been tested.', UserWarning)
super(AccuracyMulticlass, self).__init__()
self.num_classes = num_classes
def __call__(self, *, y_label: torch.Tensor, y_pred: torch.Tensor):
return tm_F.accuracy(preds=y_pred, target=y_label, num_classes=self.num_classes)
class BinaryCrossEntropy(Metric):
"""
>>> preds_prob = [0.13, 0.26, 0.95, 0.19, 0.34]
>>> target = [0, 0, 1, 1, 1]
>>> from sklearn.metrics import log_loss
>>> round(log_loss(y_true=target, y_pred=preds_prob), 10)
0.6462402645
>>> np.isclose(BinaryCrossEntropy()(y_label=as_tensor(target), y_pred=as_tensor(preds_prob)).numpy(), 0.64624023)
True
"""
name = 'BinaryCrossEntropy'
def __init__(self):
super(BinaryCrossEntropy, self).__init__()
def __call__(self, *, y_label: torch.Tensor, y_pred: torch.Tensor):
return binary_cross_entropy(input=y_pred, target=y_label.float())
class CrossEntropy(Metric):
name = 'CrossEntropy'
def __init__(self):
super(CrossEntropy, self).__init__()
def __call__(self, *, y_label: torch.Tensor, y_pred: torch.Tensor):
return cross_entropy(input=y_pred, target=y_label)
class NLLLoss(Metric):
"""
input is of size N x C = 3 x 5,
each element in target has to have 0 <= value < C
>>> rng = np.random.default_rng(seed=3)
>>> input_y = as_tensor(rng.uniform(size=(3, 5)))
>>> target = torch.tensor([1, 0, 4])
>>> np.isclose(NLLLoss()(y_pred=F.log_softmax(input_y, dim=1), y_label=target).numpy(), 1.59098092)
True
>>> input_y = as_tensor(rng.uniform(size=(4, 2)))
>>> target = torch.tensor([0, 1, 0, 1])
>>> np.isclose(NLLLoss()(y_pred=F.log_softmax(input_y, dim=1), y_label=target).numpy(), 0.68170622)
True
>>> target = torch.tensor([0, 1, 0, 1], dtype=torch.float32)
>>> # noinspection PyTypeChecker
>>> np.isclose(NLLLoss()(y_pred=F.log_softmax(input, dim=1), y_label=target).numpy(), 0.68170622)
True
"""
name = 'NLLLoss'
def __init__(self):
super(NLLLoss, self).__init__()
def __call__(self, *, y_label: torch.Tensor, y_pred: torch.Tensor):
return nll_loss(input=y_pred, target=y_label.long())
class PearsonR(Metric):
"""
Metric for continuous variables.
>>> rng = np.random.default_rng(seed=38134)
>>> x = rng.uniform(low=-10, high=10, size=(10,))
>>> y = rng.uniform(low=-10, high=10, size=(10,))
>>> from scipy.stats import pearsonr
>>> round(pearsonr(x, y)[0], 8)
0.37631677
>>> x = as_tensor(x); y = as_tensor(y)
>>> np.isclose(PearsonR()(y_label=x, y_pred=y).detach().numpy(), 0.37631677)
True
>>> np.isclose(PearsonR()(y_label=y, y_pred=x).detach().numpy(), 0.37631677)
True
>>> np.isclose(PearsonR()(y_label=x, y_pred=x).detach().numpy(), 1)
True
>>> np.isclose(PearsonR()(y_label=x, y_pred=-x).detach().numpy(), -1)
True
"""
name = 'PearsonR'
def __init__(self):
super(PearsonR, self).__init__()
def __call__(self, *, y_label: torch.Tensor, y_pred: torch.Tensor):
return tm_F.pearson_corrcoef(preds=y_pred, target=y_label)