-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathutils.py
113 lines (89 loc) · 3.52 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
import torch
import os
import numpy as np
import scipy.misc as m
from torch.utils import data
from torch.utils.data import DataLoader
import torch.nn as nn
import sklearn.metrics as skm
import torch.nn.functional as F
def cross_entropy2d(input, target, weight=None, size_average=True):
n, c, h, w = input.size()
nt, ht, wt = target.size()
# Handle inconsistent size between input and target
if h != ht and w != wt: # upsample labels
input = F.interpolate(input, size=(ht, wt), mode="bilinear", align_corners=True)
input = input.transpose(1, 2).transpose(2, 3).contiguous().view(-1, c)
target = target.view(-1)
loss = F.cross_entropy(
input, target, weight=weight, size_average=size_average, ignore_index=250
)
return loss
'''We have used skelarn libraries to calculate Accuracy and Jaccard Score'''
def get_metrics(gt_label, pred_label):
# Accuracy Score
acc = skm.accuracy_score(gt_label, pred_label, normalize=True)
# Jaccard Score/IoU
js = skm.jaccard_score(gt_label, pred_label, average='micro')
result_gm_sh = [acc, js]
return(result_gm_sh)
'''
Calculation of confusion matrix from :
https://github.com/meetshah1995/pytorch-semseg/blob/master/ptsemseg/metrics.py
Added modifications to calculate 3 evaluation metrics -
Specificity, Senstivity, F1 Score
'''
class runningScore(object):
def __init__(self, n_classes):
self.n_classes = n_classes
self.confusion_matrix = np.zeros((n_classes, n_classes))
def _fast_hist(self, label_true, label_pred, n_class):
mask = (label_true >= 0) & (label_true < n_class)
hist = np.bincount(
n_class * label_true[mask].astype(int) + label_pred[mask], minlength=n_class ** 2
).reshape(n_class, n_class)
return hist
def update(self, label_trues, label_preds):
for lt, lp in zip(label_trues, label_preds):
self.confusion_matrix += self._fast_hist(lt.flatten(), lp.flatten(), self.n_classes)
def get_scores(self):
# confusion matrix
hist = self.confusion_matrix
# T
# 0 1 2
# 0 TP FP FP
# P 1 FN TN TN This is wrt to class 0
# 2 FN TN TN
# 0 1 2
# 0 TP FP FP
# P 1 FP TP FP This is wrt prediction classes; AXIS = 1
# 2 FP FP TP
# 0 1 2
# 0 TP FN FN
# P 1 FN TP FN This is wrt true classes; AXIS = 0
# 2 FN FN TP
TP = np.diag(hist)
TN = hist.sum() - hist.sum(axis = 1) - hist.sum(axis = 0) + np.diag(hist)
FP = hist.sum(axis = 1) - TP
FN = hist.sum(axis = 0) - TP
# 1e-6 was added to prevent corner cases where denominator = 0
# Specificity: TN / TN + FP
specif_cls = (TN) / (TN + FP + 1e-6)
specif = np.nanmean(specif_cls)
# Senstivity/Recall: TP / TP + FN
sensti_cls = (TP) / (TP + FN + 1e-6)
sensti = np.nanmean(sensti_cls)
# Precision: TP / (TP + FP)
prec_cls = (TP) / (TP + FP + 1e-6)
prec = np.nanmean(prec_cls)
# F1 = 2 * Precision * Recall / Precision + Recall
f1 = (2 * prec * sensti) / (prec + sensti + 1e-6)
return (
{
"Specificity": specif,
"Senstivity": sensti,
"F1": f1,
}
)
def reset(self):
self.confusion_matrix = np.zeros((self.n_classes, self.n_classes))