-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmetrics.py
44 lines (36 loc) · 1.54 KB
/
metrics.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
import sklearn
import pandas as pd
import config as cfg
import seaborn as sn
import matplotlib.pyplot as plt
import numpy as np
def evaluation(y_test, y_pred, fname = False, show = True, verbose = True):
confusion_matrix = sklearn.metrics.confusion_matrix(y_test,y_pred)
accuracy = sklearn.metrics.accuracy_score(y_test, y_pred)
f1_score = sklearn.metrics.f1_score(y_test, y_pred, average = "weighted")
precision = sklearn.metrics.precision_score(y_test, y_pred,average = "weighted")
recall = sklearn.metrics.recall_score(y_test, y_pred, average = "weighted")
if fname:
df_cm = pd.DataFrame(confusion_matrix, index = [i for i in cfg.languages],
columns = [i for i in cfg.languages])
sn.set(font_scale=1)#for label size
sn.heatmap(df_cm, annot=True,annot_kws={"size": 12}, fmt='g')# font
plt.title("Accuracy = " + str(accuracy),size = 18)
plt.ylabel("Actual Label")
plt.xlabel("Predicted Label")
plt.savefig(fname)
plt.show()
metrics = {"confusion_matrix": confusion_matrix,
"accuracy": accuracy,
"f1_score": f1_score,
"precision": precision,
"recall": recall}
wrong_ixd = [i for i,pred in enumerate(y_pred) if not pred == y_test[i]]
wrong_pred = np.array(y_pred)[wrong_ixd]
metrics["wrong_ixd"] = wrong_ixd
metrics["wrong_pred"] = wrong_pred
# if verbose:
# print(metrics)
# print("wrong_ixd",wrong_ixd)
# print("wrong_pred",wrong_pred)
return metrics