-
Notifications
You must be signed in to change notification settings - Fork 4
/
antibotsvc.py
101 lines (76 loc) · 2.6 KB
/
antibotsvc.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
from datasets.generator import AntibotDataset
from sklearn.preprocessing import MinMaxScaler
from sklearn.pipeline import make_pipeline
from sklearn.model_selection import StratifiedKFold, cross_validate
from sklearn.svm import SVC
from sklearn.metrics import classification_report
import pandas as pd
import numpy as np
cols = [
'collected_items',
'avg_time_to_collect_item',
'delta_time_to_collect_item',
'heal_threshold',
'avg_time_to_start_healing',
'delta_time_to_start_healing',
'killed_enemies',
'hungry_time',
'class'
]
def load_antibot_train():
ds = pd.read_csv('antibot.train.data', sep=',',
header=None, names=cols, dtype=float)
X = ds.iloc[:, :-1]
y = ds.iloc[:, -1]
return (X, y)
def load_antibot_test():
ds = pd.read_csv('antibot.test.data', sep=',',
header=None, names=cols, dtype=float)
X = ds.iloc[:, :-1]
y = ds.iloc[:, -1]
return (X, y)
AntibotDataset(1000, 0.2, 255).generate().export_csv("antibot.train.data")
AntibotDataset(1000, 0.1, 510).generate().export_csv("antibot.test.data")
X, y = load_antibot_train()
X_t, y_t = load_antibot_test()
# Stratified 10-Fold
skf = StratifiedKFold(10)
skf.split(X, y)
# Estimator
estimator = SVC(C=3.0, degree=2, gamma='scale', kernel='poly')
scoring = {
'accuracy': 'accuracy',
'precision': 'precision',
'f1': 'f1',
'recall': 'recall',
'roc_auc': 'roc_auc'
}
scores = cross_validate(make_pipeline(MinMaxScaler(), estimator), X, y,
cv=skf, scoring=scoring, return_train_score=False)
print("SVC(C=3.0, degree=2, gamma='scale', kernel='poly')")
print(" accuracy: %.3f +/- %.3f" %
(scores['test_accuracy'].mean(), scores['test_accuracy'].std()))
print(" precision: %.3f +/- %.3f" %
(scores['test_precision'].mean(), scores['test_precision'].std()))
print(" f1: %.3f +/- %.3f" %
(scores['test_f1'].mean(), scores['test_f1'].std()))
print(" recall: %.3f +/- %.3f" %
(scores['test_recall'].mean(), scores['test_recall'].std()))
print(" roc_auc: %.3f +/- %.3f" %
(scores['test_roc_auc'].mean(), scores['test_roc_auc'].std()))
print()
# Scale training dataset
scaler = MinMaxScaler()
scaler.fit(X)
# Fit estimator with the entire training dataset
estimator.fit(scaler.transform(X), y)
# Predict with unseen dataset
predictions = estimator.predict(scaler.transform(X_t))
# Print report
print(classification_report(y_t, predictions))
print("Indices of support vectors")
print(estimator.support_)
print()
print("Number of support vectors for each class")
print(estimator.n_support_)
print()