-
Notifications
You must be signed in to change notification settings - Fork 1
/
eval_func.py
152 lines (120 loc) · 4.42 KB
/
eval_func.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
import torch
import torchvision.utils as tvu
from torchvision import transforms
from PIL import Image
import random
import numpy as np
from loader import load_trojai_backdoored_data, load_trojai_clean_data
from pdb import set_trace as st
def add_patch(img, backdoor_clf="blind-p"):
if "blind" in backdoor_clf:
if backdoor_clf == "blind-p":
pattern_tensor: torch.Tensor = torch.tensor([
[1., 0., 1.],
[-10., 1., -10.],
[-10., -10., 0.],
[-10., 1., -10.],
[1., 0., 1.]
]).cuda()
elif backdoor_clf == "blind-s":
pattern_tensor: torch.Tensor = torch.tensor([
[1.],
]).cuda()
elif backdoor_clf == "blind-g":
pattern_tensor = torch.load("weights/blind-g-backdoor.pt").cuda()
else:
raise ValueError(f"couldn't find the patch backdoor for backdoor_clf {backdoor_clf}")
full_image = torch.zeros((3,224,224)).cuda()
full_image.fill_(-10)
x_top = 3
"X coordinate to put the backdoor into."
y_top = 23
if len(pattern_tensor.shape) == 2:
x_bot = x_top + pattern_tensor.shape[0]
y_bot = y_top + pattern_tensor.shape[1]
else:
x_bot = x_top + pattern_tensor.shape[1]
y_bot = y_top + pattern_tensor.shape[2]
full_image[:, x_top:x_bot, y_top:y_bot] = pattern_tensor
mask = 1 * (full_image != -10).cuda()
# pattern = task.normalize(full_image).cuda()
pattern = full_image
pattern = (1 - mask) * img + mask * pattern
img = pattern
elif backdoor_clf == "htba":
patch_size = 30
trans_trigger = transforms.Compose([transforms.Resize((patch_size, patch_size)),
transforms.ToTensor(),
])
trigger = Image.open('models/htba/trigger.png').convert('RGB')
trigger = trans_trigger(trigger).unsqueeze(0).cuda()
for z in range(img.size(0)):
start_x = random.randint(0, 224-patch_size-1)
start_y = random.randint(0, 224-patch_size-1)
img[z, :, start_y:start_y+patch_size, start_x:start_x+patch_size] = trigger
elif backdoor_clf == "clip_backdoor":
trans_trigger = transforms.Compose([transforms.ToTensor()])
patch = np.zeros((16,16,3))
for i in range(16):
for j in range(16):
if (i+j) % 2 == 0:
patch[i,j,:] = [1,1,1]
trigger = trans_trigger(patch).unsqueeze(0)
img[:,:,30:46, 20:36] = trigger
return img
def eval_patch_backdoor(model, val_loader, backdoor_clf, target_label):
if backdoor_clf == "trojai":
data = load_trojai_backdoored_data()
with torch.no_grad():
pred = model(data)
acc = (pred.max(1)[1] == target_label).sum().item()
return acc / data.shape[0]
acc = 0.
cnt = 0.
for data, label in val_loader:
if cnt >= 5000:
break
data = data.cuda()
data = add_patch(data, backdoor_clf)
with torch.no_grad():
pred = model(data)
acc += (pred.max(1)[1]==target_label).sum().item()
cnt += data.shape[0]
acc /= cnt
return acc
def eval_clean(model, val_loader, backdoor_clf):
if backdoor_clf == "trojai":
test_loader = load_trojai_clean_data()
acc = 0.
cnt = 0.
for data, label in test_loader:
with torch.no_grad():
pred = model(data)
acc += (pred.max(1)[1] == label).sum().item()
cnt += data.shape[0]
return acc / cnt
acc = 0.
cnt = 0.
for data ,label in val_loader:
if cnt >= 5000:
break
data = data.cuda()
label = label.cuda()
with torch.no_grad():
pred = model(data)
acc += (pred.max(1)[1]==label).sum().item()
cnt += data.shape[0]
acc /= cnt
return acc
def eval_additive_backdoor(model, val_loader, perturbation, target_label):
acc = 0.
cnt = 0.
for data ,label in val_loader:
data = data.cuda()
data = torch.clamp(data+perturbation.repeat(data.shape[0],1,1,1), 0,1)
with torch.no_grad():
pred = model(data)
acc += (pred.max(1)[1]==target_label).sum().item()
cnt += data.shape[0]
acc /= cnt
return acc