-
Notifications
You must be signed in to change notification settings - Fork 2
/
model_res.py
68 lines (50 loc) · 1.63 KB
/
model_res.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
#
#Copyright (C) 2023 ISTI-CNR
#Licensed under the BSD 3-Clause Clear License (see license.txt)
#
import torch
import torch.nn as nn
import torch.nn.functional as F
import torchvision.models as models
from torchvision.models import ResNet18_Weights
from regressor import *
#
#
#
class QNetRes(nn.Module):
#
#
#
def __init__(self, in_size = 1, out_size=1, params_size = None, whichResnet = 18):
super(QNetRes, self).__init__()
"""Load the pretrained ResNet-50 and replace top fc layer."""
if whichResnet == 18:
resnet = models.resnet18(weights=None)
if whichResnet == 50:
resnet = models.resnet50(weights = None)
modules = list(resnet.children())[:-1] #remove the last fc layer.
self.resnet = nn.Sequential(*modules)
#network
self.regressor = Regressor(resnet.fc.in_features, out_size, params_size)
#
#
#
def forward(self, stim, lmax = None):
# ResNet CNN
sz = stim.shape
if sz[1] == 1:
tmp = torch.zeros((sz[0], 3, sz[2], sz[3]))
tmp[:,0,:,:] = stim[:,0,:,:]
tmp[:,1,:,:] = stim[:,0,:,:]
tmp[:,2,:,:] = stim[:,0,:,:]
if torch.cuda.is_available():
stim = tmp.cuda()
else:
stim = tmp
x = self.resnet(stim) #apply ResNet
x = x.view(x.size(0), -1) #flatten output of conv
q = self.regressor(x, lmax)
return q
if __name__ == '__main__':
model = QNetRes()
print(model)