forked from Sachin19/adversarial-classify
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathoptim.py
34 lines (30 loc) · 1.28 KB
/
optim.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
from torch.nn.utils import clip_grad_norm_
import torch
class Optim(object):
def set_parameters(self, params):
self.params = list(params) # careful: params may be a generator
self.params = filter(lambda p: p.requires_grad, self.params)
if self.method == 'sgd':
self.optimizer = torch.optim.SGD(self.params, lr=self.lr)
elif self.method == 'adagrad':
self.optimizer = torch.optim.Adagrad(self.params, lr=self.lr)
elif self.method == 'adadelta':
self.optimizer = torch.optim.Adadelta(self.params, lr=self.lr)
elif self.method == 'adam':
self.optimizer = torch.optim.Adam(self.params, lr=self.lr)
else:
raise RuntimeError("Invalid optim method: " + self.method)
def __init__(self, method, lr, max_grad_norm, lr_decay=1, start_decay_at=None):
self.last_ppl = None
self.best_ppl = None
self.lr = lr
self.max_grad_norm = max_grad_norm
self.method = method
self.lr_decay = lr_decay
self.start_decay_at = start_decay_at
self.start_decay = False
def step(self):
# Compute gradients norm.
if self.max_grad_norm:
clip_grad_norm_(self.params, self.max_grad_norm)
self.optimizer.step()