-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathtest_utils.py
46 lines (38 loc) · 1.47 KB
/
test_utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import unittest
import string
import torch as th
import torch.nn as nn
# Local
from utils import NT_ASGD
class Tests(unittest.TestCase):
def test_NT_ASGD(self):
model = nn.LSTM(10, 20, 1)
lr = 0.4
weight_decay = 0.1
n = 3
nt_asgd = NT_ASGD(lr, weight_decay, n)
# ASGD used in place of SGD optimizer when
# loss increases for n succesive calls to get_optimizer
self.assertEqual(nt_asgd.asgd_triggered, False)
nt_asgd.get_optimizer(3)
self.assertEqual(nt_asgd.asgd_triggered, False)
nt_asgd.get_optimizer(2)
self.assertEqual(nt_asgd.asgd_triggered, False)
nt_asgd.get_optimizer(3)
self.assertEqual(nt_asgd.asgd_triggered, False)
nt_asgd.get_optimizer(3)
self.assertEqual(nt_asgd.asgd_triggered, False)
# ASGD Triggered because loss was lowest n+1 epochs ago
nt_asgd.get_optimizer(4)
self.assertEqual(nt_asgd.asgd_triggered, True)
nt_asgd.get_optimizer(2)
self.assertEqual(nt_asgd.asgd_triggered, True)
nt_asgd.get_optimizer(3)
self.assertEqual(nt_asgd.asgd_triggered, True)
nt_asgd.get_optimizer(3)
self.assertEqual(nt_asgd.asgd_triggered, True)
# Doesn't un-trigger
nt_asgd.get_optimizer(4)
self.assertEqual(nt_asgd.asgd_triggered, True)
suite = unittest.TestLoader().loadTestsFromTestCase(Tests)
unittest.TextTestRunner(verbosity=2).run(suite)