Commit 5f83962 committed Nov 24, 2024 · 5 / 5
1 parent ea524b9 commit 5f83962 Copy full SHA for 5f83962
File tree 1 file changed +7
-3
lines changed
1 file changed +7
-3
lines changed Original file line number Diff line number Diff line change 1
- import numpy as np
1
+ import math
2
2
3
3
4
4
def assign_learning_rate (optimizer , new_lr ):
@@ -18,6 +18,7 @@ def _lr_adjuster(step):
18
18
lr = base_lr
19
19
assign_learning_rate (optimizer , lr )
20
20
return lr
21
+
21
22
return _lr_adjuster
22
23
23
24
@@ -33,10 +34,11 @@ def _lr_adjuster(step):
33
34
e = step - start_cooldown_step
34
35
es = steps - start_cooldown_step
35
36
# linear decay if power == 1; polynomial decay otherwise;
36
- decay = (1 - (e / es )) ** cooldown_power
37
+ decay = (1 - (e / es )) ** cooldown_power
37
38
lr = decay * (base_lr - cooldown_end_lr ) + cooldown_end_lr
38
39
assign_learning_rate (optimizer , lr )
39
40
return lr
41
+
40
42
return _lr_adjuster
41
43
42
44
@@ -47,7 +49,9 @@ def _lr_adjuster(step):
47
49
else :
48
50
e = step - warmup_length
49
51
es = steps - warmup_length
50
- lr = 0.5 * (1 + np .cos (np .pi * e / es )) * base_lr
52
+ lr = 0.5 * (1 + math .cos (math .pi * e / es )) * base_lr
51
53
assign_learning_rate (optimizer , lr )
52
54
return lr
55
+
53
56
return _lr_adjuster
57
+
You can’t perform that action at this time.
0 commit comments