-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathschedulers.py
57 lines (50 loc) · 1.68 KB
/
schedulers.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
from enum import auto, Enum
import math
class LRSchedule(Enum):
Constant = auto()
Cosine = auto()
class Scheduler:
def __init__(
self,
schedule: str,
base_lr: float,
data_loader,
epochs: int,
optimizer,
batch_steps=None,
batch_size=None,
):
self.schedule = schedule
self.base_lr = base_lr
self.data_loader = data_loader
self.epochs = epochs
self.optimizer = optimizer
if batch_size is None:
self.batch_size = data_loader.config.batch_size
else:
self.batch_size = batch_size
if batch_steps is None:
self.batch_steps = len(data_loader)
else:
self.batch_steps = batch_steps
def adjust_learning_rate(self, step: int):
if self.schedule == LRSchedule.Constant:
return self.base_lr
else:
max_steps = self.epochs * self.batch_steps
warmup_steps = int(0.10 * max_steps)
for param_group in self.optimizer.param_groups:
base_lr = (
param_group["base_lr"] if "base_lr" in param_group else self.base_lr
)
base_lr = base_lr * self.batch_size / 256
if step < warmup_steps:
lr = base_lr * step / warmup_steps
else:
step -= warmup_steps
max_steps -= warmup_steps
q = 0.5 * (1 + math.cos(math.pi * step / max_steps))
end_lr = base_lr * 0.001
lr = base_lr * q + end_lr * (1 - q)
param_group["lr"] = lr
return lr