Closed zx-code123 closed 5 years ago
Could I have the Python and PyTorch versions you’re running on?
Could I have the Python and PyTorch versions you’re running on?
python version:3.6.7 pytorch version:1.1.0 CUDA Version: 10.0
This project does not support PyTorch 1.1.0 yet, you may try following codes instead in lr_scheduler.py
from bisect import bisect_right
from typing import List
from torch.optim import Optimizer
from torch.optim.lr_scheduler import _LRScheduler
class WarmUpMultiStepLR(_LRScheduler):
def __init__(self, optimizer: Optimizer, milestones: List[int], gamma: float = 0.1,
factor: float = 0.3333, num_iters: int = 500, last_epoch: int = -1):
self.milestones = milestones
self.gamma = gamma
self.factor = factor
self.num_iters = num_iters
super().__init__(optimizer, last_epoch)
def get_lr(self) -> List[float]:
if self.last_epoch < self.num_iters:
alpha = self.last_epoch / self.num_iters
factor = (1 - self.factor) * alpha + self.factor
else:
factor = 1
decay_power = bisect_right(self.milestones, self.last_epoch)
return [base_lr * factor * self.gamma ** decay_power
for base_lr in self.base_lrs]
It worked!!Thank you very much!
@potterhsu When i train with PASCAL VOC 2007 ,learning rate is always 0 like this.