[docs]defget_lr(self):ifnotself._get_lr_called_within_step:warnings.warn("To get the last learning rate computed by the scheduler, ""please use `get_last_lr()`.",UserWarning)ifself.last_epoch==0:returnself.base_lrsreturn[max(group['lr']*self.gamma,self.min_lr)forgroupinself.optimizer.param_groups]
[docs]defget_optimizer(cfg,model):ifcfg.type=='adam':returntorch.optim.Adam(model.parameters(),lr=cfg.lr,weight_decay=cfg.weight_decay,betas=(cfg.beta1,cfg.beta2,))else:raiseNotImplementedError('Optimizer not supported: %s'%cfg.type)
[docs]defget_scheduler(cfg,optimizer):ifcfg.type=='plateau':returntorch.optim.lr_scheduler.ReduceLROnPlateau(optimizer,factor=cfg.factor,patience=cfg.patience,)elifcfg.type=='expmin':returnExponentialLR_with_minLr(optimizer,gamma=cfg.factor,min_lr=cfg.min_lr,)elifcfg.type=='expmin_milestone':gamma=np.exp(np.log(cfg.factor)/cfg.milestone)returnExponentialLR_with_minLr(optimizer,gamma=gamma,min_lr=cfg.min_lr,)else:raiseNotImplementedError('Scheduler not supported: %s'%cfg.type)