text
stringlengths
1
93.6k
weight_decay = config.weight_decay,
amsgrad = config.amsgrad
)
elif config.opt == 'AdamW':
return torch.optim.AdamW(
model.parameters(),
lr = config.lr,
betas = config.betas,
eps = config.eps,
weight_decay = config.weight_decay,
amsgrad = config.amsgrad
)
elif config.opt == 'Adamax':
return torch.optim.Adamax(
model.parameters(),
lr = config.lr,
betas = config.betas,
eps = config.eps,
weight_decay = config.weight_decay
)
elif config.opt == 'ASGD':
return torch.optim.ASGD(
model.parameters(),
lr = config.lr,
lambd = config.lambd,
alpha = config.alpha,
t0 = config.t0,
weight_decay = config.weight_decay
)
elif config.opt == 'RMSprop':
return torch.optim.RMSprop(
model.parameters(),
lr = config.lr,
momentum = config.momentum,
alpha = config.alpha,
eps = config.eps,
centered = config.centered,
weight_decay = config.weight_decay
)
elif config.opt == 'Rprop':
return torch.optim.Rprop(
model.parameters(),
lr = config.lr,
etas = config.etas,
step_sizes = config.step_sizes,
)
elif config.opt == 'SGD':
return torch.optim.SGD(
model.parameters(),
lr = config.lr,
momentum = config.momentum,
weight_decay = config.weight_decay,
dampening = config.dampening,
nesterov = config.nesterov
)
else: # default opt is SGD
return torch.optim.SGD(
model.parameters(),
lr = 0.01,
momentum = 0.9,
weight_decay = 0.05,
)
def get_scheduler(config, optimizer):
assert config.sch in ['StepLR', 'MultiStepLR', 'ExponentialLR', 'CosineAnnealingLR', 'ReduceLROnPlateau',
'CosineAnnealingWarmRestarts', 'WP_MultiStepLR', 'WP_CosineLR'], 'Unsupported scheduler!'
if config.sch == 'StepLR':
scheduler = torch.optim.lr_scheduler.StepLR(
optimizer,
step_size = config.step_size,
gamma = config.gamma,
last_epoch = config.last_epoch
)
elif config.sch == 'MultiStepLR':
scheduler = torch.optim.lr_scheduler.MultiStepLR(
optimizer,
milestones = config.milestones,
gamma = config.gamma,
last_epoch = config.last_epoch
)
elif config.sch == 'ExponentialLR':
scheduler = torch.optim.lr_scheduler.ExponentialLR(
optimizer,
gamma = config.gamma,
last_epoch = config.last_epoch
)
elif config.sch == 'CosineAnnealingLR':
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(
optimizer,
T_max = config.T_max,
eta_min = config.eta_min,
last_epoch = config.last_epoch
)
elif config.sch == 'ReduceLROnPlateau':
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(
optimizer,
mode = config.mode,
factor = config.factor,
patience = config.patience,