|
|
@@ -120,7 +120,10 @@ def train(hyp, opt, device, tb_writer=None, wandb=None): |
|
|
|
|
|
|
|
# Scheduler https://arxiv.org/pdf/1812.01187.pdf |
|
|
|
# https://pytorch.org/docs/stable/_modules/torch/optim/lr_scheduler.html#OneCycleLR |
|
|
|
lf = one_cycle(1, hyp['lrf'], epochs) # cosine 1->hyp['lrf'] |
|
|
|
if opt.linear_lr: |
|
|
|
lf = lambda x: (1 - x / (epochs - 1)) * (1.0 - hyp['lrf']) + hyp['lrf'] # linear |
|
|
|
else: |
|
|
|
lf = one_cycle(1, hyp['lrf'], epochs) # cosine 1->hyp['lrf'] |
|
|
|
scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf) |
|
|
|
# plot_lr_scheduler(optimizer, scheduler, epochs) |
|
|
|
|
|
|
@@ -464,6 +467,7 @@ if __name__ == '__main__': |
|
|
|
parser.add_argument('--name', default='exp', help='save to project/name') |
|
|
|
parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment') |
|
|
|
parser.add_argument('--quad', action='store_true', help='quad dataloader') |
|
|
|
parser.add_argument('--linear-lr', action='store_true', help='linear LR') |
|
|
|
opt = parser.parse_args() |
|
|
|
|
|
|
|
# Set DDP variables |