From cce95e744dad1d8cb487c34b1e641136063e77f3 Mon Sep 17 00:00:00 2001 From: Glenn Jocher Date: Thu, 18 Jun 2020 00:13:18 -0700 Subject: [PATCH] backbone as FP16, save default to FP32 --- train.py | 2 +- utils/utils.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/train.py b/train.py index ce46797..5163bb1 100644 --- a/train.py +++ b/train.py @@ -332,7 +332,7 @@ def train(hyp): ckpt = {'epoch': epoch, 'best_fitness': best_fitness, 'training_results': f.read(), - 'model': ema.ema.module.half() if hasattr(model, 'module') else ema.ema.half(), + 'model': ema.ema.module if hasattr(model, 'module') else ema.ema, 'optimizer': None if final_epoch else optimizer.state_dict()} # Save last, best and delete diff --git a/utils/utils.py b/utils/utils.py index 47f5219..9c3d7d2 100755 --- a/utils/utils.py +++ b/utils/utils.py @@ -627,13 +627,12 @@ def strip_optimizer(f='weights/best.pt'): # from utils.utils import *; strip_op def create_backbone(f='weights/best.pt', s='weights/backbone.pt'): # from utils.utils import *; create_backbone() # create backbone 's' from 'f' device = torch.device('cpu') - x = torch.load(f, map_location=device) - torch.save(x, s) # update model if SourceChangeWarning x = torch.load(s, map_location=device) x['optimizer'] = None x['training_results'] = None x['epoch'] = -1 + x['model'].half() # to FP16 for p in x['model'].parameters(): p.requires_grad = True torch.save(x, s)