Bladeren bron

Update utils.py strip_optimizer() (#509)

5.0
Glenn Jocher GitHub 4 jaren geleden
bovenliggende
commit
0032af2980
Geen bekende sleutel gevonden voor deze handtekening in de database GPG sleutel-ID: 4AEE18F83AFDEB23
2 gewijzigde bestanden met toevoegingen van 7 en 15 verwijderingen
  1. +1
    -1
      Dockerfile
  2. +6
    -14
      utils/utils.py

+ 1
- 1
Dockerfile Bestand weergeven

@@ -43,7 +43,7 @@ COPY . /usr/src/app
# sudo docker commit 092b16b25c5b usr/resume && sudo docker run -it --gpus all --ipc=host -v "$(pwd)"/coco:/usr/src/coco --entrypoint=sh usr/resume

# Send weights to GCP
# python -c "from utils.utils import *; create_pretrained('path/last.pt')" && gsutil cp weights/pretrained.pt gs://*
# python -c "from utils.utils import *; strip_optimizer('runs/exp0/weights/last.pt', 'temp.pt')" && gsutil cp temp.pt gs://*

# Clean up
# docker system prune -a --volumes

+ 6
- 14
utils/utils.py Bestand weergeven

@@ -645,26 +645,18 @@ def non_max_suppression(prediction, conf_thres=0.1, iou_thres=0.6, merge=False,
return output


def strip_optimizer(f='weights/best.pt'): # from utils.utils import *; strip_optimizer()
# Strip optimizer from *.pt files for lighter files (reduced by 1/2 size)
x = torch.load(f, map_location=torch.device('cpu'))
x['optimizer'] = None
x['model'].half() # to FP16
torch.save(x, f)
print('Optimizer stripped from %s, %.1fMB' % (f, os.path.getsize(f) / 1E6))


def create_pretrained(f='weights/best.pt', s='weights/pretrained.pt'): # from utils.utils import *; create_pretrained()
# create pretrained checkpoint 's' from 'f' (create_pretrained(x, x) for x in glob.glob('./*.pt'))
def strip_optimizer(f='weights/best.pt', s=''): # from utils.utils import *; strip_optimizer()
# Strip optimizer from 'f' to finalize training, optionally save as 's'
x = torch.load(f, map_location=torch.device('cpu'))
x['optimizer'] = None
x['training_results'] = None
x['epoch'] = -1
x['model'].half() # to FP16
for p in x['model'].parameters():
p.requires_grad = True
torch.save(x, s)
print('%s saved as pretrained checkpoint %s, %.1fMB' % (f, s, os.path.getsize(s) / 1E6))
p.requires_grad = False
torch.save(x, s or f)
mb = os.path.getsize(s or f) / 1E6 # filesize
print('Optimizer stripped from %s,%s %.1fMB' % (f, (' saved as %s,' % s) if s else '', mb))


def coco_class_count(path='../coco/labels/train2014/'):

Laden…
Annuleren
Opslaan