Browse Source

Remove autoanchor and class checks on resumed training (#889)

* Class frequency not calculated on resuming training

Calculation of class frequency is not needed when resuming training.
Anchors can still be recalculated whether resuming or not.

* Check rank for autoanchor

* Update train.py

no autoanchor checks on resume

Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
5.0
Naman Gupta GitHub 4 years ago
parent
commit
6f3db5e662
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 4 additions and 4 deletions
  1. +4
    -4
      train.py

+ 4
- 4
train.py View File

@@ -185,18 +185,18 @@ def train(hyp, opt, device, tb_writer=None):
model.class_weights = labels_to_class_weights(dataset.labels, nc).to(device) # attach class weights
model.names = names

# Class frequency
if rank in [-1, 0]:
# Classes and Anchors
if rank in [-1, 0] and not opt.resume:
labels = np.concatenate(dataset.labels, 0)
c = torch.tensor(labels[:, 0]) # classes
# cf = torch.bincount(c.long(), minlength=nc) + 1.
# cf = torch.bincount(c.long(), minlength=nc) + 1. # frequency
# model._initialize_biases(cf.to(device))
plot_labels(labels, save_dir=log_dir)
if tb_writer:
# tb_writer.add_hparams(hyp, {}) # causes duplicate https://github.com/ultralytics/yolov5/pull/384
tb_writer.add_histogram('classes', c, 0)

# Check anchors
# Anchors
if not opt.noautoanchor:
check_anchors(dataset, model=model, thr=hyp['anchor_t'], imgsz=imgsz)


Loading…
Cancel
Save