Browse Source

W&B: track batch size after autobatch (#6039)

* track batch size after autobatch

* remove redundant import

* Update __init__.py

* Update __init__.py

Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>
modifyDataloader
Ayush Chaurasia GitHub 2 years ago
parent
commit
db6ec66a60
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 8 additions and 1 deletions
  1. +1
    -0
      train.py
  2. +1
    -1
      utils/callbacks.py
  3. +6
    -0
      utils/loggers/__init__.py

+ 1
- 0
train.py View File

# Batch size # Batch size
if RANK == -1 and batch_size == -1: # single-GPU only, estimate best batch size if RANK == -1 and batch_size == -1: # single-GPU only, estimate best batch size
batch_size = check_train_batch_size(model, imgsz) batch_size = check_train_batch_size(model, imgsz)
loggers.on_params_update({"batch_size": batch_size})


# Optimizer # Optimizer
nbs = 64 # nominal batch size nbs = 64 # nominal batch size

+ 1
- 1
utils/callbacks.py View File

'on_fit_epoch_end': [], # fit = train + val 'on_fit_epoch_end': [], # fit = train + val
'on_model_save': [], 'on_model_save': [],
'on_train_end': [], 'on_train_end': [],
'on_params_update': [],
'teardown': [], 'teardown': [],
} }



+ 6
- 0
utils/loggers/__init__.py View File

else: else:
self.wandb.finish_run() self.wandb.finish_run()
self.wandb = WandbLogger(self.opt) self.wandb = WandbLogger(self.opt)

def on_params_update(self, params):
# Update hyperparams or configs of the experiment
# params: A dict containing {param: value} pairs
if self.wandb:
self.wandb.wandb_run.config.update(params, allow_val_change=True)

Loading…
Cancel
Save