* track batch size after autobatch * remove redundant import * Update __init__.py * Update __init__.py Co-authored-by: Glenn Jocher <glenn.jocher@ultralytics.com>modifyDataloader
# Batch size | # Batch size | ||||
if RANK == -1 and batch_size == -1: # single-GPU only, estimate best batch size | if RANK == -1 and batch_size == -1: # single-GPU only, estimate best batch size | ||||
batch_size = check_train_batch_size(model, imgsz) | batch_size = check_train_batch_size(model, imgsz) | ||||
loggers.on_params_update({"batch_size": batch_size}) | |||||
# Optimizer | # Optimizer | ||||
nbs = 64 # nominal batch size | nbs = 64 # nominal batch size |
'on_fit_epoch_end': [], # fit = train + val | 'on_fit_epoch_end': [], # fit = train + val | ||||
'on_model_save': [], | 'on_model_save': [], | ||||
'on_train_end': [], | 'on_train_end': [], | ||||
'on_params_update': [], | |||||
'teardown': [], | 'teardown': [], | ||||
} | } | ||||
else: | else: | ||||
self.wandb.finish_run() | self.wandb.finish_run() | ||||
self.wandb = WandbLogger(self.opt) | self.wandb = WandbLogger(self.opt) | ||||
def on_params_update(self, params): | |||||
# Update hyperparams or configs of the experiment | |||||
# params: A dict containing {param: value} pairs | |||||
if self.wandb: | |||||
self.wandb.wandb_run.config.update(params, allow_val_change=True) |