Skip to content

Commit

Permalink
W&B: track batch size after autobatch (#6039)
Browse files Browse the repository at this point in the history
* track batch size after autobatch

* remove redundant import

* Update __init__.py

* Update __init__.py

Co-authored-by: Glenn Jocher <[email protected]>
  • Loading branch information
AyushExel and glenn-jocher committed Dec 23, 2021
1 parent c72270c commit db6ec66
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 1 deletion.
1 change: 1 addition & 0 deletions train.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@ def train(hyp, # path/to/hyp.yaml or hyp dictionary
# Batch size
if RANK == -1 and batch_size == -1: # single-GPU only, estimate best batch size
batch_size = check_train_batch_size(model, imgsz)
loggers.on_params_update({"batch_size": batch_size})

# Optimizer
nbs = 64 # nominal batch size
Expand Down
2 changes: 1 addition & 1 deletion utils/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def __init__(self):
'on_fit_epoch_end': [], # fit = train + val
'on_model_save': [],
'on_train_end': [],

'on_params_update': [],
'teardown': [],
}

Expand Down
6 changes: 6 additions & 0 deletions utils/loggers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,3 +157,9 @@ def on_train_end(self, last, best, plots, epoch, results):
else:
self.wandb.finish_run()
self.wandb = WandbLogger(self.opt)

def on_params_update(self, params):
# Update hyperparams or configs of the experiment
# params: A dict containing {param: value} pairs
if self.wandb:
self.wandb.wandb_run.config.update(params, allow_val_change=True)

0 comments on commit db6ec66

Please sign in to comment.