Unverified 提交 db6ec66a authored 作者: Ayush Chaurasia's avatar Ayush Chaurasia 提交者: GitHub

W&B: track batch size after autobatch (#6039)

* track batch size after autobatch * remove redundant import * Update __init__.py * Update __init__.py Co-authored-by: 's avatarGlenn Jocher <glenn.jocher@ultralytics.com>
上级 c72270c0
......@@ -138,6 +138,7 @@ def train(hyp, # path/to/hyp.yaml or hyp dictionary
# Batch size
if RANK == -1 and batch_size == -1: # single-GPU only, estimate best batch size
batch_size = check_train_batch_size(model, imgsz)
loggers.on_params_update({"batch_size": batch_size})
# Optimizer
nbs = 64 # nominal batch size
......
......@@ -32,7 +32,7 @@ class Callbacks:
'on_fit_epoch_end': [], # fit = train + val
'on_model_save': [],
'on_train_end': [],
'on_params_update': [],
'teardown': [],
}
......
......@@ -157,3 +157,9 @@ class Loggers():
else:
self.wandb.finish_run()
self.wandb = WandbLogger(self.opt)
def on_params_update(self, params):
# Update hyperparams or configs of the experiment
# params: A dict containing {param: value} pairs
if self.wandb:
self.wandb.wandb_run.config.update(params, allow_val_change=True)
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论