Unverified 提交 6f3db5e6 authored 作者: Naman Gupta's avatar Naman Gupta 提交者: GitHub

Remove autoanchor and class checks on resumed training (#889)

* Class frequency not calculated on resuming training Calculation of class frequency is not needed when resuming training. Anchors can still be recalculated whether resuming or not. * Check rank for autoanchor * Update train.py no autoanchor checks on resume Co-authored-by: 's avatarGlenn Jocher <glenn.jocher@ultralytics.com>
上级 c5969f79
...@@ -185,18 +185,18 @@ def train(hyp, opt, device, tb_writer=None): ...@@ -185,18 +185,18 @@ def train(hyp, opt, device, tb_writer=None):
model.class_weights = labels_to_class_weights(dataset.labels, nc).to(device) # attach class weights model.class_weights = labels_to_class_weights(dataset.labels, nc).to(device) # attach class weights
model.names = names model.names = names
# Class frequency # Classes and Anchors
if rank in [-1, 0]: if rank in [-1, 0] and not opt.resume:
labels = np.concatenate(dataset.labels, 0) labels = np.concatenate(dataset.labels, 0)
c = torch.tensor(labels[:, 0]) # classes c = torch.tensor(labels[:, 0]) # classes
# cf = torch.bincount(c.long(), minlength=nc) + 1. # cf = torch.bincount(c.long(), minlength=nc) + 1. # frequency
# model._initialize_biases(cf.to(device)) # model._initialize_biases(cf.to(device))
plot_labels(labels, save_dir=log_dir) plot_labels(labels, save_dir=log_dir)
if tb_writer: if tb_writer:
# tb_writer.add_hparams(hyp, {}) # causes duplicate https://github.com/ultralytics/yolov5/pull/384 # tb_writer.add_hparams(hyp, {}) # causes duplicate https://github.com/ultralytics/yolov5/pull/384
tb_writer.add_histogram('classes', c, 0) tb_writer.add_histogram('classes', c, 0)
# Check anchors # Anchors
if not opt.noautoanchor: if not opt.noautoanchor:
check_anchors(dataset, model=model, thr=hyp['anchor_t'], imgsz=imgsz) check_anchors(dataset, model=model, thr=hyp['anchor_t'], imgsz=imgsz)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论