提交 333f678b authored 作者: Alex Stoken's avatar Alex Stoken

add update default hyp dict with provided yaml

上级 a448c3bc
...@@ -42,17 +42,6 @@ hyp = {'lr0': 0.01, # initial learning rate (SGD=1E-2, Adam=1E-3) ...@@ -42,17 +42,6 @@ hyp = {'lr0': 0.01, # initial learning rate (SGD=1E-2, Adam=1E-3)
# Don't need to be printing every time # Don't need to be printing every time
#print(hyp) #print(hyp)
# Overwrite hyp with hyp*.txt (optional)
if f:
print('Using %s' % f[0])
for k, v in zip(hyp.keys(), np.loadtxt(f[0])):
hyp[k] = v
# Print focal loss if gamma > 0
if hyp['fl_gamma']:
print('Using FocalLoss(gamma=%g)' % hyp['fl_gamma'])
def train(hyp): def train(hyp):
#write all results to the tb log_dir, so all data from one run is together #write all results to the tb log_dir, so all data from one run is together
log_dir = tb_writer.log_dir log_dir = tb_writer.log_dir
...@@ -430,7 +419,16 @@ if __name__ == '__main__': ...@@ -430,7 +419,16 @@ if __name__ == '__main__':
# Train # Train
if not opt.evolve: if not opt.evolve:
tb_writer = SummaryWriter(comment=opt.name) tb_writer = SummaryWriter(comment=opt.name)
#updates hyp defaults from hyp.yaml
if opt.hyp: hyp.update(opt.hyp)
# Print focal loss if gamma > 0
if hyp['fl_gamma']:
print('Using FocalLoss(gamma=%g)' % hyp['fl_gamma'])
print(f'Beginning training with {hyp}\n\n')
print('Start Tensorboard with "tensorboard --logdir=runs", view at http://localhost:6006/') print('Start Tensorboard with "tensorboard --logdir=runs", view at http://localhost:6006/')
train(hyp) train(hyp)
# Evolve hyperparameters (optional) # Evolve hyperparameters (optional)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论