optimizer=optim.Adam(pg0,lr=hyp['lr0'],betas=(hyp['momentum'],0.999))#use default beta2, adjust beta1 for Adam momentum per momentum adjustments in https://pytorch.org/docs/stable/_modules/torch/optim/lr_scheduler.html#OneCycleLR
optimizer=optim.Adam(pg0,lr=hyp['lr0'],betas=(hyp['momentum'],0.999))#use default beta2, adjust beta1 for Adam momentum per momentum adjustments in https://pytorch.org/docs/stable/_modules/torch/optim/lr_scheduler.html#OneCycleLR