Fix --optimizer-args None default bug

This commit is contained in:
Yin Li 2020-07-31 16:07:35 -04:00
parent 5ac9016987
commit 6dfde5ee7f

View File

@ -111,7 +111,7 @@ def add_train_args(parser):
help='optimizer from torch.optim') help='optimizer from torch.optim')
parser.add_argument('--lr', type=float, required=True, parser.add_argument('--lr', type=float, required=True,
help='initial learning rate') help='initial learning rate')
parser.add_argument('--optimizer-args', type=json.loads, parser.add_argument('--optimizer-args', default='{}', type=json.loads,
help='optimizer arguments in addition to the learning rate, ' help='optimizer arguments in addition to the learning rate, '
'e.g. --optimizer-args \'{"betas": [0.5, 0.9]}\'') 'e.g. --optimizer-args \'{"betas": [0.5, 0.9]}\'')
parser.add_argument('--reduce-lr-on-plateau', action='store_true', parser.add_argument('--reduce-lr-on-plateau', action='store_true',