Add weight decay

This commit is contained in:
Yin Li 2019-12-12 12:04:39 -05:00
parent 96b683bfef
commit bd3798222a
2 changed files with 3 additions and 3 deletions

View File

@ -57,8 +57,8 @@ def add_train_args(parser):
help='initial learning rate') help='initial learning rate')
# parser.add_argument('--momentum', default=0.9, type=float, # parser.add_argument('--momentum', default=0.9, type=float,
# help='momentum') # help='momentum')
# parser.add_argument('--weight-decay', default=1e-4, type=float, parser.add_argument('--weight-decay', default=0., type=float,
# help='weight decay') help='weight decay')
parser.add_argument('--dist-backend', default='nccl', type=str, parser.add_argument('--dist-backend', default='nccl', type=str,
choices=['gloo', 'nccl'], help='distributed backend') choices=['gloo', 'nccl'], help='distributed backend')
parser.add_argument('--seed', type=int, parser.add_argument('--seed', type=int,

View File

@ -94,7 +94,7 @@ def gpu_worker(local_rank, args):
model.parameters(), model.parameters(),
lr=args.lr, lr=args.lr,
#momentum=args.momentum, #momentum=args.momentum,
#weight_decay=args.weight_decay weight_decay=args.weight_decay,
) )
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer,
factor=0.1, verbose=True) factor=0.1, verbose=True)