From f64b1e42e9392225fda3a27613a43bb9033cb419 Mon Sep 17 00:00:00 2001 From: Yin Li Date: Sun, 8 Dec 2019 21:27:44 -0500 Subject: [PATCH] Add synchronized random seed to training --- map2map/args.py | 2 +- map2map/train.py | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/map2map/args.py b/map2map/args.py index 43f41d7..011542a 100644 --- a/map2map/args.py +++ b/map2map/args.py @@ -64,7 +64,7 @@ def add_train_args(parser): # help='weight decay') parser.add_argument('--dist-backend', default='nccl', type=str, choices=['gloo', 'nccl'], help='distributed backend') - parser.add_argument('--seed', default=42, type=int, + parser.add_argument('--seed', type=int, help='seed for initializing training') parser.add_argument('--log-interval', default=20, type=int, help='interval between logging training loss') diff --git a/map2map/train.py b/map2map/train.py index cf03d2e..d3ec938 100644 --- a/map2map/train.py +++ b/map2map/train.py @@ -1,5 +1,6 @@ import os import shutil +import random import torch from torch.multiprocessing import spawn from torch.distributed import init_process_group, destroy_process_group, all_reduce @@ -13,6 +14,8 @@ from .models import UNet, narrow_like def node_worker(args): + if args.seed is None: + args.seed = random.randint(0, 65535) torch.manual_seed(args.seed) # NOTE: why here not in gpu_worker? #torch.backends.cudnn.deterministic = True # NOTE: test perf