Change default number of dataloader workers

as suggested by pytorch
This commit is contained in:
Yin Li 2021-03-25 17:05:36 -04:00
parent 3eaca964ed
commit 0410435a8a

View File

@ -81,10 +81,9 @@ def add_common_args(parser):
# "batches" is kept for now for backward compatibility # "batches" is kept for now for backward compatibility
parser.add_argument('--batch-size', '--batches', type=int, required=True, parser.add_argument('--batch-size', '--batches', type=int, required=True,
help='mini-batch size, per GPU in training or in total in testing') help='mini-batch size, per GPU in training or in total in testing')
parser.add_argument('--loader-workers', default=-8, type=int, parser.add_argument('--loader-workers', default=8, type=int,
help='number of subprocesses per data loader. ' help='number of subprocesses per data loader. '
'0 to disable multiprocessing; ' '0 to disable multiprocessing')
'negative number to multiply by the batch size')
parser.add_argument('--callback-at', type=lambda s: os.path.abspath(s), parser.add_argument('--callback-at', type=lambda s: os.path.abspath(s),
help='directory of custorm code defining callbacks for models, ' help='directory of custorm code defining callbacks for models, '
@ -186,8 +185,7 @@ def int_tuple(s):
def set_common_args(args): def set_common_args(args):
if args.loader_workers < 0: pass
args.loader_workers *= - args.batch_size
def set_train_args(args): def set_train_args(args):