We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
2 parents 3d9c23a + 908563d commit 3f9959cCopy full SHA for 3f9959c
train.py
@@ -397,7 +397,7 @@ def main():
397
# setup synchronized BatchNorm for distributed training
398
if args.distributed and args.sync_bn:
399
assert not args.split_bn
400
- if has_apex and use_amp != 'native':
+ if has_apex and use_amp == 'apex':
401
# Apex SyncBN preferred unless native amp is activated
402
model = convert_syncbn_model(model)
403
else:
@@ -451,7 +451,7 @@ def main():
451
452
# setup distributed training
453
if args.distributed:
454
455
# Apex DDP preferred unless native amp is activated
456
if args.local_rank == 0:
457
_logger.info("Using NVIDIA APEX DistributedDataParallel.")
0 commit comments