diff --git a/tools/train.py b/tools/train.py index 84c1eec93aa..89c8548fc32 100644 --- a/tools/train.py +++ b/tools/train.py @@ -91,10 +91,6 @@ def merge_args(cfg, args): # enable automatic-mixed-precision training if args.amp is True: - optim_wrapper = cfg.optim_wrapper.get('type', 'OptimWrapper') - assert optim_wrapper in ['OptimWrapper', 'AmpOptimWrapper'], \ - '`--amp` is not supported custom optimizer wrapper type ' \ - f'`{optim_wrapper}.' cfg.optim_wrapper.type = 'AmpOptimWrapper' cfg.optim_wrapper.setdefault('loss_scale', 'dynamic')