diff --git a/mmengine/optim/optimizer/builder.py b/mmengine/optim/optimizer/builder.py index 7b4090ba7a..d47d5e4e5a 100644 --- a/mmengine/optim/optimizer/builder.py +++ b/mmengine/optim/optimizer/builder.py @@ -170,7 +170,7 @@ def register_transformers_optimizers(): except ImportError: pass else: - OPTIMIZERS.register_module(name='Adafactor', module=Adafactor) + OPTIMIZERS.register_module(name='Adafactor', module=Adafactor, force=True) # Same optimizer is introduced in PyTorch but transformers had it prior and forcing transformers one to keepup with backward compatibility. transformer_optimizers.append('Adafactor') return transformer_optimizers