We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 203e262 commit 51d7e45Copy full SHA for 51d7e45
src/transformers/trainer.py
@@ -5118,7 +5118,7 @@ def create_accelerator_and_postprocess(self):
5118
# deepspeed and accelerate flags covering both trainer args and accelerate launcher
5119
self.is_deepspeed_enabled = getattr(self.accelerator.state, "deepspeed_plugin", None) is not None
5120
self.is_fsdp_enabled = getattr(self.accelerator.state, "fsdp_plugin", None) is not None
5121
- self.is_tp_enabled = getattr(self.accelerator.state, "tp_plugin", None) is not None
+ self.is_tp_enabled = getattr(self.accelerator.state, "torch_tp_plugin", None) is not None
5122
# post accelerator creation setup
5123
if self.is_fsdp_enabled:
5124
fsdp_plugin = self.accelerator.state.fsdp_plugin
0 commit comments