We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent 51d7e45 commit ebc811aCopy full SHA for ebc811a
src/transformers/training_args.py
@@ -1989,6 +1989,11 @@ def __post_init__(self):
1989
warnings.warn("`--xla_fsdp_grad_ckpt` is useful only when `--xla` is set to true.")
1990
1991
if self.tp_size > 1:
1992
+ if not is_accelerate_available("1.3.1"):
1993
+ raise NotImplementedError(
1994
+ "TP using PyTorch requires Accelerate version `accelerate` >= 1.3.1. "
1995
+ "This is not supported and we recommend you to update your version."
1996
+ )
1997
os.environ["ACCELERATE_USE_TP"] = "true"
1998
os.environ["TP_SIZE"] = str(self.tp_size)
1999
# accelerate integration for FSDP
0 commit comments