Skip to content

Commit

Permalink
[Training] Support for Transformers 4.37 (#459)
Browse files Browse the repository at this point in the history
Remove the use of is_torch_less_than_1_11
  • Loading branch information
michaelbenayoun authored Feb 16, 2024
1 parent c55550e commit 1b477ba
Showing 1 changed file with 1 addition and 2 deletions.
3 changes: 1 addition & 2 deletions optimum/neuron/trainers.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
from transformers.debug_utils import DebugOption, DebugUnderflowOverflow
from transformers.integrations import hp_params
from transformers.modeling_utils import unwrap_model
from transformers.pytorch_utils import is_torch_less_than_1_11
from transformers.trainer import (
OPTIMIZER_NAME,
SCHEDULER_NAME,
Expand Down Expand Up @@ -861,7 +860,7 @@ def _inner_training_loop(

sampler_kinds.append(SeedableRandomSampler)
is_random_sampler = isinstance(sampler, tuple(sampler_kinds))
if is_torch_less_than_1_11 or not is_random_sampler:
if not is_random_sampler:
# We just need to begin an iteration to create the randomization of the sampler.
for _ in train_dataloader:
break
Expand Down

0 comments on commit 1b477ba

Please sign in to comment.