Skip to content

Commit

Permalink
Solve CUDA issues (#2272)
Browse files Browse the repository at this point in the history
* Solve CUDA issues

* import
  • Loading branch information
muellerzr authored Dec 22, 2023
1 parent b565a6c commit b60061d
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
5 changes: 3 additions & 2 deletions src/accelerate/test_utils/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
is_bnb_available,
is_clearml_available,
is_comet_ml_available,
is_cuda_available,
is_datasets_available,
is_deepspeed_available,
is_dvclive_available,
Expand All @@ -51,7 +52,7 @@


def get_backend():
if torch.cuda.is_available():
if is_cuda_available():
return "cuda", torch.cuda.device_count()
elif is_mps_available():
return "mps", 1
Expand Down Expand Up @@ -117,7 +118,7 @@ def require_cuda(test_case):
"""
Decorator marking a test that requires CUDA. These tests are skipped when there are no GPU available.
"""
return unittest.skipUnless(torch.cuda.is_available(), "test requires a GPU")(test_case)
return unittest.skipUnless(is_cuda_available(), "test requires a GPU")(test_case)


def require_xpu(test_case):
Expand Down
2 changes: 1 addition & 1 deletion src/accelerate/utils/imports.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def is_bf16_available(ignore_tpu=False):
"Checks if bf16 is supported, optionally ignoring the TPU"
if is_tpu_available():
return not ignore_tpu
if torch.cuda.is_available():
if is_cuda_available():
return torch.cuda.is_bf16_supported()
return True

Expand Down

0 comments on commit b60061d

Please sign in to comment.