Skip to content

Commit

Permalink
Make cleaning optional for device map (#2233)
Browse files Browse the repository at this point in the history
* Make cleaning optional for device map

* Apply suggestions from code review

Co-authored-by: Marc Sun <[email protected]>

* Change order

* Nit

---------

Co-authored-by: Marc Sun <[email protected]>
  • Loading branch information
muellerzr and SunMarc authored Dec 8, 2023
1 parent 0a37e20 commit f86876d
Showing 1 changed file with 8 additions and 3 deletions.
11 changes: 8 additions & 3 deletions src/accelerate/utils/modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import re
import shutil
import tempfile
from collections import defaultdict
from collections import OrderedDict, defaultdict
from typing import Dict, List, Optional, Tuple, Union

import torch
Expand Down Expand Up @@ -923,6 +923,7 @@ def infer_auto_device_map(
dtype: Optional[Union[str, torch.dtype]] = None,
special_dtypes: Optional[Dict[str, Union[str, torch.dtype]]] = None,
verbose: bool = False,
clean_result: bool = True,
):
"""
Compute a device map for a given model giving priority to GPUs, then offload on CPU and finally offload to disk,
Expand Down Expand Up @@ -956,6 +957,8 @@ def infer_auto_device_map(
all weights).
verbose (`bool`, *optional*, defaults to `False`):
Whether or not to provide debugging statements as the function builds the device_map.
clean_result (`bool`, *optional*, defaults to `True`):
Clean the resulting device_map by grouping all submodules that go on the same device together.
"""
# Get default / clean up max_memory
max_memory = get_max_memory(max_memory)
Expand Down Expand Up @@ -985,7 +988,7 @@ def infer_auto_device_map(
"The model weights are not tied. Please use the `tie_weights` method before using the `infer_auto_device` function."
)

device_map = {}
device_map = OrderedDict()
current_device = 0
current_memory_used = 0

Expand Down Expand Up @@ -1153,7 +1156,9 @@ def infer_auto_device_map(
current_memory_used += module_size
device_map[name] = devices[current_device]

return clean_device_map(device_map)
if clean_result:
device_map = clean_device_map(device_map)
return device_map


def check_device_map(model: nn.Module, device_map: Dict[str, Union[int, str, torch.device]]):
Expand Down

0 comments on commit f86876d

Please sign in to comment.