Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 25 additions & 0 deletions QEfficient/finetune/experimental/core/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,28 @@
# SPDX-License-Identifier: BSD-3-Clause
#
# -----------------------------------------------------------------------------

"""
Optimizer components for the training system.
"""

import torch.optim as optim

from QEfficient.finetune.experimental.core.component_registry import registry

registry.optimizer("Adam")(optim.Adam)
registry.optimizer("AdamW")(optim.AdamW)
registry.optimizer("SGD")(optim.SGD)


def get_optimizer(opt_config):
"""
Create optimizer from config.
Args: opt_config: Dictionary containing optimizer configuration.
Returns: Tuple of optimizer class and its arguments.
"""
opt_name = opt_config.pop("optimizer_name")
opt_cls = registry.get_optimizer(opt_name)
opt_config["lr"] = float(opt_config["lr"])
optimizer_cls_and_kwargs = (opt_cls, opt_config)
return optimizer_cls_and_kwargs
93 changes: 93 additions & 0 deletions QEfficient/finetune/experimental/tests/test_optimizer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
# -----------------------------------------------------------------------------
#
# Copyright (c) Qualcomm Technologies, Inc. and/or its subsidiaries.
# SPDX-License-Identifier: BSD-3-Clause
#
# -----------------------------------------------------------------------------

import pytest
import torch.nn as nn
import torch.optim as optim

from QEfficient.finetune.experimental.core.component_registry import registry
from QEfficient.finetune.experimental.core.optimizer import get_optimizer

OPTIMIZER_CONFIGS = {
"Adam": {
"optimizer_name": "Adam",
"opt_cls": optim.Adam,
"lr": 1e-4,
"weight_decay": 0.01,
"betas": (0.9, 0.999),
"eps": 1e-8,
"amsgrad": False,
},
"AdamW": {
"optimizer_name": "AdamW",
"opt_cls": optim.AdamW,
"lr": 1e-4,
"weight_decay": 0.01,
"betas": (0.9, 0.999),
"eps": 1e-8,
"amsgrad": False,
},
"SGD": {
"optimizer_name": "SGD",
"opt_cls": optim.SGD,
"lr": 1e-4,
"momentum": 0.9,
"weight_decay": 0.01,
"dampening": 0.0,
"nesterov": False,
},
"RMSprop": {
"optimizer_name": "RMSprop",
"opt_cls": optim.RMSprop,
},
}

REGISTRY_CONFIG = {
"RMSprop": {
"optimizer_name": "RMSprop",
"opt_cls": optim.RMSprop,
},
}


@pytest.fixture
def dummy_model():
return nn.Sequential(
nn.Linear(10, 5),
nn.ReLU(),
nn.Linear(5, 1),
)


@pytest.mark.parametrize("opt_name", OPTIMIZER_CONFIGS.keys())
def test_optimizers(opt_name, dummy_model):
"""Test that all registered optimizers can be created with their configs."""
config = OPTIMIZER_CONFIGS[opt_name]
config.pop("opt_cls")
try:
optimizer_class_and_kwargs = get_optimizer(config)
assert optimizer_class_and_kwargs is not None
except ValueError as e:
assert "Unknown optimizer" in str(e)
return
optimizer_class = optimizer_class_and_kwargs[0]
opt_inst = optimizer_class(dummy_model.parameters(), **optimizer_class_and_kwargs[1])
assert isinstance(opt_inst, optim.Optimizer)
assert len(list(opt_inst.param_groups)) == 1

for key in ["lr", "weight_decay", "betas", "eps", "momentum", "dampening", "nesterov", "amsgrad"]:
if key in config:
assert opt_inst.param_groups[0][key] == config[key], f"{key} mismatch"
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good cleanup



@pytest.mark.parametrize("opt_name, opt_cls", REGISTRY_CONFIG.items())
def test_registered_optimizer(opt_name, opt_cls):
"""Test that the optimizer registerd correctly."""
registry.optimizer(opt_name)(opt_cls)
optimizer_class = registry.get_optimizer(opt_name)
assert optimizer_class is not None
assert optimizer_class == opt_cls