From b83d4740cde27ea2e9ba807cf7bbcf0bb5dd5154 Mon Sep 17 00:00:00 2001 From: Michael Benayoun Date: Thu, 16 Jan 2025 12:02:16 +0100 Subject: [PATCH] Add lora bias when needed --- optimum/neuron/distributed/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/optimum/neuron/distributed/utils.py b/optimum/neuron/distributed/utils.py index ac888c89f..510b32934 100644 --- a/optimum/neuron/distributed/utils.py +++ b/optimum/neuron/distributed/utils.py @@ -1078,6 +1078,7 @@ def _peft_tuner_linear_to_parallel_linear( config.init_lora_weights, config.use_rslora, config.use_dora, + config.lora_bias, ) if axis == "row": layer_to_parallelize = parent.lora_A[adapter_name]