Skip to content

Commit 1523f5c

Browse files
FIX DeLoRA adapter deletion issue
Currently, adapter deletion raises an error with DeLoRA. The reason is that the dropout module is called module_dropout, i.e. the prefix "delora" is not part of the name, which is required for proper working. This PR renames the attribute to delora_dropout. The reason why this was not caught is because the corresponding test was not updated to include DeLoRA. The test was thus also changed. Note: I came across this issue in huggingface#2846 but I wanted to fix it in a separate PR, as huggingface#2846 is probably not going to make it into the next PEFT release.
1 parent 2813b9c commit 1523f5c

File tree

2 files changed

+5
-4
lines changed

2 files changed

+5
-4
lines changed

src/peft/tuners/delora/layer.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -34,14 +34,14 @@ class DeloraLayer(BaseTunerLayer):
3434
# All names of other parameters that may contain adapter-related parameters
3535
other_param_names = (
3636
"r",
37-
"module_dropout",
37+
"delora_dropout",
3838
"delora_w_norm",
3939
)
4040

4141
def __init__(self, base_layer: nn.Module, **kwargs) -> None:
4242
self.base_layer = base_layer
4343
self.r = {}
44-
self.module_dropout = nn.ModuleDict({})
44+
self.delora_dropout = nn.ModuleDict({})
4545
self.delora_A = nn.ParameterDict({})
4646
self.delora_B = nn.ParameterDict({})
4747
self.delora_lambda = nn.ParameterDict({})
@@ -113,7 +113,7 @@ def update_layer(
113113
module_dropout_layer = nn.Dropout(p=module_dropout)
114114
else:
115115
module_dropout_layer = nn.Identity()
116-
self.module_dropout.update(nn.ModuleDict({adapter_name: module_dropout_layer}))
116+
self.delora_dropout.update(nn.ModuleDict({adapter_name: module_dropout_layer}))
117117

118118
# Initialize weights
119119
self.reset_delora_parameters(adapter_name, init_weights, delora_lambda)
@@ -241,7 +241,7 @@ def forward(self, x: torch.Tensor, *args: Any, **kwargs: Any) -> torch.Tensor:
241241
if adapter not in self.delora_A:
242242
continue
243243

244-
x_d = self.module_dropout[adapter](x)
244+
x_d = self.delora_dropout[adapter](x)
245245

246246
# Decomposed delta calculation
247247
# 1. (x * w_norm) @ A.T

tests/testing_common.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1434,6 +1434,7 @@ def _test_delete_adapter(self, model_id, config_cls, config_kwargs):
14341434
PeftType.VBLORA,
14351435
PeftType.BONE,
14361436
PeftType.MISS,
1437+
PeftType.DELORA,
14371438
]
14381439
# IA3 does not support deleting adapters yet, but it just needs to be added
14391440
# AdaLora does not support multiple adapters

0 commit comments

Comments
 (0)