Skip to content

Commit 816bc42

Browse files
Merge pull request #15816 from huchenlei/bias_backup
[Performance 5/6] Prevent unnecessary extra networks bias backup
2 parents 371cb60 + b2ae449 commit 816bc42

File tree

1 file changed

+6
-1
lines changed

1 file changed

+6
-1
lines changed

extensions-builtin/Lora/networks.py

+6-1
Original file line numberDiff line numberDiff line change
@@ -388,13 +388,18 @@ def network_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn
388388
self.network_weights_backup = weights_backup
389389

390390
bias_backup = getattr(self, "network_bias_backup", None)
391-
if bias_backup is None:
391+
if bias_backup is None and wanted_names != ():
392392
if isinstance(self, torch.nn.MultiheadAttention) and self.out_proj.bias is not None:
393393
bias_backup = self.out_proj.bias.to(devices.cpu, copy=True)
394394
elif getattr(self, 'bias', None) is not None:
395395
bias_backup = self.bias.to(devices.cpu, copy=True)
396396
else:
397397
bias_backup = None
398+
399+
# Unlike weight which always has value, some modules don't have bias.
400+
# Only report if bias is not None and current bias are not unchanged.
401+
if bias_backup is not None and current_names != ():
402+
raise RuntimeError("no backup bias found and current bias are not unchanged")
398403
self.network_bias_backup = bias_backup
399404

400405
if current_names != wanted_names:

0 commit comments

Comments
 (0)