fix for lazy backup

This commit is contained in:
Won-Kyu Park 2024-10-04 00:07:23 +09:00
parent 412401becb
commit b783a967c0
No known key found for this signature in database
GPG Key ID: 53AA79C8C9535D15

View File

@ -426,7 +426,7 @@ def network_restore_weights_from_backup(self: Union[torch.nn.Conv2d, torch.nn.Li
def network_backup_weights(self): def network_backup_weights(self):
network_layer_name = getattr(self, 'network_layer_name', None) network_layer_name = getattr(self, 'network_layer_name', None)
current_names = getattr(self, "network_current_names", ()) _current_names = getattr(self, "network_current_names", ())
wanted_names = tuple((x.name, x.te_multiplier, x.unet_multiplier, x.dyn_dim) for x in loaded_networks) wanted_names = tuple((x.name, x.te_multiplier, x.unet_multiplier, x.dyn_dim) for x in loaded_networks)
need_backup = False need_backup = False
@ -443,9 +443,6 @@ def network_backup_weights(self):
weights_backup = getattr(self, "network_weights_backup", None) weights_backup = getattr(self, "network_weights_backup", None)
if weights_backup is None and wanted_names != (): if weights_backup is None and wanted_names != ():
if current_names != () and not allowed_layer_without_weight(self):
raise RuntimeError(f"{network_layer_name} - no backup weights found and current weights are not unchanged")
if isinstance(self, torch.nn.MultiheadAttention): if isinstance(self, torch.nn.MultiheadAttention):
weights_backup = (store_weights_backup(self.in_proj_weight, self.org_dtype), store_weights_backup(self.out_proj.weight, self.org_dtype)) weights_backup = (store_weights_backup(self.in_proj_weight, self.org_dtype), store_weights_backup(self.out_proj.weight, self.org_dtype))
else: else:
@ -462,11 +459,6 @@ def network_backup_weights(self):
else: else:
bias_backup = None bias_backup = None
# Unlike weight which always has value, some modules don't have bias.
# Only report if bias is not None and current bias are not unchanged.
if bias_backup is not None and current_names != ():
raise RuntimeError("no backup bias found and current bias are not unchanged")
self.network_bias_backup = bias_backup self.network_bias_backup = bias_backup