mirror of
https://github.com/AUTOMATIC1111/stable-diffusion-webui.git
synced 2025-02-01 03:03:00 +08:00
Merge pull request #14170 from MrCheeze/sd-turbo
Add support for SD 2.1 Turbo
This commit is contained in:
commit
4125552752
@ -38,9 +38,6 @@ ldm.models.diffusion.ddpm.print = shared.ldm_print
|
||||
optimizers = []
|
||||
current_optimizer: sd_hijack_optimizations.SdOptimization = None
|
||||
|
||||
ldm_original_forward = patches.patch(__file__, ldm.modules.diffusionmodules.openaimodel.UNetModel, "forward", sd_unet.UNetModel_forward)
|
||||
sgm_original_forward = patches.patch(__file__, sgm.modules.diffusionmodules.openaimodel.UNetModel, "forward", sd_unet.UNetModel_forward)
|
||||
|
||||
def list_optimizers():
|
||||
new_optimizers = script_callbacks.list_optimizers_callback()
|
||||
|
||||
@ -258,6 +255,9 @@ class StableDiffusionModelHijack:
|
||||
|
||||
import modules.models.diffusion.ddpm_edit
|
||||
|
||||
ldm_original_forward = patches.patch(__file__, ldm.modules.diffusionmodules.openaimodel.UNetModel, "forward", sd_unet.UNetModel_forward)
|
||||
sgm_original_forward = patches.patch(__file__, sgm.modules.diffusionmodules.openaimodel.UNetModel, "forward", sd_unet.UNetModel_forward)
|
||||
|
||||
if isinstance(m, ldm.models.diffusion.ddpm.LatentDiffusion):
|
||||
sd_unet.original_forward = ldm_original_forward
|
||||
elif isinstance(m, modules.models.diffusion.ddpm_edit.LatentDiffusion):
|
||||
@ -303,6 +303,9 @@ class StableDiffusionModelHijack:
|
||||
self.layers = None
|
||||
self.clip = None
|
||||
|
||||
patches.undo(__file__, ldm.modules.diffusionmodules.openaimodel.UNetModel, "forward")
|
||||
patches.undo(__file__, sgm.modules.diffusionmodules.openaimodel.UNetModel, "forward")
|
||||
|
||||
sd_unet.original_forward = None
|
||||
|
||||
|
||||
|
@ -230,15 +230,19 @@ def select_checkpoint():
|
||||
return checkpoint_info
|
||||
|
||||
|
||||
checkpoint_dict_replacements = {
|
||||
checkpoint_dict_replacements_sd1 = {
|
||||
'cond_stage_model.transformer.embeddings.': 'cond_stage_model.transformer.text_model.embeddings.',
|
||||
'cond_stage_model.transformer.encoder.': 'cond_stage_model.transformer.text_model.encoder.',
|
||||
'cond_stage_model.transformer.final_layer_norm.': 'cond_stage_model.transformer.text_model.final_layer_norm.',
|
||||
}
|
||||
|
||||
checkpoint_dict_replacements_sd2_turbo = { # Converts SD 2.1 Turbo from SGM to LDM format.
|
||||
'conditioner.embedders.0.': 'cond_stage_model.',
|
||||
}
|
||||
|
||||
def transform_checkpoint_dict_key(k):
|
||||
for text, replacement in checkpoint_dict_replacements.items():
|
||||
|
||||
def transform_checkpoint_dict_key(k, replacements):
|
||||
for text, replacement in replacements.items():
|
||||
if k.startswith(text):
|
||||
k = replacement + k[len(text):]
|
||||
|
||||
@ -249,9 +253,14 @@ def get_state_dict_from_checkpoint(pl_sd):
|
||||
pl_sd = pl_sd.pop("state_dict", pl_sd)
|
||||
pl_sd.pop("state_dict", None)
|
||||
|
||||
is_sd2_turbo = 'conditioner.embedders.0.model.ln_final.weight' in pl_sd and pl_sd['conditioner.embedders.0.model.ln_final.weight'].size()[0] == 1024
|
||||
|
||||
sd = {}
|
||||
for k, v in pl_sd.items():
|
||||
new_key = transform_checkpoint_dict_key(k)
|
||||
if is_sd2_turbo:
|
||||
new_key = transform_checkpoint_dict_key(k, checkpoint_dict_replacements_sd2_turbo)
|
||||
else:
|
||||
new_key = transform_checkpoint_dict_key(k, checkpoint_dict_replacements_sd1)
|
||||
|
||||
if new_key is not None:
|
||||
sd[new_key] = v
|
||||
|
Loading…
Reference in New Issue
Block a user