mirror of
https://github.com/AUTOMATIC1111/stable-diffusion-webui.git
synced 2025-02-11 08:02:53 +08:00
Fix variable conversion code issue
This commit is contained in:
parent
d641af6a7d
commit
8c48ede135
@ -150,26 +150,26 @@ def run_modelmerger(primary_model_name, secondary_model_name, interp_method, int
|
|||||||
alpha = alpha * alpha * (3 - (2 * alpha))
|
alpha = alpha * alpha * (3 - (2 * alpha))
|
||||||
return theta0 + ((theta1 - theta0) * alpha)
|
return theta0 + ((theta1 - theta0) * alpha)
|
||||||
|
|
||||||
if os.path.exists(secondary_model_name):
|
|
||||||
secondary_model_filename = secondary_model_name
|
|
||||||
secondary_model_name = os.path.splitext(os.path.basename(secondary_model_name))[0]
|
|
||||||
else:
|
|
||||||
secondary_model_filename = 'models/' + secondary_model_name + '.ckpt'
|
|
||||||
|
|
||||||
if os.path.exists(primary_model_name):
|
if os.path.exists(primary_model_name):
|
||||||
primary_model_filename = primary_model_name
|
primary_model_filename = primary_model_name
|
||||||
primary_model_name = os.path.splitext(os.path.basename(primary_model_name))[0]
|
primary_model_name = os.path.splitext(os.path.basename(primary_model_name))[0]
|
||||||
else:
|
else:
|
||||||
primary_model_filename = 'models/' + primary_model_name + '.ckpt'
|
primary_model_filename = 'models/' + primary_model_name + '.ckpt'
|
||||||
|
|
||||||
print(f"Loading {secondary_model_filename}...")
|
if os.path.exists(secondary_model_name):
|
||||||
model_0 = torch.load(secondary_model_filename, map_location='cpu')
|
secondary_model_filename = secondary_model_name
|
||||||
|
secondary_model_name = os.path.splitext(os.path.basename(secondary_model_name))[0]
|
||||||
|
else:
|
||||||
|
secondary_model_filename = 'models/' + secondary_model_name + '.ckpt'
|
||||||
|
|
||||||
print(f"Loading {primary_model_filename}...")
|
print(f"Loading {primary_model_filename}...")
|
||||||
model_1 = torch.load(primary_model_filename, map_location='cpu')
|
primary_model = torch.load(primary_model_filename, map_location='cpu')
|
||||||
|
|
||||||
theta_0 = model_0['state_dict']
|
print(f"Loading {secondary_model_filename}...")
|
||||||
theta_1 = model_1['state_dict']
|
secondary_model = torch.load(secondary_model_filename, map_location='cpu')
|
||||||
|
|
||||||
|
theta_0 = primary_model['state_dict']
|
||||||
|
theta_1 = secondary_model['state_dict']
|
||||||
|
|
||||||
theta_funcs = {
|
theta_funcs = {
|
||||||
"Weighted Sum": weighted_sum,
|
"Weighted Sum": weighted_sum,
|
||||||
@ -180,7 +180,7 @@ def run_modelmerger(primary_model_name, secondary_model_name, interp_method, int
|
|||||||
print(f"Merging...")
|
print(f"Merging...")
|
||||||
for key in tqdm.tqdm(theta_0.keys()):
|
for key in tqdm.tqdm(theta_0.keys()):
|
||||||
if 'model' in key and key in theta_1:
|
if 'model' in key and key in theta_1:
|
||||||
theta_0[key] = theta_func(theta_0[key], theta_1[key], interp_amount)
|
theta_0[key] = theta_func(theta_0[key], theta_1[key], (float(1.0) - interp_amount)) # Need to reverse the interp_amount to match the desired mix ration in the merged checkpoint
|
||||||
|
|
||||||
for key in theta_1.keys():
|
for key in theta_1.keys():
|
||||||
if 'model' in key and key not in theta_0:
|
if 'model' in key and key not in theta_0:
|
||||||
@ -188,7 +188,7 @@ def run_modelmerger(primary_model_name, secondary_model_name, interp_method, int
|
|||||||
|
|
||||||
output_modelname = 'models/' + primary_model_name + '_' + str(interp_amount) + '-' + secondary_model_name + '_' + str(float(1.0) - interp_amount) + '-' + interp_method.replace(" ", "_") + '-merged.ckpt'
|
output_modelname = 'models/' + primary_model_name + '_' + str(interp_amount) + '-' + secondary_model_name + '_' + str(float(1.0) - interp_amount) + '-' + interp_method.replace(" ", "_") + '-merged.ckpt'
|
||||||
print(f"Saving to {output_modelname}...")
|
print(f"Saving to {output_modelname}...")
|
||||||
torch.save(model_0, output_modelname)
|
torch.save(primary_model, output_modelname)
|
||||||
|
|
||||||
print(f"Checkpoint saved.")
|
print(f"Checkpoint saved.")
|
||||||
return "Checkpoint saved to " + output_modelname
|
return "Checkpoint saved to " + output_modelname
|
||||||
|
Loading…
Reference in New Issue
Block a user