mirror of
https://github.com/AUTOMATIC1111/stable-diffusion-webui.git
synced 2024-12-29 02:45:05 +08:00
update for commandline args for btch prompts to parse string properly
This commit is contained in:
parent
58e62312c3
commit
7d6042b908
@ -1,7 +1,9 @@
|
||||
import copy
|
||||
import math
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
import shlex
|
||||
|
||||
import modules.scripts as scripts
|
||||
import gradio as gr
|
||||
@ -10,6 +12,75 @@ from modules.processing import Processed, process_images
|
||||
from PIL import Image
|
||||
from modules.shared import opts, cmd_opts, state
|
||||
|
||||
|
||||
def process_string_tag(tag):
|
||||
return tag
|
||||
|
||||
|
||||
def process_int_tag(tag):
|
||||
return int(tag)
|
||||
|
||||
|
||||
def process_float_tag(tag):
|
||||
return float(tag)
|
||||
|
||||
|
||||
def process_boolean_tag(tag):
|
||||
return True if (tag == "true") else False
|
||||
|
||||
|
||||
prompt_tags = {
|
||||
"sd_model": None,
|
||||
"outpath_samples": process_string_tag,
|
||||
"outpath_grids": process_string_tag,
|
||||
"prompt_for_display": process_string_tag,
|
||||
"prompt": process_string_tag,
|
||||
"negative_prompt": process_string_tag,
|
||||
"styles": process_string_tag,
|
||||
"seed": process_int_tag,
|
||||
"subseed_strength": process_float_tag,
|
||||
"subseed": process_int_tag,
|
||||
"seed_resize_from_h": process_int_tag,
|
||||
"seed_resize_from_w": process_int_tag,
|
||||
"sampler_index": process_int_tag,
|
||||
"batch_size": process_int_tag,
|
||||
"n_iter": process_int_tag,
|
||||
"steps": process_int_tag,
|
||||
"cfg_scale": process_float_tag,
|
||||
"width": process_int_tag,
|
||||
"height": process_int_tag,
|
||||
"restore_faces": process_boolean_tag,
|
||||
"tiling": process_boolean_tag,
|
||||
"do_not_save_samples": process_boolean_tag,
|
||||
"do_not_save_grid": process_boolean_tag
|
||||
}
|
||||
|
||||
|
||||
def cmdargs(line):
|
||||
args = shlex.split(line)
|
||||
pos = 0
|
||||
res = {}
|
||||
|
||||
while pos < len(args):
|
||||
arg = args[pos]
|
||||
|
||||
assert arg.startswith("--"), f'must start with "--": {arg}'
|
||||
tag = arg[2:]
|
||||
|
||||
func = prompt_tags.get(tag, None)
|
||||
assert func, f'unknown commandline option: {arg}'
|
||||
|
||||
assert pos+1 < len(args), f'missing argument for command line option {arg}'
|
||||
|
||||
val = args[pos+1]
|
||||
|
||||
res[tag] = func(val)
|
||||
|
||||
pos += 2
|
||||
|
||||
return res
|
||||
|
||||
|
||||
class Script(scripts.Script):
|
||||
def title(self):
|
||||
return "Prompts from file or textbox"
|
||||
@ -28,87 +99,52 @@ class Script(scripts.Script):
|
||||
checkbox_txt.change(fn=lambda x: [gr.File.update(visible = not x), gr.TextArea.update(visible = x)], inputs=[checkbox_txt], outputs=[file, prompt_txt])
|
||||
return [checkbox_txt, file, prompt_txt]
|
||||
|
||||
def process_string_tag(self, tag):
|
||||
return tag[1:-2]
|
||||
|
||||
def process_int_tag(self, tag):
|
||||
return int(tag)
|
||||
|
||||
def process_float_tag(self, tag):
|
||||
return float(tag)
|
||||
|
||||
def process_boolean_tag(self, tag):
|
||||
return True if (tag == "true") else False
|
||||
|
||||
prompt_tags = {
|
||||
"sd_model": None,
|
||||
"outpath_samples": process_string_tag,
|
||||
"outpath_grids": process_string_tag,
|
||||
"prompt_for_display": process_string_tag,
|
||||
"prompt": process_string_tag,
|
||||
"negative_prompt": process_string_tag,
|
||||
"styles": process_string_tag,
|
||||
"seed": process_int_tag,
|
||||
"subseed_strength": process_float_tag,
|
||||
"subseed": process_int_tag,
|
||||
"seed_resize_from_h": process_int_tag,
|
||||
"seed_resize_from_w": process_int_tag,
|
||||
"sampler_index": process_int_tag,
|
||||
"batch_size": process_int_tag,
|
||||
"n_iter": process_int_tag,
|
||||
"steps": process_int_tag,
|
||||
"cfg_scale": process_float_tag,
|
||||
"width": process_int_tag,
|
||||
"height": process_int_tag,
|
||||
"restore_faces": process_boolean_tag,
|
||||
"tiling": process_boolean_tag,
|
||||
"do_not_save_samples": process_boolean_tag,
|
||||
"do_not_save_grid": process_boolean_tag
|
||||
}
|
||||
|
||||
def on_show(self, checkbox_txt, file, prompt_txt):
|
||||
return [ gr.Checkbox.update(visible = True), gr.File.update(visible = not checkbox_txt), gr.TextArea.update(visible = checkbox_txt) ]
|
||||
|
||||
def run(self, p, checkbox_txt, data: bytes, prompt_txt: str):
|
||||
if (checkbox_txt):
|
||||
if checkbox_txt:
|
||||
lines = [x.strip() for x in prompt_txt.splitlines()]
|
||||
else:
|
||||
lines = [x.strip() for x in data.decode('utf8', errors='ignore').split("\n")]
|
||||
lines = [x for x in lines if len(x) > 0]
|
||||
|
||||
img_count = len(lines) * p.n_iter
|
||||
batch_count = math.ceil(img_count / p.batch_size)
|
||||
loop_count = math.ceil(batch_count / p.n_iter)
|
||||
# These numbers no longer accurately reflect the total images and number of batches
|
||||
print(f"Will process {img_count} images in {batch_count} batches.")
|
||||
|
||||
p.do_not_save_grid = True
|
||||
|
||||
state.job_count = batch_count
|
||||
job_count = 0
|
||||
jobs = []
|
||||
|
||||
for line in lines:
|
||||
if "--" in line:
|
||||
try:
|
||||
args = cmdargs(line)
|
||||
except Exception:
|
||||
print(f"Error parsing line [line] as commandline:", file=sys.stderr)
|
||||
print(traceback.format_exc(), file=sys.stderr)
|
||||
args = {"prompt": line}
|
||||
else:
|
||||
args = {"prompt": line}
|
||||
|
||||
n_iter = args.get("n_iter", 1)
|
||||
if n_iter != 1:
|
||||
job_count += n_iter
|
||||
else:
|
||||
job_count += 1
|
||||
|
||||
jobs.append(args)
|
||||
|
||||
print(f"Will process {len(lines)} lines in {job_count} jobs.")
|
||||
state.job_count = job_count
|
||||
|
||||
images = []
|
||||
for loop_no in range(loop_count):
|
||||
state.job = f"{loop_no + 1} out of {loop_count}"
|
||||
# The following line may need revising to remove batch_size references
|
||||
current_line = lines[loop_no*p.batch_size:(loop_no+1)*p.batch_size] * p.n_iter
|
||||
for n, args in enumerate(jobs):
|
||||
state.job = f"{state.job_no + 1} out of {state.job_count}"
|
||||
|
||||
# If the current line has no tags, parse the whole line as a prompt, else parse each tag
|
||||
if(current_line[0][:2] != "--"):
|
||||
p.prompt = current_line
|
||||
else:
|
||||
tokenized_line = current_line[0].split("--")
|
||||
copy_p = copy.copy(p)
|
||||
for k, v in args.items():
|
||||
setattr(copy_p, k, v)
|
||||
|
||||
for tag in tokenized_line:
|
||||
tag_split = tag.split(" ", 1)
|
||||
if(tag_split[0] != ''):
|
||||
value_func = self.prompt_tags.get(tag_split[0], None)
|
||||
if(value_func != None):
|
||||
value = value_func(self, tag_split[1])
|
||||
setattr(p, tag_split[0], value)
|
||||
else:
|
||||
print(f"Unknown option \"{tag_split}\"")
|
||||
|
||||
proc = process_images(p)
|
||||
proc = process_images(copy_p)
|
||||
images += proc.images
|
||||
|
||||
return Processed(p, images, p.seed, "")
|
||||
|
Loading…
Reference in New Issue
Block a user