Merge pull request #13653 from antfu/feat/interrupted-end

Interrupt after current generation
This commit is contained in:
AUTOMATIC1111 2024-01-01 16:40:02 +03:00 committed by GitHub
commit 2a7ad70db5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 15 additions and 7 deletions

View File

@ -78,6 +78,7 @@ def wrap_gradio_call(func, extra_outputs=None, add_stats=False):
shared.state.skipped = False
shared.state.interrupted = False
shared.state.interrupted_next = False
shared.state.job_count = 0
if not add_stats:

View File

@ -51,7 +51,7 @@ def process_batch(p, input_dir, output_dir, inpaint_mask_dir, args, to_scale=Fal
if state.skipped:
state.skipped = False
if state.interrupted:
if state.interrupted or state.interrupted_next:
break
try:

View File

@ -865,7 +865,7 @@ def process_images_inner(p: StableDiffusionProcessing) -> Processed:
if state.skipped:
state.skipped = False
if state.interrupted:
if state.interrupted or state.interrupted_next:
break
sd_models.reload_model_weights() # model can be changed for example by refiner

View File

@ -120,6 +120,7 @@ options_templates.update(options_section(('system', "System", "system"), {
"disable_mmap_load_safetensors": OptionInfo(False, "Disable memmapping for loading .safetensors files.").info("fixes very slow loading speed in some cases"),
"hide_ldm_prints": OptionInfo(True, "Prevent Stability-AI's ldm/sgm modules from printing noise to console."),
"dump_stacks_on_signal": OptionInfo(False, "Print stack traces before exiting the program with ctrl+c."),
"interrupt_after_current": OptionInfo(False, "Interrupt generation after current image is finished on batch processing"),
}))
options_templates.update(options_section(('API', "API", "system"), {

View File

@ -12,6 +12,7 @@ log = logging.getLogger(__name__)
class State:
skipped = False
interrupted = False
interrupted_next = False
job = ""
job_no = 0
job_count = 0
@ -79,6 +80,10 @@ class State:
self.interrupted = True
log.info("Received interrupt request")
def interrupt_next(self):
self.interrupted_next = True
log.info("Received interrupt request, interrupt after current job")
def nextjob(self):
if shared.opts.live_previews_enable and shared.opts.show_progress_every_n_steps == -1:
self.do_set_current_image()
@ -91,6 +96,7 @@ class State:
obj = {
"skipped": self.skipped,
"interrupted": self.interrupted,
"interrupted_next": self.interrupted_next,
"job": self.job,
"job_count": self.job_count,
"job_timestamp": self.job_timestamp,
@ -114,6 +120,7 @@ class State:
self.id_live_preview = 0
self.skipped = False
self.interrupted = False
self.interrupted_next = False
self.textinfo = None
self.job = job
devices.torch_gc()

View File

@ -177,7 +177,6 @@ def update_negative_prompt_token_counter(text, steps):
return update_token_counter(text, steps, is_positive=False)
def setup_progressbar(*args, **kwargs):
pass

View File

@ -95,7 +95,7 @@ class Script(scripts.Script):
processed = processing.process_images(p)
# Generation cancelled.
if state.interrupted:
if state.interrupted or state.interrupted_next:
break
if initial_seed is None:
@ -122,8 +122,8 @@ class Script(scripts.Script):
p.inpainting_fill = original_inpainting_fill
if state.interrupted:
break
if state.interrupted or state.interrupted_next:
break
if len(history) > 1:
grid = images.image_grid(history, rows=1)

View File

@ -696,7 +696,7 @@ class Script(scripts.Script):
grid_infotext = [None] * (1 + len(zs))
def cell(x, y, z, ix, iy, iz):
if shared.state.interrupted:
if shared.state.interrupted or state.interrupted_next:
return Processed(p, [], p.seed, "")
pc = copy(p)