Browse Source

fix borked merge, rename fields to better match what they do, change setting default to true for #13653

AUTOMATIC1111 1 year ago
parent
commit
0aa7c53c0b

+ 1 - 1
modules/call_queue.py

@@ -78,7 +78,7 @@ def wrap_gradio_call(func, extra_outputs=None, add_stats=False):
 
         shared.state.skipped = False
         shared.state.interrupted = False
-        shared.state.interrupted_next = False
+        shared.state.stopping_generation = False
         shared.state.job_count = 0
 
         if not add_stats:

+ 1 - 1
modules/img2img.py

@@ -51,7 +51,7 @@ def process_batch(p, input_dir, output_dir, inpaint_mask_dir, args, to_scale=Fal
         if state.skipped:
             state.skipped = False
 
-        if state.interrupted or state.interrupted_next:
+        if state.interrupted or state.stopping_generation:
             break
 
         try:

+ 1 - 1
modules/processing.py

@@ -865,7 +865,7 @@ def process_images_inner(p: StableDiffusionProcessing) -> Processed:
             if state.skipped:
                 state.skipped = False
 
-            if state.interrupted or state.interrupted_next:
+            if state.interrupted or state.stopping_generation:
                 break
 
             sd_models.reload_model_weights()  # model can be changed for example by refiner

+ 1 - 1
modules/shared_options.py

@@ -120,7 +120,6 @@ options_templates.update(options_section(('system', "System", "system"), {
     "disable_mmap_load_safetensors": OptionInfo(False, "Disable memmapping for loading .safetensors files.").info("fixes very slow loading speed in some cases"),
     "hide_ldm_prints": OptionInfo(True, "Prevent Stability-AI's ldm/sgm modules from printing noise to console."),
     "dump_stacks_on_signal": OptionInfo(False, "Print stack traces before exiting the program with ctrl+c."),
-    "interrupt_after_current": OptionInfo(False, "Interrupt generation after current image is finished on batch processing"),
 }))
 
 options_templates.update(options_section(('API', "API", "system"), {
@@ -286,6 +285,7 @@ options_templates.update(options_section(('ui_alternatives', "UI alternatives",
     "hires_fix_show_prompts": OptionInfo(False, "Hires fix: show hires prompt and negative prompt").needs_reload_ui(),
     "txt2img_settings_accordion": OptionInfo(False, "Settings in txt2img hidden under Accordion").needs_reload_ui(),
     "img2img_settings_accordion": OptionInfo(False, "Settings in img2img hidden under Accordion").needs_reload_ui(),
+    "interrupt_after_current": OptionInfo(True, "Don't Interrupt in the middle").info("when using Interrupt button, if generating more than one image, stop after the generation of an image has finished, instead of immediately"),
 }))
 
 options_templates.update(options_section(('ui', "User interface", "ui"), {

+ 6 - 6
modules/shared_state.py

@@ -12,7 +12,7 @@ log = logging.getLogger(__name__)
 class State:
     skipped = False
     interrupted = False
-    interrupted_next = False
+    stopping_generation = False
     job = ""
     job_no = 0
     job_count = 0
@@ -80,9 +80,9 @@ class State:
         self.interrupted = True
         log.info("Received interrupt request")
 
-    def interrupt_next(self):
-        self.interrupted_next = True
-        log.info("Received interrupt request, interrupt after current job")
+    def stop_generating(self):
+        self.stopping_generation = True
+        log.info("Received stop generating request")
 
     def nextjob(self):
         if shared.opts.live_previews_enable and shared.opts.show_progress_every_n_steps == -1:
@@ -96,7 +96,7 @@ class State:
         obj = {
             "skipped": self.skipped,
             "interrupted": self.interrupted,
-            "interrupted_next": self.interrupted_next,
+            "stopping_generation": self.stopping_generation,
             "job": self.job,
             "job_count": self.job_count,
             "job_timestamp": self.job_timestamp,
@@ -120,7 +120,7 @@ class State:
         self.id_live_preview = 0
         self.skipped = False
         self.interrupted = False
-        self.interrupted_next = False
+        self.stopping_generation = False
         self.textinfo = None
         self.job = job
         devices.torch_gc()

+ 7 - 1
modules/ui_toprow.py

@@ -106,8 +106,14 @@ class Toprow:
                 outputs=[],
             )
 
+            def interrupt_function():
+                if shared.state.job_count > 1 and shared.opts.interrupt_after_current:
+                    shared.state.stop_generating()
+                else:
+                    shared.state.interrupt()
+
             self.interrupt.click(
-                fn=lambda: shared.state.interrupt(),
+                fn=interrupt_function,
                 inputs=[],
                 outputs=[],
             )

+ 2 - 2
scripts/loopback.py

@@ -95,7 +95,7 @@ class Script(scripts.Script):
                 processed = processing.process_images(p)
 
                 # Generation cancelled.
-                if state.interrupted or state.interrupted_next:
+                if state.interrupted or state.stopping_generation:
                     break
 
                 if initial_seed is None:
@@ -122,7 +122,7 @@ class Script(scripts.Script):
 
             p.inpainting_fill = original_inpainting_fill
 
-            if state.interrupted or state.interrupted_next:
+            if state.interrupted or state.stopping_generation:
                 break
 
         if len(history) > 1:

+ 1 - 1
scripts/xyz_grid.py

@@ -696,7 +696,7 @@ class Script(scripts.Script):
         grid_infotext = [None] * (1 + len(zs))
 
         def cell(x, y, z, ix, iy, iz):
-            if shared.state.interrupted or state.interrupted_next:
+            if shared.state.interrupted or state.stopping_generation:
                 return Processed(p, [], p.seed, "")
 
             pc = copy(p)