Browse Source

add missing infotext entry for the pad cond/uncond option

AUTOMATIC 2 years ago
parent
commit
4bd490c28d
2 changed files with 11 additions and 1 deletions
  1. 1 0
      modules/generation_parameters_copypaste.py
  2. 10 1
      modules/sd_samplers_kdiffusion.py

+ 1 - 0
modules/generation_parameters_copypaste.py

@@ -357,6 +357,7 @@ infotext_to_setting_name_mapping = [
     ('Token merging ratio hr', 'token_merging_ratio_hr'),
     ('Token merging ratio hr', 'token_merging_ratio_hr'),
     ('RNG', 'randn_source'),
     ('RNG', 'randn_source'),
     ('NGMS', 's_min_uncond'),
     ('NGMS', 's_min_uncond'),
+    ('Pad conds', 'pad_cond_uncond'),
 ]
 ]
 
 
 
 

+ 10 - 1
modules/sd_samplers_kdiffusion.py

@@ -69,6 +69,7 @@ class CFGDenoiser(torch.nn.Module):
         self.init_latent = None
         self.init_latent = None
         self.step = 0
         self.step = 0
         self.image_cfg_scale = None
         self.image_cfg_scale = None
+        self.padded_cond_uncond = False
 
 
     def combine_denoised(self, x_out, conds_list, uncond, cond_scale):
     def combine_denoised(self, x_out, conds_list, uncond, cond_scale):
         denoised_uncond = x_out[-uncond.shape[0]:]
         denoised_uncond = x_out[-uncond.shape[0]:]
@@ -133,15 +134,17 @@ class CFGDenoiser(torch.nn.Module):
             x_in = x_in[:-batch_size]
             x_in = x_in[:-batch_size]
             sigma_in = sigma_in[:-batch_size]
             sigma_in = sigma_in[:-batch_size]
 
 
-        # TODO add infotext entry
+        self.padded_cond_uncond = False
         if shared.opts.pad_cond_uncond and tensor.shape[1] != uncond.shape[1]:
         if shared.opts.pad_cond_uncond and tensor.shape[1] != uncond.shape[1]:
             empty = shared.sd_model.cond_stage_model_empty_prompt
             empty = shared.sd_model.cond_stage_model_empty_prompt
             num_repeats = (tensor.shape[1] - uncond.shape[1]) // empty.shape[1]
             num_repeats = (tensor.shape[1] - uncond.shape[1]) // empty.shape[1]
 
 
             if num_repeats < 0:
             if num_repeats < 0:
                 tensor = torch.cat([tensor, empty.repeat((tensor.shape[0], -num_repeats, 1))], axis=1)
                 tensor = torch.cat([tensor, empty.repeat((tensor.shape[0], -num_repeats, 1))], axis=1)
+                self.padded_cond_uncond = True
             elif num_repeats > 0:
             elif num_repeats > 0:
                 uncond = torch.cat([uncond, empty.repeat((uncond.shape[0], num_repeats, 1))], axis=1)
                 uncond = torch.cat([uncond, empty.repeat((uncond.shape[0], num_repeats, 1))], axis=1)
+                self.padded_cond_uncond = True
 
 
         if tensor.shape[1] == uncond.shape[1] or skip_uncond:
         if tensor.shape[1] == uncond.shape[1] or skip_uncond:
             if is_edit_model:
             if is_edit_model:
@@ -405,6 +408,9 @@ class KDiffusionSampler:
 
 
         samples = self.launch_sampling(t_enc + 1, lambda: self.func(self.model_wrap_cfg, xi, extra_args=extra_args, disable=False, callback=self.callback_state, **extra_params_kwargs))
         samples = self.launch_sampling(t_enc + 1, lambda: self.func(self.model_wrap_cfg, xi, extra_args=extra_args, disable=False, callback=self.callback_state, **extra_params_kwargs))
 
 
+        if self.model_wrap_cfg.padded_cond_uncond:
+            p.extra_generation_params["Pad conds"] = True
+
         return samples
         return samples
 
 
     def sample(self, p, x, conditioning, unconditional_conditioning, steps=None, image_conditioning=None):
     def sample(self, p, x, conditioning, unconditional_conditioning, steps=None, image_conditioning=None):
@@ -438,5 +444,8 @@ class KDiffusionSampler:
             's_min_uncond': self.s_min_uncond
             's_min_uncond': self.s_min_uncond
         }, disable=False, callback=self.callback_state, **extra_params_kwargs))
         }, disable=False, callback=self.callback_state, **extra_params_kwargs))
 
 
+        if self.model_wrap_cfg.padded_cond_uncond:
+            p.extra_generation_params["Pad conds"] = True
+
         return samples
         return samples