sd_schedulers.py 1.3 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243
  1. import dataclasses
  2. import torch
  3. import k_diffusion
  4. @dataclasses.dataclass
  5. class Scheduler:
  6. name: str
  7. label: str
  8. function: any
  9. default_rho: float = -1
  10. need_inner_model: bool = False
  11. aliases: list = None
  12. def uniform(n, sigma_min, sigma_max, inner_model, device):
  13. return inner_model.get_sigmas(n)
  14. def sgm_uniform(n, sigma_min, sigma_max, inner_model, device):
  15. start = inner_model.sigma_to_t(torch.tensor(sigma_max))
  16. end = inner_model.sigma_to_t(torch.tensor(sigma_min))
  17. sigs = [
  18. inner_model.t_to_sigma(ts)
  19. for ts in torch.linspace(start, end, n + 1)[:-1]
  20. ]
  21. sigs += [0.0]
  22. return torch.FloatTensor(sigs).to(device)
  23. schedulers = [
  24. Scheduler('automatic', 'Automatic', None),
  25. Scheduler('uniform', 'Uniform', uniform, need_inner_model=True),
  26. Scheduler('karras', 'Karras', k_diffusion.sampling.get_sigmas_karras, default_rho=7.0),
  27. Scheduler('exponential', 'Exponential', k_diffusion.sampling.get_sigmas_exponential),
  28. Scheduler('polyexponential', 'Polyexponential', k_diffusion.sampling.get_sigmas_polyexponential, default_rho=1.0),
  29. Scheduler('sgm_uniform', 'SGM Uniform', sgm_uniform, need_inner_model=True, aliases=["SGMUniform"]),
  30. ]
  31. schedulers_map = {**{x.name: x for x in schedulers}, **{x.label: x for x in schedulers}}