dat_model.py 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293
  1. import os
  2. from modules import modelloader, errors
  3. from modules.shared import cmd_opts, opts, hf_endpoint
  4. from modules.upscaler import Upscaler, UpscalerData
  5. from modules.upscaler_utils import upscale_with_model
  6. class UpscalerDAT(Upscaler):
  7. def __init__(self, user_path):
  8. self.name = "DAT"
  9. self.user_path = user_path
  10. self.scalers = []
  11. super().__init__()
  12. for file in self.find_models(ext_filter=[".pt", ".pth"]):
  13. name = modelloader.friendly_name(file)
  14. scaler_data = UpscalerData(name, file, upscaler=self, scale=None)
  15. self.scalers.append(scaler_data)
  16. for model in get_dat_models(self):
  17. if model.name in opts.dat_enabled_models:
  18. self.scalers.append(model)
  19. def do_upscale(self, img, path):
  20. try:
  21. info = self.load_model(path)
  22. except Exception:
  23. errors.report(f"Unable to load DAT model {path}", exc_info=True)
  24. return img
  25. model_descriptor = modelloader.load_spandrel_model(
  26. info.local_data_path,
  27. device=self.device,
  28. prefer_half=(not cmd_opts.no_half and not cmd_opts.upcast_sampling),
  29. expected_architecture="DAT",
  30. )
  31. return upscale_with_model(
  32. model_descriptor,
  33. img,
  34. tile_size=opts.DAT_tile,
  35. tile_overlap=opts.DAT_tile_overlap,
  36. )
  37. def load_model(self, path):
  38. for scaler in self.scalers:
  39. if scaler.data_path == path:
  40. if scaler.local_data_path.startswith("http"):
  41. scaler.local_data_path = modelloader.load_file_from_url(
  42. scaler.data_path,
  43. model_dir=self.model_download_path,
  44. hash_prefix=scaler.sha256,
  45. )
  46. if os.path.getsize(scaler.local_data_path) < 200:
  47. # Re-download if the file is too small, probably an LFS pointer
  48. scaler.local_data_path = modelloader.load_file_from_url(
  49. scaler.data_path,
  50. model_dir=self.model_download_path,
  51. hash_prefix=scaler.sha256,
  52. re_download=True,
  53. )
  54. if not os.path.exists(scaler.local_data_path):
  55. raise FileNotFoundError(f"DAT data missing: {scaler.local_data_path}")
  56. return scaler
  57. raise ValueError(f"Unable to find model info: {path}")
  58. def get_dat_models(scaler):
  59. return [
  60. UpscalerData(
  61. name="DAT x2",
  62. path=f"{hf_endpoint}/w-e-w/DAT/resolve/main/experiments/pretrained_models/DAT/DAT_x2.pth",
  63. scale=2,
  64. upscaler=scaler,
  65. sha256='7760aa96e4ee77e29d4f89c3a4486200042e019461fdb8aa286f49aa00b89b51',
  66. ),
  67. UpscalerData(
  68. name="DAT x3",
  69. path=f"{hf_endpoint}/w-e-w/DAT/resolve/main/experiments/pretrained_models/DAT/DAT_x3.pth",
  70. scale=3,
  71. upscaler=scaler,
  72. sha256='581973e02c06f90d4eb90acf743ec9604f56f3c2c6f9e1e2c2b38ded1f80d197',
  73. ),
  74. UpscalerData(
  75. name="DAT x4",
  76. path=f"{hf_endpoint}/w-e-w/DAT/resolve/main/experiments/pretrained_models/DAT/DAT_x4.pth",
  77. scale=4,
  78. upscaler=scaler,
  79. sha256='391a6ce69899dff5ea3214557e9d585608254579217169faf3d4c353caff049e',
  80. ),
  81. ]