initialize.py 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168
  1. import importlib
  2. import logging
  3. import os
  4. import sys
  5. import warnings
  6. from threading import Thread
  7. from modules.timer import startup_timer
  8. def imports():
  9. logging.getLogger("torch.distributed.nn").setLevel(logging.ERROR) # sshh...
  10. logging.getLogger("xformers").addFilter(lambda record: 'A matching Triton is not available' not in record.getMessage())
  11. import torch # noqa: F401
  12. startup_timer.record("import torch")
  13. import pytorch_lightning # noqa: F401
  14. startup_timer.record("import torch")
  15. warnings.filterwarnings(action="ignore", category=DeprecationWarning, module="pytorch_lightning")
  16. warnings.filterwarnings(action="ignore", category=UserWarning, module="torchvision")
  17. os.environ.setdefault('GRADIO_ANALYTICS_ENABLED', 'False')
  18. import gradio # noqa: F401
  19. startup_timer.record("import gradio")
  20. from modules import paths, timer, import_hook, errors # noqa: F401
  21. startup_timer.record("setup paths")
  22. import ldm.modules.encoders.modules # noqa: F401
  23. startup_timer.record("import ldm")
  24. import sgm.modules.encoders.modules # noqa: F401
  25. startup_timer.record("import sgm")
  26. from modules import shared_init
  27. shared_init.initialize()
  28. startup_timer.record("initialize shared")
  29. from modules import processing, gradio_extensons, ui # noqa: F401
  30. startup_timer.record("other imports")
  31. def check_versions():
  32. from modules.shared_cmd_options import cmd_opts
  33. if not cmd_opts.skip_version_check:
  34. from modules import errors
  35. errors.check_versions()
  36. def initialize():
  37. from modules import initialize_util
  38. initialize_util.fix_torch_version()
  39. initialize_util.fix_asyncio_event_loop_policy()
  40. initialize_util.validate_tls_options()
  41. initialize_util.configure_sigint_handler()
  42. initialize_util.configure_opts_onchange()
  43. from modules import sd_models
  44. sd_models.setup_model()
  45. startup_timer.record("setup SD model")
  46. from modules.shared_cmd_options import cmd_opts
  47. from modules import codeformer_model
  48. warnings.filterwarnings(action="ignore", category=UserWarning, module="torchvision.transforms.functional_tensor")
  49. codeformer_model.setup_model(cmd_opts.codeformer_models_path)
  50. startup_timer.record("setup codeformer")
  51. from modules import gfpgan_model
  52. gfpgan_model.setup_model(cmd_opts.gfpgan_models_path)
  53. startup_timer.record("setup gfpgan")
  54. initialize_rest(reload_script_modules=False)
  55. def initialize_rest(*, reload_script_modules=False):
  56. """
  57. Called both from initialize() and when reloading the webui.
  58. """
  59. from modules.shared_cmd_options import cmd_opts
  60. from modules import sd_samplers
  61. sd_samplers.set_samplers()
  62. startup_timer.record("set samplers")
  63. from modules import extensions
  64. extensions.list_extensions()
  65. startup_timer.record("list extensions")
  66. from modules import initialize_util
  67. initialize_util.restore_config_state_file()
  68. startup_timer.record("restore config state file")
  69. from modules import shared, upscaler, scripts
  70. if cmd_opts.ui_debug_mode:
  71. shared.sd_upscalers = upscaler.UpscalerLanczos().scalers
  72. scripts.load_scripts()
  73. return
  74. from modules import sd_models
  75. sd_models.list_models()
  76. startup_timer.record("list SD models")
  77. from modules import localization
  78. localization.list_localizations(cmd_opts.localizations_dir)
  79. startup_timer.record("list localizations")
  80. with startup_timer.subcategory("load scripts"):
  81. scripts.load_scripts()
  82. if reload_script_modules:
  83. for module in [module for name, module in sys.modules.items() if name.startswith("modules.ui")]:
  84. importlib.reload(module)
  85. startup_timer.record("reload script modules")
  86. from modules import modelloader
  87. modelloader.load_upscalers()
  88. startup_timer.record("load upscalers")
  89. from modules import sd_vae
  90. sd_vae.refresh_vae_list()
  91. startup_timer.record("refresh VAE")
  92. from modules import textual_inversion
  93. textual_inversion.textual_inversion.list_textual_inversion_templates()
  94. startup_timer.record("refresh textual inversion templates")
  95. from modules import script_callbacks, sd_hijack_optimizations, sd_hijack
  96. script_callbacks.on_list_optimizers(sd_hijack_optimizations.list_optimizers)
  97. sd_hijack.list_optimizers()
  98. startup_timer.record("scripts list_optimizers")
  99. from modules import sd_unet
  100. sd_unet.list_unets()
  101. startup_timer.record("scripts list_unets")
  102. def load_model():
  103. """
  104. Accesses shared.sd_model property to load model.
  105. After it's available, if it has been loaded before this access by some extension,
  106. its optimization may be None because the list of optimizaers has neet been filled
  107. by that time, so we apply optimization again.
  108. """
  109. from modules import devices
  110. devices.torch_npu_set_device()
  111. shared.sd_model # noqa: B018
  112. if sd_hijack.current_optimizer is None:
  113. sd_hijack.apply_optimizations()
  114. devices.first_time_calculation()
  115. if not shared.cmd_opts.skip_load_model_at_start:
  116. Thread(target=load_model).start()
  117. from modules import shared_items
  118. shared_items.reload_hypernetworks()
  119. startup_timer.record("reload hypernetworks")
  120. from modules import ui_extra_networks
  121. ui_extra_networks.initialize()
  122. ui_extra_networks.register_default_pages()
  123. from modules import extra_networks
  124. extra_networks.initialize()
  125. extra_networks.register_default_extra_networks()
  126. startup_timer.record("initialize extra networks")