initialize.py 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170
  1. import importlib
  2. import logging
  3. import os
  4. import sys
  5. import warnings
  6. from threading import Thread
  7. from modules.timer import startup_timer
  8. def imports():
  9. logging.getLogger("torch.distributed.nn").setLevel(logging.ERROR) # sshh...
  10. logging.getLogger("xformers").addFilter(lambda record: 'A matching Triton is not available' not in record.getMessage())
  11. import torch # noqa: F401
  12. startup_timer.record("import torch")
  13. from modules import patch_hf_hub_download # noqa: F401
  14. import pytorch_lightning # noqa: F401
  15. startup_timer.record("import torch")
  16. warnings.filterwarnings(action="ignore", category=DeprecationWarning, module="pytorch_lightning")
  17. warnings.filterwarnings(action="ignore", category=UserWarning, module="torchvision")
  18. os.environ.setdefault('GRADIO_ANALYTICS_ENABLED', 'False')
  19. import gradio # noqa: F401
  20. startup_timer.record("import gradio")
  21. from modules import paths, timer, import_hook, errors # noqa: F401
  22. startup_timer.record("setup paths")
  23. import ldm.modules.encoders.modules # noqa: F401
  24. startup_timer.record("import ldm")
  25. import sgm.modules.encoders.modules # noqa: F401
  26. startup_timer.record("import sgm")
  27. from modules import shared_init
  28. shared_init.initialize()
  29. startup_timer.record("initialize shared")
  30. from modules import processing, gradio_extensons, ui # noqa: F401
  31. startup_timer.record("other imports")
  32. def check_versions():
  33. from modules.shared_cmd_options import cmd_opts
  34. if not cmd_opts.skip_version_check:
  35. from modules import errors
  36. errors.check_versions()
  37. def initialize():
  38. from modules import initialize_util
  39. initialize_util.fix_torch_version()
  40. initialize_util.fix_pytorch_lightning()
  41. initialize_util.fix_asyncio_event_loop_policy()
  42. initialize_util.validate_tls_options()
  43. initialize_util.configure_sigint_handler()
  44. initialize_util.configure_opts_onchange()
  45. from modules import sd_models
  46. sd_models.setup_model()
  47. startup_timer.record("setup SD model")
  48. from modules.shared_cmd_options import cmd_opts
  49. from modules import codeformer_model
  50. warnings.filterwarnings(action="ignore", category=UserWarning, module="torchvision.transforms.functional_tensor")
  51. codeformer_model.setup_model(cmd_opts.codeformer_models_path)
  52. startup_timer.record("setup codeformer")
  53. from modules import gfpgan_model
  54. gfpgan_model.setup_model(cmd_opts.gfpgan_models_path)
  55. startup_timer.record("setup gfpgan")
  56. initialize_rest(reload_script_modules=False)
  57. def initialize_rest(*, reload_script_modules=False):
  58. """
  59. Called both from initialize() and when reloading the webui.
  60. """
  61. from modules.shared_cmd_options import cmd_opts
  62. from modules import sd_samplers
  63. sd_samplers.set_samplers()
  64. startup_timer.record("set samplers")
  65. from modules import extensions
  66. extensions.list_extensions()
  67. startup_timer.record("list extensions")
  68. from modules import initialize_util
  69. initialize_util.restore_config_state_file()
  70. startup_timer.record("restore config state file")
  71. from modules import shared, upscaler, scripts
  72. if cmd_opts.ui_debug_mode:
  73. shared.sd_upscalers = upscaler.UpscalerLanczos().scalers
  74. scripts.load_scripts()
  75. return
  76. from modules import sd_models
  77. sd_models.list_models()
  78. startup_timer.record("list SD models")
  79. from modules import localization
  80. localization.list_localizations(cmd_opts.localizations_dir)
  81. startup_timer.record("list localizations")
  82. with startup_timer.subcategory("load scripts"):
  83. scripts.load_scripts()
  84. if reload_script_modules and shared.opts.enable_reloading_ui_scripts:
  85. for module in [module for name, module in sys.modules.items() if name.startswith("modules.ui")]:
  86. importlib.reload(module)
  87. startup_timer.record("reload script modules")
  88. from modules import modelloader
  89. modelloader.load_upscalers()
  90. startup_timer.record("load upscalers")
  91. from modules import sd_vae
  92. sd_vae.refresh_vae_list()
  93. startup_timer.record("refresh VAE")
  94. from modules import textual_inversion
  95. textual_inversion.textual_inversion.list_textual_inversion_templates()
  96. startup_timer.record("refresh textual inversion templates")
  97. from modules import script_callbacks, sd_hijack_optimizations, sd_hijack
  98. script_callbacks.on_list_optimizers(sd_hijack_optimizations.list_optimizers)
  99. sd_hijack.list_optimizers()
  100. startup_timer.record("scripts list_optimizers")
  101. from modules import sd_unet
  102. sd_unet.list_unets()
  103. startup_timer.record("scripts list_unets")
  104. def load_model():
  105. """
  106. Accesses shared.sd_model property to load model.
  107. After it's available, if it has been loaded before this access by some extension,
  108. its optimization may be None because the list of optimizers has not been filled
  109. by that time, so we apply optimization again.
  110. """
  111. from modules import devices
  112. devices.torch_npu_set_device()
  113. shared.sd_model # noqa: B018
  114. if sd_hijack.current_optimizer is None:
  115. sd_hijack.apply_optimizations()
  116. devices.first_time_calculation()
  117. if not shared.cmd_opts.skip_load_model_at_start:
  118. Thread(target=load_model).start()
  119. from modules import shared_items
  120. shared_items.reload_hypernetworks()
  121. startup_timer.record("reload hypernetworks")
  122. from modules import ui_extra_networks
  123. ui_extra_networks.initialize()
  124. ui_extra_networks.register_default_pages()
  125. from modules import extra_networks
  126. extra_networks.initialize()
  127. extra_networks.register_default_extra_networks()
  128. startup_timer.record("initialize extra networks")