launch_utils.py 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482
  1. # this scripts installs necessary requirements and launches main program in webui.py
  2. import logging
  3. import re
  4. import subprocess
  5. import os
  6. import shutil
  7. import sys
  8. import importlib.util
  9. import importlib.metadata
  10. import platform
  11. import json
  12. from functools import lru_cache
  13. from modules import cmd_args, errors
  14. from modules.paths_internal import script_path, extensions_dir
  15. from modules.timer import startup_timer
  16. from modules import logging_config
  17. args, _ = cmd_args.parser.parse_known_args()
  18. logging_config.setup_logging(args.loglevel)
  19. python = sys.executable
  20. git = os.environ.get('GIT', "git")
  21. index_url = os.environ.get('INDEX_URL', "")
  22. dir_repos = "repositories"
  23. # Whether to default to printing command output
  24. default_command_live = (os.environ.get('WEBUI_LAUNCH_LIVE_OUTPUT') == "1")
  25. os.environ.setdefault('GRADIO_ANALYTICS_ENABLED', 'False')
  26. def check_python_version():
  27. is_windows = platform.system() == "Windows"
  28. major = sys.version_info.major
  29. minor = sys.version_info.minor
  30. micro = sys.version_info.micro
  31. if is_windows:
  32. supported_minors = [10]
  33. else:
  34. supported_minors = [7, 8, 9, 10, 11]
  35. if not (major == 3 and minor in supported_minors):
  36. import modules.errors
  37. modules.errors.print_error_explanation(f"""
  38. INCOMPATIBLE PYTHON VERSION
  39. This program is tested with 3.10.6 Python, but you have {major}.{minor}.{micro}.
  40. If you encounter an error with "RuntimeError: Couldn't install torch." message,
  41. or any other error regarding unsuccessful package (library) installation,
  42. please downgrade (or upgrade) to the latest version of 3.10 Python
  43. and delete current Python and "venv" folder in WebUI's directory.
  44. You can download 3.10 Python from here: https://www.python.org/downloads/release/python-3106/
  45. {"Alternatively, use a binary release of WebUI: https://github.com/AUTOMATIC1111/stable-diffusion-webui/releases/tag/v1.0.0-pre" if is_windows else ""}
  46. Use --skip-python-version-check to suppress this warning.
  47. """)
  48. @lru_cache()
  49. def commit_hash():
  50. try:
  51. return subprocess.check_output([git, "-C", script_path, "rev-parse", "HEAD"], shell=False, encoding='utf8').strip()
  52. except Exception:
  53. return "<none>"
  54. @lru_cache()
  55. def git_tag():
  56. try:
  57. return subprocess.check_output([git, "-C", script_path, "describe", "--tags"], shell=False, encoding='utf8').strip()
  58. except Exception:
  59. try:
  60. changelog_md = os.path.join(os.path.dirname(os.path.dirname(__file__)), "CHANGELOG.md")
  61. with open(changelog_md, "r", encoding="utf-8") as file:
  62. line = next((line.strip() for line in file if line.strip()), "<none>")
  63. line = line.replace("## ", "")
  64. return line
  65. except Exception:
  66. return "<none>"
  67. def run(command, desc=None, errdesc=None, custom_env=None, live: bool = default_command_live) -> str:
  68. if desc is not None:
  69. print(desc)
  70. run_kwargs = {
  71. "args": command,
  72. "shell": True,
  73. "env": os.environ if custom_env is None else custom_env,
  74. "encoding": 'utf8',
  75. "errors": 'ignore',
  76. }
  77. if not live:
  78. run_kwargs["stdout"] = run_kwargs["stderr"] = subprocess.PIPE
  79. result = subprocess.run(**run_kwargs)
  80. if result.returncode != 0:
  81. error_bits = [
  82. f"{errdesc or 'Error running command'}.",
  83. f"Command: {command}",
  84. f"Error code: {result.returncode}",
  85. ]
  86. if result.stdout:
  87. error_bits.append(f"stdout: {result.stdout}")
  88. if result.stderr:
  89. error_bits.append(f"stderr: {result.stderr}")
  90. raise RuntimeError("\n".join(error_bits))
  91. return (result.stdout or "")
  92. def is_installed(package):
  93. try:
  94. dist = importlib.metadata.distribution(package)
  95. except importlib.metadata.PackageNotFoundError:
  96. try:
  97. spec = importlib.util.find_spec(package)
  98. except ModuleNotFoundError:
  99. return False
  100. return spec is not None
  101. return dist is not None
  102. def repo_dir(name):
  103. return os.path.join(script_path, dir_repos, name)
  104. def run_pip(command, desc=None, live=default_command_live):
  105. if args.skip_install:
  106. return
  107. index_url_line = f' --index-url {index_url}' if index_url != '' else ''
  108. return run(f'"{python}" -m pip {command} --prefer-binary{index_url_line}', desc=f"Installing {desc}", errdesc=f"Couldn't install {desc}", live=live)
  109. def check_run_python(code: str) -> bool:
  110. result = subprocess.run([python, "-c", code], capture_output=True, shell=False)
  111. return result.returncode == 0
  112. def git_fix_workspace(dir, name):
  113. run(f'"{git}" -C "{dir}" fetch --refetch --no-auto-gc', f"Fetching all contents for {name}", f"Couldn't fetch {name}", live=True)
  114. run(f'"{git}" -C "{dir}" gc --aggressive --prune=now', f"Pruning {name}", f"Couldn't prune {name}", live=True)
  115. return
  116. def run_git(dir, name, command, desc=None, errdesc=None, custom_env=None, live: bool = default_command_live, autofix=True):
  117. try:
  118. return run(f'"{git}" -C "{dir}" {command}', desc=desc, errdesc=errdesc, custom_env=custom_env, live=live)
  119. except RuntimeError:
  120. if not autofix:
  121. raise
  122. print(f"{errdesc}, attempting autofix...")
  123. git_fix_workspace(dir, name)
  124. return run(f'"{git}" -C "{dir}" {command}', desc=desc, errdesc=errdesc, custom_env=custom_env, live=live)
  125. def git_clone(url, dir, name, commithash=None):
  126. # TODO clone into temporary dir and move if successful
  127. if os.path.exists(dir):
  128. if commithash is None:
  129. return
  130. current_hash = run_git(dir, name, 'rev-parse HEAD', None, f"Couldn't determine {name}'s hash: {commithash}", live=False).strip()
  131. if current_hash == commithash:
  132. return
  133. if run_git(dir, name, 'config --get remote.origin.url', None, f"Couldn't determine {name}'s origin URL", live=False).strip() != url:
  134. run_git(dir, name, f'remote set-url origin "{url}"', None, f"Failed to set {name}'s origin URL", live=False)
  135. run_git(dir, name, 'fetch', f"Fetching updates for {name}...", f"Couldn't fetch {name}", autofix=False)
  136. run_git(dir, name, f'checkout {commithash}', f"Checking out commit for {name} with hash: {commithash}...", f"Couldn't checkout commit {commithash} for {name}", live=True)
  137. return
  138. try:
  139. run(f'"{git}" clone --config core.filemode=false "{url}" "{dir}"', f"Cloning {name} into {dir}...", f"Couldn't clone {name}", live=True)
  140. except RuntimeError:
  141. shutil.rmtree(dir, ignore_errors=True)
  142. raise
  143. if commithash is not None:
  144. run(f'"{git}" -C "{dir}" checkout {commithash}', None, "Couldn't checkout {name}'s hash: {commithash}")
  145. def git_pull_recursive(dir):
  146. for subdir, _, _ in os.walk(dir):
  147. if os.path.exists(os.path.join(subdir, '.git')):
  148. try:
  149. output = subprocess.check_output([git, '-C', subdir, 'pull', '--autostash'])
  150. print(f"Pulled changes for repository in '{subdir}':\n{output.decode('utf-8').strip()}\n")
  151. except subprocess.CalledProcessError as e:
  152. print(f"Couldn't perform 'git pull' on repository in '{subdir}':\n{e.output.decode('utf-8').strip()}\n")
  153. def version_check(commit):
  154. try:
  155. import requests
  156. commits = requests.get('https://api.github.com/repos/AUTOMATIC1111/stable-diffusion-webui/branches/master').json()
  157. if commit != "<none>" and commits['commit']['sha'] != commit:
  158. print("--------------------------------------------------------")
  159. print("| You are not up to date with the most recent release. |")
  160. print("| Consider running `git pull` to update. |")
  161. print("--------------------------------------------------------")
  162. elif commits['commit']['sha'] == commit:
  163. print("You are up to date with the most recent release.")
  164. else:
  165. print("Not a git clone, can't perform version check.")
  166. except Exception as e:
  167. print("version check failed", e)
  168. def run_extension_installer(extension_dir):
  169. path_installer = os.path.join(extension_dir, "install.py")
  170. if not os.path.isfile(path_installer):
  171. return
  172. try:
  173. env = os.environ.copy()
  174. env['PYTHONPATH'] = f"{os.path.abspath('.')}{os.pathsep}{env.get('PYTHONPATH', '')}"
  175. stdout = run(f'"{python}" "{path_installer}"', errdesc=f"Error running install.py for extension {extension_dir}", custom_env=env).strip()
  176. if stdout:
  177. print(stdout)
  178. except Exception as e:
  179. errors.report(str(e))
  180. def list_extensions(settings_file):
  181. settings = {}
  182. try:
  183. with open(settings_file, "r", encoding="utf8") as file:
  184. settings = json.load(file)
  185. except FileNotFoundError:
  186. pass
  187. except Exception:
  188. errors.report(f'\nCould not load settings\nThe config file "{settings_file}" is likely corrupted\nIt has been moved to the "tmp/config.json"\nReverting config to default\n\n''', exc_info=True)
  189. os.replace(settings_file, os.path.join(script_path, "tmp", "config.json"))
  190. disabled_extensions = set(settings.get('disabled_extensions', []))
  191. disable_all_extensions = settings.get('disable_all_extensions', 'none')
  192. if disable_all_extensions != 'none' or args.disable_extra_extensions or args.disable_all_extensions or not os.path.isdir(extensions_dir):
  193. return []
  194. return [x for x in os.listdir(extensions_dir) if x not in disabled_extensions]
  195. def run_extensions_installers(settings_file):
  196. if not os.path.isdir(extensions_dir):
  197. return
  198. with startup_timer.subcategory("run extensions installers"):
  199. for dirname_extension in list_extensions(settings_file):
  200. logging.debug(f"Installing {dirname_extension}")
  201. path = os.path.join(extensions_dir, dirname_extension)
  202. if os.path.isdir(path):
  203. run_extension_installer(path)
  204. startup_timer.record(dirname_extension)
  205. re_requirement = re.compile(r"\s*([-_a-zA-Z0-9]+)\s*(?:==\s*([-+_.a-zA-Z0-9]+))?\s*")
  206. def requirements_met(requirements_file):
  207. """
  208. Does a simple parse of a requirements.txt file to determine if all rerqirements in it
  209. are already installed. Returns True if so, False if not installed or parsing fails.
  210. """
  211. import importlib.metadata
  212. import packaging.version
  213. with open(requirements_file, "r", encoding="utf8") as file:
  214. for line in file:
  215. if line.strip() == "":
  216. continue
  217. m = re.match(re_requirement, line)
  218. if m is None:
  219. return False
  220. package = m.group(1).strip()
  221. version_required = (m.group(2) or "").strip()
  222. if version_required == "":
  223. continue
  224. try:
  225. version_installed = importlib.metadata.version(package)
  226. except Exception:
  227. return False
  228. if packaging.version.parse(version_required) != packaging.version.parse(version_installed):
  229. return False
  230. return True
  231. def prepare_environment():
  232. torch_index_url = os.environ.get('TORCH_INDEX_URL', "https://download.pytorch.org/whl/cu121")
  233. torch_command = os.environ.get('TORCH_COMMAND', f"pip install torch==2.1.2 torchvision==0.16.2 --extra-index-url {torch_index_url}")
  234. if args.use_ipex:
  235. if platform.system() == "Windows":
  236. # The "Nuullll/intel-extension-for-pytorch" wheels were built from IPEX source for Intel Arc GPU: https://github.com/intel/intel-extension-for-pytorch/tree/xpu-main
  237. # This is NOT an Intel official release so please use it at your own risk!!
  238. # See https://github.com/Nuullll/intel-extension-for-pytorch/releases/tag/v2.0.110%2Bxpu-master%2Bdll-bundle for details.
  239. #
  240. # Strengths (over official IPEX 2.0.110 windows release):
  241. # - AOT build (for Arc GPU only) to eliminate JIT compilation overhead: https://github.com/intel/intel-extension-for-pytorch/issues/399
  242. # - Bundles minimal oneAPI 2023.2 dependencies into the python wheels, so users don't need to install oneAPI for the whole system.
  243. # - Provides a compatible torchvision wheel: https://github.com/intel/intel-extension-for-pytorch/issues/465
  244. # Limitation:
  245. # - Only works for python 3.10
  246. url_prefix = "https://github.com/Nuullll/intel-extension-for-pytorch/releases/download/v2.0.110%2Bxpu-master%2Bdll-bundle"
  247. torch_command = os.environ.get('TORCH_COMMAND', f"pip install {url_prefix}/torch-2.0.0a0+gite9ebda2-cp310-cp310-win_amd64.whl {url_prefix}/torchvision-0.15.2a0+fa99a53-cp310-cp310-win_amd64.whl {url_prefix}/intel_extension_for_pytorch-2.0.110+gitc6ea20b-cp310-cp310-win_amd64.whl")
  248. else:
  249. # Using official IPEX release for linux since it's already an AOT build.
  250. # However, users still have to install oneAPI toolkit and activate oneAPI environment manually.
  251. # See https://intel.github.io/intel-extension-for-pytorch/index.html#installation for details.
  252. torch_index_url = os.environ.get('TORCH_INDEX_URL', "https://pytorch-extension.intel.com/release-whl/stable/xpu/us/")
  253. torch_command = os.environ.get('TORCH_COMMAND', f"pip install torch==2.0.0a0 intel-extension-for-pytorch==2.0.110+gitba7f6c1 --extra-index-url {torch_index_url}")
  254. requirements_file = os.environ.get('REQS_FILE', "requirements_versions.txt")
  255. requirements_file_for_npu = os.environ.get('REQS_FILE_FOR_NPU', "requirements_npu.txt")
  256. xformers_package = os.environ.get('XFORMERS_PACKAGE', 'xformers==0.0.23.post1')
  257. clip_package = os.environ.get('CLIP_PACKAGE', "https://github.com/openai/CLIP/archive/d50d76daa670286dd6cacf3bcd80b5e4823fc8e1.zip")
  258. openclip_package = os.environ.get('OPENCLIP_PACKAGE', "https://github.com/mlfoundations/open_clip/archive/bb6e834e9c70d9c27d0dc3ecedeebeaeb1ffad6b.zip")
  259. assets_repo = os.environ.get('ASSETS_REPO', "https://github.com/AUTOMATIC1111/stable-diffusion-webui-assets.git")
  260. stable_diffusion_repo = os.environ.get('STABLE_DIFFUSION_REPO', "https://github.com/Stability-AI/stablediffusion.git")
  261. stable_diffusion_xl_repo = os.environ.get('STABLE_DIFFUSION_XL_REPO', "https://github.com/Stability-AI/generative-models.git")
  262. k_diffusion_repo = os.environ.get('K_DIFFUSION_REPO', 'https://github.com/crowsonkb/k-diffusion.git')
  263. blip_repo = os.environ.get('BLIP_REPO', 'https://github.com/salesforce/BLIP.git')
  264. assets_commit_hash = os.environ.get('ASSETS_COMMIT_HASH', "6f7db241d2f8ba7457bac5ca9753331f0c266917")
  265. stable_diffusion_commit_hash = os.environ.get('STABLE_DIFFUSION_COMMIT_HASH', "cf1d67a6fd5ea1aa600c4df58e5b47da45f6bdbf")
  266. stable_diffusion_xl_commit_hash = os.environ.get('STABLE_DIFFUSION_XL_COMMIT_HASH', "45c443b316737a4ab6e40413d7794a7f5657c19f")
  267. k_diffusion_commit_hash = os.environ.get('K_DIFFUSION_COMMIT_HASH', "ab527a9a6d347f364e3d185ba6d714e22d80cb3c")
  268. blip_commit_hash = os.environ.get('BLIP_COMMIT_HASH', "48211a1594f1321b00f14c9f7a5b4813144b2fb9")
  269. try:
  270. # the existence of this file is a signal to webui.sh/bat that webui needs to be restarted when it stops execution
  271. os.remove(os.path.join(script_path, "tmp", "restart"))
  272. os.environ.setdefault('SD_WEBUI_RESTARTING', '1')
  273. except OSError:
  274. pass
  275. if not args.skip_python_version_check:
  276. check_python_version()
  277. startup_timer.record("checks")
  278. commit = commit_hash()
  279. tag = git_tag()
  280. startup_timer.record("git version info")
  281. print(f"Python {sys.version}")
  282. print(f"Version: {tag}")
  283. print(f"Commit hash: {commit}")
  284. if args.reinstall_torch or not is_installed("torch") or not is_installed("torchvision"):
  285. run(f'"{python}" -m {torch_command}', "Installing torch and torchvision", "Couldn't install torch", live=True)
  286. startup_timer.record("install torch")
  287. if args.use_ipex:
  288. args.skip_torch_cuda_test = True
  289. if not args.skip_torch_cuda_test and not check_run_python("import torch; assert torch.cuda.is_available()"):
  290. raise RuntimeError(
  291. 'Torch is not able to use GPU; '
  292. 'add --skip-torch-cuda-test to COMMANDLINE_ARGS variable to disable this check'
  293. )
  294. startup_timer.record("torch GPU test")
  295. if not is_installed("clip"):
  296. run_pip(f"install {clip_package}", "clip")
  297. startup_timer.record("install clip")
  298. if not is_installed("open_clip"):
  299. run_pip(f"install {openclip_package}", "open_clip")
  300. startup_timer.record("install open_clip")
  301. if (not is_installed("xformers") or args.reinstall_xformers) and args.xformers:
  302. run_pip(f"install -U -I --no-deps {xformers_package}", "xformers")
  303. startup_timer.record("install xformers")
  304. if not is_installed("ngrok") and args.ngrok:
  305. run_pip("install ngrok", "ngrok")
  306. startup_timer.record("install ngrok")
  307. os.makedirs(os.path.join(script_path, dir_repos), exist_ok=True)
  308. git_clone(assets_repo, repo_dir('stable-diffusion-webui-assets'), "assets", assets_commit_hash)
  309. git_clone(stable_diffusion_repo, repo_dir('stable-diffusion-stability-ai'), "Stable Diffusion", stable_diffusion_commit_hash)
  310. git_clone(stable_diffusion_xl_repo, repo_dir('generative-models'), "Stable Diffusion XL", stable_diffusion_xl_commit_hash)
  311. git_clone(k_diffusion_repo, repo_dir('k-diffusion'), "K-diffusion", k_diffusion_commit_hash)
  312. git_clone(blip_repo, repo_dir('BLIP'), "BLIP", blip_commit_hash)
  313. startup_timer.record("clone repositores")
  314. if not os.path.isfile(requirements_file):
  315. requirements_file = os.path.join(script_path, requirements_file)
  316. if not requirements_met(requirements_file):
  317. run_pip(f"install -r \"{requirements_file}\"", "requirements")
  318. startup_timer.record("install requirements")
  319. if not os.path.isfile(requirements_file_for_npu):
  320. requirements_file_for_npu = os.path.join(script_path, requirements_file_for_npu)
  321. if "torch_npu" in torch_command and not requirements_met(requirements_file_for_npu):
  322. run_pip(f"install -r \"{requirements_file_for_npu}\"", "requirements_for_npu")
  323. startup_timer.record("install requirements_for_npu")
  324. if not args.skip_install:
  325. run_extensions_installers(settings_file=args.ui_settings_file)
  326. if args.update_check:
  327. version_check(commit)
  328. startup_timer.record("check version")
  329. if args.update_all_extensions:
  330. git_pull_recursive(extensions_dir)
  331. startup_timer.record("update extensions")
  332. if "--exit" in sys.argv:
  333. print("Exiting because of --exit argument")
  334. exit(0)
  335. def configure_for_tests():
  336. if "--api" not in sys.argv:
  337. sys.argv.append("--api")
  338. if "--ckpt" not in sys.argv:
  339. sys.argv.append("--ckpt")
  340. sys.argv.append(os.path.join(script_path, "test/test_files/empty.pt"))
  341. if "--skip-torch-cuda-test" not in sys.argv:
  342. sys.argv.append("--skip-torch-cuda-test")
  343. if "--disable-nan-check" not in sys.argv:
  344. sys.argv.append("--disable-nan-check")
  345. os.environ['COMMANDLINE_ARGS'] = ""
  346. def start():
  347. print(f"Launching {'API server' if '--nowebui' in sys.argv else 'Web UI'} with arguments: {' '.join(sys.argv[1:])}")
  348. import webui
  349. if '--nowebui' in sys.argv:
  350. webui.api_only()
  351. else:
  352. webui.webui()
  353. def dump_sysinfo():
  354. from modules import sysinfo
  355. import datetime
  356. text = sysinfo.get()
  357. filename = f"sysinfo-{datetime.datetime.utcnow().strftime('%Y-%m-%d-%H-%M')}.json"
  358. with open(filename, "w", encoding="utf8") as file:
  359. file.write(text)
  360. return filename