gclient_scm.py 95 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291
  1. # Copyright (c) 2012 The Chromium Authors. All rights reserved.
  2. # Use of this source code is governed by a BSD-style license that can be
  3. # found in the LICENSE file.
  4. """Gclient-specific SCM-specific operations."""
  5. import collections
  6. import contextlib
  7. import errno
  8. import glob
  9. import json
  10. import logging
  11. import os
  12. import platform
  13. import posixpath
  14. import re
  15. import shutil
  16. import sys
  17. import tempfile
  18. import threading
  19. import traceback
  20. import gclient_utils
  21. import gerrit_util
  22. import git_auth
  23. import git_cache
  24. import git_common
  25. import scm
  26. import subprocess2
  27. # TODO: Should fix these warnings.
  28. # pylint: disable=line-too-long
  29. class NoUsableRevError(gclient_utils.Error):
  30. """Raised if requested revision isn't found in checkout."""
  31. class DiffFiltererWrapper(object):
  32. """Simple base class which tracks which file is being diffed and
  33. replaces instances of its file name in the original and
  34. working copy lines of the git diff output."""
  35. index_string = None
  36. original_prefix = "--- "
  37. working_prefix = "+++ "
  38. def __init__(self, relpath, print_func):
  39. # Note that we always use '/' as the path separator to be
  40. # consistent with cygwin-style output on Windows
  41. self._relpath = relpath.replace("\\", "/")
  42. self._current_file = None
  43. self._print_func = print_func
  44. def SetCurrentFile(self, current_file):
  45. self._current_file = current_file
  46. @property
  47. def _replacement_file(self):
  48. return posixpath.join(self._relpath, self._current_file)
  49. def _Replace(self, line):
  50. return line.replace(self._current_file, self._replacement_file)
  51. def Filter(self, line):
  52. if (line.startswith(self.index_string)):
  53. self.SetCurrentFile(line[len(self.index_string):])
  54. line = self._Replace(line)
  55. else:
  56. if (line.startswith(self.original_prefix)
  57. or line.startswith(self.working_prefix)):
  58. line = self._Replace(line)
  59. self._print_func(line)
  60. class GitDiffFilterer(DiffFiltererWrapper):
  61. index_string = "diff --git "
  62. def SetCurrentFile(self, current_file):
  63. # Get filename by parsing "a/<filename> b/<filename>"
  64. self._current_file = current_file[:(len(current_file) / 2)][2:]
  65. def _Replace(self, line):
  66. return re.sub("[a|b]/" + self._current_file, self._replacement_file,
  67. line)
  68. # SCMWrapper base class
  69. class SCMWrapper(object):
  70. """Add necessary glue between all the supported SCM.
  71. This is the abstraction layer to bind to different SCM.
  72. """
  73. def __init__(self,
  74. url=None,
  75. root_dir=None,
  76. relpath=None,
  77. out_fh=None,
  78. out_cb=None,
  79. print_outbuf=False):
  80. self.url = url
  81. self._root_dir = root_dir
  82. if self._root_dir:
  83. self._root_dir = self._root_dir.replace('/', os.sep)
  84. self.relpath = relpath
  85. if self.relpath:
  86. self.relpath = self.relpath.replace('/', os.sep)
  87. if self.relpath and self._root_dir:
  88. self.checkout_path = os.path.join(self._root_dir, self.relpath)
  89. if out_fh is None:
  90. out_fh = sys.stdout
  91. self.out_fh = out_fh
  92. self.out_cb = out_cb
  93. self.print_outbuf = print_outbuf
  94. def Print(self, *args, **kwargs):
  95. kwargs.setdefault('file', self.out_fh)
  96. if kwargs.pop('timestamp', True):
  97. self.out_fh.write('[%s] ' % gclient_utils.Elapsed())
  98. print(*args, **kwargs)
  99. def RunCommand(self, command, options, args, file_list=None):
  100. commands = [
  101. 'update', 'updatesingle', 'revert', 'revinfo', 'status', 'diff',
  102. 'pack', 'runhooks'
  103. ]
  104. if not command in commands:
  105. raise gclient_utils.Error('Unknown command %s' % command)
  106. if not command in dir(self):
  107. raise gclient_utils.Error(
  108. 'Command %s not implemented in %s wrapper' %
  109. (command, self.__class__.__name__))
  110. return getattr(self, command)(options, args, file_list)
  111. @staticmethod
  112. def _get_first_remote_url(checkout_path):
  113. log = scm.GIT.YieldConfigRegexp(checkout_path, r'remote.*.url')
  114. return next(log)[1]
  115. def GetCacheMirror(self):
  116. if getattr(self, 'cache_dir', None):
  117. url, _ = gclient_utils.SplitUrlRevision(self.url)
  118. return git_cache.Mirror(url)
  119. return None
  120. def GetActualRemoteURL(self, options):
  121. """Attempt to determine the remote URL for this SCMWrapper."""
  122. # Git
  123. if os.path.exists(os.path.join(self.checkout_path, '.git')):
  124. actual_remote_url = self._get_first_remote_url(self.checkout_path)
  125. mirror = self.GetCacheMirror()
  126. # If the cache is used, obtain the actual remote URL from there.
  127. if (mirror and mirror.exists() and mirror.mirror_path.replace(
  128. '\\', '/') == actual_remote_url.replace('\\', '/')):
  129. actual_remote_url = self._get_first_remote_url(
  130. mirror.mirror_path)
  131. return actual_remote_url
  132. return None
  133. def DoesRemoteURLMatch(self, options):
  134. """Determine whether the remote URL of this checkout is the expected URL."""
  135. if not os.path.exists(self.checkout_path):
  136. # A checkout which doesn't exist can't be broken.
  137. return True
  138. actual_remote_url = self.GetActualRemoteURL(options)
  139. if actual_remote_url:
  140. return (gclient_utils.SplitUrlRevision(actual_remote_url)[0].rstrip(
  141. '/') == gclient_utils.SplitUrlRevision(self.url)[0].rstrip('/'))
  142. # This may occur if the self.checkout_path exists but does not contain a
  143. # valid git checkout.
  144. return False
  145. def _DeleteOrMove(self, force):
  146. """Delete the checkout directory or move it out of the way.
  147. Args:
  148. force: bool; if True, delete the directory. Otherwise, just move it.
  149. """
  150. if force and os.environ.get('CHROME_HEADLESS') == '1':
  151. self.Print('_____ Conflicting directory found in %s. Removing.' %
  152. self.checkout_path)
  153. gclient_utils.AddWarning('Conflicting directory %s deleted.' %
  154. self.checkout_path)
  155. gclient_utils.rmtree(self.checkout_path)
  156. else:
  157. bad_scm_dir = os.path.join(self._root_dir, '_bad_scm',
  158. os.path.dirname(self.relpath))
  159. try:
  160. os.makedirs(bad_scm_dir)
  161. except OSError as e:
  162. if e.errno != errno.EEXIST:
  163. raise
  164. dest_path = tempfile.mkdtemp(prefix=os.path.basename(self.relpath),
  165. dir=bad_scm_dir)
  166. self.Print(
  167. '_____ Conflicting directory found in %s. Moving to %s.' %
  168. (self.checkout_path, dest_path))
  169. gclient_utils.AddWarning('Conflicting directory %s moved to %s.' %
  170. (self.checkout_path, dest_path))
  171. shutil.move(self.checkout_path, dest_path)
  172. class GitWrapper(SCMWrapper):
  173. """Wrapper for Git"""
  174. name = 'git'
  175. remote = 'origin'
  176. @property
  177. def cache_dir(self):
  178. try:
  179. return git_cache.Mirror.GetCachePath()
  180. except RuntimeError:
  181. return None
  182. def __init__(self, url=None, *args, **kwargs):
  183. """Removes 'git+' fake prefix from git URL."""
  184. if url and (url.startswith('git+http://')
  185. or url.startswith('git+https://')):
  186. url = url[4:]
  187. SCMWrapper.__init__(self, url, *args, **kwargs)
  188. filter_kwargs = {'time_throttle': 1, 'out_fh': self.out_fh}
  189. if self.out_cb:
  190. filter_kwargs['predicate'] = self.out_cb
  191. self.filter = gclient_utils.GitFilter(**filter_kwargs)
  192. self._running_under_rosetta = None
  193. self.current_revision = None
  194. def GetCheckoutRoot(self):
  195. return scm.GIT.GetCheckoutRoot(self.checkout_path)
  196. def GetRevisionDate(self, _revision):
  197. """Returns the given revision's date in ISO-8601 format (which contains the
  198. time zone)."""
  199. # TODO(floitsch): get the time-stamp of the given revision and not just
  200. # the time-stamp of the currently checked out revision.
  201. return self._Capture(['log', '-n', '1', '--format=%ai'])
  202. def _GetDiffFilenames(self, base):
  203. """Returns the names of files modified since base."""
  204. return self._Capture(
  205. # Filter to remove base if it is None.
  206. list(
  207. filter(
  208. bool,
  209. ['-c', 'core.quotePath=false', 'diff', '--name-only', base])
  210. )).split()
  211. def GetSubmoduleStateFromIndex(self):
  212. """Returns a map where keys are submodule names and values are commit
  213. hashes. It reads data from the Git index, so only committed values are
  214. present."""
  215. out = self._Capture(['ls-files', '-s'])
  216. result = {}
  217. for l in out.split('\n'):
  218. if not l.startswith('160000'):
  219. # Not a submodule
  220. continue
  221. (_, commit, _, filepath) = l.split(maxsplit=3)
  222. result[filepath] = commit
  223. return result
  224. def GetSubmoduleDiff(self):
  225. """Returns a map where keys are submodule names and values are tuples of
  226. (old_commit_hash, new_commit_hash). old_commit_hash matches the Git
  227. index, whereas new_commit_hash matches currently checked out commit
  228. hash."""
  229. out = self._Capture([
  230. 'diff',
  231. '--no-prefix',
  232. '--no-ext-diff',
  233. '--no-color',
  234. '--ignore-submodules=dirty',
  235. '--submodule=short',
  236. '-G',
  237. 'Subproject commit',
  238. ])
  239. NO_COMMIT = 40 * '0'
  240. committed_submodule = None
  241. checked_submodule = None
  242. filepath = None
  243. state = 0
  244. diff = {}
  245. # Parsing git diff uses simple state machine. States:
  246. # 0 - start state
  247. # 1 - diff file/line detected, ready to process content
  248. # 2 - gitlink detected, ready to process gitlink past and current
  249. # content.
  250. # 3 - past gitlink content detected. It contains a commit hash that's in
  251. # git index.
  252. # 4 - new gitlink content detected. It contains currently checked
  253. # commit. At this point, we have all information needed, and we can
  254. # reset state to 0.
  255. for l in out.split('\n'):
  256. if l.startswith('diff --git'):
  257. # New file detected, reset state.
  258. state = 1
  259. elif state == 1 and l.startswith('index') and l.endswith('160000'):
  260. # We detected gitlink
  261. state = 2
  262. elif state == 2 and l.startswith('+++ '):
  263. # This line contains filename
  264. filepath = l[4:]
  265. state = 3
  266. elif state == 3 and l.startswith('-Subproject commit '):
  267. # This line contains what commit hash Git index expects
  268. # (ls-files).
  269. committed_submodule = l.split(' ')[-1]
  270. state = 4
  271. elif state == 4 and l.startswith('+Subproject commit '):
  272. # This line contains currently checked out commit for this submodule.
  273. checked_submodule = l.split(' ')[-1]
  274. if NO_COMMIT not in (committed_submodule, checked_submodule):
  275. diff[filepath] = (committed_submodule, checked_submodule)
  276. state = 0
  277. return diff
  278. def diff(self, options, _args, _file_list):
  279. _, revision = gclient_utils.SplitUrlRevision(self.url)
  280. if not revision:
  281. revision = 'refs/remotes/%s/main' % self.remote
  282. self._Run(['-c', 'core.quotePath=false', 'diff', revision], options)
  283. def pack(self, _options, _args, _file_list):
  284. """Generates a patch file which can be applied to the root of the
  285. repository.
  286. The patch file is generated from a diff of the merge base of HEAD and
  287. its upstream branch.
  288. """
  289. try:
  290. merge_base = [self._Capture(['merge-base', 'HEAD', self.remote])]
  291. except subprocess2.CalledProcessError:
  292. merge_base = []
  293. gclient_utils.CheckCallAndFilter(['git', 'diff'] + merge_base,
  294. cwd=self.checkout_path,
  295. filter_fn=GitDiffFilterer(
  296. self.relpath,
  297. print_func=self.Print).Filter)
  298. def _Scrub(self, target, options):
  299. """Scrubs out all changes in the local repo, back to the state of target."""
  300. quiet = []
  301. if not options.verbose:
  302. quiet = ['--quiet']
  303. self._Run(['reset', '--hard', target] + quiet, options)
  304. if options.force and options.delete_unversioned_trees:
  305. # where `target` is a commit that contains both upper and lower case
  306. # versions of the same file on a case insensitive filesystem, we are
  307. # actually in a broken state here. The index will have both 'a' and
  308. # 'A', but only one of them will exist on the disk. To progress, we
  309. # delete everything that status thinks is modified.
  310. output = self._Capture(
  311. ['-c', 'core.quotePath=false', 'status', '--porcelain'],
  312. strip=False)
  313. for line in output.splitlines():
  314. # --porcelain (v1) looks like:
  315. # XY filename
  316. try:
  317. filename = line[3:]
  318. self.Print('_____ Deleting residual after reset: %r.' %
  319. filename)
  320. gclient_utils.rm_file_or_tree(
  321. os.path.join(self.checkout_path, filename))
  322. except OSError:
  323. pass
  324. def _FetchAndReset(self, revision, file_list, options):
  325. """Equivalent to git fetch; git reset."""
  326. self._SetFetchConfig(options)
  327. self._Fetch(options, prune=True, quiet=options.verbose)
  328. revision = self._AutoFetchRef(options, revision)
  329. self._Scrub(revision, options)
  330. if file_list is not None:
  331. files = self._Capture(['-c', 'core.quotePath=false',
  332. 'ls-files']).splitlines()
  333. file_list.extend(
  334. [os.path.join(self.checkout_path, f) for f in files])
  335. def _DisableHooks(self):
  336. hook_dir = os.path.join(self.checkout_path, '.git', 'hooks')
  337. if not os.path.isdir(hook_dir):
  338. return
  339. for f in os.listdir(hook_dir):
  340. if not f.endswith('.sample') and not f.endswith('.disabled'):
  341. disabled_hook_path = os.path.join(hook_dir, f + '.disabled')
  342. if os.path.exists(disabled_hook_path):
  343. os.remove(disabled_hook_path)
  344. os.rename(os.path.join(hook_dir, f), disabled_hook_path)
  345. def _maybe_break_locks(self, options):
  346. """This removes all .lock files from this repo's .git directory, if the
  347. user passed the --break_repo_locks command line flag.
  348. In particular, this will cleanup index.lock files, as well as ref lock
  349. files.
  350. """
  351. if options.break_repo_locks:
  352. git_dir = os.path.join(self.checkout_path, '.git')
  353. for path, _, filenames in os.walk(git_dir):
  354. for filename in filenames:
  355. if filename.endswith('.lock'):
  356. to_break = os.path.join(path, filename)
  357. self.Print('breaking lock: %s' % (to_break, ))
  358. try:
  359. os.remove(to_break)
  360. except OSError as ex:
  361. self.Print('FAILED to break lock: %s: %s' %
  362. (to_break, ex))
  363. raise
  364. def _download_topics(self, patch_rev, googlesource_url):
  365. """This method returns new patch_revs to process that have the same topic.
  366. It does the following:
  367. 1. Finds the topic of the Gerrit change specified in the patch_rev.
  368. 2. Find all changes with that topic.
  369. 3. Append patch_rev of the changes with the same topic to the patch_revs
  370. to process.
  371. 4. Returns the new patch_revs to process.
  372. """
  373. patch_revs_to_process = []
  374. # Parse the patch_rev to extract the CL and patchset.
  375. patch_rev_tokens = patch_rev.split('/')
  376. change = patch_rev_tokens[-2]
  377. # Parse the googlesource_url.
  378. tokens = re.search(r'//(.+).googlesource.com/(.+?)(?:\.git)?$',
  379. googlesource_url)
  380. if not tokens or len(tokens.groups()) != 2:
  381. # googlesource_url is not in the expected format.
  382. return patch_revs_to_process
  383. # parse the gerrit host and repo out of googlesource_url.
  384. host, repo = tokens.groups()[:2]
  385. gerrit_host_url = '%s-review.googlesource.com' % host
  386. # 1. Find the topic of the Gerrit change specified in the patch_rev.
  387. change_object = gerrit_util.GetChange(gerrit_host_url, change)
  388. topic = change_object.get('topic')
  389. if not topic:
  390. # This change has no topic set.
  391. return patch_revs_to_process
  392. # 2. Find all changes with that topic.
  393. changes_with_same_topic = gerrit_util.QueryChanges(
  394. gerrit_host_url, [('topic', topic), ('status', 'open'),
  395. ('repo', repo)],
  396. o_params=['ALL_REVISIONS'])
  397. for c in changes_with_same_topic:
  398. if str(c['_number']) == change:
  399. # This change is already in the patch_rev.
  400. continue
  401. self.Print('Found CL %d with the topic name %s' %
  402. (c['_number'], topic))
  403. # 3. Append patch_rev of the changes with the same topic to the
  404. # patch_revs to process.
  405. curr_rev = c['current_revision']
  406. new_patch_rev = c['revisions'][curr_rev]['ref']
  407. patch_revs_to_process.append(new_patch_rev)
  408. # 4. Return the new patch_revs to process.
  409. return patch_revs_to_process
  410. def _ref_to_remote_ref(self, target_rev):
  411. """Helper function for scm.GIT.RefToRemoteRef with error checking.
  412. Joins the results of scm.GIT.RefToRemoteRef into a string, but raises a
  413. comprehensible error if RefToRemoteRef fails.
  414. Args:
  415. target_rev: a ref somewhere under refs/.
  416. """
  417. tmp_ref = scm.GIT.RefToRemoteRef(target_rev, self.remote)
  418. if not tmp_ref:
  419. raise gclient_utils.Error(
  420. 'Failed to turn target revision %r in repo %r into remote ref' %
  421. (target_rev, self.checkout_path))
  422. return ''.join(tmp_ref)
  423. def apply_patch_ref(self, patch_repo, patch_rev, target_rev, options,
  424. file_list):
  425. # type: (str, str, str, optparse.Values, Collection[str]) -> str
  426. """Apply a patch on top of the revision we're synced at.
  427. The patch ref is given by |patch_repo|@|patch_rev|.
  428. |target_rev| is usually the branch that the |patch_rev| was uploaded against
  429. (e.g. 'refs/heads/main'), but this is not required.
  430. We cherry-pick all commits reachable from |patch_rev| on top of the curret
  431. HEAD, excluding those reachable from |target_rev|
  432. (i.e. git cherry-pick target_rev..patch_rev).
  433. Graphically, it looks like this:
  434. ... -> o -> [possibly already landed commits] -> target_rev
  435. \
  436. -> [possibly not yet landed dependent CLs] -> patch_rev
  437. The final checkout state is then:
  438. ... -> HEAD -> [possibly not yet landed dependent CLs] -> patch_rev
  439. After application, if |options.reset_patch_ref| is specified, we soft reset
  440. the cherry-picked changes, keeping them in git index only.
  441. Args:
  442. patch_repo: The patch origin.
  443. e.g. 'https://foo.googlesource.com/bar'
  444. patch_rev: The revision to patch.
  445. e.g. 'refs/changes/1234/34/1'.
  446. target_rev: The revision to use when finding the merge base.
  447. Typically, the branch that the patch was uploaded against.
  448. e.g. 'refs/heads/main' or 'refs/heads/infra/config'.
  449. options: The options passed to gclient.
  450. file_list: A list where modified files will be appended.
  451. """
  452. # Abort any cherry-picks in progress.
  453. try:
  454. self._Capture(['cherry-pick', '--abort'])
  455. except subprocess2.CalledProcessError:
  456. pass
  457. base_rev = self.revinfo(None, None, None)
  458. if not target_rev:
  459. raise gclient_utils.Error(
  460. 'A target revision for the patch must be given')
  461. if target_rev.startswith(('refs/heads/', 'refs/branch-heads')):
  462. # If |target_rev| is in refs/heads/** or refs/branch-heads/**, try
  463. # first to find the corresponding remote ref for it, since
  464. # |target_rev| might point to a local ref which is not up to date
  465. # with the corresponding remote ref.
  466. remote_ref = self._ref_to_remote_ref(target_rev)
  467. self.Print('Trying the corresponding remote ref for %r: %r\n' %
  468. (target_rev, remote_ref))
  469. if scm.GIT.IsValidRevision(self.checkout_path, remote_ref):
  470. # refs/remotes may need to be updated to cleanly cherry-pick
  471. # changes. See https://crbug.com/1255178.
  472. url, _ = gclient_utils.SplitUrlRevision(self.url)
  473. mirror = self._GetMirror(url, options, target_rev, remote_ref)
  474. if mirror:
  475. self._UpdateMirrorIfNotContains(mirror, options, 'branch',
  476. target_rev)
  477. self._Capture(['fetch', '--no-tags', self.remote, target_rev])
  478. target_rev = remote_ref
  479. elif not scm.GIT.IsValidRevision(self.checkout_path, target_rev):
  480. # Fetch |target_rev| if it's not already available.
  481. url, _ = gclient_utils.SplitUrlRevision(self.url)
  482. mirror = self._GetMirror(url, options, target_rev, target_rev)
  483. if mirror:
  484. rev_type = 'branch' if target_rev.startswith(
  485. 'refs/') else 'hash'
  486. self._UpdateMirrorIfNotContains(mirror, options, rev_type,
  487. target_rev)
  488. self._Fetch(options, refspec=target_rev)
  489. patch_revs_to_process = [patch_rev]
  490. if hasattr(options, 'download_topics') and options.download_topics:
  491. patch_revs_to_process_from_topics = self._download_topics(
  492. patch_rev, self.url)
  493. patch_revs_to_process.extend(patch_revs_to_process_from_topics)
  494. self._Capture(['reset', '--hard'])
  495. for pr in patch_revs_to_process:
  496. self.Print('===Applying patch===')
  497. self.Print('Revision to patch is %r @ %r.' % (patch_repo, pr))
  498. self.Print('Current dir is %r' % self.checkout_path)
  499. self._Capture(['fetch', '--no-tags', patch_repo, pr])
  500. pr = self._Capture(['rev-parse', 'FETCH_HEAD'])
  501. if not options.rebase_patch_ref:
  502. self._Capture(['checkout', pr])
  503. # Adjust base_rev to be the first parent of our checked out
  504. # patch ref; This will allow us to correctly extend `file_list`,
  505. # and will show the correct file-list to programs which do `git
  506. # diff --cached` expecting to see the patch diff.
  507. base_rev = self._Capture(['rev-parse', pr + '~'])
  508. else:
  509. target_rev_hash = self._Capture(['rev-parse', target_rev])
  510. commit_list = self._Capture(
  511. ['log', '--oneline', target_rev + '..' + pr])
  512. self.Print('Will cherrypick %r (%r) .. %r on top of %r:' %
  513. (target_rev_hash, target_rev, pr, base_rev))
  514. self.Print(commit_list)
  515. try:
  516. if scm.GIT.IsAncestor(pr,
  517. target_rev,
  518. cwd=self.checkout_path):
  519. if len(patch_revs_to_process) > 1:
  520. # If there are multiple patch_revs_to_process then
  521. # we do not want want to invalidate a previous patch
  522. # so throw an error.
  523. raise gclient_utils.Error(
  524. 'patch_rev %s is an ancestor of target_rev %s. This '
  525. 'situation is unsupported when we need to apply multiple '
  526. 'patch_revs: %s' %
  527. (pr, target_rev, patch_revs_to_process))
  528. # If |patch_rev| is an ancestor of |target_rev|, check
  529. # it out.
  530. self._Capture(['checkout', pr])
  531. else:
  532. # If a change was uploaded on top of another change,
  533. # which has already landed, one of the commits in the
  534. # cherry-pick range will be redundant, since it has
  535. # already landed and its changes incorporated in the
  536. # tree. We pass '--keep-redundant-commits' to ignore
  537. # those changes.
  538. self._Capture([
  539. 'cherry-pick', target_rev + '..' + pr,
  540. '--keep-redundant-commits'
  541. ])
  542. except subprocess2.CalledProcessError as e:
  543. self.Print('Failed to apply patch.')
  544. self.Print('Revision to patch was %r @ %r.' %
  545. (patch_repo, pr))
  546. self.Print('Tried to cherrypick %r .. %r on top of %r.' %
  547. (target_rev, pr, base_rev))
  548. self.Print('Current dir is %r' % self.checkout_path)
  549. self.Print('git returned non-zero exit status %s:\n%s' %
  550. (e.returncode, e.stderr.decode('utf-8')))
  551. # Print the current status so that developers know what
  552. # changes caused the patch failure, since git cherry-pick
  553. # doesn't show that information.
  554. self.Print(self._Capture(['status']))
  555. try:
  556. self._Capture(['cherry-pick', '--abort'])
  557. except subprocess2.CalledProcessError:
  558. pass
  559. raise
  560. if file_list is not None:
  561. file_list.extend(self._GetDiffFilenames(base_rev))
  562. latest_commit = self.revinfo(None, None, None)
  563. if options.reset_patch_ref:
  564. self._Capture(['reset', '--soft', base_rev])
  565. return latest_commit
  566. def check_diff(self, previous_commit, files=None):
  567. # type: (str, Optional[List[str]]) -> bool
  568. """Check if a diff exists between the current commit and `previous_commit`.
  569. Returns True if there were diffs or if an error was encountered.
  570. """
  571. cmd = ['diff', previous_commit, '--quiet']
  572. if files:
  573. cmd += ['--'] + files
  574. try:
  575. self._Capture(cmd)
  576. return False
  577. except subprocess2.CalledProcessError as e:
  578. # git diff --quiet exits with 1 if there were diffs.
  579. if e.returncode != 1:
  580. self.Print('git returned non-zero exit status %s:\n%s' %
  581. (e.returncode, e.stderr.decode('utf-8')))
  582. return True
  583. def set_config(f):
  584. def wrapper(*args):
  585. return_val = f(*args)
  586. checkout_path = args[0].checkout_path
  587. if os.path.exists(os.path.join(checkout_path, '.git')):
  588. # The config updates to the project are stored in this list
  589. # and updated consecutively after the reads. The updates
  590. # are done this way because `scm.GIT.GetConfig` caches
  591. # the config file and `scm.GIT.SetConfig` evicts the cache.
  592. # This ensures we don't interleave reads and writes causing
  593. # the cache to set and unset consecutively.
  594. config_updates = []
  595. blame_ignore_revs_cfg = scm.GIT.GetConfig(
  596. checkout_path, 'blame.ignorerevsfile')
  597. blame_ignore_revs_cfg_set = \
  598. blame_ignore_revs_cfg == \
  599. git_common.GIT_BLAME_IGNORE_REV_FILE
  600. blame_ignore_revs_exists = os.path.isfile(
  601. os.path.join(checkout_path,
  602. git_common.GIT_BLAME_IGNORE_REV_FILE))
  603. if not blame_ignore_revs_cfg_set and blame_ignore_revs_exists:
  604. config_updates.append(
  605. ('blame.ignoreRevsFile',
  606. git_common.GIT_BLAME_IGNORE_REV_FILE))
  607. elif blame_ignore_revs_cfg_set and not blame_ignore_revs_exists:
  608. # Some repos may have incorrect config set, unset this
  609. # value. Moreover, some repositories may decide to remove
  610. # git_common.GIT_BLAME_IGNORE_REV_FILE, which would break
  611. # blame without this check.
  612. # See https://crbug.com/368562244 for more details.
  613. config_updates.append(('blame.ignoreRevsFile', None))
  614. ignore_submodules = scm.GIT.GetConfig(checkout_path,
  615. 'diff.ignoresubmodules',
  616. None, 'local')
  617. if not ignore_submodules:
  618. config_updates.append(('diff.ignoreSubmodules', 'dirty'))
  619. elif ignore_submodules != 'dirty':
  620. warning_message = (
  621. "diff.ignoreSubmodules is not set to 'dirty' "
  622. "for this repository.\n"
  623. "This may cause unexpected behavior with submodules; "
  624. "see //docs/git_submodules.md\n"
  625. "Consider setting the config:\n"
  626. "\tgit config diff.ignoreSubmodules dirty\n"
  627. "or disable this warning by setting the "
  628. "GCLIENT_SUPPRESS_SUBMODULE_WARNING\n"
  629. "environment variable to 1.")
  630. if os.environ.get(
  631. 'GCLIENT_SUPPRESS_SUBMODULE_WARNING') != '1':
  632. gclient_utils.AddWarning(warning_message)
  633. if scm.GIT.GetConfig(checkout_path,
  634. 'fetch.recursesubmodules') != 'off':
  635. config_updates.append(('fetch.recurseSubmodules', 'off'))
  636. if scm.GIT.GetConfig(checkout_path,
  637. 'push.recursesubmodules') != 'off':
  638. # The default is off, but if user sets submodules.recurse to
  639. # on, this becomes on too. We never want to push submodules
  640. # for gclient managed repositories. Push, even if a no-op,
  641. # will increase `git cl upload` latency.
  642. config_updates.append(('push.recurseSubmodules', 'off'))
  643. for update in config_updates:
  644. scm.GIT.SetConfig(checkout_path,
  645. update[0],
  646. update[1],
  647. modify_all=True)
  648. return return_val
  649. return wrapper
  650. @set_config
  651. def update(self, options, args, file_list):
  652. """Runs git to update or transparently checkout the working copy.
  653. All updated files will be appended to file_list.
  654. Raises:
  655. Error: if can't get URL for relative path.
  656. """
  657. if args:
  658. raise gclient_utils.Error("Unsupported argument(s): %s" %
  659. ",".join(args))
  660. url, deps_revision = gclient_utils.SplitUrlRevision(self.url)
  661. revision = deps_revision
  662. managed = True
  663. if options.revision:
  664. # Override the revision number.
  665. revision = str(options.revision)
  666. if revision == 'unmanaged':
  667. # Check again for a revision in case an initial ref was specified
  668. # in the url, for example bla.git@refs/heads/custombranch
  669. revision = deps_revision
  670. managed = False
  671. if not revision:
  672. # If a dependency is not pinned, track the default remote branch.
  673. revision = scm.GIT.GetRemoteHeadRef(self.checkout_path, self.url,
  674. self.remote)
  675. if revision.startswith('origin/'):
  676. revision = 'refs/remotes/' + revision
  677. if managed and platform.system() == 'Windows':
  678. self._DisableHooks()
  679. printed_path = False
  680. verbose = []
  681. if options.verbose:
  682. self.Print('_____ %s at %s' % (self.relpath, revision),
  683. timestamp=False)
  684. verbose = ['--verbose']
  685. printed_path = True
  686. revision_ref = revision
  687. if ':' in revision:
  688. revision_ref, _, revision = revision.partition(':')
  689. if revision_ref.startswith('refs/branch-heads'):
  690. options.with_branch_heads = True
  691. mirror = self._GetMirror(url, options, revision, revision_ref)
  692. if mirror:
  693. url = mirror.mirror_path
  694. remote_ref = scm.GIT.RefToRemoteRef(revision, self.remote)
  695. if remote_ref:
  696. # Rewrite remote refs to their local equivalents.
  697. revision = ''.join(remote_ref)
  698. rev_type = "branch"
  699. elif revision.startswith('refs/heads/'):
  700. # Local branch? We probably don't want to support, since DEPS should
  701. # always specify branches as they are in the upstream repo.
  702. rev_type = "branch"
  703. else:
  704. # hash is also a tag, only make a distinction at checkout.
  705. # Any ref (e.g. /refs/changes/*) not a branch has no difference from
  706. # a hash.
  707. rev_type = "hash"
  708. # If we are going to introduce a new project, there is a possibility
  709. # that we are syncing back to a state where the project was originally a
  710. # sub-project rolled by DEPS (realistic case: crossing the Blink merge
  711. # point syncing backwards, when Blink was a DEPS entry and not part of
  712. # src.git). In such case, we might have a backup of the former .git
  713. # folder, which can be used to avoid re-fetching the entire repo again
  714. # (useful for bisects).
  715. backup_dir = self.GetGitBackupDirPath()
  716. target_dir = os.path.join(self.checkout_path, '.git')
  717. if os.path.exists(backup_dir) and not os.path.exists(target_dir):
  718. gclient_utils.safe_makedirs(self.checkout_path)
  719. os.rename(backup_dir, target_dir)
  720. # Reset to a clean state
  721. self._Scrub('HEAD', options)
  722. if (not os.path.exists(self.checkout_path) or
  723. (os.path.isdir(self.checkout_path)
  724. and not os.path.exists(os.path.join(self.checkout_path, '.git')))):
  725. if mirror:
  726. self._UpdateMirrorIfNotContains(mirror, options, rev_type,
  727. revision)
  728. try:
  729. self.current_revision = self._Clone(revision, url, options)
  730. except subprocess2.CalledProcessError as e:
  731. logging.warning('Clone failed due to: %s', e)
  732. self._DeleteOrMove(options.force)
  733. self.current_revision = self._Clone(revision, url, options)
  734. if file_list is not None:
  735. files = self._Capture(
  736. ['-c', 'core.quotePath=false', 'ls-files']).splitlines()
  737. file_list.extend(
  738. [os.path.join(self.checkout_path, f) for f in files])
  739. if mirror:
  740. self._Capture(
  741. ['remote', 'set-url', '--push', 'origin', mirror.url])
  742. if not verbose:
  743. # Make the output a little prettier. It's nice to have some
  744. # whitespace between projects when cloning.
  745. self.Print('')
  746. return self._Capture(['rev-parse', '--verify', 'HEAD'])
  747. if mirror:
  748. self._Capture(['remote', 'set-url', '--push', 'origin', mirror.url])
  749. if not managed:
  750. self._SetFetchConfig(options)
  751. self.Print('________ unmanaged solution; skipping %s' %
  752. self.relpath)
  753. return self._Capture(['rev-parse', '--verify', 'HEAD'])
  754. # Special case for rev_type = hash. If we use submodules, we can check
  755. # information already.
  756. if rev_type == 'hash':
  757. if self.current_revision == revision:
  758. if verbose:
  759. self.Print('Using submodule information to skip check')
  760. if options.reset or options.force:
  761. self._Scrub('HEAD', options)
  762. return revision
  763. self._maybe_break_locks(options)
  764. if mirror:
  765. self._UpdateMirrorIfNotContains(mirror, options, rev_type, revision)
  766. # See if the url has changed (the unittests use git://foo for the url,
  767. # let that through).
  768. current_url = scm.GIT.GetConfig(self.checkout_path,
  769. f'remote.{self.remote}.url',
  770. default='')
  771. return_early = False
  772. # TODO(maruel): Delete url != 'git://foo' since it's just to make the
  773. # unit test pass. (and update the comment above)
  774. strp_url = url[:-4] if url.endswith('.git') else url
  775. strp_current_url = current_url[:-4] if current_url.endswith(
  776. '.git') else current_url
  777. if (strp_current_url.rstrip('/') != strp_url.rstrip('/')
  778. and url != 'git://foo'):
  779. self.Print('_____ switching %s from %s to new upstream %s' %
  780. (self.relpath, current_url, url))
  781. if not (options.force or options.reset):
  782. # Make sure it's clean
  783. self._CheckClean(revision)
  784. # Switch over to the new upstream
  785. self._Run(['remote', 'set-url', self.remote, url], options)
  786. if mirror:
  787. if git_cache.Mirror.CacheDirToUrl(current_url.rstrip(
  788. '/')) == git_cache.Mirror.CacheDirToUrl(
  789. url.rstrip('/')):
  790. # Reset alternates when the cache dir is updated.
  791. with open(
  792. os.path.join(self.checkout_path, '.git', 'objects',
  793. 'info', 'alternates'), 'w') as fh:
  794. fh.write(os.path.join(url, 'objects'))
  795. else:
  796. # Because we use Git alternatives, our existing repository
  797. # is not self-contained. It's possible that new git
  798. # alternative doesn't have all necessary objects that the
  799. # current repository needs. Instead of blindly hoping that
  800. # new alternative contains all necessary objects, keep the
  801. # old alternative and just append a new one on top of it.
  802. with open(
  803. os.path.join(self.checkout_path, '.git', 'objects',
  804. 'info', 'alternates'), 'a') as fh:
  805. fh.write("\n" + os.path.join(url, 'objects'))
  806. current_revision = self._EnsureValidHeadObjectOrCheckout(
  807. revision, options, url)
  808. self._FetchAndReset(revision, file_list, options)
  809. return_early = True
  810. else:
  811. current_revision = self._EnsureValidHeadObjectOrCheckout(
  812. revision, options, url)
  813. if return_early:
  814. return current_revision or self._Capture(
  815. ['rev-parse', '--verify', 'HEAD'])
  816. cur_branch = self._GetCurrentBranch()
  817. # Cases:
  818. # 0) HEAD is detached. Probably from our initial clone.
  819. # - make sure HEAD is contained by a named ref, then update.
  820. # Cases 1-4. HEAD is a branch.
  821. # 1) current branch is not tracking a remote branch
  822. # - try to rebase onto the new hash or branch
  823. # 2) current branch is tracking a remote branch with local committed
  824. # changes, but the DEPS file switched to point to a hash
  825. # - rebase those changes on top of the hash
  826. # 3) current branch is tracking a remote branch w/or w/out changes, and
  827. # no DEPS switch
  828. # - see if we can FF, if not, prompt the user for rebase, merge, or stop
  829. # 4) current branch is tracking a remote branch, but DEPS switches to a
  830. # different remote branch, and a) current branch has no local changes,
  831. # and --force: - checkout new branch b) current branch has local
  832. # changes, and --force and --reset: - checkout new branch c) otherwise
  833. # exit
  834. # GetUpstreamBranch returns something like 'refs/remotes/origin/main'
  835. # for a tracking branch or 'main' if not a tracking branch (it's based
  836. # on a specific rev/hash) or it returns None if it couldn't find an
  837. # upstream
  838. if cur_branch is None:
  839. upstream_branch = None
  840. current_type = "detached"
  841. logging.debug("Detached HEAD")
  842. else:
  843. upstream_branch = scm.GIT.GetUpstreamBranch(self.checkout_path)
  844. if not upstream_branch or not upstream_branch.startswith(
  845. 'refs/remotes'):
  846. current_type = "hash"
  847. logging.debug(
  848. "Current branch is not tracking an upstream (remote)"
  849. " branch.")
  850. elif upstream_branch.startswith('refs/remotes'):
  851. current_type = "branch"
  852. else:
  853. raise gclient_utils.Error('Invalid Upstream: %s' %
  854. upstream_branch)
  855. self._SetFetchConfig(options)
  856. # Fetch upstream if we don't already have |revision|.
  857. if not scm.GIT.IsValidRevision(
  858. self.checkout_path, revision, sha_only=True):
  859. self._Fetch(options, prune=options.force)
  860. if not scm.GIT.IsValidRevision(
  861. self.checkout_path, revision, sha_only=True):
  862. # Update the remotes first so we have all the refs.
  863. remote_output = scm.GIT.Capture(['remote'] + verbose +
  864. ['update'],
  865. cwd=self.checkout_path)
  866. if verbose:
  867. self.Print(remote_output)
  868. revision = self._AutoFetchRef(options, revision)
  869. # This is a big hammer, debatable if it should even be here...
  870. if options.force or options.reset:
  871. target = 'HEAD'
  872. if options.upstream and upstream_branch:
  873. target = upstream_branch
  874. self._Scrub(target, options)
  875. if current_type == 'detached':
  876. # case 0
  877. # We just did a Scrub, this is as clean as it's going to get. In
  878. # particular if HEAD is a commit that contains two versions of the
  879. # same file on a case-insensitive filesystem (e.g. 'a' and 'A'),
  880. # there's no way to actually "Clean" the checkout; that commit is
  881. # uncheckoutable on this system. The best we can do is carry forward
  882. # to the checkout step.
  883. if not (options.force or options.reset):
  884. self._CheckClean(revision)
  885. self._CheckDetachedHead(revision, options)
  886. if not current_revision:
  887. current_revision = self._Capture(
  888. ['rev-list', '-n', '1', 'HEAD'])
  889. if current_revision == revision:
  890. self.Print('Up-to-date; skipping checkout.')
  891. else:
  892. # 'git checkout' may need to overwrite existing untracked files.
  893. # Allow it only when nuclear options are enabled.
  894. self._Checkout(
  895. options,
  896. revision,
  897. force=(options.force and options.delete_unversioned_trees),
  898. quiet=True,
  899. )
  900. if not printed_path:
  901. self.Print('_____ %s at %s' % (self.relpath, revision),
  902. timestamp=False)
  903. elif current_type == 'hash':
  904. # case 1
  905. # Can't find a merge-base since we don't know our upstream. That
  906. # makes this command VERY likely to produce a rebase failure. For
  907. # now we assume origin is our upstream since that's what the old
  908. # behavior was.
  909. upstream_branch = self.remote
  910. if options.revision or deps_revision:
  911. upstream_branch = revision
  912. self._AttemptRebase(upstream_branch,
  913. file_list,
  914. options,
  915. printed_path=printed_path,
  916. merge=options.merge)
  917. printed_path = True
  918. elif rev_type == 'hash':
  919. # case 2
  920. self._AttemptRebase(upstream_branch,
  921. file_list,
  922. options,
  923. newbase=revision,
  924. printed_path=printed_path,
  925. merge=options.merge)
  926. printed_path = True
  927. elif remote_ref and ''.join(remote_ref) != upstream_branch:
  928. # case 4
  929. new_base = ''.join(remote_ref)
  930. if not printed_path:
  931. self.Print('_____ %s at %s' % (self.relpath, revision),
  932. timestamp=False)
  933. switch_error = (
  934. "Could not switch upstream branch from %s to %s\n" %
  935. (upstream_branch, new_base) +
  936. "Please use --force or merge or rebase manually:\n" +
  937. "cd %s; git rebase %s\n" % (self.checkout_path, new_base) +
  938. "OR git checkout -b <some new branch> %s" % new_base)
  939. force_switch = False
  940. if options.force:
  941. try:
  942. self._CheckClean(revision)
  943. # case 4a
  944. force_switch = True
  945. except gclient_utils.Error as e:
  946. if options.reset:
  947. # case 4b
  948. force_switch = True
  949. else:
  950. switch_error = '%s\n%s' % (str(e), switch_error)
  951. if force_switch:
  952. self.Print("Switching upstream branch from %s to %s" %
  953. (upstream_branch, new_base))
  954. switch_branch = 'gclient_' + remote_ref[1]
  955. self._Capture(['branch', '-f', switch_branch, new_base])
  956. self._Checkout(options, switch_branch, force=True, quiet=True)
  957. else:
  958. # case 4c
  959. raise gclient_utils.Error(switch_error)
  960. else:
  961. # case 3 - the default case
  962. rebase_files = self._GetDiffFilenames(upstream_branch)
  963. if verbose:
  964. self.Print('Trying fast-forward merge to branch : %s' %
  965. upstream_branch)
  966. try:
  967. merge_args = ['merge']
  968. if options.merge:
  969. merge_args.append('--ff')
  970. else:
  971. merge_args.append('--ff-only')
  972. merge_args.append(upstream_branch)
  973. merge_output = self._Capture(merge_args)
  974. except subprocess2.CalledProcessError as e:
  975. rebase_files = []
  976. if re.search(b'fatal: Not possible to fast-forward, aborting.',
  977. e.stderr):
  978. if not printed_path:
  979. self.Print('_____ %s at %s' % (self.relpath, revision),
  980. timestamp=False)
  981. printed_path = True
  982. while True:
  983. if not options.auto_rebase:
  984. try:
  985. action = self._AskForData(
  986. 'Cannot %s, attempt to rebase? '
  987. '(y)es / (q)uit / (s)kip : ' %
  988. ('merge' if options.merge else
  989. 'fast-forward merge'), options)
  990. except ValueError:
  991. raise gclient_utils.Error('Invalid Character')
  992. if options.auto_rebase or re.match(
  993. r'yes|y', action, re.I):
  994. self._AttemptRebase(upstream_branch,
  995. rebase_files,
  996. options,
  997. printed_path=printed_path,
  998. merge=False)
  999. printed_path = True
  1000. break
  1001. if re.match(r'quit|q', action, re.I):
  1002. raise gclient_utils.Error(
  1003. "Can't fast-forward, please merge or "
  1004. "rebase manually.\n"
  1005. "cd %s && git " % self.checkout_path +
  1006. "rebase %s" % upstream_branch)
  1007. if re.match(r'skip|s', action, re.I):
  1008. self.Print('Skipping %s' % self.relpath)
  1009. return
  1010. self.Print('Input not recognized')
  1011. elif re.match(
  1012. b"error: Your local changes to '.*' would be "
  1013. b"overwritten by merge. Aborting.\nPlease, commit your "
  1014. b"changes or stash them before you can merge.\n",
  1015. e.stderr):
  1016. if not printed_path:
  1017. self.Print('_____ %s at %s' % (self.relpath, revision),
  1018. timestamp=False)
  1019. printed_path = True
  1020. raise gclient_utils.Error(e.stderr.decode('utf-8'))
  1021. else:
  1022. # Some other problem happened with the merge
  1023. logging.error("Error during fast-forward merge in %s!" %
  1024. self.relpath)
  1025. self.Print(e.stderr.decode('utf-8'))
  1026. raise
  1027. else:
  1028. # Fast-forward merge was successful
  1029. if not re.match('Already up-to-date.', merge_output) or verbose:
  1030. if not printed_path:
  1031. self.Print('_____ %s at %s' % (self.relpath, revision),
  1032. timestamp=False)
  1033. printed_path = True
  1034. self.Print(merge_output.strip())
  1035. if not verbose:
  1036. # Make the output a little prettier. It's nice to have
  1037. # some whitespace between projects when syncing.
  1038. self.Print('')
  1039. if file_list is not None:
  1040. file_list.extend(
  1041. [os.path.join(self.checkout_path, f) for f in rebase_files])
  1042. # If the rebase generated a conflict, abort and ask user to fix
  1043. if self._IsRebasing():
  1044. raise gclient_utils.Error(
  1045. '\n____ %s at %s\n'
  1046. '\nConflict while rebasing this branch.\n'
  1047. 'Fix the conflict and run gclient again.\n'
  1048. 'See man git-rebase for details.\n' % (self.relpath, revision))
  1049. # If --reset and --delete_unversioned_trees are specified, remove any
  1050. # untracked directories.
  1051. if options.reset and options.delete_unversioned_trees:
  1052. # GIT.CaptureStatus() uses 'dit diff' to compare to a specific SHA1
  1053. # (the merge-base by default), so doesn't include untracked files.
  1054. # So we use 'git ls-files --directory --others --exclude-standard'
  1055. # here directly.
  1056. paths = scm.GIT.Capture([
  1057. '-c', 'core.quotePath=false', 'ls-files', '--directory',
  1058. '--others', '--exclude-standard'
  1059. ], self.checkout_path)
  1060. for path in (p for p in paths.splitlines() if p.endswith('/')):
  1061. full_path = os.path.join(self.checkout_path, path)
  1062. if not os.path.islink(full_path):
  1063. self.Print('_____ removing unversioned directory %s' % path)
  1064. gclient_utils.rmtree(full_path)
  1065. if not current_revision:
  1066. current_revision = self._Capture(['rev-parse', '--verify', 'HEAD'])
  1067. if verbose:
  1068. self.Print(f'Checked out revision {current_revision}',
  1069. timestamp=False)
  1070. return current_revision
  1071. def revert(self, options, _args, file_list):
  1072. """Reverts local modifications.
  1073. All reverted files will be appended to file_list.
  1074. """
  1075. if not os.path.isdir(self.checkout_path):
  1076. # revert won't work if the directory doesn't exist. It needs to
  1077. # checkout instead.
  1078. self.Print('_____ %s is missing, syncing instead' % self.relpath)
  1079. # Don't reuse the args.
  1080. return self.update(options, [], file_list)
  1081. default_rev = "refs/heads/main"
  1082. if options.upstream:
  1083. if self._GetCurrentBranch():
  1084. upstream_branch = scm.GIT.GetUpstreamBranch(self.checkout_path)
  1085. default_rev = upstream_branch or default_rev
  1086. _, deps_revision = gclient_utils.SplitUrlRevision(self.url)
  1087. if not deps_revision:
  1088. deps_revision = default_rev
  1089. if deps_revision.startswith('refs/heads/'):
  1090. deps_revision = deps_revision.replace('refs/heads/',
  1091. self.remote + '/')
  1092. try:
  1093. deps_revision = self.GetUsableRev(deps_revision, options)
  1094. except NoUsableRevError as e:
  1095. # If the DEPS entry's url and hash changed, try to update the
  1096. # origin. See also http://crbug.com/520067.
  1097. logging.warning(
  1098. "Couldn't find usable revision, will retrying to update instead: %s",
  1099. str(e))
  1100. return self.update(options, [], file_list)
  1101. if file_list is not None:
  1102. files = self._GetDiffFilenames(deps_revision)
  1103. self._Scrub(deps_revision, options)
  1104. self._Run(['clean', '-f', '-d'], options)
  1105. if file_list is not None:
  1106. file_list.extend(
  1107. [os.path.join(self.checkout_path, f) for f in files])
  1108. def revinfo(self, _options, _args, _file_list):
  1109. """Returns revision"""
  1110. return self._Capture(['rev-parse', 'HEAD'])
  1111. def runhooks(self, options, args, file_list):
  1112. self.status(options, args, file_list)
  1113. def status(self, options, _args, file_list):
  1114. """Display status information."""
  1115. if not os.path.isdir(self.checkout_path):
  1116. self.Print('________ couldn\'t run status in %s:\n'
  1117. 'The directory does not exist.' % self.checkout_path)
  1118. else:
  1119. merge_base = []
  1120. if self.url:
  1121. _, base_rev = gclient_utils.SplitUrlRevision(self.url)
  1122. if base_rev:
  1123. if base_rev.startswith('refs/'):
  1124. base_rev = self._ref_to_remote_ref(base_rev)
  1125. merge_base = [base_rev]
  1126. self._Run(['-c', 'core.quotePath=false', 'diff', '--name-status'] +
  1127. merge_base,
  1128. options,
  1129. always_show_header=options.verbose)
  1130. if file_list is not None:
  1131. files = self._GetDiffFilenames(
  1132. merge_base[0] if merge_base else None)
  1133. file_list.extend(
  1134. [os.path.join(self.checkout_path, f) for f in files])
  1135. def GetUsableRev(self, rev, options):
  1136. """Finds a useful revision for this repository."""
  1137. sha1 = None
  1138. if not os.path.isdir(self.checkout_path):
  1139. raise NoUsableRevError(
  1140. 'This is not a git repo, so we cannot get a usable rev.')
  1141. if scm.GIT.IsValidRevision(cwd=self.checkout_path, rev=rev):
  1142. sha1 = rev
  1143. else:
  1144. # May exist in origin, but we don't have it yet, so fetch and look
  1145. # again.
  1146. self._Fetch(options)
  1147. if scm.GIT.IsValidRevision(cwd=self.checkout_path, rev=rev):
  1148. sha1 = rev
  1149. if not sha1:
  1150. raise NoUsableRevError(
  1151. 'Hash %s does not appear to be a valid hash in this repo.' %
  1152. rev)
  1153. return sha1
  1154. def GetGitBackupDirPath(self):
  1155. """Returns the path where the .git folder for the current project can be
  1156. staged/restored. Use case: subproject moved from DEPS <-> outer project."""
  1157. return os.path.join(self._root_dir,
  1158. 'old_' + self.relpath.replace(os.sep, '_')) + '.git'
  1159. def _GetMirror(self, url, options, revision=None, revision_ref=None):
  1160. """Get a git_cache.Mirror object for the argument url."""
  1161. if not self.cache_dir:
  1162. return None
  1163. mirror_kwargs = {
  1164. 'print_func': self.filter,
  1165. 'refs': [],
  1166. 'commits': [],
  1167. }
  1168. if hasattr(options, 'with_branch_heads') and options.with_branch_heads:
  1169. mirror_kwargs['refs'].append('refs/branch-heads/*')
  1170. elif revision_ref and revision_ref.startswith('refs/branch-heads/'):
  1171. mirror_kwargs['refs'].append(revision_ref)
  1172. if hasattr(options, 'with_tags') and options.with_tags:
  1173. mirror_kwargs['refs'].append('refs/tags/*')
  1174. elif revision_ref and revision_ref.startswith('refs/tags/'):
  1175. mirror_kwargs['refs'].append(revision_ref)
  1176. if revision and not revision.startswith('refs/'):
  1177. mirror_kwargs['commits'].append(revision)
  1178. return git_cache.Mirror(url, **mirror_kwargs)
  1179. def _UpdateMirrorIfNotContains(self, mirror, options, rev_type, revision):
  1180. """Update a git mirror unless it already contains a hash revision.
  1181. This raises an error if a SHA-1 revision isn't present even after
  1182. fetching from the remote.
  1183. """
  1184. # 'hash' is overloaded and can refer to a SHA-1 hash or refs/changes/*.
  1185. is_sha = gclient_utils.IsFullGitSha(revision)
  1186. if rev_type == 'hash' and is_sha and mirror.contains_revision(revision):
  1187. if options.verbose:
  1188. self.Print('skipping mirror update, it has rev=%s already' %
  1189. revision,
  1190. timestamp=False)
  1191. return
  1192. if getattr(options, 'shallow', False):
  1193. depth = 10000
  1194. else:
  1195. depth = None
  1196. mirror.populate(verbose=False,
  1197. bootstrap=not getattr(options, 'no_bootstrap', False),
  1198. depth=depth,
  1199. lock_timeout=getattr(options, 'lock_timeout', 0))
  1200. # Make sure we've actually fetched the revision we want, but only if it
  1201. # was specified as an explicit commit hash.
  1202. if rev_type == 'hash' and is_sha and not mirror.contains_revision(
  1203. revision):
  1204. raise gclient_utils.Error(f'Failed to fetch {revision}.')
  1205. def _Clone(self, revision, url, options):
  1206. """Clone a git repository from the given URL.
  1207. Once we've cloned the repo, we checkout a working branch if the
  1208. specified revision is a branch head. If it is a tag or a specific
  1209. commit, then we leave HEAD detached as it makes future updates simpler
  1210. -- in this case the user should first create a new branch or switch to
  1211. an existing branch before making changes in the repo."""
  1212. if self.print_outbuf:
  1213. print_stdout = True
  1214. filter_fn = None
  1215. else:
  1216. print_stdout = False
  1217. filter_fn = self.filter
  1218. if not options.verbose:
  1219. # git clone doesn't seem to insert a newline properly before
  1220. # printing to stdout
  1221. self.Print('')
  1222. # If the parent directory does not exist, Git clone on Windows will not
  1223. # create it, so we need to do it manually.
  1224. parent_dir = os.path.dirname(self.checkout_path)
  1225. gclient_utils.safe_makedirs(parent_dir)
  1226. if hasattr(options, 'no_history') and options.no_history:
  1227. self._Run(['init', self.checkout_path], options, cwd=self._root_dir)
  1228. self._Run(['remote', 'add', 'origin', url], options)
  1229. revision = self._AutoFetchRef(options, revision, depth=1)
  1230. remote_ref = scm.GIT.RefToRemoteRef(revision, self.remote)
  1231. self._Checkout(options, ''.join(remote_ref or revision), quiet=True)
  1232. else:
  1233. cfg = gclient_utils.DefaultIndexPackConfig(url)
  1234. clone_cmd = cfg + ['clone', '--no-checkout', '--progress']
  1235. if self.cache_dir:
  1236. clone_cmd.append('--shared')
  1237. if options.verbose:
  1238. clone_cmd.append('--verbose')
  1239. clone_cmd.append(url)
  1240. tmp_dir = tempfile.mkdtemp(prefix='_gclient_%s_' %
  1241. os.path.basename(self.checkout_path),
  1242. dir=parent_dir)
  1243. clone_cmd.append(tmp_dir)
  1244. try:
  1245. self._Run(clone_cmd,
  1246. options,
  1247. cwd=self._root_dir,
  1248. retry=True,
  1249. print_stdout=print_stdout,
  1250. filter_fn=filter_fn)
  1251. logging.debug(
  1252. 'Cloned into temporary dir, moving to checkout_path')
  1253. gclient_utils.safe_makedirs(self.checkout_path)
  1254. gclient_utils.safe_rename(
  1255. os.path.join(tmp_dir, '.git'),
  1256. os.path.join(self.checkout_path, '.git'))
  1257. except:
  1258. traceback.print_exc(file=self.out_fh)
  1259. raise
  1260. finally:
  1261. if os.listdir(tmp_dir):
  1262. self.Print('_____ removing non-empty tmp dir %s' % tmp_dir)
  1263. gclient_utils.rmtree(tmp_dir)
  1264. self._SetFetchConfig(options)
  1265. self._Fetch(options, prune=options.force)
  1266. revision = self._AutoFetchRef(options, revision)
  1267. remote_ref = scm.GIT.RefToRemoteRef(revision, self.remote)
  1268. self._Checkout(options, ''.join(remote_ref or revision), quiet=True)
  1269. if self._GetCurrentBranch() is None:
  1270. # Squelch git's very verbose detached HEAD warning and use our own
  1271. self.Print((
  1272. 'Checked out %s to a detached HEAD. Before making any commits\n'
  1273. 'in this repo, you should use \'git checkout <branch>\' to switch \n'
  1274. 'to an existing branch or use \'git checkout %s -b <branch>\' to\n'
  1275. 'create a new branch for your work.') % (revision, self.remote))
  1276. return revision
  1277. def _AskForData(self, prompt, options):
  1278. if options.jobs > 1:
  1279. self.Print(prompt)
  1280. raise gclient_utils.Error("Background task requires input. Rerun "
  1281. "gclient with --jobs=1 so that\n"
  1282. "interaction is possible.")
  1283. return gclient_utils.AskForData(prompt)
  1284. def _AttemptRebase(self,
  1285. upstream,
  1286. files,
  1287. options,
  1288. newbase=None,
  1289. branch=None,
  1290. printed_path=False,
  1291. merge=False):
  1292. """Attempt to rebase onto either upstream or, if specified, newbase."""
  1293. if files is not None:
  1294. files.extend(self._GetDiffFilenames(upstream))
  1295. revision = upstream
  1296. if newbase:
  1297. revision = newbase
  1298. action = 'merge' if merge else 'rebase'
  1299. if not printed_path:
  1300. self.Print('_____ %s : Attempting %s onto %s...' %
  1301. (self.relpath, action, revision))
  1302. printed_path = True
  1303. else:
  1304. self.Print('Attempting %s onto %s...' % (action, revision))
  1305. if merge:
  1306. merge_output = self._Capture(['merge', revision])
  1307. if options.verbose:
  1308. self.Print(merge_output)
  1309. return
  1310. # Build the rebase command here using the args
  1311. # git rebase [options] [--onto <newbase>] <upstream> [<branch>]
  1312. rebase_cmd = ['rebase']
  1313. if options.verbose:
  1314. rebase_cmd.append('--verbose')
  1315. if newbase:
  1316. rebase_cmd.extend(['--onto', newbase])
  1317. rebase_cmd.append(upstream)
  1318. if branch:
  1319. rebase_cmd.append(branch)
  1320. try:
  1321. rebase_output = scm.GIT.Capture(rebase_cmd, cwd=self.checkout_path)
  1322. except subprocess2.CalledProcessError as e:
  1323. if (re.match(
  1324. br'cannot rebase: you have unstaged changes', e.stderr
  1325. ) or re.match(
  1326. br'cannot rebase: your index contains uncommitted changes',
  1327. e.stderr)):
  1328. while True:
  1329. rebase_action = self._AskForData(
  1330. 'Cannot rebase because of unstaged changes.\n'
  1331. '\'git reset --hard HEAD\' ?\n'
  1332. 'WARNING: destroys any uncommitted work in your current branch!'
  1333. ' (y)es / (q)uit / (s)how : ', options)
  1334. if re.match(r'yes|y', rebase_action, re.I):
  1335. self._Scrub('HEAD', options)
  1336. # Should this be recursive?
  1337. rebase_output = scm.GIT.Capture(rebase_cmd,
  1338. cwd=self.checkout_path)
  1339. break
  1340. if re.match(r'quit|q', rebase_action, re.I):
  1341. raise gclient_utils.Error(
  1342. "Please merge or rebase manually\n"
  1343. "cd %s && git " % self.checkout_path +
  1344. "%s" % ' '.join(rebase_cmd))
  1345. if re.match(r'show|s', rebase_action, re.I):
  1346. self.Print('%s' % e.stderr.decode('utf-8').strip())
  1347. continue
  1348. gclient_utils.Error("Input not recognized")
  1349. continue
  1350. elif re.search(br'^CONFLICT', e.stdout, re.M):
  1351. raise gclient_utils.Error(
  1352. "Conflict while rebasing this branch.\n"
  1353. "Fix the conflict and run gclient again.\n"
  1354. "See 'man git-rebase' for details.\n")
  1355. else:
  1356. self.Print(e.stdout.decode('utf-8').strip())
  1357. self.Print('Rebase produced error output:\n%s' %
  1358. e.stderr.decode('utf-8').strip())
  1359. raise gclient_utils.Error(
  1360. "Unrecognized error, please merge or rebase "
  1361. "manually.\ncd %s && git " % self.checkout_path +
  1362. "%s" % ' '.join(rebase_cmd))
  1363. self.Print(rebase_output.strip())
  1364. if not options.verbose:
  1365. # Make the output a little prettier. It's nice to have some
  1366. # whitespace between projects when syncing.
  1367. self.Print('')
  1368. def _EnsureValidHeadObjectOrCheckout(self, revision, options, url):
  1369. # Special case handling if all 3 conditions are met:
  1370. # * the mirros have recently changed, but deps destination remains same,
  1371. # * the git histories of mirrors are conflicting. * git cache is used
  1372. # This manifests itself in current checkout having invalid HEAD commit
  1373. # on most git operations. Since git cache is used, just deleted the .git
  1374. # folder, and re-create it by cloning.
  1375. try:
  1376. return self._Capture(['rev-list', '-n', '1', 'HEAD'])
  1377. except subprocess2.CalledProcessError as e:
  1378. if (b'fatal: bad object HEAD' in e.stderr and self.cache_dir
  1379. and self.cache_dir in url):
  1380. self.Print(
  1381. ('Likely due to DEPS change with git cache_dir, '
  1382. 'the current commit points to no longer existing object.\n'
  1383. '%s' % e))
  1384. self._DeleteOrMove(options.force)
  1385. return self._Clone(revision, url, options)
  1386. raise
  1387. def _IsRebasing(self):
  1388. # Check for any of REBASE-i/REBASE-m/REBASE/AM. Unfortunately git
  1389. # doesn't have a plumbing command to determine whether a rebase is in
  1390. # progress, so for now emualate (more-or-less) git-rebase.sh /
  1391. # git-completion.bash
  1392. g = os.path.join(self.checkout_path, '.git')
  1393. return (os.path.isdir(os.path.join(g, "rebase-merge"))
  1394. or os.path.isdir(os.path.join(g, "rebase-apply")))
  1395. def _CheckClean(self, revision):
  1396. lockfile = os.path.join(self.checkout_path, ".git", "index.lock")
  1397. if os.path.exists(lockfile):
  1398. raise gclient_utils.Error(
  1399. '\n____ %s at %s\n'
  1400. '\tYour repo is locked, possibly due to a concurrent git process.\n'
  1401. '\tIf no git executable is running, then clean up %r and try again.\n'
  1402. % (self.relpath, revision, lockfile))
  1403. # Ensure that the tree is clean.
  1404. if scm.GIT.Capture([
  1405. 'status', '--porcelain', '--untracked-files=no',
  1406. '--ignore-submodules'
  1407. ],
  1408. cwd=self.checkout_path):
  1409. raise gclient_utils.Error(
  1410. '\n____ %s at %s\n'
  1411. '\tYou have uncommitted changes.\n'
  1412. '\tcd into %s, run git status to see changes,\n'
  1413. '\tand commit, stash, or reset.\n' %
  1414. (self.relpath, revision, self.relpath))
  1415. def _CheckDetachedHead(self, revision, _options):
  1416. # HEAD is detached. Make sure it is safe to move away from (i.e., it is
  1417. # reference by a commit). If not, error out -- most likely a rebase is
  1418. # in progress, try to detect so we can give a better error.
  1419. try:
  1420. scm.GIT.Capture(['name-rev', '--no-undefined', 'HEAD'],
  1421. cwd=self.checkout_path)
  1422. except subprocess2.CalledProcessError:
  1423. # Commit is not contained by any rev. See if the user is rebasing:
  1424. if self._IsRebasing():
  1425. # Punt to the user
  1426. raise gclient_utils.Error(
  1427. '\n____ %s at %s\n'
  1428. '\tAlready in a conflict, i.e. (no branch).\n'
  1429. '\tFix the conflict and run gclient again.\n'
  1430. '\tOr to abort run:\n\t\tgit-rebase --abort\n'
  1431. '\tSee man git-rebase for details.\n' %
  1432. (self.relpath, revision))
  1433. # Let's just save off the commit so we can proceed.
  1434. name = ('saved-by-gclient-' +
  1435. self._Capture(['rev-parse', '--short', 'HEAD']))
  1436. self._Capture(['branch', '-f', name])
  1437. self.Print(
  1438. '_____ found an unreferenced commit and saved it as \'%s\'' %
  1439. name)
  1440. def _GetCurrentBranch(self):
  1441. # Returns name of current branch or None for detached HEAD
  1442. branch = self._Capture(['rev-parse', '--abbrev-ref=strict', 'HEAD'])
  1443. if branch == 'HEAD':
  1444. return None
  1445. return branch
  1446. def _Capture(self, args, **kwargs):
  1447. set_git_dir = 'cwd' not in kwargs
  1448. kwargs.setdefault('cwd', self.checkout_path)
  1449. kwargs.setdefault('stderr', subprocess2.PIPE)
  1450. strip = kwargs.pop('strip', True)
  1451. env = scm.GIT.ApplyEnvVars(kwargs)
  1452. # If an explicit cwd isn't set, then default to the .git/ subdir so we
  1453. # get stricter behavior. This can be useful in cases of slight
  1454. # corruption -- we don't accidentally go corrupting parent git checks
  1455. # too. See https://crbug.com/1000825 for an example.
  1456. if set_git_dir:
  1457. env.setdefault(
  1458. 'GIT_DIR',
  1459. os.path.abspath(os.path.join(self.checkout_path, '.git')))
  1460. kwargs.setdefault('env', env)
  1461. ret = git_common.run(*args, **kwargs)
  1462. if strip:
  1463. ret = ret.strip()
  1464. self.Print('Finished running: %s %s' % ('git', ' '.join(args)))
  1465. return ret
  1466. def _Checkout(self, options, ref, force=False, quiet=None):
  1467. """Performs a 'git-checkout' operation.
  1468. Args:
  1469. options: The configured option set
  1470. ref: (str) The branch/commit to checkout
  1471. quiet: (bool/None) Whether or not the checkout should pass '--quiet'; if
  1472. 'None', the behavior is inferred from 'options.verbose'.
  1473. Returns: (str) The output of the checkout operation
  1474. """
  1475. if quiet is None:
  1476. quiet = (not options.verbose)
  1477. checkout_args = ['checkout']
  1478. if force:
  1479. checkout_args.append('--force')
  1480. if quiet:
  1481. checkout_args.append('--quiet')
  1482. checkout_args.append(ref)
  1483. return self._Capture(checkout_args)
  1484. def _Fetch(self,
  1485. options,
  1486. remote=None,
  1487. prune=False,
  1488. quiet=False,
  1489. refspec=None,
  1490. depth=None):
  1491. cfg = gclient_utils.DefaultIndexPackConfig(self.url)
  1492. # When updating, the ref is modified to be a remote ref .
  1493. # (e.g. refs/heads/NAME becomes refs/remotes/REMOTE/NAME).
  1494. # Try to reverse that mapping.
  1495. original_ref = scm.GIT.RemoteRefToRef(refspec, self.remote)
  1496. if original_ref:
  1497. refspec = original_ref + ':' + refspec
  1498. # When a mirror is configured, it only fetches
  1499. # refs/{heads,branch-heads,tags}/*.
  1500. # If asked to fetch other refs, we must fetch those directly from
  1501. # the repository, and not from the mirror.
  1502. if not original_ref.startswith(
  1503. ('refs/heads/', 'refs/branch-heads/', 'refs/tags/')):
  1504. remote, _ = gclient_utils.SplitUrlRevision(self.url)
  1505. fetch_cmd = cfg + [
  1506. 'fetch',
  1507. remote or self.remote,
  1508. ]
  1509. if refspec:
  1510. fetch_cmd.append(refspec)
  1511. if prune:
  1512. fetch_cmd.append('--prune')
  1513. if options.verbose:
  1514. fetch_cmd.append('--verbose')
  1515. if not hasattr(options, 'with_tags') or not options.with_tags:
  1516. fetch_cmd.append('--no-tags')
  1517. elif quiet:
  1518. fetch_cmd.append('--quiet')
  1519. if depth:
  1520. fetch_cmd.append('--depth=' + str(depth))
  1521. self._Run(fetch_cmd, options, show_header=options.verbose, retry=True)
  1522. def _SetFetchConfig(self, options):
  1523. """Adds, and optionally fetches, "branch-heads" and "tags" refspecs
  1524. if requested."""
  1525. if options.force or options.reset:
  1526. try:
  1527. scm.GIT.SetConfig(self.checkout_path,
  1528. f'remote.{self.remote}.fetch',
  1529. modify_all=True)
  1530. scm.GIT.SetConfig(
  1531. self.checkout_path, f'remote.{self.remote}.fetch',
  1532. f'+refs/heads/*:refs/remotes/{self.remote}/*')
  1533. except subprocess2.CalledProcessError as e:
  1534. # If exit code was 5, it means we attempted to unset a config
  1535. # that didn't exist. Ignore it.
  1536. if e.returncode != 5:
  1537. raise
  1538. if hasattr(options, 'with_branch_heads') and options.with_branch_heads:
  1539. scm.GIT.SetConfig(
  1540. self.checkout_path,
  1541. f'remote.{self.remote}.fetch',
  1542. '+refs/branch-heads/*:refs/remotes/branch-heads/*',
  1543. value_pattern='^\\+refs/branch-heads/\\*:.*$',
  1544. modify_all=True)
  1545. if hasattr(options, 'with_tags') and options.with_tags:
  1546. scm.GIT.SetConfig(self.checkout_path,
  1547. f'remote.{self.remote}.fetch',
  1548. '+refs/tags/*:refs/tags/*',
  1549. value_pattern='^\\+refs/tags/\\*:.*$',
  1550. modify_all=True)
  1551. def _AutoFetchRef(self, options, revision, depth=None):
  1552. """Attempts to fetch |revision| if not available in local repo.
  1553. Returns possibly updated revision."""
  1554. if not scm.GIT.IsValidRevision(self.checkout_path, revision):
  1555. self._Fetch(options, refspec=revision, depth=depth)
  1556. revision = self._Capture(['rev-parse', 'FETCH_HEAD'])
  1557. return revision
  1558. def _Run(self, args, options, **kwargs):
  1559. # Disable 'unused options' warning | pylint: disable=unused-argument
  1560. kwargs.setdefault('cwd', self.checkout_path)
  1561. kwargs.setdefault('filter_fn', self.filter)
  1562. kwargs.setdefault('show_header', True)
  1563. env = scm.GIT.ApplyEnvVars(kwargs)
  1564. cmd = ['git'] + args
  1565. gclient_utils.CheckCallAndFilter(cmd, env=env, **kwargs)
  1566. class CipdPackage(object):
  1567. """A representation of a single CIPD package."""
  1568. def __init__(self, name, version, authority_for_subdir):
  1569. self._authority_for_subdir = authority_for_subdir
  1570. self._name = name
  1571. self._version = version
  1572. @property
  1573. def authority_for_subdir(self):
  1574. """Whether this package has authority to act on behalf of its subdir.
  1575. Some operations should only be performed once per subdirectory. A package
  1576. that has authority for its subdirectory is the only package that should
  1577. perform such operations.
  1578. Returns:
  1579. bool; whether this package has subdir authority.
  1580. """
  1581. return self._authority_for_subdir
  1582. @property
  1583. def name(self):
  1584. return self._name
  1585. @property
  1586. def version(self):
  1587. return self._version
  1588. class CipdRoot(object):
  1589. """A representation of a single CIPD root."""
  1590. def __init__(self, root_dir, service_url, log_level=None):
  1591. self._all_packages = set()
  1592. self._mutator_lock = threading.Lock()
  1593. self._packages_by_subdir = collections.defaultdict(list)
  1594. self._root_dir = root_dir
  1595. self._service_url = service_url
  1596. self._resolved_packages = None
  1597. self._log_level = log_level or 'error'
  1598. def add_package(self, subdir, package, version):
  1599. """Adds a package to this CIPD root.
  1600. As far as clients are concerned, this grants both root and subdir authority
  1601. to packages arbitrarily. (The implementation grants root authority to the
  1602. first package added and subdir authority to the first package added for that
  1603. subdir, but clients should not depend on or expect that behavior.)
  1604. Args:
  1605. subdir: str; relative path to where the package should be installed from
  1606. the cipd root directory.
  1607. package: str; the cipd package name.
  1608. version: str; the cipd package version.
  1609. Returns:
  1610. CipdPackage; the package that was created and added to this root.
  1611. """
  1612. with self._mutator_lock:
  1613. cipd_package = CipdPackage(package, version,
  1614. not self._packages_by_subdir[subdir])
  1615. self._all_packages.add(cipd_package)
  1616. self._packages_by_subdir[subdir].append(cipd_package)
  1617. return cipd_package
  1618. def packages(self, subdir):
  1619. """Get the list of configured packages for the given subdir."""
  1620. return list(self._packages_by_subdir[subdir])
  1621. def resolved_packages(self):
  1622. if not self._resolved_packages:
  1623. self._resolved_packages = self.ensure_file_resolve()
  1624. return self._resolved_packages
  1625. def clobber(self):
  1626. """Remove the .cipd directory.
  1627. This is useful for forcing ensure to redownload and reinitialize all
  1628. packages.
  1629. """
  1630. with self._mutator_lock:
  1631. cipd_cache_dir = os.path.join(self.root_dir, '.cipd')
  1632. try:
  1633. gclient_utils.rmtree(os.path.join(cipd_cache_dir))
  1634. except OSError:
  1635. if os.path.exists(cipd_cache_dir):
  1636. raise
  1637. def expand_package_name(self, package_name_string, **kwargs):
  1638. """Run `cipd expand-package-name`.
  1639. CIPD package names can be declared with placeholder variables
  1640. such as '${platform}', this cmd will return the package name
  1641. with the variables resolved. The resolution is based on the host
  1642. the command is executing on.
  1643. """
  1644. kwargs.setdefault('stderr', subprocess2.PIPE)
  1645. cmd = ['cipd', 'expand-package-name', package_name_string]
  1646. ret = subprocess2.check_output(cmd, **kwargs).decode('utf-8')
  1647. return ret.strip()
  1648. @contextlib.contextmanager
  1649. def _create_ensure_file(self):
  1650. try:
  1651. contents = '$ParanoidMode CheckPresence\n'
  1652. # TODO(crbug/1329641): Remove once cipd packages have been updated
  1653. # to always be created in copy mode.
  1654. contents += '$OverrideInstallMode copy\n\n'
  1655. for subdir, packages in sorted(self._packages_by_subdir.items()):
  1656. contents += '@Subdir %s\n' % subdir
  1657. for package in sorted(packages, key=lambda p: p.name):
  1658. contents += '%s %s\n' % (package.name, package.version)
  1659. contents += '\n'
  1660. ensure_file = None
  1661. with tempfile.NamedTemporaryFile(suffix='.ensure',
  1662. delete=False,
  1663. mode='wb') as ensure_file:
  1664. ensure_file.write(contents.encode('utf-8', 'replace'))
  1665. yield ensure_file.name
  1666. finally:
  1667. if ensure_file is not None and os.path.exists(ensure_file.name):
  1668. os.remove(ensure_file.name)
  1669. def ensure(self):
  1670. """Run `cipd ensure`."""
  1671. with self._mutator_lock:
  1672. with self._create_ensure_file() as ensure_file:
  1673. cmd = [
  1674. 'cipd',
  1675. 'ensure',
  1676. '-log-level',
  1677. self._log_level,
  1678. '-root',
  1679. self.root_dir,
  1680. '-ensure-file',
  1681. ensure_file,
  1682. ]
  1683. gclient_utils.CheckCallAndFilter(cmd,
  1684. print_stdout=True,
  1685. show_header=True)
  1686. @contextlib.contextmanager
  1687. def _create_ensure_file_for_resolve(self):
  1688. try:
  1689. contents = '$ResolvedVersions %s\n' % os.devnull
  1690. for subdir, packages in sorted(self._packages_by_subdir.items()):
  1691. contents += '@Subdir %s\n' % subdir
  1692. for package in sorted(packages, key=lambda p: p.name):
  1693. contents += '%s %s\n' % (package.name, package.version)
  1694. contents += '\n'
  1695. ensure_file = None
  1696. with tempfile.NamedTemporaryFile(suffix='.ensure',
  1697. delete=False,
  1698. mode='wb') as ensure_file:
  1699. ensure_file.write(contents.encode('utf-8', 'replace'))
  1700. yield ensure_file.name
  1701. finally:
  1702. if ensure_file is not None and os.path.exists(ensure_file.name):
  1703. os.remove(ensure_file.name)
  1704. def _create_resolved_file(self):
  1705. return tempfile.NamedTemporaryFile(suffix='.resolved',
  1706. delete=False,
  1707. mode='wb')
  1708. def ensure_file_resolve(self):
  1709. """Run `cipd ensure-file-resolve`."""
  1710. with self._mutator_lock:
  1711. with self._create_resolved_file() as output_file:
  1712. with self._create_ensure_file_for_resolve() as ensure_file:
  1713. cmd = [
  1714. 'cipd',
  1715. 'ensure-file-resolve',
  1716. '-log-level',
  1717. self._log_level,
  1718. '-ensure-file',
  1719. ensure_file,
  1720. '-json-output',
  1721. output_file.name,
  1722. ]
  1723. gclient_utils.CheckCallAndFilter(cmd,
  1724. print_stdout=False,
  1725. show_header=False)
  1726. with open(output_file.name) as f:
  1727. output_json = json.load(f)
  1728. return output_json.get('result', {})
  1729. def run(self, command):
  1730. if command == 'update':
  1731. self.ensure()
  1732. elif command == 'revert':
  1733. self.clobber()
  1734. self.ensure()
  1735. def created_package(self, package):
  1736. """Checks whether this root created the given package.
  1737. Args:
  1738. package: CipdPackage; the package to check.
  1739. Returns:
  1740. bool; whether this root created the given package.
  1741. """
  1742. return package in self._all_packages
  1743. @property
  1744. def root_dir(self):
  1745. return self._root_dir
  1746. @property
  1747. def service_url(self):
  1748. return self._service_url
  1749. class CipdWrapper(SCMWrapper):
  1750. """Wrapper for CIPD.
  1751. Currently only supports chrome-infra-packages.appspot.com.
  1752. """
  1753. name = 'cipd'
  1754. def __init__(self,
  1755. url=None,
  1756. root_dir=None,
  1757. relpath=None,
  1758. out_fh=None,
  1759. out_cb=None,
  1760. root=None,
  1761. package=None):
  1762. super(CipdWrapper, self).__init__(url=url,
  1763. root_dir=root_dir,
  1764. relpath=relpath,
  1765. out_fh=out_fh,
  1766. out_cb=out_cb)
  1767. assert root.created_package(package)
  1768. self._package = package
  1769. self._root = root
  1770. #override
  1771. def GetCacheMirror(self):
  1772. return None
  1773. #override
  1774. def GetActualRemoteURL(self, options):
  1775. return self._root.service_url
  1776. #override
  1777. def DoesRemoteURLMatch(self, options):
  1778. del options
  1779. return True
  1780. def revert(self, options, args, file_list):
  1781. """Does nothing.
  1782. CIPD packages should be reverted at the root by running
  1783. `CipdRoot.run('revert')`.
  1784. """
  1785. def diff(self, options, args, file_list):
  1786. """CIPD has no notion of diffing."""
  1787. def pack(self, options, args, file_list):
  1788. """CIPD has no notion of diffing."""
  1789. def revinfo(self, options, args, file_list):
  1790. """Grab the instance ID."""
  1791. try:
  1792. tmpdir = tempfile.mkdtemp()
  1793. # Attempt to get instance_id from the root resolved cache.
  1794. # Resolved cache will not match on any CIPD packages with
  1795. # variables such as ${platform}, they will fall back to
  1796. # the slower method below.
  1797. resolved = self._root.resolved_packages()
  1798. if resolved:
  1799. # CIPD uses POSIX separators across all platforms, so
  1800. # replace any Windows separators.
  1801. path_split = self.relpath.replace(os.sep, "/").split(":")
  1802. if len(path_split) > 1:
  1803. src_path, package = path_split
  1804. if src_path in resolved:
  1805. for resolved_package in resolved[src_path]:
  1806. if package == resolved_package.get(
  1807. 'pin', {}).get('package'):
  1808. return resolved_package.get(
  1809. 'pin', {}).get('instance_id')
  1810. describe_json_path = os.path.join(tmpdir, 'describe.json')
  1811. cmd = [
  1812. 'cipd', 'describe', self._package.name, '-log-level', 'error',
  1813. '-version', self._package.version, '-json-output',
  1814. describe_json_path
  1815. ]
  1816. gclient_utils.CheckCallAndFilter(cmd)
  1817. with open(describe_json_path) as f:
  1818. describe_json = json.load(f)
  1819. return describe_json.get('result', {}).get('pin',
  1820. {}).get('instance_id')
  1821. finally:
  1822. gclient_utils.rmtree(tmpdir)
  1823. def status(self, options, args, file_list):
  1824. pass
  1825. def update(self, options, args, file_list):
  1826. """Does nothing.
  1827. CIPD packages should be updated at the root by running
  1828. `CipdRoot.run('update')`.
  1829. """
  1830. class GcsRoot(object):
  1831. """Root to keep track of all GCS objects, per checkout"""
  1832. def __init__(self, root_dir):
  1833. self._mutator_lock = threading.Lock()
  1834. self._root_dir = root_dir
  1835. # Populated when the DEPS file is parsed
  1836. # The objects here have not yet been downloaded and written into
  1837. # the .gcs_entries file
  1838. self._parsed_objects = {}
  1839. # .gcs_entries keeps track of which GCS deps have already been installed
  1840. # Maps checkout_name -> {GCS dep path -> [object_name]}
  1841. # This file is in the same directory as .gclient
  1842. self._gcs_entries_file = os.path.join(self._root_dir, '.gcs_entries')
  1843. # Contents of the .gcs_entries file
  1844. self._gcs_entries = self.read_gcs_entries()
  1845. @property
  1846. def root_dir(self):
  1847. return self._root_dir
  1848. def add_object(self, checkout_name, dep_path, object_name):
  1849. """Records the object in the _parsed_objects variable
  1850. This does not actually download the object"""
  1851. with self._mutator_lock:
  1852. if checkout_name not in self._parsed_objects:
  1853. self._parsed_objects[checkout_name] = {}
  1854. if dep_path not in self._parsed_objects[checkout_name]:
  1855. self._parsed_objects[checkout_name][dep_path] = [object_name]
  1856. else:
  1857. self._parsed_objects[checkout_name][dep_path].append(
  1858. object_name)
  1859. def read_gcs_entries(self):
  1860. """Reads .gcs_entries file and loads the content into _gcs_entries"""
  1861. if not os.path.exists(self._gcs_entries_file):
  1862. return {}
  1863. with open(self._gcs_entries_file, 'r') as f:
  1864. content = f.read().rstrip()
  1865. if content:
  1866. return json.loads(content)
  1867. return {}
  1868. def resolve_objects(self, checkout_name):
  1869. """Updates .gcs_entries with objects in _parsed_objects
  1870. This should only be called after the objects have been downloaded
  1871. and extracted."""
  1872. with self._mutator_lock:
  1873. object_dict = self._parsed_objects.get(checkout_name)
  1874. if not object_dict:
  1875. return
  1876. self._gcs_entries[checkout_name] = object_dict
  1877. with open(self._gcs_entries_file, 'w') as f:
  1878. f.write(json.dumps(self._gcs_entries, indent=2))
  1879. self._parsed_objects[checkout_name] = {}
  1880. def clobber_deps_with_updated_objects(self, checkout_name):
  1881. """Clobber the path if an object or GCS dependency is removed/added
  1882. This must be called before the GCS dependencies are
  1883. downloaded and extracted."""
  1884. with self._mutator_lock:
  1885. parsed_object_dict = self._parsed_objects.get(checkout_name, {})
  1886. parsed_paths = set(parsed_object_dict.keys())
  1887. resolved_object_dict = self._gcs_entries.get(checkout_name, {})
  1888. resolved_paths = set(resolved_object_dict.keys())
  1889. # If any GCS deps are added or removed entirely, clobber that path
  1890. intersected_paths = parsed_paths.intersection(resolved_paths)
  1891. # If any objects within a GCS dep are added/removed, clobber its
  1892. # extracted contents and relevant gcs dotfiles
  1893. for path in intersected_paths:
  1894. resolved_objects = resolved_object_dict[path]
  1895. parsed_objects = parsed_object_dict[path]
  1896. full_path = os.path.join(self.root_dir, path)
  1897. if (len(resolved_objects) != len(parsed_objects)
  1898. and os.path.exists(full_path)):
  1899. self.clobber_tar_content_names(full_path)
  1900. self.clobber_hash_files(full_path)
  1901. self.clobber_migration_files(full_path)
  1902. def clobber_tar_content_names(self, entry_directory):
  1903. """Delete paths written in .*_content_names files"""
  1904. content_names_files = glob.glob(
  1905. os.path.join(entry_directory, '.*_content_names'))
  1906. for file in content_names_files:
  1907. with open(file, 'r') as f:
  1908. names = json.loads(f.read().strip())
  1909. for name in names:
  1910. name_path = os.path.join(entry_directory, name)
  1911. if os.path.isdir(
  1912. name_path) or not os.path.exists(name_path):
  1913. continue
  1914. os.remove(os.path.join(entry_directory, name))
  1915. os.remove(file)
  1916. def clobber_hash_files(self, entry_directory):
  1917. files = glob.glob(os.path.join(entry_directory, '.*_hash'))
  1918. for f in files:
  1919. os.remove(f)
  1920. def clobber_migration_files(self, entry_directory):
  1921. files = glob.glob(os.path.join(entry_directory,
  1922. '.*_is_first_class_gcs'))
  1923. for f in files:
  1924. os.remove(f)
  1925. def clobber(self):
  1926. """Remove all dep path gcs items and clear .gcs_entries"""
  1927. for _, objects_dict in self._gcs_entries.items():
  1928. for dep_path, _ in objects_dict.items():
  1929. full_path = os.path.join(self.root_dir, dep_path)
  1930. self.clobber_tar_content_names(full_path)
  1931. self.clobber_hash_files(full_path)
  1932. self.clobber_migration_files(full_path)
  1933. if os.path.exists(self._gcs_entries_file):
  1934. os.remove(self._gcs_entries_file)
  1935. with self._mutator_lock:
  1936. self._gcs_entries = {}
  1937. class GcsWrapper(SCMWrapper):
  1938. """Wrapper for GCS.
  1939. Currently only supports content from Google Cloud Storage.
  1940. """
  1941. name = 'gcs'
  1942. def __init__(self,
  1943. url=None,
  1944. root_dir=None,
  1945. relpath=None,
  1946. out_fh=None,
  1947. out_cb=None):
  1948. super(GcsWrapper, self).__init__(url=url,
  1949. root_dir=root_dir,
  1950. relpath=relpath,
  1951. out_fh=out_fh,
  1952. out_cb=out_cb)
  1953. #override
  1954. def GetCacheMirror(self):
  1955. return None
  1956. #override
  1957. def GetActualRemoteURL(self, options):
  1958. return None
  1959. #override
  1960. def DoesRemoteURLMatch(self, options):
  1961. del options
  1962. return True
  1963. def revert(self, options, args, file_list):
  1964. """Does nothing."""
  1965. def diff(self, options, args, file_list):
  1966. """GCS has no notion of diffing."""
  1967. def pack(self, options, args, file_list):
  1968. """GCS has no notion of diffing."""
  1969. def revinfo(self, options, args, file_list):
  1970. """Does nothing"""
  1971. def status(self, options, args, file_list):
  1972. pass
  1973. def update(self, options, args, file_list):
  1974. """Does nothing."""
  1975. class CogWrapper(SCMWrapper):
  1976. """Wrapper for Cog, all no-op."""
  1977. name = 'cog'
  1978. def __init__(self):
  1979. super(CogWrapper, self).__init__()
  1980. #override
  1981. def GetCacheMirror(self):
  1982. return None
  1983. #override
  1984. def GetActualRemoteURL(self, options):
  1985. return None
  1986. #override
  1987. def GetSubmoduleDiff(self):
  1988. return None
  1989. #override
  1990. def GetSubmoduleStateFromIndex(self):
  1991. return None
  1992. #override
  1993. def DoesRemoteURLMatch(self, options):
  1994. del options
  1995. return True
  1996. def revert(self, options, args, file_list):
  1997. pass
  1998. def diff(self, options, args, file_list):
  1999. pass
  2000. def pack(self, options, args, file_list):
  2001. pass
  2002. def revinfo(self, options, args, file_list):
  2003. pass
  2004. def status(self, options, args, file_list):
  2005. pass
  2006. def update(self, options, args, file_list):
  2007. pass