浏览代码

switch to 4 space indent

Leave the recipes/ code at 2 space to match the rest of the recipes
project in other repos.

Reformatted using:
files=( $(
	git ls-tree -r --name-only HEAD | \
		grep -Ev -e '^(third_party|recipes)/' | \
		grep '\.py$';
	git grep -l '#!/usr/bin/env.*python' | grep -v '\.py$'
) )
parallel ./yapf -i -- "${files[@]}"
~/chromiumos/chromite/contrib/reflow_overlong_comments "${files[@]}"

The files that still had strings that were too long were manually
reformatted because they were easy and only a few issues.
autoninja.py
clang_format.py
download_from_google_storage.py
fix_encoding.py
gclient_utils.py
git_cache.py
git_common.py
git_map_branches.py
git_reparent_branch.py
gn.py
my_activity.py
owners_finder.py
presubmit_canned_checks.py
reclient_helper.py
reclientreport.py
roll_dep.py
rustfmt.py
siso.py
split_cl.py
subcommand.py
subprocess2.py
swift_format.py
upload_to_google_storage.py

These files still had lines (strings) that were too long, so the pylint
warnings were suppressed with a TODO.
auth.py
gclient.py
gclient_eval.py
gclient_paths.py
gclient_scm.py
gerrit_util.py
git_cl.py
presubmit_canned_checks.py
presubmit_support.py
scm.py

Change-Id: Ia6535c4f2c48d46b589ec1e791dde6c6b2ea858f
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/4836379
Commit-Queue: Josip Sokcevic <sokcevic@chromium.org>
Auto-Submit: Mike Frysinger <vapier@chromium.org>
Reviewed-by: Josip Sokcevic <sokcevic@chromium.org>
Mike Frysinger 1 年之前
父节点
当前提交
124bb8e53c
共有 100 个文件被更改,包括 17530 次插入16792 次删除
  1. 0 1
      .style.yapf
  2. 133 119
      PRESUBMIT.py
  3. 98 92
      auth.py
  4. 239 223
      autoninja.py
  5. 41 41
      autosiso.py
  6. 28 28
      bazel.py
  7. 216 213
      bootstrap/bootstrap.py
  8. 0 1
      breakpad.py
  9. 64 61
      clang_format.py
  10. 45 45
      compile_single_file.py
  11. 384 379
      cpplint.py
  12. 8 7
      cpplint_chromium.py
  13. 71 69
      cros
  14. 46 43
      detect_host_arch.py
  15. 592 533
      download_from_google_storage.py
  16. 236 216
      fetch.py
  17. 225 223
      fix_encoding.py
  18. 90 77
      gclient-new-workdir.py
  19. 888 851
      gclient.py
  20. 599 600
      gclient_eval.py
  21. 113 110
      gclient_paths.py
  22. 1388 1301
      gclient_scm.py
  23. 484 471
      gclient_utils.py
  24. 395 367
      gerrit_client.py
  25. 641 622
      gerrit_util.py
  26. 807 758
      git_cache.py
  27. 431 413
      git_cl.py
  28. 449 443
      git_common.py
  29. 34 35
      git_dates.py
  30. 21 20
      git_drover.py
  31. 36 32
      git_find_releases.py
  32. 168 161
      git_footers.py
  33. 16 16
      git_freezer.py
  34. 295 281
      git_hyper_blame.py
  35. 103 102
      git_map.py
  36. 328 314
      git_map_branches.py
  37. 40 40
      git_mark_merge_base.py
  38. 71 70
      git_migrate_default_branch.py
  39. 45 43
      git_nav_downstream.py
  40. 68 60
      git_new_branch.py
  41. 187 166
      git_number.py
  42. 309 289
      git_rebase_update.py
  43. 40 37
      git_rename_branch.py
  44. 82 79
      git_reparent_branch.py
  45. 155 142
      git_retry.py
  46. 18 14
      git_squash_branch.py
  47. 43 38
      git_upstream_diff.py
  48. 47 45
      gn.py
  49. 222 219
      gsutil.py
  50. 34 34
      infra_to_superproject.py
  51. 0 1
      isort
  52. 75 74
      lockfile.py
  53. 104 103
      man/src/filter_demo_output.py
  54. 0 2
      metadata/.style.yapf
  55. 1 2
      metadata/fields/field_types.py
  56. 2 4
      metadata/tests/dependency_metadata_test.py
  57. 2 2
      metadata/tests/validate_test.py
  58. 1 2
      metadata/validate.py
  59. 2 3
      metadata/validation_result.py
  60. 273 271
      metrics.py
  61. 198 223
      metrics_utils.py
  62. 926 878
      my_activity.py
  63. 60 59
      ninja.py
  64. 8 8
      ninja_reclient.py
  65. 152 152
      ninjalog_uploader.py
  66. 71 71
      ninjalog_uploader_wrapper.py
  67. 140 137
      owners_client.py
  68. 329 322
      owners_finder.py
  69. 123 122
      post_build_ninja_summary.py
  70. 645 590
      presubmit_canned_checks.py
  71. 582 540
      presubmit_support.py
  72. 1 1
      pylint-2.6
  73. 1 1
      pylint-2.7
  74. 31 31
      pylint_main.py
  75. 41 43
      rdb_wrapper.py
  76. 1 0
      recipes/.style.yapf
  77. 145 142
      reclient_helper.py
  78. 72 72
      reclient_metrics.py
  79. 34 34
      reclientreport.py
  80. 26 26
      repo
  81. 262 255
      roll_dep.py
  82. 38 38
      rustfmt.py
  83. 435 421
      scm.py
  84. 102 97
      setup_color.py
  85. 60 56
      siso.py
  86. 223 221
      split_cl.py
  87. 167 166
      subcommand.py
  88. 136 136
      subprocess2.py
  89. 42 42
      swift_format.py
  90. 0 3
      testing_support/.style.yapf
  91. 2 3
      testing_support/coverage_utils.py
  92. 0 3
      tests/.style.yapf
  93. 20 19
      update_depot_tools_toggle.py
  94. 16 15
      upload_metrics.py
  95. 270 249
      upload_to_google_storage.py
  96. 15 15
      utils.py
  97. 93 92
      watchlists.py
  98. 25 24
      weekly
  99. 7 7
      win32imports.py
  100. 498 470
      win_toolchain/get_toolchain_if_necessary.py

+ 0 - 1
.style.yapf

@@ -1,4 +1,3 @@
 [style]
 based_on_style = pep8
-indent_width = 2
 column_limit = 80

+ 133 - 119
PRESUBMIT.py

@@ -1,7 +1,6 @@
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Top-level presubmit script for depot tools.
 
 See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
@@ -31,149 +30,164 @@ TEST_TIMEOUT_S = 330  # 5m 30s
 
 
 def CheckPylint(input_api, output_api):
-  """Gather all the pylint logic into one place to make it self-contained."""
-  files_to_check = [
-    r'^[^/]*\.py$',
-    r'^testing_support/[^/]*\.py$',
-    r'^tests/[^/]*\.py$',
-    r'^recipe_modules/.*\.py$',  # Allow recursive search in recipe modules.
-  ]
-  files_to_skip = list(input_api.DEFAULT_FILES_TO_SKIP)
-  if os.path.exists('.gitignore'):
-    with open('.gitignore', encoding='utf-8') as fh:
-      lines = [l.strip() for l in fh.readlines()]
-      files_to_skip.extend([fnmatch.translate(l) for l in lines if
-                         l and not l.startswith('#')])
-  if os.path.exists('.git/info/exclude'):
-    with open('.git/info/exclude', encoding='utf-8') as fh:
-      lines = [l.strip() for l in fh.readlines()]
-      files_to_skip.extend([fnmatch.translate(l) for l in lines if
-                         l and not l.startswith('#')])
-  disabled_warnings = [
-      'R0401',  # Cyclic import
-      'W0613',  # Unused argument
-      'C0415',  # import-outside-toplevel
-      'R1710',  # inconsistent-return-statements
-      'E1101',  # no-member
-      'E1120',  # no-value-for-parameter
-      'R1708',  # stop-iteration-return
-      'W1510',  # subprocess-run-check
-      # Checks which should be re-enabled after Python 2 support is removed.
-      'R0205',  # useless-object-inheritance
-      'R1725',  # super-with-arguments
-      'W0707',  # raise-missing-from
-      'W1113',  # keyword-arg-before-vararg
-  ]
-  return input_api.RunTests(input_api.canned_checks.GetPylint(
-      input_api,
-      output_api,
-      files_to_check=files_to_check,
-      files_to_skip=files_to_skip,
-      disabled_warnings=disabled_warnings,
-      version='2.7'), parallel=False)
+    """Gather all the pylint logic into one place to make it self-contained."""
+    files_to_check = [
+        r'^[^/]*\.py$',
+        r'^testing_support/[^/]*\.py$',
+        r'^tests/[^/]*\.py$',
+        r'^recipe_modules/.*\.py$',  # Allow recursive search in recipe modules.
+    ]
+    files_to_skip = list(input_api.DEFAULT_FILES_TO_SKIP)
+    if os.path.exists('.gitignore'):
+        with open('.gitignore', encoding='utf-8') as fh:
+            lines = [l.strip() for l in fh.readlines()]
+            files_to_skip.extend([
+                fnmatch.translate(l) for l in lines
+                if l and not l.startswith('#')
+            ])
+    if os.path.exists('.git/info/exclude'):
+        with open('.git/info/exclude', encoding='utf-8') as fh:
+            lines = [l.strip() for l in fh.readlines()]
+            files_to_skip.extend([
+                fnmatch.translate(l) for l in lines
+                if l and not l.startswith('#')
+            ])
+    disabled_warnings = [
+        'R0401',  # Cyclic import
+        'W0613',  # Unused argument
+        'C0415',  # import-outside-toplevel
+        'R1710',  # inconsistent-return-statements
+        'E1101',  # no-member
+        'E1120',  # no-value-for-parameter
+        'R1708',  # stop-iteration-return
+        'W1510',  # subprocess-run-check
+        # Checks which should be re-enabled after Python 2 support is removed.
+        'R0205',  # useless-object-inheritance
+        'R1725',  # super-with-arguments
+        'W0707',  # raise-missing-from
+        'W1113',  # keyword-arg-before-vararg
+    ]
+    return input_api.RunTests(input_api.canned_checks.GetPylint(
+        input_api,
+        output_api,
+        files_to_check=files_to_check,
+        files_to_skip=files_to_skip,
+        disabled_warnings=disabled_warnings,
+        version='2.7'),
+                              parallel=False)
 
 
 def CheckRecipes(input_api, output_api):
-  file_filter = lambda x: x.LocalPath() == 'infra/config/recipes.cfg'
-  return input_api.canned_checks.CheckJsonParses(input_api, output_api,
-                                                 file_filter=file_filter)
+    file_filter = lambda x: x.LocalPath() == 'infra/config/recipes.cfg'
+    return input_api.canned_checks.CheckJsonParses(input_api,
+                                                   output_api,
+                                                   file_filter=file_filter)
 
 
 def CheckUsePython3(input_api, output_api):
-  results = []
+    results = []
 
-  if sys.version_info.major != 3:
-    results.append(
-        output_api.PresubmitError(
-            'Did not use Python3 for //tests/PRESUBMIT.py.'))
+    if sys.version_info.major != 3:
+        results.append(
+            output_api.PresubmitError(
+                'Did not use Python3 for //tests/PRESUBMIT.py.'))
 
-  return results
+    return results
 
 
 def CheckJsonFiles(input_api, output_api):
-  return input_api.canned_checks.CheckJsonParses(
-      input_api, output_api)
+    return input_api.canned_checks.CheckJsonParses(input_api, output_api)
 
 
 def CheckUnitTestsOnCommit(input_api, output_api):
-  """ Do not run integration tests on upload since they are way too slow."""
-
-  input_api.SetTimeout(TEST_TIMEOUT_S)
-
-  # Run only selected tests on Windows.
-  test_to_run_list = [r'.*test\.py$']
-  tests_to_skip_list = []
-  if input_api.platform.startswith(('cygwin', 'win32')):
-    print('Warning: skipping most unit tests on Windows')
-    tests_to_skip_list.extend([
-        r'.*auth_test\.py$',
-        r'.*git_common_test\.py$',
-        r'.*git_hyper_blame_test\.py$',
-        r'.*git_map_test\.py$',
-        r'.*ninjalog_uploader_test\.py$',
-        r'.*recipes_test\.py$',
-    ])
-
-  tests = input_api.canned_checks.GetUnitTestsInDirectory(
-      input_api,
-      output_api,
-      'tests',
-      files_to_check=test_to_run_list,
-      files_to_skip=tests_to_skip_list)
-
-  return input_api.RunTests(tests)
+    """ Do not run integration tests on upload since they are way too slow."""
+
+    input_api.SetTimeout(TEST_TIMEOUT_S)
+
+    # Run only selected tests on Windows.
+    test_to_run_list = [r'.*test\.py$']
+    tests_to_skip_list = []
+    if input_api.platform.startswith(('cygwin', 'win32')):
+        print('Warning: skipping most unit tests on Windows')
+        tests_to_skip_list.extend([
+            r'.*auth_test\.py$',
+            r'.*git_common_test\.py$',
+            r'.*git_hyper_blame_test\.py$',
+            r'.*git_map_test\.py$',
+            r'.*ninjalog_uploader_test\.py$',
+            r'.*recipes_test\.py$',
+        ])
+
+    tests = input_api.canned_checks.GetUnitTestsInDirectory(
+        input_api,
+        output_api,
+        'tests',
+        files_to_check=test_to_run_list,
+        files_to_skip=tests_to_skip_list)
+
+    return input_api.RunTests(tests)
 
 
 def CheckCIPDManifest(input_api, output_api):
-  # Validate CIPD manifests.
-  root = input_api.os_path.normpath(
-    input_api.os_path.abspath(input_api.PresubmitLocalPath()))
-  rel_file = lambda rel: input_api.os_path.join(root, rel)
-  cipd_manifests = set(rel_file(input_api.os_path.join(*x)) for x in (
-    ('cipd_manifest.txt',),
-    ('bootstrap', 'manifest.txt'),
-    ('bootstrap', 'manifest_bleeding_edge.txt'),
-
-    # Also generate a file for the cipd client itself.
-    ('cipd_client_version',),
-  ))
-  affected_manifests = input_api.AffectedFiles(
-    include_deletes=False,
-    file_filter=lambda x:
-      input_api.os_path.normpath(x.AbsoluteLocalPath()) in cipd_manifests)
-  tests = []
-  for path in affected_manifests:
-    path = path.AbsoluteLocalPath()
-    if path.endswith('.txt'):
-      tests.append(input_api.canned_checks.CheckCIPDManifest(
-          input_api, output_api, path=path))
-    else:
-      pkg = 'infra/tools/cipd/${platform}'
-      ver = input_api.ReadFile(path)
-      tests.append(input_api.canned_checks.CheckCIPDManifest(
-          input_api, output_api,
-          content=CIPD_CLIENT_ENSURE_FILE_TEMPLATE % (pkg, ver)))
-      tests.append(input_api.canned_checks.CheckCIPDClientDigests(
-          input_api, output_api, client_version_file=path))
-
-  return input_api.RunTests(tests)
+    # Validate CIPD manifests.
+    root = input_api.os_path.normpath(
+        input_api.os_path.abspath(input_api.PresubmitLocalPath()))
+    rel_file = lambda rel: input_api.os_path.join(root, rel)
+    cipd_manifests = set(
+        rel_file(input_api.os_path.join(*x)) for x in (
+            ('cipd_manifest.txt', ),
+            ('bootstrap', 'manifest.txt'),
+            ('bootstrap', 'manifest_bleeding_edge.txt'),
+
+            # Also generate a file for the cipd client itself.
+            (
+                'cipd_client_version', ),
+        ))
+    affected_manifests = input_api.AffectedFiles(
+        include_deletes=False,
+        file_filter=lambda x: input_api.os_path.normpath(x.AbsoluteLocalPath()
+                                                         ) in cipd_manifests)
+    tests = []
+    for path in affected_manifests:
+        path = path.AbsoluteLocalPath()
+        if path.endswith('.txt'):
+            tests.append(
+                input_api.canned_checks.CheckCIPDManifest(input_api,
+                                                          output_api,
+                                                          path=path))
+        else:
+            pkg = 'infra/tools/cipd/${platform}'
+            ver = input_api.ReadFile(path)
+            tests.append(
+                input_api.canned_checks.CheckCIPDManifest(
+                    input_api,
+                    output_api,
+                    content=CIPD_CLIENT_ENSURE_FILE_TEMPLATE % (pkg, ver)))
+            tests.append(
+                input_api.canned_checks.CheckCIPDClientDigests(
+                    input_api, output_api, client_version_file=path))
+
+    return input_api.RunTests(tests)
 
 
 def CheckOwnersFormat(input_api, output_api):
-  return input_api.canned_checks.CheckOwnersFormat(input_api, output_api)
+    return input_api.canned_checks.CheckOwnersFormat(input_api, output_api)
 
 
 def CheckOwnersOnUpload(input_api, output_api):
-  return input_api.canned_checks.CheckOwners(input_api, output_api,
-                                             allow_tbr=False)
+    return input_api.canned_checks.CheckOwners(input_api,
+                                               output_api,
+                                               allow_tbr=False)
+
 
 def CheckDoNotSubmitOnCommit(input_api, output_api):
-  return input_api.canned_checks.CheckDoNotSubmit(input_api, output_api)
+    return input_api.canned_checks.CheckDoNotSubmit(input_api, output_api)
 
 
 def CheckPatchFormatted(input_api, output_api):
-  # TODO(https://crbug.com/979330) If clang-format is fixed for non-chromium
-  # repos, remove check_clang_format=False so that proto files can be formatted
-  return input_api.canned_checks.CheckPatchFormatted(input_api,
-                                                     output_api,
-                                                     check_clang_format=False)
+    # TODO(https://crbug.com/979330) If clang-format is fixed for non-chromium
+    # repos, remove check_clang_format=False so that proto files can be
+    # formatted
+    return input_api.canned_checks.CheckPatchFormatted(input_api,
+                                                       output_api,
+                                                       check_clang_format=False)

+ 98 - 92
auth.py

@@ -1,7 +1,6 @@
 # Copyright 2015 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Google OAuth2 related functions."""
 
 from __future__ import print_function
@@ -16,6 +15,8 @@ import os
 
 import subprocess2
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
 
 # This is what most GAE apps require for authentication.
 OAUTH_SCOPE_EMAIL = 'https://www.googleapis.com/auth/userinfo.email'
@@ -27,89 +28,87 @@ OAUTH_SCOPES = OAUTH_SCOPE_EMAIL
 
 # Mockable datetime.datetime.utcnow for testing.
 def datetime_now():
-  return datetime.datetime.utcnow()
+    return datetime.datetime.utcnow()
 
 
 # OAuth access token with its expiration time (UTC datetime or None if unknown).
-class AccessToken(collections.namedtuple('AccessToken', [
-    'token',
-    'expires_at',
-  ])):
-
-  def needs_refresh(self):
-    """True if this AccessToken should be refreshed."""
-    if self.expires_at is not None:
-      # Allow 30s of clock skew between client and backend.
-      return datetime_now() + datetime.timedelta(seconds=30) >= self.expires_at
-    # Token without expiration time never expires.
-    return False
+class AccessToken(
+        collections.namedtuple('AccessToken', [
+            'token',
+            'expires_at',
+        ])):
+    def needs_refresh(self):
+        """True if this AccessToken should be refreshed."""
+        if self.expires_at is not None:
+            # Allow 30s of clock skew between client and backend.
+            return datetime_now() + datetime.timedelta(
+                seconds=30) >= self.expires_at
+        # Token without expiration time never expires.
+        return False
 
 
 class LoginRequiredError(Exception):
-  """Interaction with the user is required to authenticate."""
-
-  def __init__(self, scopes=OAUTH_SCOPE_EMAIL):
-    msg = (
-        'You are not logged in. Please login first by running:\n'
-        '  luci-auth login -scopes %s' % scopes)
-    super(LoginRequiredError, self).__init__(msg)
+    """Interaction with the user is required to authenticate."""
+    def __init__(self, scopes=OAUTH_SCOPE_EMAIL):
+        msg = ('You are not logged in. Please login first by running:\n'
+               '  luci-auth login -scopes %s' % scopes)
+        super(LoginRequiredError, self).__init__(msg)
 
 
 def has_luci_context_local_auth():
-  """Returns whether LUCI_CONTEXT should be used for ambient authentication."""
-  ctx_path = os.environ.get('LUCI_CONTEXT')
-  if not ctx_path:
-    return False
-  try:
-    with open(ctx_path) as f:
-      loaded = json.load(f)
-  except (OSError, IOError, ValueError):
-    return False
-  return loaded.get('local_auth', {}).get('default_account_id') is not None
+    """Returns whether LUCI_CONTEXT should be used for ambient authentication."""
+    ctx_path = os.environ.get('LUCI_CONTEXT')
+    if not ctx_path:
+        return False
+    try:
+        with open(ctx_path) as f:
+            loaded = json.load(f)
+    except (OSError, IOError, ValueError):
+        return False
+    return loaded.get('local_auth', {}).get('default_account_id') is not None
 
 
 class Authenticator(object):
-  """Object that knows how to refresh access tokens when needed.
+    """Object that knows how to refresh access tokens when needed.
 
   Args:
     scopes: space separated oauth scopes. Defaults to OAUTH_SCOPE_EMAIL.
   """
+    def __init__(self, scopes=OAUTH_SCOPE_EMAIL):
+        self._access_token = None
+        self._scopes = scopes
 
-  def __init__(self, scopes=OAUTH_SCOPE_EMAIL):
-    self._access_token = None
-    self._scopes = scopes
-
-  def has_cached_credentials(self):
-    """Returns True if credentials can be obtained.
+    def has_cached_credentials(self):
+        """Returns True if credentials can be obtained.
 
     If returns False, get_access_token() later will probably ask for interactive
     login by raising LoginRequiredError.
 
     If returns True, get_access_token() won't ask for interactive login.
     """
-    return bool(self._get_luci_auth_token())
+        return bool(self._get_luci_auth_token())
 
-  def get_access_token(self):
-    """Returns AccessToken, refreshing it if necessary.
+    def get_access_token(self):
+        """Returns AccessToken, refreshing it if necessary.
 
     Raises:
       LoginRequiredError if user interaction is required.
     """
-    if self._access_token and not self._access_token.needs_refresh():
-      return self._access_token
+        if self._access_token and not self._access_token.needs_refresh():
+            return self._access_token
 
-    # Token expired or missing. Maybe some other process already updated it,
-    # reload from the cache.
-    self._access_token = self._get_luci_auth_token()
-    if self._access_token and not self._access_token.needs_refresh():
-      return self._access_token
+        # Token expired or missing. Maybe some other process already updated it,
+        # reload from the cache.
+        self._access_token = self._get_luci_auth_token()
+        if self._access_token and not self._access_token.needs_refresh():
+            return self._access_token
 
-    # Nope, still expired. Needs user interaction.
-    logging.error('Failed to create access token')
-    raise LoginRequiredError(self._scopes)
+        # Nope, still expired. Needs user interaction.
+        logging.error('Failed to create access token')
+        raise LoginRequiredError(self._scopes)
 
-  def authorize(self, http):
-    """Monkey patches authentication logic of httplib2.Http instance.
+    def authorize(self, http):
+        """Monkey patches authentication logic of httplib2.Http instance.
 
     The modified http.request method will add authentication headers to each
     request.
@@ -120,46 +119,53 @@ class Authenticator(object):
     Returns:
        A modified instance of http that was passed in.
     """
-    # Adapted from oauth2client.OAuth2Credentials.authorize.
-    request_orig = http.request
-
-    @functools.wraps(request_orig)
-    def new_request(
-        uri, method='GET', body=None, headers=None,
-        redirections=httplib2.DEFAULT_MAX_REDIRECTS,
-        connection_type=None):
-      headers = (headers or {}).copy()
-      headers['Authorization'] = 'Bearer %s' % self.get_access_token().token
-      return request_orig(
-          uri, method, body, headers, redirections, connection_type)
-
-    http.request = new_request
-    return http
-
-  ## Private methods.
-
-  def _run_luci_auth_login(self):
-    """Run luci-auth login.
+        # Adapted from oauth2client.OAuth2Credentials.authorize.
+        request_orig = http.request
+
+        @functools.wraps(request_orig)
+        def new_request(uri,
+                        method='GET',
+                        body=None,
+                        headers=None,
+                        redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+                        connection_type=None):
+            headers = (headers or {}).copy()
+            headers['Authorization'] = 'Bearer %s' % self.get_access_token(
+            ).token
+            return request_orig(uri, method, body, headers, redirections,
+                                connection_type)
+
+        http.request = new_request
+        return http
+
+    ## Private methods.
+
+    def _run_luci_auth_login(self):
+        """Run luci-auth login.
 
     Returns:
       AccessToken with credentials.
     """
-    logging.debug('Running luci-auth login')
-    subprocess2.check_call(['luci-auth', 'login', '-scopes', self._scopes])
-    return self._get_luci_auth_token()
-
-  def _get_luci_auth_token(self):
-    logging.debug('Running luci-auth token')
-    try:
-      out, err = subprocess2.check_call_out(
-          ['luci-auth', 'token', '-scopes', self._scopes, '-json-output', '-'],
-          stdout=subprocess2.PIPE, stderr=subprocess2.PIPE)
-      logging.debug('luci-auth token stderr:\n%s', err)
-      token_info = json.loads(out)
-      return AccessToken(
-          token_info['token'],
-          datetime.datetime.utcfromtimestamp(token_info['expiry']))
-    except subprocess2.CalledProcessError as e:
-      # subprocess2.CalledProcessError.__str__ nicely formats stdout/stderr.
-      logging.error('luci-auth token failed: %s', e)
-      return None
+        logging.debug('Running luci-auth login')
+        subprocess2.check_call(['luci-auth', 'login', '-scopes', self._scopes])
+        return self._get_luci_auth_token()
+
+    def _get_luci_auth_token(self):
+        logging.debug('Running luci-auth token')
+        try:
+            out, err = subprocess2.check_call_out([
+                'luci-auth', 'token', '-scopes', self._scopes, '-json-output',
+                '-'
+            ],
+                                                  stdout=subprocess2.PIPE,
+                                                  stderr=subprocess2.PIPE)
+            logging.debug('luci-auth token stderr:\n%s', err)
+            token_info = json.loads(out)
+            return AccessToken(
+                token_info['token'],
+                datetime.datetime.utcfromtimestamp(token_info['expiry']))
+        except subprocess2.CalledProcessError as e:
+            # subprocess2.CalledProcessError.__str__ nicely formats
+            # stdout/stderr.
+            logging.error('luci-auth token failed: %s', e)
+            return None

+ 239 - 223
autoninja.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2017 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """
 This script (intended to be invoked by autoninja or autoninja.bat) detects
 whether a build is accelerated using a service like goma. If so, it runs with a
@@ -19,256 +18,273 @@ import subprocess
 import sys
 
 if sys.platform == 'darwin':
-  import resource
+    import resource
 
 SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
 
 
 def main(args):
-  # The -t tools are incompatible with -j
-  t_specified = False
-  j_specified = False
-  offline = False
-  output_dir = '.'
-  input_args = args
-  # On Windows the autoninja.bat script passes along the arguments enclosed in
-  # double quotes. This prevents multiple levels of parsing of the special '^'
-  # characters needed when compiling a single file but means that this script
-  # gets called with a single argument containing all of the actual arguments,
-  # separated by spaces. When this case is detected we need to do argument
-  # splitting ourselves. This means that arguments containing actual spaces are
-  # not supported by autoninja, but that is not a real limitation.
-  if (sys.platform.startswith('win') and len(args) == 2
-      and input_args[1].count(' ') > 0):
-    input_args = args[:1] + args[1].split()
+    # The -t tools are incompatible with -j
+    t_specified = False
+    j_specified = False
+    offline = False
+    output_dir = '.'
+    input_args = args
+    # On Windows the autoninja.bat script passes along the arguments enclosed in
+    # double quotes. This prevents multiple levels of parsing of the special '^'
+    # characters needed when compiling a single file but means that this script
+    # gets called with a single argument containing all of the actual arguments,
+    # separated by spaces. When this case is detected we need to do argument
+    # splitting ourselves. This means that arguments containing actual spaces
+    # are not supported by autoninja, but that is not a real limitation.
+    if (sys.platform.startswith('win') and len(args) == 2
+            and input_args[1].count(' ') > 0):
+        input_args = args[:1] + args[1].split()
+
+    # Ninja uses getopt_long, which allow to intermix non-option arguments.
+    # To leave non supported parameters untouched, we do not use getopt.
+    for index, arg in enumerate(input_args[1:]):
+        if arg.startswith('-j'):
+            j_specified = True
+        if arg.startswith('-t'):
+            t_specified = True
+        if arg == '-C':
+            # + 1 to get the next argument and +1 because we trimmed off
+            # input_args[0]
+            output_dir = input_args[index + 2]
+        elif arg.startswith('-C'):
+            # Support -Cout/Default
+            output_dir = arg[2:]
+        elif arg in ('-o', '--offline'):
+            offline = True
+        elif arg == '-h':
+            print('autoninja: Use -o/--offline to temporary disable goma.',
+                  file=sys.stderr)
+            print(file=sys.stderr)
 
-  # Ninja uses getopt_long, which allow to intermix non-option arguments.
-  # To leave non supported parameters untouched, we do not use getopt.
-  for index, arg in enumerate(input_args[1:]):
-    if arg.startswith('-j'):
-      j_specified = True
-    if arg.startswith('-t'):
-      t_specified = True
-    if arg == '-C':
-      # + 1 to get the next argument and +1 because we trimmed off input_args[0]
-      output_dir = input_args[index + 2]
-    elif arg.startswith('-C'):
-      # Support -Cout/Default
-      output_dir = arg[2:]
-    elif arg in ('-o', '--offline'):
-      offline = True
-    elif arg == '-h':
-      print('autoninja: Use -o/--offline to temporary disable goma.',
-            file=sys.stderr)
-      print(file=sys.stderr)
+    use_goma = False
+    use_remoteexec = False
+    use_rbe = False
+    use_siso = False
 
-  use_goma = False
-  use_remoteexec = False
-  use_rbe = False
-  use_siso = False
+    # Attempt to auto-detect remote build acceleration. We support gn-based
+    # builds, where we look for args.gn in the build tree, and cmake-based
+    # builds where we look for rules.ninja.
+    if os.path.exists(os.path.join(output_dir, 'args.gn')):
+        with open(os.path.join(output_dir, 'args.gn')) as file_handle:
+            for line in file_handle:
+                # use_goma, use_remoteexec, or use_rbe will activate build
+                # acceleration.
+                #
+                # This test can match multi-argument lines. Examples of this
+                # are: is_debug=false use_goma=true is_official_build=false
+                # use_goma=false# use_goma=true This comment is ignored
+                #
+                # Anything after a comment is not consider a valid argument.
+                line_without_comment = line.split('#')[0]
+                if re.search(r'(^|\s)(use_goma)\s*=\s*true($|\s)',
+                             line_without_comment):
+                    use_goma = True
+                    continue
+                if re.search(r'(^|\s)(use_remoteexec)\s*=\s*true($|\s)',
+                             line_without_comment):
+                    use_remoteexec = True
+                    continue
+                if re.search(r'(^|\s)(use_rbe)\s*=\s*true($|\s)',
+                             line_without_comment):
+                    use_rbe = True
+                    continue
+                if re.search(r'(^|\s)(use_siso)\s*=\s*true($|\s)',
+                             line_without_comment):
+                    use_siso = True
+                    continue
 
-  # Attempt to auto-detect remote build acceleration. We support gn-based
-  # builds, where we look for args.gn in the build tree, and cmake-based builds
-  # where we look for rules.ninja.
-  if os.path.exists(os.path.join(output_dir, 'args.gn')):
-    with open(os.path.join(output_dir, 'args.gn')) as file_handle:
-      for line in file_handle:
-        # use_goma, use_remoteexec, or use_rbe will activate build acceleration.
-        #
-        # This test can match multi-argument lines. Examples of this are:
-        # is_debug=false use_goma=true is_official_build=false
-        # use_goma=false# use_goma=true This comment is ignored
-        #
-        # Anything after a comment is not consider a valid argument.
-        line_without_comment = line.split('#')[0]
-        if re.search(r'(^|\s)(use_goma)\s*=\s*true($|\s)',
-                     line_without_comment):
-          use_goma = True
-          continue
-        if re.search(r'(^|\s)(use_remoteexec)\s*=\s*true($|\s)',
-                     line_without_comment):
-          use_remoteexec = True
-          continue
-        if re.search(r'(^|\s)(use_rbe)\s*=\s*true($|\s)', line_without_comment):
-          use_rbe = True
-          continue
-        if re.search(r'(^|\s)(use_siso)\s*=\s*true($|\s)',
-                     line_without_comment):
-          use_siso = True
-          continue
+        siso_marker = os.path.join(output_dir, '.siso_deps')
+        if use_siso:
+            ninja_marker = os.path.join(output_dir, '.ninja_log')
+            # autosiso generates a .ninja_log file so the mere existence of a
+            # .ninja_log file doesn't imply that a ninja build was done. However
+            # if there is a .ninja_log but no .siso_deps then that implies a
+            # ninja build.
+            if os.path.exists(ninja_marker) and not os.path.exists(siso_marker):
+                return (
+                    'echo Run gn clean before switching from ninja to siso in '
+                    '%s' % output_dir)
+            siso = ['autosiso'] if use_remoteexec else ['siso', 'ninja']
+            if sys.platform.startswith('win'):
+                # An explicit 'call' is needed to make sure the invocation of
+                # autosiso returns to autoninja.bat, and the command prompt
+                # title gets reset.
+                siso = ['call'] + siso
+            return ' '.join(siso + input_args[1:])
 
-    siso_marker = os.path.join(output_dir, '.siso_deps')
-    if use_siso:
-      ninja_marker = os.path.join(output_dir, '.ninja_log')
-      # autosiso generates a .ninja_log file so the mere existence of a
-      # .ninja_log file doesn't imply that a ninja build was done. However if
-      # there is a .ninja_log but no .siso_deps then that implies a ninja build.
-      if os.path.exists(ninja_marker) and not os.path.exists(siso_marker):
-        return ('echo Run gn clean before switching from ninja to siso in %s' %
+        if os.path.exists(siso_marker):
+            return (
+                'echo Run gn clean before switching from siso to ninja in %s' %
                 output_dir)
-      siso = ['autosiso'] if use_remoteexec else ['siso', 'ninja']
-      if sys.platform.startswith('win'):
-        # An explicit 'call' is needed to make sure the invocation of autosiso
-        # returns to autoninja.bat, and the command prompt title gets reset.
-        siso = ['call'] + siso
-      return ' '.join(siso + input_args[1:])
 
-    if os.path.exists(siso_marker):
-      return ('echo Run gn clean before switching from siso to ninja in %s' %
-              output_dir)
+    else:
+        for relative_path in [
+                '',  # GN keeps them in the root of output_dir
+                'CMakeFiles'
+        ]:
+            path = os.path.join(output_dir, relative_path, 'rules.ninja')
+            if os.path.exists(path):
+                with open(path) as file_handle:
+                    for line in file_handle:
+                        if re.match(r'^\s*command\s*=\s*\S+gomacc', line):
+                            use_goma = True
+                            break
 
-  else:
-    for relative_path in [
-        '',  # GN keeps them in the root of output_dir
-        'CMakeFiles'
-    ]:
-      path = os.path.join(output_dir, relative_path, 'rules.ninja')
-      if os.path.exists(path):
-        with open(path) as file_handle:
-          for line in file_handle:
-            if re.match(r'^\s*command\s*=\s*\S+gomacc', line):
-              use_goma = True
-              break
+    # Strip -o/--offline so ninja doesn't see them.
+    input_args = [arg for arg in input_args if arg not in ('-o', '--offline')]
 
-  # Strip -o/--offline so ninja doesn't see them.
-  input_args = [arg for arg in input_args if arg not in ('-o', '--offline')]
+    # If GOMA_DISABLED is set to "true", "t", "yes", "y", or "1"
+    # (case-insensitive) then gomacc will use the local compiler instead of
+    # doing a goma compile. This is convenient if you want to briefly disable
+    # goma. It avoids having to rebuild the world when transitioning between
+    # goma/non-goma builds. However, it is not as fast as doing a "normal"
+    # non-goma build because an extra process is created for each compile step.
+    # Checking this environment variable ensures that autoninja uses an
+    # appropriate -j value in this situation.
+    goma_disabled_env = os.environ.get('GOMA_DISABLED', '0').lower()
+    if offline or goma_disabled_env in ['true', 't', 'yes', 'y', '1']:
+        use_goma = False
 
-  # If GOMA_DISABLED is set to "true", "t", "yes", "y", or "1"
-  # (case-insensitive) then gomacc will use the local compiler instead of doing
-  # a goma compile. This is convenient if you want to briefly disable goma. It
-  # avoids having to rebuild the world when transitioning between goma/non-goma
-  # builds. However, it is not as fast as doing a "normal" non-goma build
-  # because an extra process is created for each compile step. Checking this
-  # environment variable ensures that autoninja uses an appropriate -j value in
-  # this situation.
-  goma_disabled_env = os.environ.get('GOMA_DISABLED', '0').lower()
-  if offline or goma_disabled_env in ['true', 't', 'yes', 'y', '1']:
-    use_goma = False
+    if use_goma:
+        gomacc_file = 'gomacc.exe' if sys.platform.startswith(
+            'win') else 'gomacc'
+        goma_dir = os.environ.get('GOMA_DIR',
+                                  os.path.join(SCRIPT_DIR, '.cipd_bin'))
+        gomacc_path = os.path.join(goma_dir, gomacc_file)
+        # Don't invoke gomacc if it doesn't exist.
+        if os.path.exists(gomacc_path):
+            # Check to make sure that goma is running. If not, don't start the
+            # build.
+            status = subprocess.call([gomacc_path, 'port'],
+                                     stdout=subprocess.PIPE,
+                                     stderr=subprocess.PIPE,
+                                     shell=False)
+            if status == 1:
+                print(
+                    'Goma is not running. Use "goma_ctl ensure_start" to start '
+                    'it.',
+                    file=sys.stderr)
+                if sys.platform.startswith('win'):
+                    # Set an exit code of 1 in the batch file.
+                    print('cmd "/c exit 1"')
+                else:
+                    # Set an exit code of 1 by executing 'false' in the bash
+                    # script.
+                    print('false')
+                sys.exit(1)
 
-  if use_goma:
-    gomacc_file = 'gomacc.exe' if sys.platform.startswith('win') else 'gomacc'
-    goma_dir = os.environ.get('GOMA_DIR', os.path.join(SCRIPT_DIR, '.cipd_bin'))
-    gomacc_path = os.path.join(goma_dir, gomacc_file)
-    # Don't invoke gomacc if it doesn't exist.
-    if os.path.exists(gomacc_path):
-      # Check to make sure that goma is running. If not, don't start the build.
-      status = subprocess.call([gomacc_path, 'port'],
-                               stdout=subprocess.PIPE,
-                               stderr=subprocess.PIPE,
-                               shell=False)
-      if status == 1:
-        print('Goma is not running. Use "goma_ctl ensure_start" to start it.',
-              file=sys.stderr)
-        if sys.platform.startswith('win'):
-          # Set an exit code of 1 in the batch file.
-          print('cmd "/c exit 1"')
-        else:
-          # Set an exit code of 1 by executing 'false' in the bash script.
-          print('false')
-        sys.exit(1)
-
-  # A large build (with or without goma) tends to hog all system resources.
-  # Launching the ninja process with 'nice' priorities improves this situation.
-  prefix_args = []
-  if (sys.platform.startswith('linux')
-      and os.environ.get('NINJA_BUILD_IN_BACKGROUND', '0') == '1'):
-    # nice -10 is process priority 10 lower than default 0
-    # ionice -c 3 is IO priority IDLE
-    prefix_args = ['nice'] + ['-10']
+    # A large build (with or without goma) tends to hog all system resources.
+    # Launching the ninja process with 'nice' priorities improves this
+    # situation.
+    prefix_args = []
+    if (sys.platform.startswith('linux')
+            and os.environ.get('NINJA_BUILD_IN_BACKGROUND', '0') == '1'):
+        # nice -10 is process priority 10 lower than default 0
+        # ionice -c 3 is IO priority IDLE
+        prefix_args = ['nice'] + ['-10']
 
-  # Tell goma or reclient to do local compiles. On Windows these environment
-  # variables are set by the wrapper batch file.
-  offline_env = ['RBE_remote_disabled=1', 'GOMA_DISABLED=1'
-                 ] if offline and not sys.platform.startswith('win') else []
+    # Tell goma or reclient to do local compiles. On Windows these environment
+    # variables are set by the wrapper batch file.
+    offline_env = ['RBE_remote_disabled=1', 'GOMA_DISABLED=1'
+                   ] if offline and not sys.platform.startswith('win') else []
 
-  # On macOS, the default limit of open file descriptors is too low (256).
-  # This causes a large j value to result in 'Too many open files' errors.
-  # Check whether the limit can be raised to a large enough value. If yes,
-  # use `ulimit -n .... &&` as a prefix to increase the limit when running
-  # ninja.
-  if sys.platform == 'darwin':
-    wanted_limit = 200000  # Large enough to avoid any risk of exhaustion.
-    fileno_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
-    if fileno_limit <= wanted_limit:
-      try:
-        resource.setrlimit(resource.RLIMIT_NOFILE, (wanted_limit, hard_limit))
-      except Exception as _:
-        pass
-      fileno_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
-      if fileno_limit >= wanted_limit:
-        prefix_args = ['ulimit', '-n', f'{wanted_limit}', '&&'] + offline_env
-        offline_env = []
+    # On macOS, the default limit of open file descriptors is too low (256).
+    # This causes a large j value to result in 'Too many open files' errors.
+    # Check whether the limit can be raised to a large enough value. If yes,
+    # use `ulimit -n .... &&` as a prefix to increase the limit when running
+    # ninja.
+    if sys.platform == 'darwin':
+        wanted_limit = 200000  # Large enough to avoid any risk of exhaustion.
+        fileno_limit, hard_limit = resource.getrlimit(resource.RLIMIT_NOFILE)
+        if fileno_limit <= wanted_limit:
+            try:
+                resource.setrlimit(resource.RLIMIT_NOFILE,
+                                   (wanted_limit, hard_limit))
+            except Exception as _:
+                pass
+            fileno_limit, hard_limit = resource.getrlimit(
+                resource.RLIMIT_NOFILE)
+            if fileno_limit >= wanted_limit:
+                prefix_args = ['ulimit', '-n', f'{wanted_limit}', '&&'
+                               ] + offline_env
+                offline_env = []
 
+    # Call ninja.py so that it can find ninja binary installed by DEPS or one in
+    # PATH.
+    ninja_path = os.path.join(SCRIPT_DIR, 'ninja.py')
+    # If using remoteexec, use ninja_reclient.py which wraps ninja.py with
+    # starting and stopping reproxy.
+    if use_remoteexec:
+        ninja_path = os.path.join(SCRIPT_DIR, 'ninja_reclient.py')
 
-  # Call ninja.py so that it can find ninja binary installed by DEPS or one in
-  # PATH.
-  ninja_path = os.path.join(SCRIPT_DIR, 'ninja.py')
-  # If using remoteexec, use ninja_reclient.py which wraps ninja.py with
-  # starting and stopping reproxy.
-  if use_remoteexec:
-    ninja_path = os.path.join(SCRIPT_DIR, 'ninja_reclient.py')
+    args = offline_env + prefix_args + [sys.executable, ninja_path
+                                        ] + input_args[1:]
 
-  args = offline_env + prefix_args + [sys.executable, ninja_path
-                                      ] + input_args[1:]
+    num_cores = multiprocessing.cpu_count()
+    if not j_specified and not t_specified:
+        if not offline and (use_goma or use_remoteexec or use_rbe):
+            args.append('-j')
+            default_core_multiplier = 80
+            if platform.machine() in ('x86_64', 'AMD64'):
+                # Assume simultaneous multithreading and therefore half as many
+                # cores as logical processors.
+                num_cores //= 2
 
-  num_cores = multiprocessing.cpu_count()
-  if not j_specified and not t_specified:
-    if not offline and (use_goma or use_remoteexec or use_rbe):
-      args.append('-j')
-      default_core_multiplier = 80
-      if platform.machine() in ('x86_64', 'AMD64'):
-        # Assume simultaneous multithreading and therefore half as many cores as
-        # logical processors.
-        num_cores //= 2
+            core_multiplier = int(
+                os.environ.get('NINJA_CORE_MULTIPLIER',
+                               default_core_multiplier))
+            j_value = num_cores * core_multiplier
 
-      core_multiplier = int(
-          os.environ.get('NINJA_CORE_MULTIPLIER', default_core_multiplier))
-      j_value = num_cores * core_multiplier
+            core_limit = int(os.environ.get('NINJA_CORE_LIMIT', j_value))
+            j_value = min(j_value, core_limit)
 
-      core_limit = int(os.environ.get('NINJA_CORE_LIMIT', j_value))
-      j_value = min(j_value, core_limit)
+            if sys.platform.startswith('win'):
+                # On windows, j value higher than 1000 does not improve build
+                # performance.
+                j_value = min(j_value, 1000)
+            elif sys.platform == 'darwin':
+                # If the number of open file descriptors is large enough (or it
+                # can be raised to a large enough value), then set j value to
+                # 1000. This limit comes from ninja which is limited to at most
+                # FD_SETSIZE (1024) open file descriptors (using 1000 leave a
+                # bit of head room).
+                #
+                # If the number of open file descriptors cannot be raised, then
+                # use a j value of 200 which is the maximum value that reliably
+                # work with the default limit of 256.
+                if fileno_limit >= wanted_limit:
+                    j_value = min(j_value, 1000)
+                else:
+                    j_value = min(j_value, 200)
 
-      if sys.platform.startswith('win'):
-        # On windows, j value higher than 1000 does not improve build
-        # performance.
-        j_value = min(j_value, 1000)
-      elif sys.platform == 'darwin':
-        # If the number of open file descriptors is large enough (or it can be
-        # raised to a large enough value), then set j value to 1000. This limit
-        # comes from ninja which is limited to at most FD_SETSIZE (1024) open
-        # file descriptors (using 1000 leave a bit of head room).
-        #
-        # If the number of open file descriptors cannot be raised, then use a
-        # j value of 200 which is the maximum value that reliably work with
-        # the default limit of 256.
-        if fileno_limit >= wanted_limit:
-          j_value = min(j_value, 1000)
+            args.append('%d' % j_value)
         else:
-          j_value = min(j_value, 200)
-
-      args.append('%d' % j_value)
-    else:
-      j_value = num_cores
-      # Ninja defaults to |num_cores + 2|
-      j_value += int(os.environ.get('NINJA_CORE_ADDITION', '2'))
-      args.append('-j')
-      args.append('%d' % j_value)
+            j_value = num_cores
+            # Ninja defaults to |num_cores + 2|
+            j_value += int(os.environ.get('NINJA_CORE_ADDITION', '2'))
+            args.append('-j')
+            args.append('%d' % j_value)
 
-  # On Windows, fully quote the path so that the command processor doesn't think
-  # the whole output is the command.
-  # On Linux and Mac, if people put depot_tools in directories with ' ',
-  # shell would misunderstand ' ' as a path separation.
-  # TODO(yyanagisawa): provide proper quoting for Windows.
-  # see https://cs.chromium.org/chromium/src/tools/mb/mb.py
-  for i in range(len(args)):
-    if (i == 0 and sys.platform.startswith('win')) or ' ' in args[i]:
-      args[i] = '"%s"' % args[i].replace('"', '\\"')
+    # On Windows, fully quote the path so that the command processor doesn't
+    # think the whole output is the command. On Linux and Mac, if people put
+    # depot_tools in directories with ' ', shell would misunderstand ' ' as a
+    # path separation. TODO(yyanagisawa): provide proper quoting for Windows.
+    # see https://cs.chromium.org/chromium/src/tools/mb/mb.py
+    for i in range(len(args)):
+        if (i == 0 and sys.platform.startswith('win')) or ' ' in args[i]:
+            args[i] = '"%s"' % args[i].replace('"', '\\"')
 
-  if os.environ.get('NINJA_SUMMARIZE_BUILD', '0') == '1':
-    args += ['-d', 'stats']
+    if os.environ.get('NINJA_SUMMARIZE_BUILD', '0') == '1':
+        args += ['-d', 'stats']
 
-  return ' '.join(args)
+    return ' '.join(args)
 
 
 if __name__ == '__main__':
-  print(main(sys.argv))
+    print(main(sys.argv))

+ 41 - 41
autosiso.py

@@ -18,53 +18,53 @@ import siso
 
 
 def _use_remoteexec(argv):
-  out_dir = reclient_helper.find_ninja_out_dir(argv)
-  gn_args_path = os.path.join(out_dir, 'args.gn')
-  if not os.path.exists(gn_args_path):
+    out_dir = reclient_helper.find_ninja_out_dir(argv)
+    gn_args_path = os.path.join(out_dir, 'args.gn')
+    if not os.path.exists(gn_args_path):
+        return False
+    with open(gn_args_path) as f:
+        for line in f:
+            line_without_comment = line.split('#')[0]
+            if re.search(r'(^|\s)use_remoteexec\s*=\s*true($|\s)',
+                         line_without_comment):
+                return True
     return False
-  with open(gn_args_path) as f:
-    for line in f:
-      line_without_comment = line.split('#')[0]
-      if re.search(r'(^|\s)use_remoteexec\s*=\s*true($|\s)',
-                   line_without_comment):
-        return True
-  return False
 
 
 def main(argv):
-  # On Windows the autosiso.bat script passes along the arguments enclosed in
-  # double quotes. This prevents multiple levels of parsing of the special '^'
-  # characters needed when compiling a single file but means that this script
-  # gets called with a single argument containing all of the actual arguments,
-  # separated by spaces. When this case is detected we need to do argument
-  # splitting ourselves. This means that arguments containing actual spaces are
-  # not supported by autoninja, but that is not a real limitation.
-  if (sys.platform.startswith('win') and len(argv) == 2
-      and argv[1].count(' ') > 0):
-    argv = argv[:1] + argv[1].split()
+    # On Windows the autosiso.bat script passes along the arguments enclosed in
+    # double quotes. This prevents multiple levels of parsing of the special '^'
+    # characters needed when compiling a single file but means that this script
+    # gets called with a single argument containing all of the actual arguments,
+    # separated by spaces. When this case is detected we need to do argument
+    # splitting ourselves. This means that arguments containing actual spaces
+    # are not supported by autoninja, but that is not a real limitation.
+    if (sys.platform.startswith('win') and len(argv) == 2
+            and argv[1].count(' ') > 0):
+        argv = argv[:1] + argv[1].split()
 
-  if not _use_remoteexec(argv):
-    print(
-        "`use_remoteexec=true` is not detected.\n"
-        "Please run `siso` command directly.",
-        file=sys.stderr)
-    return 1
+    if not _use_remoteexec(argv):
+        print(
+            "`use_remoteexec=true` is not detected.\n"
+            "Please run `siso` command directly.",
+            file=sys.stderr)
+        return 1
 
-  with reclient_helper.build_context(argv, 'autosiso') as ret_code:
-    if ret_code:
-      return ret_code
-    argv = [
-        argv[0],
-        'ninja',
-        # Do not authenticate when using Reproxy.
-        '-project=',
-        '-reapi_instance=',
-    ] + argv[1:]
-    return siso.main(argv)
+    with reclient_helper.build_context(argv, 'autosiso') as ret_code:
+        if ret_code:
+            return ret_code
+        argv = [
+            argv[0],
+            'ninja',
+            # Do not authenticate when using Reproxy.
+            '-project=',
+            '-reapi_instance=',
+        ] + argv[1:]
+        return siso.main(argv)
 
 
 if __name__ == '__main__':
-  try:
-    sys.exit(main(sys.argv))
-  except KeyboardInterrupt:
-    sys.exit(1)
+    try:
+        sys.exit(main(sys.argv))
+    except KeyboardInterrupt:
+        sys.exit(1)

+ 28 - 28
bazel.py

@@ -25,29 +25,29 @@ from typing import List, Optional
 
 
 def _find_bazel_cros() -> Optional[Path]:
-  """Find the bazel launcher for ChromiumOS."""
-  cwd = Path.cwd()
-  for parent in itertools.chain([cwd], cwd.parents):
-    bazel_launcher = parent / "chromite" / "bin" / "bazel"
-    if bazel_launcher.exists():
-      return bazel_launcher
-  return None
+    """Find the bazel launcher for ChromiumOS."""
+    cwd = Path.cwd()
+    for parent in itertools.chain([cwd], cwd.parents):
+        bazel_launcher = parent / "chromite" / "bin" / "bazel"
+        if bazel_launcher.exists():
+            return bazel_launcher
+    return None
 
 
 def _find_next_bazel_in_path() -> Optional[Path]:
-  """The fallback method: search the remainder of PATH for bazel."""
-  # Remove depot_tools from PATH if present.
-  depot_tools = Path(__file__).resolve().parent
-  path_env = os.environ.get("PATH", os.defpath)
-  search_paths = []
-  for path in path_env.split(os.pathsep):
-    if Path(path).resolve() != depot_tools:
-      search_paths.append(path)
-  new_path_env = os.pathsep.join(search_paths)
-  bazel = shutil.which("bazel", path=new_path_env)
-  if bazel:
-    return Path(bazel)
-  return None
+    """The fallback method: search the remainder of PATH for bazel."""
+    # Remove depot_tools from PATH if present.
+    depot_tools = Path(__file__).resolve().parent
+    path_env = os.environ.get("PATH", os.defpath)
+    search_paths = []
+    for path in path_env.split(os.pathsep):
+        if Path(path).resolve() != depot_tools:
+            search_paths.append(path)
+    new_path_env = os.pathsep.join(search_paths)
+    bazel = shutil.which("bazel", path=new_path_env)
+    if bazel:
+        return Path(bazel)
+    return None
 
 
 # All functions used to search for Bazel (in order of search).
@@ -71,15 +71,15 @@ it's actually installed."""
 
 
 def main(argv: List[str]) -> int:
-  """Main."""
-  for search_func in _SEARCH_FUNCTIONS:
-    bazel = search_func()
-    if bazel:
-      os.execv(bazel, [str(bazel), *argv])
+    """Main."""
+    for search_func in _SEARCH_FUNCTIONS:
+        bazel = search_func()
+        if bazel:
+            os.execv(bazel, [str(bazel), *argv])
 
-  print(_FIND_FAILURE_MSG, file=sys.stderr)
-  return 1
+    print(_FIND_FAILURE_MSG, file=sys.stderr)
+    return 1
 
 
 if __name__ == "__main__":
-  sys.exit(main(sys.argv[1:]))
+    sys.exit(main(sys.argv[1:]))

+ 216 - 213
bootstrap/bootstrap.py

@@ -17,7 +17,6 @@ import subprocess
 import sys
 import tempfile
 
-
 THIS_DIR = os.path.abspath(os.path.dirname(__file__))
 ROOT_DIR = os.path.abspath(os.path.join(THIS_DIR, '..'))
 
@@ -29,26 +28,31 @@ BAT_EXT = '.bat' if IS_WIN else ''
 # Top-level stubs to generate that fall through to executables within the Git
 # directory.
 WIN_GIT_STUBS = {
-  'git.bat': 'cmd\\git.exe',
-  'gitk.bat': 'cmd\\gitk.exe',
-  'ssh.bat': 'usr\\bin\\ssh.exe',
-  'ssh-keygen.bat': 'usr\\bin\\ssh-keygen.exe',
+    'git.bat': 'cmd\\git.exe',
+    'gitk.bat': 'cmd\\gitk.exe',
+    'ssh.bat': 'usr\\bin\\ssh.exe',
+    'ssh-keygen.bat': 'usr\\bin\\ssh-keygen.exe',
 }
 
 
 # Accumulated template parameters for generated stubs.
-class Template(collections.namedtuple('Template', (
-    'PYTHON_RELDIR', 'PYTHON_BIN_RELDIR', 'PYTHON_BIN_RELDIR_UNIX',
-    'PYTHON3_BIN_RELDIR', 'PYTHON3_BIN_RELDIR_UNIX', 'GIT_BIN_RELDIR',
-    'GIT_BIN_RELDIR_UNIX', 'GIT_PROGRAM',
-    ))):
-
-  @classmethod
-  def empty(cls):
-    return cls(**{k: None for k in cls._fields})
-
-  def maybe_install(self, name, dst_path):
-    """Installs template |name| to |dst_path| if it has changed.
+class Template(
+        collections.namedtuple('Template', (
+            'PYTHON_RELDIR',
+            'PYTHON_BIN_RELDIR',
+            'PYTHON_BIN_RELDIR_UNIX',
+            'PYTHON3_BIN_RELDIR',
+            'PYTHON3_BIN_RELDIR_UNIX',
+            'GIT_BIN_RELDIR',
+            'GIT_BIN_RELDIR_UNIX',
+            'GIT_PROGRAM',
+        ))):
+    @classmethod
+    def empty(cls):
+        return cls(**{k: None for k in cls._fields})
+
+    def maybe_install(self, name, dst_path):
+        """Installs template |name| to |dst_path| if it has changed.
 
     This loads the template |name| from THIS_DIR, resolves template parameters,
     and installs it to |dst_path|. See `maybe_update` for more information.
@@ -59,14 +63,14 @@ class Template(collections.namedtuple('Template', (
 
     Returns (bool): True if |dst_path| was updated, False otherwise.
     """
-    template_path = os.path.join(THIS_DIR, name)
-    with open(template_path, 'r', encoding='utf8') as fd:
-      t = string.Template(fd.read())
-    return maybe_update(t.safe_substitute(self._asdict()), dst_path)
+        template_path = os.path.join(THIS_DIR, name)
+        with open(template_path, 'r', encoding='utf8') as fd:
+            t = string.Template(fd.read())
+        return maybe_update(t.safe_substitute(self._asdict()), dst_path)
 
 
 def maybe_update(content, dst_path):
-  """Writes |content| to |dst_path| if |dst_path| does not already match.
+    """Writes |content| to |dst_path| if |dst_path| does not already match.
 
   This function will ensure that there is a file at |dst_path| containing
   |content|. If |dst_path| already exists and contains |content|, no operation
@@ -79,22 +83,22 @@ def maybe_update(content, dst_path):
 
   Returns (bool): True if |dst_path| was updated, False otherwise.
   """
-  # If the path already exists and matches the new content, refrain from writing
-  # a new one.
-  if os.path.exists(dst_path):
-    with open(dst_path, 'r', encoding='utf-8') as fd:
-      if fd.read() == content:
-        return False
-
-  logging.debug('Updating %r', dst_path)
-  with open(dst_path, 'w', encoding='utf-8') as fd:
-    fd.write(content)
-  os.chmod(dst_path, 0o755)
-  return True
+    # If the path already exists and matches the new content, refrain from
+    # writing a new one.
+    if os.path.exists(dst_path):
+        with open(dst_path, 'r', encoding='utf-8') as fd:
+            if fd.read() == content:
+                return False
+
+    logging.debug('Updating %r', dst_path)
+    with open(dst_path, 'w', encoding='utf-8') as fd:
+        fd.write(content)
+    os.chmod(dst_path, 0o755)
+    return True
 
 
 def maybe_copy(src_path, dst_path):
-  """Writes the content of |src_path| to |dst_path| if needed.
+    """Writes the content of |src_path| to |dst_path| if needed.
 
   See `maybe_update` for more information.
 
@@ -104,13 +108,13 @@ def maybe_copy(src_path, dst_path):
 
   Returns (bool): True if |dst_path| was updated, False otherwise.
   """
-  with open(src_path, 'r', encoding='utf-8') as fd:
-    content = fd.read()
-  return maybe_update(content, dst_path)
+    with open(src_path, 'r', encoding='utf-8') as fd:
+        content = fd.read()
+    return maybe_update(content, dst_path)
 
 
 def call_if_outdated(stamp_path, stamp_version, fn):
-  """Invokes |fn| if the stamp at |stamp_path| doesn't match |stamp_version|.
+    """Invokes |fn| if the stamp at |stamp_path| doesn't match |stamp_version|.
 
   This can be used to keep a filesystem record of whether an operation has been
   performed. The record is stored at |stamp_path|. To invalidate a record,
@@ -128,22 +132,22 @@ def call_if_outdated(stamp_path, stamp_version, fn):
   Returns (bool): True if an update occurred.
   """
 
-  stamp_version = stamp_version.strip()
-  if os.path.isfile(stamp_path):
-    with open(stamp_path, 'r', encoding='utf-8') as fd:
-      current_version = fd.read().strip()
-    if current_version == stamp_version:
-      return False
+    stamp_version = stamp_version.strip()
+    if os.path.isfile(stamp_path):
+        with open(stamp_path, 'r', encoding='utf-8') as fd:
+            current_version = fd.read().strip()
+        if current_version == stamp_version:
+            return False
 
-  fn()
+    fn()
 
-  with open(stamp_path, 'w', encoding='utf-8') as fd:
-    fd.write(stamp_version)
-  return True
+    with open(stamp_path, 'w', encoding='utf-8') as fd:
+        fd.write(stamp_version)
+    return True
 
 
 def _in_use(path):
-  """Checks if a Windows file is in use.
+    """Checks if a Windows file is in use.
 
   When Windows is using an executable, it prevents other writers from
   modifying or deleting that executable. We can safely test for an in-use
@@ -152,93 +156,93 @@ def _in_use(path):
 
   Returns (bool): True if the file was in use, False if not.
   """
-  try:
-    with open(path, 'r+'):
-      return False
-  except IOError:
-    return True
+    try:
+        with open(path, 'r+'):
+            return False
+    except IOError:
+        return True
 
 
 def _toolchain_in_use(toolchain_path):
-  """Returns (bool): True if a toolchain rooted at |path| is in use.
+    """Returns (bool): True if a toolchain rooted at |path| is in use.
   """
-  # Look for Python files that may be in use.
-  for python_dir in (
-      os.path.join(toolchain_path, 'python', 'bin'), # CIPD
-      toolchain_path, # Legacy ZIP distributions.
-      ):
-    for component in (
-        os.path.join(python_dir, 'python.exe'),
-        os.path.join(python_dir, 'DLLs', 'unicodedata.pyd'),
+    # Look for Python files that may be in use.
+    for python_dir in (
+            os.path.join(toolchain_path, 'python', 'bin'),  # CIPD
+            toolchain_path,  # Legacy ZIP distributions.
+    ):
+        for component in (
+                os.path.join(python_dir, 'python.exe'),
+                os.path.join(python_dir, 'DLLs', 'unicodedata.pyd'),
         ):
-      if os.path.isfile(component) and _in_use(component):
-        return True
-  # Look for Pytho:n 3 files that may be in use.
-  python_dir = os.path.join(toolchain_path, 'python3', 'bin')
-  for component in (
-      os.path.join(python_dir, 'python3.exe'),
-      os.path.join(python_dir, 'DLLs', 'unicodedata.pyd'),
-      ):
-    if os.path.isfile(component) and _in_use(component):
-      return True
-  return False
-
+            if os.path.isfile(component) and _in_use(component):
+                return True
+    # Look for Pytho:n 3 files that may be in use.
+    python_dir = os.path.join(toolchain_path, 'python3', 'bin')
+    for component in (
+            os.path.join(python_dir, 'python3.exe'),
+            os.path.join(python_dir, 'DLLs', 'unicodedata.pyd'),
+    ):
+        if os.path.isfile(component) and _in_use(component):
+            return True
+    return False
 
 
 def _check_call(argv, stdin_input=None, **kwargs):
-  """Wrapper for subprocess.check_call that adds logging."""
-  logging.info('running %r', argv)
-  if stdin_input is not None:
-    kwargs['stdin'] = subprocess.PIPE
-  proc = subprocess.Popen(argv, **kwargs)
-  proc.communicate(input=stdin_input)
-  if proc.returncode:
-    raise subprocess.CalledProcessError(proc.returncode, argv, None)
+    """Wrapper for subprocess.check_call that adds logging."""
+    logging.info('running %r', argv)
+    if stdin_input is not None:
+        kwargs['stdin'] = subprocess.PIPE
+    proc = subprocess.Popen(argv, **kwargs)
+    proc.communicate(input=stdin_input)
+    if proc.returncode:
+        raise subprocess.CalledProcessError(proc.returncode, argv, None)
 
 
 def _safe_rmtree(path):
-  if not os.path.exists(path):
-    return
-
-  def _make_writable_and_remove(path):
-    st = os.stat(path)
-    new_mode = st.st_mode | 0o200
-    if st.st_mode == new_mode:
-      return False
-    try:
-      os.chmod(path, new_mode)
-      os.remove(path)
-      return True
-    except Exception:
-      return False
+    if not os.path.exists(path):
+        return
+
+    def _make_writable_and_remove(path):
+        st = os.stat(path)
+        new_mode = st.st_mode | 0o200
+        if st.st_mode == new_mode:
+            return False
+        try:
+            os.chmod(path, new_mode)
+            os.remove(path)
+            return True
+        except Exception:
+            return False
 
-  def _on_error(function, path, excinfo):
-    if not _make_writable_and_remove(path):
-      logging.warning('Failed to %s: %s (%s)', function, path, excinfo)
+    def _on_error(function, path, excinfo):
+        if not _make_writable_and_remove(path):
+            logging.warning('Failed to %s: %s (%s)', function, path, excinfo)
 
-  shutil.rmtree(path, onerror=_on_error)
+    shutil.rmtree(path, onerror=_on_error)
 
 
 def clean_up_old_installations(skip_dir):
-  """Removes Python installations other than |skip_dir|.
+    """Removes Python installations other than |skip_dir|.
 
   This includes an "in-use" check against the "python.exe" in a given directory
   to avoid removing Python executables that are currently ruinning. We need
   this because our Python bootstrap may be run after (and by) other software
   that is using the bootstrapped Python!
   """
-  root_contents = os.listdir(ROOT_DIR)
-  for f in ('win_tools-*_bin', 'python27*_bin', 'git-*_bin', 'bootstrap-*_bin'):
-    for entry in fnmatch.filter(root_contents, f):
-      full_entry = os.path.join(ROOT_DIR, entry)
-      if full_entry == skip_dir or not os.path.isdir(full_entry):
-        continue
+    root_contents = os.listdir(ROOT_DIR)
+    for f in ('win_tools-*_bin', 'python27*_bin', 'git-*_bin',
+              'bootstrap-*_bin'):
+        for entry in fnmatch.filter(root_contents, f):
+            full_entry = os.path.join(ROOT_DIR, entry)
+            if full_entry == skip_dir or not os.path.isdir(full_entry):
+                continue
 
-      logging.info('Cleaning up old installation %r', entry)
-      if not _toolchain_in_use(full_entry):
-        _safe_rmtree(full_entry)
-      else:
-        logging.info('Toolchain at %r is in-use; skipping', full_entry)
+            logging.info('Cleaning up old installation %r', entry)
+            if not _toolchain_in_use(full_entry):
+                _safe_rmtree(full_entry)
+            else:
+                logging.info('Toolchain at %r is in-use; skipping', full_entry)
 
 
 # Version of "git_postprocess" system configuration (see |git_postprocess|).
@@ -246,111 +250,110 @@ GIT_POSTPROCESS_VERSION = '2'
 
 
 def git_get_mingw_dir(git_directory):
-  """Returns (str) The "mingw" directory in a Git installation, or None."""
-  for candidate in ('mingw64', 'mingw32'):
-    mingw_dir = os.path.join(git_directory, candidate)
-    if os.path.isdir(mingw_dir):
-      return mingw_dir
-  return None
+    """Returns (str) The "mingw" directory in a Git installation, or None."""
+    for candidate in ('mingw64', 'mingw32'):
+        mingw_dir = os.path.join(git_directory, candidate)
+        if os.path.isdir(mingw_dir):
+            return mingw_dir
+    return None
 
 
 def git_postprocess(template, git_directory):
-  # Update depot_tools files for "git help <command>"
-  mingw_dir = git_get_mingw_dir(git_directory)
-  if mingw_dir:
-    docsrc = os.path.join(ROOT_DIR, 'man', 'html')
-    git_docs_dir = os.path.join(mingw_dir, 'share', 'doc', 'git-doc')
-    for name in os.listdir(docsrc):
-      maybe_copy(
-          os.path.join(docsrc, name),
-          os.path.join(git_docs_dir, name))
-  else:
-    logging.info('Could not find mingw directory for %r.', git_directory)
-
-  # Create Git templates and configure its base layout.
-  for stub_name, relpath in WIN_GIT_STUBS.items():
-    stub_template = template._replace(GIT_PROGRAM=relpath)
-    stub_template.maybe_install(
-        'git.template.bat',
-        os.path.join(ROOT_DIR, stub_name))
-
-  # Set-up our system configuration environment. The following set of
-  # parameters is versioned by "GIT_POSTPROCESS_VERSION". If they change,
-  # update "GIT_POSTPROCESS_VERSION" accordingly.
-  def configure_git_system():
-    git_bat_path = os.path.join(ROOT_DIR, 'git.bat')
-    _check_call([git_bat_path, 'config', '--system', 'core.autocrlf', 'false'])
-    _check_call([git_bat_path, 'config', '--system', 'core.filemode', 'false'])
-    _check_call([git_bat_path, 'config', '--system', 'core.preloadindex',
-                 'true'])
-    _check_call([git_bat_path, 'config', '--system', 'core.fscache', 'true'])
-    _check_call([git_bat_path, 'config', '--system', 'protocol.version', '2'])
-
-  call_if_outdated(
-      os.path.join(git_directory, '.git_postprocess'),
-      GIT_POSTPROCESS_VERSION,
-      configure_git_system)
+    # Update depot_tools files for "git help <command>"
+    mingw_dir = git_get_mingw_dir(git_directory)
+    if mingw_dir:
+        docsrc = os.path.join(ROOT_DIR, 'man', 'html')
+        git_docs_dir = os.path.join(mingw_dir, 'share', 'doc', 'git-doc')
+        for name in os.listdir(docsrc):
+            maybe_copy(os.path.join(docsrc, name),
+                       os.path.join(git_docs_dir, name))
+    else:
+        logging.info('Could not find mingw directory for %r.', git_directory)
+
+    # Create Git templates and configure its base layout.
+    for stub_name, relpath in WIN_GIT_STUBS.items():
+        stub_template = template._replace(GIT_PROGRAM=relpath)
+        stub_template.maybe_install('git.template.bat',
+                                    os.path.join(ROOT_DIR, stub_name))
+
+    # Set-up our system configuration environment. The following set of
+    # parameters is versioned by "GIT_POSTPROCESS_VERSION". If they change,
+    # update "GIT_POSTPROCESS_VERSION" accordingly.
+    def configure_git_system():
+        git_bat_path = os.path.join(ROOT_DIR, 'git.bat')
+        _check_call(
+            [git_bat_path, 'config', '--system', 'core.autocrlf', 'false'])
+        _check_call(
+            [git_bat_path, 'config', '--system', 'core.filemode', 'false'])
+        _check_call(
+            [git_bat_path, 'config', '--system', 'core.preloadindex', 'true'])
+        _check_call(
+            [git_bat_path, 'config', '--system', 'core.fscache', 'true'])
+        _check_call(
+            [git_bat_path, 'config', '--system', 'protocol.version', '2'])
+
+    call_if_outdated(os.path.join(git_directory, '.git_postprocess'),
+                     GIT_POSTPROCESS_VERSION, configure_git_system)
 
 
 def main(argv):
-  parser = argparse.ArgumentParser()
-  parser.add_argument('--verbose', action='store_true')
-  parser.add_argument('--bootstrap-name', required=True,
-                      help='The directory of the Python installation.')
-  args = parser.parse_args(argv)
-
-  logging.basicConfig(level=logging.DEBUG if args.verbose else logging.WARN)
-
-  template = Template.empty()._replace(
-      PYTHON_RELDIR=os.path.join(args.bootstrap_name, 'python'),
-      PYTHON_BIN_RELDIR=os.path.join(args.bootstrap_name, 'python', 'bin'),
-      PYTHON_BIN_RELDIR_UNIX=posixpath.join(
-          args.bootstrap_name, 'python', 'bin'),
-      PYTHON3_BIN_RELDIR=os.path.join(args.bootstrap_name, 'python3', 'bin'),
-      PYTHON3_BIN_RELDIR_UNIX=posixpath.join(
-          args.bootstrap_name, 'python3', 'bin'),
-      GIT_BIN_RELDIR=os.path.join(args.bootstrap_name, 'git'),
-      GIT_BIN_RELDIR_UNIX=posixpath.join(args.bootstrap_name, 'git'))
-
-  bootstrap_dir = os.path.join(ROOT_DIR, args.bootstrap_name)
-
-  # Clean up any old Python and Git installations.
-  clean_up_old_installations(bootstrap_dir)
-
-  if IS_WIN:
-    git_postprocess(template, os.path.join(bootstrap_dir, 'git'))
-    templates = [
-        ('git-bash.template.sh', 'git-bash', ROOT_DIR),
-        ('python27.bat', 'python.bat', ROOT_DIR),
-        ('python3.bat', 'python3.bat', ROOT_DIR),
-    ]
-    for src_name, dst_name, dst_dir in templates:
-      # Re-evaluate and regenerate our root templated files.
-      template.maybe_install(src_name, os.path.join(dst_dir, dst_name))
-
-  # Emit our Python bin depot-tools-relative directory. This is read by
-  # python.bat, python3.bat, vpython[.bat] and vpython3[.bat] to identify the
-  # path of the current Python installation.
-  #
-  # We use this indirection so that upgrades can change this pointer to
-  # redirect "python.bat" to a new Python installation. We can't just update
-  # "python.bat" because batch file executions reload the batch file and seek
-  # to the previous cursor in between every command, so changing the batch
-  # file contents could invalidate any existing executions.
-  #
-  # The intention is that the batch file itself never needs to change when
-  # switching Python versions.
-
-  maybe_update(
-      template.PYTHON_BIN_RELDIR,
-      os.path.join(ROOT_DIR, 'python_bin_reldir.txt'))
-
-  maybe_update(
-      template.PYTHON3_BIN_RELDIR,
-      os.path.join(ROOT_DIR, 'python3_bin_reldir.txt'))
-
-  return 0
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--verbose', action='store_true')
+    parser.add_argument('--bootstrap-name',
+                        required=True,
+                        help='The directory of the Python installation.')
+    args = parser.parse_args(argv)
+
+    logging.basicConfig(level=logging.DEBUG if args.verbose else logging.WARN)
+
+    template = Template.empty()._replace(
+        PYTHON_RELDIR=os.path.join(args.bootstrap_name, 'python'),
+        PYTHON_BIN_RELDIR=os.path.join(args.bootstrap_name, 'python', 'bin'),
+        PYTHON_BIN_RELDIR_UNIX=posixpath.join(args.bootstrap_name, 'python',
+                                              'bin'),
+        PYTHON3_BIN_RELDIR=os.path.join(args.bootstrap_name, 'python3', 'bin'),
+        PYTHON3_BIN_RELDIR_UNIX=posixpath.join(args.bootstrap_name, 'python3',
+                                               'bin'),
+        GIT_BIN_RELDIR=os.path.join(args.bootstrap_name, 'git'),
+        GIT_BIN_RELDIR_UNIX=posixpath.join(args.bootstrap_name, 'git'))
+
+    bootstrap_dir = os.path.join(ROOT_DIR, args.bootstrap_name)
+
+    # Clean up any old Python and Git installations.
+    clean_up_old_installations(bootstrap_dir)
+
+    if IS_WIN:
+        git_postprocess(template, os.path.join(bootstrap_dir, 'git'))
+        templates = [
+            ('git-bash.template.sh', 'git-bash', ROOT_DIR),
+            ('python27.bat', 'python.bat', ROOT_DIR),
+            ('python3.bat', 'python3.bat', ROOT_DIR),
+        ]
+        for src_name, dst_name, dst_dir in templates:
+            # Re-evaluate and regenerate our root templated files.
+            template.maybe_install(src_name, os.path.join(dst_dir, dst_name))
+
+    # Emit our Python bin depot-tools-relative directory. This is read by
+    # python.bat, python3.bat, vpython[.bat] and vpython3[.bat] to identify the
+    # path of the current Python installation.
+    #
+    # We use this indirection so that upgrades can change this pointer to
+    # redirect "python.bat" to a new Python installation. We can't just update
+    # "python.bat" because batch file executions reload the batch file and seek
+    # to the previous cursor in between every command, so changing the batch
+    # file contents could invalidate any existing executions.
+    #
+    # The intention is that the batch file itself never needs to change when
+    # switching Python versions.
+
+    maybe_update(template.PYTHON_BIN_RELDIR,
+                 os.path.join(ROOT_DIR, 'python_bin_reldir.txt'))
+
+    maybe_update(template.PYTHON3_BIN_RELDIR,
+                 os.path.join(ROOT_DIR, 'python3_bin_reldir.txt'))
+
+    return 0
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+    sys.exit(main(sys.argv[1:]))

+ 0 - 1
breakpad.py

@@ -1,7 +1,6 @@
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """This file remains here because of multiple find_depot_tools.py scripts
 that attempt to import it as a way to find depot_tools.
 

+ 64 - 61
clang_format.py

@@ -2,7 +2,6 @@
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Redirects to the version of clang-format checked into the Chrome tree.
 
 clang-format binaries are pulled down from Google Cloud Storage whenever you
@@ -18,77 +17,81 @@ import sys
 
 
 class NotFoundError(Exception):
-  """A file could not be found."""
-  def __init__(self, e):
-    Exception.__init__(self,
-        'Problem while looking for clang-format in Chromium source tree:\n'
-        '%s' % e)
+    """A file could not be found."""
+    def __init__(self, e):
+        Exception.__init__(
+            self,
+            'Problem while looking for clang-format in Chromium source tree:\n'
+            '%s' % e)
 
 
 def FindClangFormatToolInChromiumTree():
-  """Return a path to the clang-format executable, or die trying."""
-  primary_solution_path = gclient_paths.GetPrimarySolutionPath()
-  if primary_solution_path:
-    bin_path = os.path.join(primary_solution_path, 'third_party',
-                            'clang-format',
-                            'clang-format' + gclient_paths.GetExeSuffix())
-    if os.path.exists(bin_path):
-      return bin_path
-
-  bin_path = gclient_paths.GetBuildtoolsPlatformBinaryPath()
-  if not bin_path:
-    raise NotFoundError(
-        'Could not find checkout in any parent of the current path.\n'
-        'Set CHROMIUM_BUILDTOOLS_PATH to use outside of a chromium checkout.')
-
-  tool_path = os.path.join(bin_path,
-                           'clang-format' + gclient_paths.GetExeSuffix())
-  if not os.path.exists(tool_path):
-    raise NotFoundError('File does not exist: %s' % tool_path)
-  return tool_path
+    """Return a path to the clang-format executable, or die trying."""
+    primary_solution_path = gclient_paths.GetPrimarySolutionPath()
+    if primary_solution_path:
+        bin_path = os.path.join(primary_solution_path, 'third_party',
+                                'clang-format',
+                                'clang-format' + gclient_paths.GetExeSuffix())
+        if os.path.exists(bin_path):
+            return bin_path
+
+    bin_path = gclient_paths.GetBuildtoolsPlatformBinaryPath()
+    if not bin_path:
+        raise NotFoundError(
+            'Could not find checkout in any parent of the current path.\n'
+            'Set CHROMIUM_BUILDTOOLS_PATH to use outside of a chromium '
+            'checkout.')
+
+    tool_path = os.path.join(bin_path,
+                             'clang-format' + gclient_paths.GetExeSuffix())
+    if not os.path.exists(tool_path):
+        raise NotFoundError('File does not exist: %s' % tool_path)
+    return tool_path
 
 
 def FindClangFormatScriptInChromiumTree(script_name):
-  """Return a path to a clang-format helper script, or die trying."""
-  primary_solution_path = gclient_paths.GetPrimarySolutionPath()
-  if primary_solution_path:
-    script_path = os.path.join(primary_solution_path, 'third_party',
-                               'clang-format', 'script', script_name)
-    if os.path.exists(script_path):
-      return script_path
-
-  tools_path = gclient_paths.GetBuildtoolsPath()
-  if not tools_path:
-    raise NotFoundError(
-        'Could not find checkout in any parent of the current path.\n',
-        'Set CHROMIUM_BUILDTOOLS_PATH to use outside of a chromium checkout.')
-
-  script_path = os.path.join(tools_path, 'clang_format', 'script', script_name)
-  if not os.path.exists(script_path):
-    raise NotFoundError('File does not exist: %s' % script_path)
-  return script_path
+    """Return a path to a clang-format helper script, or die trying."""
+    primary_solution_path = gclient_paths.GetPrimarySolutionPath()
+    if primary_solution_path:
+        script_path = os.path.join(primary_solution_path, 'third_party',
+                                   'clang-format', 'script', script_name)
+        if os.path.exists(script_path):
+            return script_path
+
+    tools_path = gclient_paths.GetBuildtoolsPath()
+    if not tools_path:
+        raise NotFoundError(
+            'Could not find checkout in any parent of the current path.\n',
+            'Set CHROMIUM_BUILDTOOLS_PATH to use outside of a chromium '
+            'checkout.')
+
+    script_path = os.path.join(tools_path, 'clang_format', 'script',
+                               script_name)
+    if not os.path.exists(script_path):
+        raise NotFoundError('File does not exist: %s' % script_path)
+    return script_path
 
 
 def main(args):
-  try:
-    tool = FindClangFormatToolInChromiumTree()
-  except NotFoundError as e:
-    sys.stderr.write("%s\n" % str(e))
-    return 1
+    try:
+        tool = FindClangFormatToolInChromiumTree()
+    except NotFoundError as e:
+        sys.stderr.write("%s\n" % str(e))
+        return 1
 
-  # Add some visibility to --help showing where the tool lives, since this
-  # redirection can be a little opaque.
-  help_syntax = ('-h', '--help', '-help', '-help-list', '--help-list')
-  if any(match in args for match in help_syntax):
-    print(
-        '\nDepot tools redirects you to the clang-format at:\n    %s\n' % tool)
+    # Add some visibility to --help showing where the tool lives, since this
+    # redirection can be a little opaque.
+    help_syntax = ('-h', '--help', '-help', '-help-list', '--help-list')
+    if any(match in args for match in help_syntax):
+        print('\nDepot tools redirects you to the clang-format at:\n    %s\n' %
+              tool)
 
-  return subprocess.call([tool] + args)
+    return subprocess.call([tool] + args)
 
 
 if __name__ == '__main__':
-  try:
-    sys.exit(main(sys.argv[1:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main(sys.argv[1:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 45 - 45
compile_single_file.py

@@ -10,64 +10,64 @@ import os
 import subprocess
 import sys
 
-
 DEPOT_TOOLS_DIR = os.path.dirname(os.path.realpath(__file__))
 
+
 # This function is inspired from the one in src/tools/vim/ninja-build.vim in the
 # Chromium repository.
 def path_to_source_root(path):
-  """Returns the absolute path to the chromium source root."""
-  candidate = os.path.dirname(path)
-  # This is a list of directories that need to identify the src directory. The
-  # shorter it is, the more likely it's wrong (checking for just
-  # "build/common.gypi" would find "src/v8" for files below "src/v8", as
-  # "src/v8/build/common.gypi" exists). The longer it is, the more likely it is
-  # to break when we rename directories.
-  fingerprints = ['chrome', 'net', 'v8', 'build', 'skia']
-  while candidate and not all(
-      os.path.isdir(os.path.join(candidate, fp)) for fp in fingerprints):
-    new_candidate = os.path.dirname(candidate)
-    if new_candidate == candidate:
-      raise Exception("Couldn't find source-dir from %s" % path)
-    candidate = os.path.dirname(candidate)
-  return candidate
+    """Returns the absolute path to the chromium source root."""
+    candidate = os.path.dirname(path)
+    # This is a list of directories that need to identify the src directory. The
+    # shorter it is, the more likely it's wrong (checking for just
+    # "build/common.gypi" would find "src/v8" for files below "src/v8", as
+    # "src/v8/build/common.gypi" exists). The longer it is, the more likely it
+    # is to break when we rename directories.
+    fingerprints = ['chrome', 'net', 'v8', 'build', 'skia']
+    while candidate and not all(
+            os.path.isdir(os.path.join(candidate, fp)) for fp in fingerprints):
+        new_candidate = os.path.dirname(candidate)
+        if new_candidate == candidate:
+            raise Exception("Couldn't find source-dir from %s" % path)
+        candidate = os.path.dirname(candidate)
+    return candidate
 
 
 def main():
-  parser = argparse.ArgumentParser()
-  parser.add_argument(
-      '--file-path',
-      help='The file path, could be absolute or relative to the current '
-           'directory.',
-      required=True)
-  parser.add_argument(
-      '--build-dir',
-      help='The build directory, relative to the source directory.',
-      required=True)
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        '--file-path',
+        help='The file path, could be absolute or relative to the current '
+        'directory.',
+        required=True)
+    parser.add_argument(
+        '--build-dir',
+        help='The build directory, relative to the source directory.',
+        required=True)
 
-  options = parser.parse_args()
+    options = parser.parse_args()
 
-  src_dir = path_to_source_root(os.path.abspath(options.file_path))
-  abs_build_dir = os.path.join(src_dir, options.build_dir)
-  src_relpath = os.path.relpath(options.file_path, abs_build_dir)
+    src_dir = path_to_source_root(os.path.abspath(options.file_path))
+    abs_build_dir = os.path.join(src_dir, options.build_dir)
+    src_relpath = os.path.relpath(options.file_path, abs_build_dir)
 
-  print('Building %s' % options.file_path)
+    print('Building %s' % options.file_path)
 
-  carets = '^'
-  if sys.platform == 'win32':
-    # The caret character has to be escaped on Windows as it's an escape
-    # character.
-    carets = '^^'
+    carets = '^'
+    if sys.platform == 'win32':
+        # The caret character has to be escaped on Windows as it's an escape
+        # character.
+        carets = '^^'
 
-  command = [
-      'python3',
-      os.path.join(DEPOT_TOOLS_DIR, 'ninja.py'), '-C', abs_build_dir,
-      '%s%s' % (src_relpath, carets)
-  ]
-  # |shell| should be set to True on Windows otherwise the carets characters
-  # get dropped from the command line.
-  return subprocess.call(command, shell=sys.platform=='win32')
+    command = [
+        'python3',
+        os.path.join(DEPOT_TOOLS_DIR, 'ninja.py'), '-C', abs_build_dir,
+        '%s%s' % (src_relpath, carets)
+    ]
+    # |shell| should be set to True on Windows otherwise the carets characters
+    # get dropped from the command line.
+    return subprocess.call(command, shell=sys.platform == 'win32')
 
 
 if __name__ == '__main__':
-  sys.exit(main())
+    sys.exit(main())

文件差异内容过多而无法显示
+ 384 - 379
cpplint.py


+ 8 - 7
cpplint_chromium.py

@@ -36,7 +36,7 @@ _RE_PATTERN_POINTER_DECLARATION_WHITESPACE = re.compile(
 
 
 def CheckPointerDeclarationWhitespace(filename, clean_lines, linenum, error):
-  """Checks for Foo *foo declarations.
+    """Checks for Foo *foo declarations.
 
   Args:
     filename: The name of the current file.
@@ -44,9 +44,10 @@ def CheckPointerDeclarationWhitespace(filename, clean_lines, linenum, error):
     linenum: The number of the line to check.
     error: The function to call with any errors found.
   """
-  line = clean_lines.elided[linenum]
-  matched = _RE_PATTERN_POINTER_DECLARATION_WHITESPACE.match(line)
-  if matched:
-    error(filename, linenum, 'whitespace/declaration', 3,
-          'Declaration has space between type name and %s in %s' %
-          (matched.group('pointer_operator'), matched.group(0).strip()))
+    line = clean_lines.elided[linenum]
+    matched = _RE_PATTERN_POINTER_DECLARATION_WHITESPACE.match(line)
+    if matched:
+        error(
+            filename, linenum, 'whitespace/declaration', 3,
+            'Declaration has space between type name and %s in %s' %
+            (matched.group('pointer_operator'), matched.group(0).strip()))

+ 71 - 69
cros

@@ -2,7 +2,6 @@
 # Copyright 2011 The ChromiumOS Authors
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Wrapper for chromite tools.
 
 The script is intend to be symlinked to any number of chromite tools, attempts
@@ -17,7 +16,6 @@ import pathlib
 import subprocess
 import sys
 
-
 # Min version of Python that we *want*.  We warn for older versions.
 MIN_PYTHON_VER_SOFT = (3, 8)
 # Min version of Python that we *require*.  We abort for older versions.
@@ -30,91 +28,95 @@ CIPD_CACHE_DIR = DEPOT_TOOLS_DIR / '.cipd_bin_cros_python2'
 
 
 def _FindChromite(path):
-  """Find the chromite dir in a repo, gclient, or submodule checkout."""
-  path = os.path.abspath(path)
-  # Depending on the checkout type (whether repo chromeos or gclient chrome)
-  # Chromite lives in a different location.
-  roots = (
-      ('.repo', 'chromite/.git'),
-      ('.gclient', 'src/third_party/chromite/.git'),
-      ('src/.gitmodules', 'src/third_party/chromite/.git'),
-  )
-
-  while path != '/':
-    for root, chromite_git_dir in roots:
-      if all(os.path.exists(os.path.join(path, x))
-             for x in [root, chromite_git_dir]):
-        return os.path.dirname(os.path.join(path, chromite_git_dir))
-    path = os.path.dirname(path)
-  return None
+    """Find the chromite dir in a repo, gclient, or submodule checkout."""
+    path = os.path.abspath(path)
+    # Depending on the checkout type (whether repo chromeos or gclient chrome)
+    # Chromite lives in a different location.
+    roots = (
+        ('.repo', 'chromite/.git'),
+        ('.gclient', 'src/third_party/chromite/.git'),
+        ('src/.gitmodules', 'src/third_party/chromite/.git'),
+    )
+
+    while path != '/':
+        for root, chromite_git_dir in roots:
+            if all(
+                    os.path.exists(os.path.join(path, x))
+                    for x in [root, chromite_git_dir]):
+                return os.path.dirname(os.path.join(path, chromite_git_dir))
+        path = os.path.dirname(path)
+    return None
 
 
 def _MissingErrorOut(target):
-  sys.stderr.write("""ERROR: Couldn't find the chromite tool %s.
+    sys.stderr.write("""ERROR: Couldn't find the chromite tool %s.
 
 Please change to a directory inside your ChromiumOS source tree
 and retry.  If you need to setup a ChromiumOS source tree, see
   https://chromium.googlesource.com/chromiumos/docs/+/HEAD/developer_guide.md
 """ % target)
-  return 127
+    return 127
 
 
 def _CheckPythonVersion():
-  """Verify active Python is new enough."""
-  if sys.version_info >= MIN_PYTHON_VER_SOFT:
-    return
-
-  progname = os.path.basename(sys.argv[0])
-  print('%s: ChromiumOS requires Python-%s+, but "%s" is "%s"' %
-        (progname, '.'.join(str(x) for x in MIN_PYTHON_VER_SOFT),
-         sys.executable, sys.version.replace('\n', ' ')),
-        file=sys.stderr)
-  if sys.version_info < MIN_PYTHON_VER_HARD:
-    print('%s: fatal: giving up since Python is too old.' % (progname,),
+    """Verify active Python is new enough."""
+    if sys.version_info >= MIN_PYTHON_VER_SOFT:
+        return
+
+    progname = os.path.basename(sys.argv[0])
+    print('%s: ChromiumOS requires Python-%s+, but "%s" is "%s"' %
+          (progname, '.'.join(str(x) for x in MIN_PYTHON_VER_SOFT),
+           sys.executable, sys.version.replace('\n', ' ')),
           file=sys.stderr)
-    sys.exit(1)
-
-  print('warning: temporarily continuing anyways; you must upgrade soon to '
-        'maintain support.', file=sys.stderr)
+    if sys.version_info < MIN_PYTHON_VER_HARD:
+        print('%s: fatal: giving up since Python is too old.' % (progname, ),
+              file=sys.stderr)
+        sys.exit(1)
+
+    print(
+        'warning: temporarily continuing anyways; you must upgrade soon to '
+        'maintain support.',
+        file=sys.stderr)
 
 
 def _BootstrapVpython27():
-  """Installs the vpython2.7 packages into the cipd cache directory."""
-  subprocess.run(['cipd', 'ensure',
-                  '-log-level', 'info',
-                  '-ensure-file',
-                  DEPOT_TOOLS_DIR / 'cipd_manifest_cros_python2.txt',
-                  '-root', CIPD_CACHE_DIR],
-                 check=True)
+    """Installs the vpython2.7 packages into the cipd cache directory."""
+    subprocess.run([
+        'cipd', 'ensure', '-log-level', 'info', '-ensure-file',
+        DEPOT_TOOLS_DIR / 'cipd_manifest_cros_python2.txt', '-root',
+        CIPD_CACHE_DIR
+    ],
+                   check=True)
 
 
 def main():
-  _CheckPythonVersion()
-
-  chromite_dir = _FindChromite(os.getcwd())
-  target = os.path.basename(sys.argv[0])
-  if chromite_dir is None:
-    return _MissingErrorOut(target)
-
-  path = os.path.join(chromite_dir, 'bin', target)
-
-  # Check to see if this is a script requiring vpython2.7.
-  with open(path, 'rb') as fp:
-    shebang = next(fp).strip()
-  interpreter = shebang.split()[-1]
-  if interpreter in (b'python', b'python2', b'python2.7', b'vpython'):
-    _BootstrapVpython27()
-    vpython = CIPD_CACHE_DIR / 'vpython'
-    args = [vpython]
-    if interpreter != b'vpython':
-      args.extend(['-vpython-spec', DEPOT_TOOLS_DIR / 'cros_python2.vpython'])
-    args.append(path)
-    path = vpython
-  else:
-    args = [path]
-
-  os.execv(path, args + sys.argv[1:])
+    _CheckPythonVersion()
+
+    chromite_dir = _FindChromite(os.getcwd())
+    target = os.path.basename(sys.argv[0])
+    if chromite_dir is None:
+        return _MissingErrorOut(target)
+
+    path = os.path.join(chromite_dir, 'bin', target)
+
+    # Check to see if this is a script requiring vpython2.7.
+    with open(path, 'rb') as fp:
+        shebang = next(fp).strip()
+    interpreter = shebang.split()[-1]
+    if interpreter in (b'python', b'python2', b'python2.7', b'vpython'):
+        _BootstrapVpython27()
+        vpython = CIPD_CACHE_DIR / 'vpython'
+        args = [vpython]
+        if interpreter != b'vpython':
+            args.extend(
+                ['-vpython-spec', DEPOT_TOOLS_DIR / 'cros_python2.vpython'])
+        args.append(path)
+        path = vpython
+    else:
+        args = [path]
+
+    os.execv(path, args + sys.argv[1:])
 
 
 if __name__ == '__main__':
-  sys.exit(main())
+    sys.exit(main())

+ 46 - 43
detect_host_arch.py

@@ -2,7 +2,6 @@
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Outputs host CPU architecture in format recognized by gyp."""
 
 from __future__ import print_function
@@ -10,55 +9,59 @@ from __future__ import print_function
 import platform
 import re
 
+
 def HostArch():
-  """Returns the host architecture with a predictable string."""
-  host_arch = platform.machine().lower()
-  host_processor = platform.processor().lower()
+    """Returns the host architecture with a predictable string."""
+    host_arch = platform.machine().lower()
+    host_processor = platform.processor().lower()
 
-  # Convert machine type to format recognized by gyp.
-  if re.match(r'i.86', host_arch) or host_arch == 'i86pc':
-    host_arch = 'x86'
-  elif host_arch in ['x86_64', 'amd64']:
-    host_arch = 'x64'
-  elif host_arch == 'arm64' or host_arch.startswith('aarch64'):
-    host_arch = 'arm64'
-  elif host_arch.startswith('arm'):
-    host_arch = 'arm'
-  elif host_arch.startswith('mips64'):
-    host_arch = 'mips64'
-  elif host_arch.startswith('mips'):
-    host_arch = 'mips'
-  elif host_arch.startswith('ppc') or host_processor == 'powerpc':
-    host_arch = 'ppc'
-  elif host_arch.startswith('s390'):
-    host_arch = 's390'
-  elif host_arch.startswith('riscv'):
-    host_arch = 'riscv64'
+    # Convert machine type to format recognized by gyp.
+    if re.match(r'i.86', host_arch) or host_arch == 'i86pc':
+        host_arch = 'x86'
+    elif host_arch in ['x86_64', 'amd64']:
+        host_arch = 'x64'
+    elif host_arch == 'arm64' or host_arch.startswith('aarch64'):
+        host_arch = 'arm64'
+    elif host_arch.startswith('arm'):
+        host_arch = 'arm'
+    elif host_arch.startswith('mips64'):
+        host_arch = 'mips64'
+    elif host_arch.startswith('mips'):
+        host_arch = 'mips'
+    elif host_arch.startswith('ppc') or host_processor == 'powerpc':
+        host_arch = 'ppc'
+    elif host_arch.startswith('s390'):
+        host_arch = 's390'
+    elif host_arch.startswith('riscv'):
+        host_arch = 'riscv64'
 
-  if host_arch == 'arm64':
-    host_platform = platform.architecture()
-    if len(host_platform) > 1:
-      if host_platform[1].lower() == 'windowspe':
-        # Special case for Windows on Arm: windows-386 packages no longer work
-        # so use the x64 emulation (this restricts us to Windows 11). Python
-        # 32-bit returns the host_arch as arm64, 64-bit does not.
-        return 'x64'
+    if host_arch == 'arm64':
+        host_platform = platform.architecture()
+        if len(host_platform) > 1:
+            if host_platform[1].lower() == 'windowspe':
+                # Special case for Windows on Arm: windows-386 packages no
+                # longer work so use the x64 emulation (this restricts us to
+                # Windows 11). Python 32-bit returns the host_arch as arm64,
+                # 64-bit does not.
+                return 'x64'
 
-  # platform.machine is based on running kernel. It's possible to use 64-bit
-  # kernel with 32-bit userland, e.g. to give linker slightly more memory.
-  # Distinguish between different userland bitness by querying
-  # the python binary.
-  if host_arch == 'x64' and platform.architecture()[0] == '32bit':
-    host_arch = 'x86'
-  if host_arch == 'arm64' and platform.architecture()[0] == '32bit':
-    host_arch = 'arm'
+    # platform.machine is based on running kernel. It's possible to use 64-bit
+    # kernel with 32-bit userland, e.g. to give linker slightly more memory.
+    # Distinguish between different userland bitness by querying
+    # the python binary.
+    if host_arch == 'x64' and platform.architecture()[0] == '32bit':
+        host_arch = 'x86'
+    if host_arch == 'arm64' and platform.architecture()[0] == '32bit':
+        host_arch = 'arm'
+
+    return host_arch
 
-  return host_arch
 
 def DoMain(_):
-  """Hook to be called from gyp without starting a separate python
+    """Hook to be called from gyp without starting a separate python
   interpreter."""
-  return HostArch()
+    return HostArch()
+
 
 if __name__ == '__main__':
-  print(DoMain([]))
+    print(DoMain([]))

+ 592 - 533
download_from_google_storage.py

@@ -21,14 +21,13 @@ import time
 
 import subprocess2
 
-
 # Env vars that tempdir can be gotten from; minimally, this
 # needs to match python's tempfile module and match normal
 # unix standards.
 _TEMPDIR_ENV_VARS = ('TMPDIR', 'TEMP', 'TMP')
 
-GSUTIL_DEFAULT_PATH = os.path.join(
-    os.path.dirname(os.path.abspath(__file__)), 'gsutil.py')
+GSUTIL_DEFAULT_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+                                   'gsutil.py')
 # Maps sys.platform to what we actually want to call them.
 PLATFORM_MAPPING = {
     'cygwin': 'win',
@@ -42,345 +41,368 @@ PLATFORM_MAPPING = {
 
 
 class InvalidFileError(IOError):
-  pass
+    pass
 
 
 class InvalidPlatformError(Exception):
-  pass
+    pass
 
 
 def GetNormalizedPlatform():
-  """Returns the result of sys.platform accounting for cygwin.
+    """Returns the result of sys.platform accounting for cygwin.
   Under cygwin, this will always return "win32" like the native Python."""
-  if sys.platform == 'cygwin':
-    return 'win32'
-  return sys.platform
+    if sys.platform == 'cygwin':
+        return 'win32'
+    return sys.platform
+
 
 # Common utilities
 class Gsutil(object):
-  """Call gsutil with some predefined settings.  This is a convenience object,
+    """Call gsutil with some predefined settings.  This is a convenience object,
   and is also immutable.
 
   HACK: This object is used directly by the external script
     `<depot_tools>/win_toolchain/get_toolchain_if_necessary.py`
   """
 
-  MAX_TRIES = 5
-  RETRY_BASE_DELAY = 5.0
-  RETRY_DELAY_MULTIPLE = 1.3
-  VPYTHON3 = ('vpython3.bat'
-              if GetNormalizedPlatform() == 'win32' else 'vpython3')
-
-  def __init__(self, path, boto_path=None):
-    if not os.path.exists(path):
-      raise FileNotFoundError('GSUtil not found in %s' % path)
-    self.path = path
-    self.boto_path = boto_path
-
-  def get_sub_env(self):
-    env = os.environ.copy()
-    if self.boto_path == os.devnull:
-      env['AWS_CREDENTIAL_FILE'] = ''
-      env['BOTO_CONFIG'] = ''
-    elif self.boto_path:
-      env['AWS_CREDENTIAL_FILE'] = self.boto_path
-      env['BOTO_CONFIG'] = self.boto_path
-
-    if PLATFORM_MAPPING[sys.platform] != 'win':
-      env.update((x, "/tmp") for x in _TEMPDIR_ENV_VARS)
-
-    return env
-
-  def call(self, *args):
-    cmd = [self.VPYTHON3, self.path]
-    cmd.extend(args)
-    return subprocess2.call(cmd, env=self.get_sub_env())
-
-  def check_call(self, *args):
-    cmd = [self.VPYTHON3, self.path]
-    cmd.extend(args)
-    ((out, err), code) = subprocess2.communicate(
-        cmd,
-        stdout=subprocess2.PIPE,
-        stderr=subprocess2.PIPE,
-        env=self.get_sub_env())
-
-    out = out.decode('utf-8', 'replace')
-    err = err.decode('utf-8', 'replace')
-
-    # Parse output.
-    status_code_match = re.search('status=([0-9]+)', err)
-    if status_code_match:
-      return (int(status_code_match.group(1)), out, err)
-    if ('ServiceException: 401 Anonymous' in err):
-      return (401, out, err)
-    if ('You are attempting to access protected data with '
-        'no configured credentials.' in err):
-      return (403, out, err)
-    if 'matched no objects' in err or 'No URLs matched' in err:
-      return (404, out, err)
-    return (code, out, err)
-
-  def check_call_with_retries(self, *args):
-    delay = self.RETRY_BASE_DELAY
-    for i in range(self.MAX_TRIES):
-      code, out, err = self.check_call(*args)
-      if not code or i == self.MAX_TRIES - 1:
-        break
-
-      time.sleep(delay)
-      delay *= self.RETRY_DELAY_MULTIPLE
-
-    return code, out, err
+    MAX_TRIES = 5
+    RETRY_BASE_DELAY = 5.0
+    RETRY_DELAY_MULTIPLE = 1.3
+    VPYTHON3 = ('vpython3.bat'
+                if GetNormalizedPlatform() == 'win32' else 'vpython3')
+
+    def __init__(self, path, boto_path=None):
+        if not os.path.exists(path):
+            raise FileNotFoundError('GSUtil not found in %s' % path)
+        self.path = path
+        self.boto_path = boto_path
+
+    def get_sub_env(self):
+        env = os.environ.copy()
+        if self.boto_path == os.devnull:
+            env['AWS_CREDENTIAL_FILE'] = ''
+            env['BOTO_CONFIG'] = ''
+        elif self.boto_path:
+            env['AWS_CREDENTIAL_FILE'] = self.boto_path
+            env['BOTO_CONFIG'] = self.boto_path
+
+        if PLATFORM_MAPPING[sys.platform] != 'win':
+            env.update((x, "/tmp") for x in _TEMPDIR_ENV_VARS)
+
+        return env
+
+    def call(self, *args):
+        cmd = [self.VPYTHON3, self.path]
+        cmd.extend(args)
+        return subprocess2.call(cmd, env=self.get_sub_env())
+
+    def check_call(self, *args):
+        cmd = [self.VPYTHON3, self.path]
+        cmd.extend(args)
+        ((out, err), code) = subprocess2.communicate(cmd,
+                                                     stdout=subprocess2.PIPE,
+                                                     stderr=subprocess2.PIPE,
+                                                     env=self.get_sub_env())
+
+        out = out.decode('utf-8', 'replace')
+        err = err.decode('utf-8', 'replace')
+
+        # Parse output.
+        status_code_match = re.search('status=([0-9]+)', err)
+        if status_code_match:
+            return (int(status_code_match.group(1)), out, err)
+        if ('ServiceException: 401 Anonymous' in err):
+            return (401, out, err)
+        if ('You are attempting to access protected data with '
+                'no configured credentials.' in err):
+            return (403, out, err)
+        if 'matched no objects' in err or 'No URLs matched' in err:
+            return (404, out, err)
+        return (code, out, err)
+
+    def check_call_with_retries(self, *args):
+        delay = self.RETRY_BASE_DELAY
+        for i in range(self.MAX_TRIES):
+            code, out, err = self.check_call(*args)
+            if not code or i == self.MAX_TRIES - 1:
+                break
+
+            time.sleep(delay)
+            delay *= self.RETRY_DELAY_MULTIPLE
+
+        return code, out, err
 
 
 def check_platform(target):
-  """Checks if any parent directory of target matches (win|mac|linux)."""
-  assert os.path.isabs(target)
-  root, target_name = os.path.split(target)
-  if not target_name:
-    return None
-  if target_name in ('linux', 'mac', 'win'):
-    return target_name
-  return check_platform(root)
+    """Checks if any parent directory of target matches (win|mac|linux)."""
+    assert os.path.isabs(target)
+    root, target_name = os.path.split(target)
+    if not target_name:
+        return None
+    if target_name in ('linux', 'mac', 'win'):
+        return target_name
+    return check_platform(root)
 
 
 def get_sha1(filename):
-  sha1 = hashlib.sha1()
-  with open(filename, 'rb') as f:
-    while True:
-      # Read in 1mb chunks, so it doesn't all have to be loaded into memory.
-      chunk = f.read(1024*1024)
-      if not chunk:
-        break
-      sha1.update(chunk)
-  return sha1.hexdigest()
+    sha1 = hashlib.sha1()
+    with open(filename, 'rb') as f:
+        while True:
+            # Read in 1mb chunks, so it doesn't all have to be loaded into
+            # memory.
+            chunk = f.read(1024 * 1024)
+            if not chunk:
+                break
+            sha1.update(chunk)
+    return sha1.hexdigest()
 
 
 # Download-specific code starts here
 
+
 def enumerate_input(input_filename, directory, recursive, ignore_errors, output,
                     sha1_file, auto_platform):
-  if sha1_file:
-    if not os.path.exists(input_filename):
-      if not ignore_errors:
-        raise FileNotFoundError(
-          '{} not found when attempting enumerate files to download.'.format(
-          input_filename))
-      print('%s not found.' % input_filename, file=sys.stderr)
-    with open(input_filename, 'rb') as f:
-      sha1_match = re.match(b'^([A-Za-z0-9]{40})$', f.read(1024).rstrip())
-      if sha1_match:
-        yield (sha1_match.groups(1)[0].decode('utf-8'), output)
-        return
-    if not ignore_errors:
-      raise InvalidFileError('No sha1 sum found in %s.' % input_filename)
-    print('No sha1 sum found in %s.' % input_filename, file=sys.stderr)
-    return
-
-  if not directory:
-    yield (input_filename, output)
-    return
-
-  for root, dirs, files in os.walk(input_filename):
-    if not recursive:
-      for item in dirs[:]:
-        dirs.remove(item)
-    else:
-      for exclude in ['.svn', '.git']:
-        if exclude in dirs:
-          dirs.remove(exclude)
-    for filename in files:
-      full_path = os.path.join(root, filename)
-      if full_path.endswith('.sha1'):
-        if auto_platform:
-          # Skip if the platform does not match.
-          target_platform = check_platform(os.path.abspath(full_path))
-          if not target_platform:
-            err = ('--auto_platform passed in but no platform name found in '
-                   'the path of %s' % full_path)
+    if sha1_file:
+        if not os.path.exists(input_filename):
             if not ignore_errors:
-              raise InvalidFileError(err)
-            print(err, file=sys.stderr)
-            continue
-          current_platform = PLATFORM_MAPPING[sys.platform]
-          if current_platform != target_platform:
-            continue
+                raise FileNotFoundError(
+                    '{} not found when attempting enumerate files to download.'.
+                    format(input_filename))
+            print('%s not found.' % input_filename, file=sys.stderr)
+        with open(input_filename, 'rb') as f:
+            sha1_match = re.match(b'^([A-Za-z0-9]{40})$', f.read(1024).rstrip())
+            if sha1_match:
+                yield (sha1_match.groups(1)[0].decode('utf-8'), output)
+                return
+        if not ignore_errors:
+            raise InvalidFileError('No sha1 sum found in %s.' % input_filename)
+        print('No sha1 sum found in %s.' % input_filename, file=sys.stderr)
+        return
+
+    if not directory:
+        yield (input_filename, output)
+        return
 
-        with open(full_path, 'rb') as f:
-          sha1_match = re.match(b'^([A-Za-z0-9]{40})$', f.read(1024).rstrip())
-        if sha1_match:
-          yield (
-              sha1_match.groups(1)[0].decode('utf-8'),
-              full_path.replace('.sha1', '')
-          )
+    for root, dirs, files in os.walk(input_filename):
+        if not recursive:
+            for item in dirs[:]:
+                dirs.remove(item)
         else:
-          if not ignore_errors:
-            raise InvalidFileError('No sha1 sum found in %s.' % filename)
-          print('No sha1 sum found in %s.' % filename, file=sys.stderr)
+            for exclude in ['.svn', '.git']:
+                if exclude in dirs:
+                    dirs.remove(exclude)
+        for filename in files:
+            full_path = os.path.join(root, filename)
+            if full_path.endswith('.sha1'):
+                if auto_platform:
+                    # Skip if the platform does not match.
+                    target_platform = check_platform(os.path.abspath(full_path))
+                    if not target_platform:
+                        err = ('--auto_platform passed in but no platform name '
+                               'found in the path of %s' % full_path)
+                        if not ignore_errors:
+                            raise InvalidFileError(err)
+                        print(err, file=sys.stderr)
+                        continue
+                    current_platform = PLATFORM_MAPPING[sys.platform]
+                    if current_platform != target_platform:
+                        continue
+
+                with open(full_path, 'rb') as f:
+                    sha1_match = re.match(b'^([A-Za-z0-9]{40})$',
+                                          f.read(1024).rstrip())
+                if sha1_match:
+                    yield (sha1_match.groups(1)[0].decode('utf-8'),
+                           full_path.replace('.sha1', ''))
+                else:
+                    if not ignore_errors:
+                        raise InvalidFileError('No sha1 sum found in %s.' %
+                                               filename)
+                    print('No sha1 sum found in %s.' % filename,
+                          file=sys.stderr)
 
 
 def _validate_tar_file(tar, prefix):
-  def _validate(tarinfo):
-    """Returns false if the tarinfo is something we explicitly forbid."""
-    if tarinfo.issym() or tarinfo.islnk():
-      return False
-    if ('../' in tarinfo.name or
-        '..\\' in tarinfo.name or
-        not tarinfo.name.startswith(prefix)):
-      return False
-    return True
-  return all(map(_validate, tar.getmembers()))
-
-def _downloader_worker_thread(thread_num, q, force, base_url,
-                              gsutil, out_q, ret_codes, verbose, extract,
-                              delete=True):
-  while True:
-    input_sha1_sum, output_filename = q.get()
-    if input_sha1_sum is None:
-      return
-    extract_dir = None
-    if extract:
-      if not output_filename.endswith('.tar.gz'):
-        out_q.put('%d> Error: %s is not a tar.gz archive.' % (
-                  thread_num, output_filename))
-        ret_codes.put((1, '%s is not a tar.gz archive.' % (output_filename)))
-        continue
-      extract_dir = output_filename[:-len('.tar.gz')]
-    if os.path.exists(output_filename) and not force:
-      skip = get_sha1(output_filename) == input_sha1_sum
-      if extract:
-        # Additional condition for extract:
-        # 1) extract_dir must exist
-        # 2) .tmp flag file mustn't exist
-        if not os.path.exists(extract_dir):
-          out_q.put('%d> Extract dir %s does not exist, re-downloading...' %
-                    (thread_num, extract_dir))
-          skip = False
-        # .tmp file is created just before extraction and removed just after
-        # extraction. If such file exists, it means the process was terminated
-        # mid-extraction and therefore needs to be extracted again.
-        elif os.path.exists(extract_dir + '.tmp'):
-          out_q.put('%d> Detected tmp flag file for %s, '
-                    're-downloading...' % (thread_num, output_filename))
-          skip = False
-      if skip:
-        continue
-
-    file_url = '%s/%s' % (base_url, input_sha1_sum)
+    def _validate(tarinfo):
+        """Returns false if the tarinfo is something we explicitly forbid."""
+        if tarinfo.issym() or tarinfo.islnk():
+            return False
+        if ('../' in tarinfo.name or '..\\' in tarinfo.name
+                or not tarinfo.name.startswith(prefix)):
+            return False
+        return True
 
-    try:
-      if delete:
-        os.remove(output_filename)  # Delete the file if it exists already.
-    except OSError:
-      if os.path.exists(output_filename):
-        out_q.put('%d> Warning: deleting %s failed.' % (
-            thread_num, output_filename))
-    if verbose:
-      out_q.put('%d> Downloading %s@%s...' % (
-          thread_num, output_filename, input_sha1_sum))
-    code, _, err = gsutil.check_call('cp', file_url, output_filename)
-    if code != 0:
-      if code == 404:
-        out_q.put('%d> File %s for %s does not exist, skipping.' % (
-            thread_num, file_url, output_filename))
-        ret_codes.put((1, 'File %s for %s does not exist.' % (
-            file_url, output_filename)))
-      elif code == 401:
-        out_q.put(
-            """%d> Failed to fetch file %s for %s due to unauthorized access,
-            skipping. Try running `gsutil.py config`.""" %
-            (thread_num, file_url, output_filename))
-        ret_codes.put(
-            (1, 'Failed to fetch file %s for %s due to unauthorized access.' %
-             (file_url, output_filename)))
-      else:
-        # Other error, probably auth related (bad ~/.boto, etc).
-        out_q.put('%d> Failed to fetch file %s for %s, skipping. [Err: %s]' %
-                  (thread_num, file_url, output_filename, err))
-        ret_codes.put((code, 'Failed to fetch file %s for %s. [Err: %s]' %
-                       (file_url, output_filename, err)))
-      continue
-
-    remote_sha1 = get_sha1(output_filename)
-    if remote_sha1 != input_sha1_sum:
-      msg = ('%d> ERROR remote sha1 (%s) does not match expected sha1 (%s).' %
-             (thread_num, remote_sha1, input_sha1_sum))
-      out_q.put(msg)
-      ret_codes.put((20, msg))
-      continue
+    return all(map(_validate, tar.getmembers()))
 
-    if extract:
-      if not tarfile.is_tarfile(output_filename):
-        out_q.put('%d> Error: %s is not a tar.gz archive.' % (
-                  thread_num, output_filename))
-        ret_codes.put((1, '%s is not a tar.gz archive.' % (output_filename)))
-        continue
-      with tarfile.open(output_filename, 'r:gz') as tar:
-        dirname = os.path.dirname(os.path.abspath(output_filename))
-        # If there are long paths inside the tarball we can get extraction
-        # errors on windows due to the 260 path length limit (this includes
-        # pwd). Use the extended path syntax.
-        if sys.platform == 'win32':
-          dirname = '\\\\?\\%s' % dirname
-        if not _validate_tar_file(tar, os.path.basename(extract_dir)):
-          out_q.put('%d> Error: %s contains files outside %s.' % (
-                    thread_num, output_filename, extract_dir))
-          ret_codes.put((1, '%s contains invalid entries.' % (output_filename)))
-          continue
-        if os.path.exists(extract_dir):
-          try:
-            shutil.rmtree(extract_dir)
-            out_q.put('%d> Removed %s...' % (thread_num, extract_dir))
-          except OSError:
-            out_q.put('%d> Warning: Can\'t delete: %s' % (
-                      thread_num, extract_dir))
-            ret_codes.put((1, 'Can\'t delete %s.' % (extract_dir)))
+
+def _downloader_worker_thread(thread_num,
+                              q,
+                              force,
+                              base_url,
+                              gsutil,
+                              out_q,
+                              ret_codes,
+                              verbose,
+                              extract,
+                              delete=True):
+    while True:
+        input_sha1_sum, output_filename = q.get()
+        if input_sha1_sum is None:
+            return
+        extract_dir = None
+        if extract:
+            if not output_filename.endswith('.tar.gz'):
+                out_q.put('%d> Error: %s is not a tar.gz archive.' %
+                          (thread_num, output_filename))
+                ret_codes.put(
+                    (1, '%s is not a tar.gz archive.' % (output_filename)))
+                continue
+            extract_dir = output_filename[:-len('.tar.gz')]
+        if os.path.exists(output_filename) and not force:
+            skip = get_sha1(output_filename) == input_sha1_sum
+            if extract:
+                # Additional condition for extract:
+                # 1) extract_dir must exist
+                # 2) .tmp flag file mustn't exist
+                if not os.path.exists(extract_dir):
+                    out_q.put(
+                        '%d> Extract dir %s does not exist, re-downloading...' %
+                        (thread_num, extract_dir))
+                    skip = False
+                # .tmp file is created just before extraction and removed just
+                # after extraction. If such file exists, it means the process
+                # was terminated mid-extraction and therefore needs to be
+                # extracted again.
+                elif os.path.exists(extract_dir + '.tmp'):
+                    out_q.put('%d> Detected tmp flag file for %s, '
+                              're-downloading...' %
+                              (thread_num, output_filename))
+                    skip = False
+            if skip:
+                continue
+
+        file_url = '%s/%s' % (base_url, input_sha1_sum)
+
+        try:
+            if delete:
+                os.remove(
+                    output_filename)  # Delete the file if it exists already.
+        except OSError:
+            if os.path.exists(output_filename):
+                out_q.put('%d> Warning: deleting %s failed.' %
+                          (thread_num, output_filename))
+        if verbose:
+            out_q.put('%d> Downloading %s@%s...' %
+                      (thread_num, output_filename, input_sha1_sum))
+        code, _, err = gsutil.check_call('cp', file_url, output_filename)
+        if code != 0:
+            if code == 404:
+                out_q.put('%d> File %s for %s does not exist, skipping.' %
+                          (thread_num, file_url, output_filename))
+                ret_codes.put((1, 'File %s for %s does not exist.' %
+                               (file_url, output_filename)))
+            elif code == 401:
+                out_q.put(
+                    '%d> Failed to fetch file %s for %s due to unauthorized '
+                    'access, skipping. Try running `gsutil.py config`.' %
+                    (thread_num, file_url, output_filename))
+                ret_codes.put((
+                    1,
+                    'Failed to fetch file %s for %s due to unauthorized access.'
+                    % (file_url, output_filename)))
+            else:
+                # Other error, probably auth related (bad ~/.boto, etc).
+                out_q.put(
+                    '%d> Failed to fetch file %s for %s, skipping. [Err: %s]' %
+                    (thread_num, file_url, output_filename, err))
+                ret_codes.put(
+                    (code, 'Failed to fetch file %s for %s. [Err: %s]' %
+                     (file_url, output_filename, err)))
+            continue
+
+        remote_sha1 = get_sha1(output_filename)
+        if remote_sha1 != input_sha1_sum:
+            msg = (
+                '%d> ERROR remote sha1 (%s) does not match expected sha1 (%s).'
+                % (thread_num, remote_sha1, input_sha1_sum))
+            out_q.put(msg)
+            ret_codes.put((20, msg))
             continue
-        out_q.put('%d> Extracting %d entries from %s to %s' %
-                  (thread_num, len(tar.getmembers()),output_filename,
-                   extract_dir))
-        with open(extract_dir + '.tmp', 'a'):
-          tar.extractall(path=dirname)
-        os.remove(extract_dir + '.tmp')
-    # Set executable bit.
-    if sys.platform == 'cygwin':
-      # Under cygwin, mark all files as executable. The executable flag in
-      # Google Storage will not be set when uploading from Windows, so if
-      # this script is running under cygwin and we're downloading an
-      # executable, it will be unrunnable from inside cygwin without this.
-      st = os.stat(output_filename)
-      os.chmod(output_filename, st.st_mode | stat.S_IEXEC)
-    elif sys.platform != 'win32':
-      # On non-Windows platforms, key off of the custom header
-      # "x-goog-meta-executable".
-      code, out, err = gsutil.check_call('stat', file_url)
-      if code != 0:
-        out_q.put('%d> %s' % (thread_num, err))
-        ret_codes.put((code, err))
-      elif re.search(r'executable:\s*1', out):
-        st = os.stat(output_filename)
-        os.chmod(output_filename, st.st_mode | stat.S_IEXEC)
+
+        if extract:
+            if not tarfile.is_tarfile(output_filename):
+                out_q.put('%d> Error: %s is not a tar.gz archive.' %
+                          (thread_num, output_filename))
+                ret_codes.put(
+                    (1, '%s is not a tar.gz archive.' % (output_filename)))
+                continue
+            with tarfile.open(output_filename, 'r:gz') as tar:
+                dirname = os.path.dirname(os.path.abspath(output_filename))
+                # If there are long paths inside the tarball we can get
+                # extraction errors on windows due to the 260 path length limit
+                # (this includes pwd). Use the extended path syntax.
+                if sys.platform == 'win32':
+                    dirname = '\\\\?\\%s' % dirname
+                if not _validate_tar_file(tar, os.path.basename(extract_dir)):
+                    out_q.put('%d> Error: %s contains files outside %s.' %
+                              (thread_num, output_filename, extract_dir))
+                    ret_codes.put(
+                        (1, '%s contains invalid entries.' % (output_filename)))
+                    continue
+                if os.path.exists(extract_dir):
+                    try:
+                        shutil.rmtree(extract_dir)
+                        out_q.put('%d> Removed %s...' %
+                                  (thread_num, extract_dir))
+                    except OSError:
+                        out_q.put('%d> Warning: Can\'t delete: %s' %
+                                  (thread_num, extract_dir))
+                        ret_codes.put((1, 'Can\'t delete %s.' % (extract_dir)))
+                        continue
+                out_q.put('%d> Extracting %d entries from %s to %s' %
+                          (thread_num, len(
+                              tar.getmembers()), output_filename, extract_dir))
+                with open(extract_dir + '.tmp', 'a'):
+                    tar.extractall(path=dirname)
+                os.remove(extract_dir + '.tmp')
+        # Set executable bit.
+        if sys.platform == 'cygwin':
+            # Under cygwin, mark all files as executable. The executable flag in
+            # Google Storage will not be set when uploading from Windows, so if
+            # this script is running under cygwin and we're downloading an
+            # executable, it will be unrunnable from inside cygwin without this.
+            st = os.stat(output_filename)
+            os.chmod(output_filename, st.st_mode | stat.S_IEXEC)
+        elif sys.platform != 'win32':
+            # On non-Windows platforms, key off of the custom header
+            # "x-goog-meta-executable".
+            code, out, err = gsutil.check_call('stat', file_url)
+            if code != 0:
+                out_q.put('%d> %s' % (thread_num, err))
+                ret_codes.put((code, err))
+            elif re.search(r'executable:\s*1', out):
+                st = os.stat(output_filename)
+                os.chmod(output_filename, st.st_mode | stat.S_IEXEC)
 
 
 class PrinterThread(threading.Thread):
-  def __init__(self, output_queue):
-    super(PrinterThread, self).__init__()
-    self.output_queue = output_queue
-    self.did_print_anything = False
+    def __init__(self, output_queue):
+        super(PrinterThread, self).__init__()
+        self.output_queue = output_queue
+        self.did_print_anything = False
 
-  def run(self):
-    while True:
-      line = self.output_queue.get()
-      # It's plausible we want to print empty lines: Explicit `is None`.
-      if line is None:
-        break
-      self.did_print_anything = True
-      print(line)
+    def run(self):
+        while True:
+            line = self.output_queue.get()
+            # It's plausible we want to print empty lines: Explicit `is None`.
+            if line is None:
+                break
+            self.did_print_anything = True
+            print(line)
 
 
 def _data_exists(input_sha1_sum, output_filename, extract):
-  """Returns True if the data exists locally and matches the sha1.
+    """Returns True if the data exists locally and matches the sha1.
 
   This conservatively returns False for error cases.
 
@@ -394,247 +416,284 @@ def _data_exists(input_sha1_sum, output_filename, extract):
         the target directory already exists. The content of the target directory
         is not checked.
   """
-  extract_dir = None
-  if extract:
-    if not output_filename.endswith('.tar.gz'):
-      # This will cause an error later. Conservativly return False to not bail
-      # out too early.
-      return False
-    extract_dir = output_filename[:-len('.tar.gz')]
-  if os.path.exists(output_filename):
-    if not extract or os.path.exists(extract_dir):
-      if get_sha1(output_filename) == input_sha1_sum:
-        return True
-  return False
-
-
-def download_from_google_storage(
-    input_filename, base_url, gsutil, num_threads, directory, recursive,
-    force, output, ignore_errors, sha1_file, verbose, auto_platform, extract):
-
-  # Tuples of sha1s and paths.
-  input_data = list(enumerate_input(
-      input_filename, directory, recursive, ignore_errors, output, sha1_file,
-      auto_platform))
-
-  # Sequentially check for the most common case and see if we can bail out
-  # early before making any slow calls to gsutil.
-  if not force and all(
-      _data_exists(sha1, path, extract) for sha1, path in input_data):
-    return 0
-
-  # Call this once to ensure gsutil's update routine is called only once. Only
-  # needs to be done if we'll process input data in parallel, which can lead to
-  # a race in gsutil's self-update on the first call. Note, this causes a
-  # network call, therefore any fast bailout should be done before this point.
-  if len(input_data) > 1:
-    gsutil.check_call('version')
-
-  # Start up all the worker threads.
-  all_threads = []
-  download_start = time.time()
-  stdout_queue = queue.Queue()
-  work_queue = queue.Queue()
-  ret_codes = queue.Queue()
-  ret_codes.put((0, None))
-  for thread_num in range(num_threads):
-    t = threading.Thread(
-        target=_downloader_worker_thread,
-        args=[thread_num, work_queue, force, base_url,
-              gsutil, stdout_queue, ret_codes, verbose, extract])
-    t.daemon = True
-    t.start()
-    all_threads.append(t)
-  printer_thread = PrinterThread(stdout_queue)
-  printer_thread.daemon = True
-  printer_thread.start()
-
-  # Populate our work queue.
-  for sha1, path in input_data:
-    work_queue.put((sha1, path))
-  for _ in all_threads:
-    work_queue.put((None, None))  # Used to tell worker threads to stop.
-
-  # Wait for all downloads to finish.
-  for t in all_threads:
-    t.join()
-  stdout_queue.put(None)
-  printer_thread.join()
-
-  # See if we ran into any errors.
-  max_ret_code = 0
-  for ret_code, message in ret_codes.queue:
-    max_ret_code = max(ret_code, max_ret_code)
-    if message:
-      print(message, file=sys.stderr)
-
-  # Only print summary if any work was done.
-  if printer_thread.did_print_anything:
-    print('Downloading %d files took %1f second(s)' %
-          (len(input_data), time.time() - download_start))
-  return max_ret_code
+    extract_dir = None
+    if extract:
+        if not output_filename.endswith('.tar.gz'):
+            # This will cause an error later. Conservativly return False to not
+            # bail out too early.
+            return False
+        extract_dir = output_filename[:-len('.tar.gz')]
+    if os.path.exists(output_filename):
+        if not extract or os.path.exists(extract_dir):
+            if get_sha1(output_filename) == input_sha1_sum:
+                return True
+    return False
+
+
+def download_from_google_storage(input_filename, base_url, gsutil, num_threads,
+                                 directory, recursive, force, output,
+                                 ignore_errors, sha1_file, verbose,
+                                 auto_platform, extract):
+
+    # Tuples of sha1s and paths.
+    input_data = list(
+        enumerate_input(input_filename, directory, recursive, ignore_errors,
+                        output, sha1_file, auto_platform))
+
+    # Sequentially check for the most common case and see if we can bail out
+    # early before making any slow calls to gsutil.
+    if not force and all(
+            _data_exists(sha1, path, extract) for sha1, path in input_data):
+        return 0
+
+    # Call this once to ensure gsutil's update routine is called only once. Only
+    # needs to be done if we'll process input data in parallel, which can lead
+    # to a race in gsutil's self-update on the first call. Note, this causes a
+    # network call, therefore any fast bailout should be done before this point.
+    if len(input_data) > 1:
+        gsutil.check_call('version')
+
+    # Start up all the worker threads.
+    all_threads = []
+    download_start = time.time()
+    stdout_queue = queue.Queue()
+    work_queue = queue.Queue()
+    ret_codes = queue.Queue()
+    ret_codes.put((0, None))
+    for thread_num in range(num_threads):
+        t = threading.Thread(target=_downloader_worker_thread,
+                             args=[
+                                 thread_num, work_queue, force, base_url,
+                                 gsutil, stdout_queue, ret_codes, verbose,
+                                 extract
+                             ])
+        t.daemon = True
+        t.start()
+        all_threads.append(t)
+    printer_thread = PrinterThread(stdout_queue)
+    printer_thread.daemon = True
+    printer_thread.start()
+
+    # Populate our work queue.
+    for sha1, path in input_data:
+        work_queue.put((sha1, path))
+    for _ in all_threads:
+        work_queue.put((None, None))  # Used to tell worker threads to stop.
+
+    # Wait for all downloads to finish.
+    for t in all_threads:
+        t.join()
+    stdout_queue.put(None)
+    printer_thread.join()
+
+    # See if we ran into any errors.
+    max_ret_code = 0
+    for ret_code, message in ret_codes.queue:
+        max_ret_code = max(ret_code, max_ret_code)
+        if message:
+            print(message, file=sys.stderr)
+
+    # Only print summary if any work was done.
+    if printer_thread.did_print_anything:
+        print('Downloading %d files took %1f second(s)' %
+              (len(input_data), time.time() - download_start))
+    return max_ret_code
 
 
 def main(args):
-  usage = ('usage: %prog [options] target\n'
-           'Target must be:\n'
-           '  (default) a sha1 sum ([A-Za-z0-9]{40}).\n'
-           '  (-s or --sha1_file) a .sha1 file, containing a sha1 sum on '
-           'the first line.\n'
-           '  (-d or --directory) A directory to scan for .sha1 files.')
-  parser = optparse.OptionParser(usage)
-  parser.add_option('-o', '--output',
-                    help='Specify the output file name. Defaults to: '
-                         '(a) Given a SHA1 hash, the name is the SHA1 hash. '
-                         '(b) Given a .sha1 file or directory, the name will '
-                         'match (.*).sha1.')
-  parser.add_option('-b', '--bucket',
-                    help='Google Storage bucket to fetch from.')
-  parser.add_option('-e', '--boto',
-                    help='Specify a custom boto file.')
-  parser.add_option('-c', '--no_resume', action='store_true',
-                    help='DEPRECATED: Resume download if file is '
-                         'partially downloaded.')
-  parser.add_option('-f', '--force', action='store_true',
-                    help='Force download even if local file exists.')
-  parser.add_option('-i', '--ignore_errors', action='store_true',
-                    help='Don\'t throw error if we find an invalid .sha1 file.')
-  parser.add_option('-r', '--recursive', action='store_true',
-                    help='Scan folders recursively for .sha1 files. '
-                         'Must be used with -d/--directory')
-  parser.add_option('-t', '--num_threads', default=1, type='int',
-                    help='Number of downloader threads to run.')
-  parser.add_option('-d', '--directory', action='store_true',
-                    help='The target is a directory.  '
-                         'Cannot be used with -s/--sha1_file.')
-  parser.add_option('-s', '--sha1_file', action='store_true',
-                    help='The target is a file containing a sha1 sum.  '
-                         'Cannot be used with -d/--directory.')
-  parser.add_option('-g', '--config', action='store_true',
-                    help='Alias for "gsutil config".  Run this if you want '
-                         'to initialize your saved Google Storage '
-                         'credentials.  This will create a read-only '
-                         'credentials file in ~/.boto.depot_tools.')
-  parser.add_option('-n', '--no_auth', action='store_true',
-                    help='Skip auth checking.  Use if it\'s known that the '
-                         'target bucket is a public bucket.')
-  parser.add_option('-p', '--platform',
-                    help='A regular expression that is compared against '
-                         'Python\'s sys.platform. If this option is specified, '
-                         'the download will happen only if there is a match.')
-  parser.add_option('-a', '--auto_platform',
-                    action='store_true',
-                    help='Detects if any parent folder of the target matches '
-                         '(linux|mac|win).  If so, the script will only '
-                         'process files that are in the paths that '
-                         'that matches the current platform.')
-  parser.add_option('-u', '--extract',
-                    action='store_true',
-                    help='Extract a downloaded tar.gz file. '
-                         'Leaves the tar.gz file around for sha1 verification'
-                         'If a directory with the same name as the tar.gz '
-                         'file already exists, is deleted (to get a '
-                         'clean state in case of update.)')
-  parser.add_option('-v', '--verbose', action='store_true', default=True,
-                    help='DEPRECATED: Defaults to True.  Use --no-verbose '
-                         'to suppress.')
-  parser.add_option('-q', '--quiet', action='store_false', dest='verbose',
-                    help='Suppresses diagnostic and progress information.')
-
-  (options, args) = parser.parse_args()
-
-  # Make sure we should run at all based on platform matching.
-  if options.platform:
-    if options.auto_platform:
-      parser.error('--platform can not be specified with --auto_platform')
-    if not re.match(options.platform, GetNormalizedPlatform()):
-      if options.verbose:
-        print('The current platform doesn\'t match "%s", skipping.' %
-              options.platform)
-      return 0
-
-  # Set the boto file to /dev/null if we don't need auth.
-  if options.no_auth:
-    if (set(('http_proxy', 'https_proxy')).intersection(
-        env.lower() for env in os.environ) and
-        'NO_AUTH_BOTO_CONFIG' not in os.environ):
-      print('NOTICE: You have PROXY values set in your environment, but gsutil'
-            'in depot_tools does not (yet) obey them.',
-            file=sys.stderr)
-      print('Also, --no_auth prevents the normal BOTO_CONFIG environment'
-            'variable from being used.',
-            file=sys.stderr)
-      print('To use a proxy in this situation, please supply those settings'
-            'in a .boto file pointed to by the NO_AUTH_BOTO_CONFIG environment'
-            'variable.',
-            file=sys.stderr)
-    options.boto = os.environ.get('NO_AUTH_BOTO_CONFIG', os.devnull)
-
-  # Make sure gsutil exists where we expect it to.
-  if os.path.exists(GSUTIL_DEFAULT_PATH):
-    gsutil = Gsutil(GSUTIL_DEFAULT_PATH,
-                    boto_path=options.boto)
-  else:
-    parser.error('gsutil not found in %s, bad depot_tools checkout?' %
-                 GSUTIL_DEFAULT_PATH)
-
-  # Passing in -g/--config will run our copy of GSUtil, then quit.
-  if options.config:
-    print('===Note from depot_tools===')
-    print('If you do not have a project ID, enter "0" when asked for one.')
-    print('===End note from depot_tools===')
-    print()
-    gsutil.check_call('version')
-    return gsutil.call('config')
-
-  if not args:
-    parser.error('Missing target.')
-  if len(args) > 1:
-    parser.error('Too many targets.')
-  if not options.bucket:
-    parser.error('Missing bucket.  Specify bucket with --bucket.')
-  if options.sha1_file and options.directory:
-    parser.error('Both --directory and --sha1_file are specified, '
-                 'can only specify one.')
-  if options.recursive and not options.directory:
-    parser.error('--recursive specified but --directory not specified.')
-  if options.output and options.directory:
-    parser.error('--directory is specified, so --output has no effect.')
-  if (not (options.sha1_file or options.directory)
-      and options.auto_platform):
-    parser.error('--auto_platform must be specified with either '
-                 '--sha1_file or --directory')
-
-  input_filename = args[0]
-
-  # Set output filename if not specified.
-  if not options.output and not options.directory:
-    if not options.sha1_file:
-      # Target is a sha1 sum, so output filename would also be the sha1 sum.
-      options.output = input_filename
-    elif options.sha1_file:
-      # Target is a .sha1 file.
-      if not input_filename.endswith('.sha1'):
-        parser.error('--sha1_file is specified, but the input filename '
-                     'does not end with .sha1, and no --output is specified. '
-                     'Either make sure the input filename has a .sha1 '
-                     'extension, or specify --output.')
-      options.output = input_filename[:-5]
+    usage = ('usage: %prog [options] target\n'
+             'Target must be:\n'
+             '  (default) a sha1 sum ([A-Za-z0-9]{40}).\n'
+             '  (-s or --sha1_file) a .sha1 file, containing a sha1 sum on '
+             'the first line.\n'
+             '  (-d or --directory) A directory to scan for .sha1 files.')
+    parser = optparse.OptionParser(usage)
+    parser.add_option('-o',
+                      '--output',
+                      help='Specify the output file name. Defaults to: '
+                      '(a) Given a SHA1 hash, the name is the SHA1 hash. '
+                      '(b) Given a .sha1 file or directory, the name will '
+                      'match (.*).sha1.')
+    parser.add_option('-b',
+                      '--bucket',
+                      help='Google Storage bucket to fetch from.')
+    parser.add_option('-e', '--boto', help='Specify a custom boto file.')
+    parser.add_option('-c',
+                      '--no_resume',
+                      action='store_true',
+                      help='DEPRECATED: Resume download if file is '
+                      'partially downloaded.')
+    parser.add_option('-f',
+                      '--force',
+                      action='store_true',
+                      help='Force download even if local file exists.')
+    parser.add_option(
+        '-i',
+        '--ignore_errors',
+        action='store_true',
+        help='Don\'t throw error if we find an invalid .sha1 file.')
+    parser.add_option('-r',
+                      '--recursive',
+                      action='store_true',
+                      help='Scan folders recursively for .sha1 files. '
+                      'Must be used with -d/--directory')
+    parser.add_option('-t',
+                      '--num_threads',
+                      default=1,
+                      type='int',
+                      help='Number of downloader threads to run.')
+    parser.add_option('-d',
+                      '--directory',
+                      action='store_true',
+                      help='The target is a directory.  '
+                      'Cannot be used with -s/--sha1_file.')
+    parser.add_option('-s',
+                      '--sha1_file',
+                      action='store_true',
+                      help='The target is a file containing a sha1 sum.  '
+                      'Cannot be used with -d/--directory.')
+    parser.add_option('-g',
+                      '--config',
+                      action='store_true',
+                      help='Alias for "gsutil config".  Run this if you want '
+                      'to initialize your saved Google Storage '
+                      'credentials.  This will create a read-only '
+                      'credentials file in ~/.boto.depot_tools.')
+    parser.add_option('-n',
+                      '--no_auth',
+                      action='store_true',
+                      help='Skip auth checking.  Use if it\'s known that the '
+                      'target bucket is a public bucket.')
+    parser.add_option('-p',
+                      '--platform',
+                      help='A regular expression that is compared against '
+                      'Python\'s sys.platform. If this option is specified, '
+                      'the download will happen only if there is a match.')
+    parser.add_option('-a',
+                      '--auto_platform',
+                      action='store_true',
+                      help='Detects if any parent folder of the target matches '
+                      '(linux|mac|win).  If so, the script will only '
+                      'process files that are in the paths that '
+                      'that matches the current platform.')
+    parser.add_option('-u',
+                      '--extract',
+                      action='store_true',
+                      help='Extract a downloaded tar.gz file. '
+                      'Leaves the tar.gz file around for sha1 verification'
+                      'If a directory with the same name as the tar.gz '
+                      'file already exists, is deleted (to get a '
+                      'clean state in case of update.)')
+    parser.add_option('-v',
+                      '--verbose',
+                      action='store_true',
+                      default=True,
+                      help='DEPRECATED: Defaults to True.  Use --no-verbose '
+                      'to suppress.')
+    parser.add_option('-q',
+                      '--quiet',
+                      action='store_false',
+                      dest='verbose',
+                      help='Suppresses diagnostic and progress information.')
+
+    (options, args) = parser.parse_args()
+
+    # Make sure we should run at all based on platform matching.
+    if options.platform:
+        if options.auto_platform:
+            parser.error('--platform can not be specified with --auto_platform')
+        if not re.match(options.platform, GetNormalizedPlatform()):
+            if options.verbose:
+                print('The current platform doesn\'t match "%s", skipping.' %
+                      options.platform)
+            return 0
+
+    # Set the boto file to /dev/null if we don't need auth.
+    if options.no_auth:
+        if (set(
+            ('http_proxy', 'https_proxy')).intersection(env.lower()
+                                                        for env in os.environ)
+                and 'NO_AUTH_BOTO_CONFIG' not in os.environ):
+            print(
+                'NOTICE: You have PROXY values set in your environment, but '
+                'gsutil in depot_tools does not (yet) obey them.',
+                file=sys.stderr)
+            print(
+                'Also, --no_auth prevents the normal BOTO_CONFIG environment '
+                'variable from being used.',
+                file=sys.stderr)
+            print(
+                'To use a proxy in this situation, please supply those '
+                'settings in a .boto file pointed to by the '
+                'NO_AUTH_BOTO_CONFIG environment variable.',
+                file=sys.stderr)
+        options.boto = os.environ.get('NO_AUTH_BOTO_CONFIG', os.devnull)
+
+    # Make sure gsutil exists where we expect it to.
+    if os.path.exists(GSUTIL_DEFAULT_PATH):
+        gsutil = Gsutil(GSUTIL_DEFAULT_PATH, boto_path=options.boto)
     else:
-      parser.error('Unreachable state.')
+        parser.error('gsutil not found in %s, bad depot_tools checkout?' %
+                     GSUTIL_DEFAULT_PATH)
+
+    # Passing in -g/--config will run our copy of GSUtil, then quit.
+    if options.config:
+        print('===Note from depot_tools===')
+        print('If you do not have a project ID, enter "0" when asked for one.')
+        print('===End note from depot_tools===')
+        print()
+        gsutil.check_call('version')
+        return gsutil.call('config')
+
+    if not args:
+        parser.error('Missing target.')
+    if len(args) > 1:
+        parser.error('Too many targets.')
+    if not options.bucket:
+        parser.error('Missing bucket.  Specify bucket with --bucket.')
+    if options.sha1_file and options.directory:
+        parser.error('Both --directory and --sha1_file are specified, '
+                     'can only specify one.')
+    if options.recursive and not options.directory:
+        parser.error('--recursive specified but --directory not specified.')
+    if options.output and options.directory:
+        parser.error('--directory is specified, so --output has no effect.')
+    if (not (options.sha1_file or options.directory) and options.auto_platform):
+        parser.error('--auto_platform must be specified with either '
+                     '--sha1_file or --directory')
+
+    input_filename = args[0]
+
+    # Set output filename if not specified.
+    if not options.output and not options.directory:
+        if not options.sha1_file:
+            # Target is a sha1 sum, so output filename would also be the sha1
+            # sum.
+            options.output = input_filename
+        elif options.sha1_file:
+            # Target is a .sha1 file.
+            if not input_filename.endswith('.sha1'):
+                parser.error(
+                    '--sha1_file is specified, but the input filename '
+                    'does not end with .sha1, and no --output is specified. '
+                    'Either make sure the input filename has a .sha1 '
+                    'extension, or specify --output.')
+            options.output = input_filename[:-5]
+        else:
+            parser.error('Unreachable state.')
 
-  base_url = 'gs://%s' % options.bucket
+    base_url = 'gs://%s' % options.bucket
 
-  try:
-    return download_from_google_storage(
-      input_filename, base_url, gsutil, options.num_threads, options.directory,
-      options.recursive, options.force, options.output, options.ignore_errors,
-      options.sha1_file, options.verbose, options.auto_platform,
-      options.extract)
-  except FileNotFoundError as e:
-    print("Fatal error: {}".format(e))
-    return 1
+    try:
+        return download_from_google_storage(
+            input_filename, base_url, gsutil, options.num_threads,
+            options.directory, options.recursive, options.force, options.output,
+            options.ignore_errors, options.sha1_file, options.verbose,
+            options.auto_platform, options.extract)
+    except FileNotFoundError as e:
+        print("Fatal error: {}".format(e))
+        return 1
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv))
+    sys.exit(main(sys.argv))

+ 236 - 216
fetch.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2013 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """
 Tool to perform checkouts in one easy command line!
 
@@ -31,14 +30,14 @@ import git_common
 
 from distutils import spawn
 
-
 SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__))
 
+
 #################################################
 # Checkout class definitions.
 #################################################
 class Checkout(object):
-  """Base class for implementing different types of checkouts.
+    """Base class for implementing different types of checkouts.
 
   Attributes:
     |base|: the absolute path of the directory in which this script is run.
@@ -47,212 +46,230 @@ class Checkout(object):
     |root|: the directory into which the checkout will be performed, as returned
         by the config. This is a relative path from |base|.
   """
-  def __init__(self, options, spec, root):
-    self.base = os.getcwd()
-    self.options = options
-    self.spec = spec
-    self.root = root
-
-  def exists(self):
-    """Check does this checkout already exist on desired location"""
-
-  def init(self):
-    pass
-
-  def run(self, cmd, return_stdout=False, **kwargs):
-    print('Running: %s' % (' '.join(pipes.quote(x) for x in cmd)))
-    if self.options.dry_run:
-      return ''
-    if return_stdout:
-      return subprocess.check_output(cmd, **kwargs).decode()
-
-    try:
-      subprocess.check_call(cmd, **kwargs)
-    except subprocess.CalledProcessError as e:
-      # If the subprocess failed, it likely emitted its own distress message
-      # already - don't scroll that message off the screen with a stack trace
-      # from this program as well. Emit a terse message and bail out here;
-      # otherwise a later step will try doing more work and may hide the
-      # subprocess message.
-      print('Subprocess failed with return code %d.' % e.returncode)
-      sys.exit(e.returncode)
-    return ''
+    def __init__(self, options, spec, root):
+        self.base = os.getcwd()
+        self.options = options
+        self.spec = spec
+        self.root = root
+
+    def exists(self):
+        """Check does this checkout already exist on desired location"""
+
+    def init(self):
+        pass
+
+    def run(self, cmd, return_stdout=False, **kwargs):
+        print('Running: %s' % (' '.join(pipes.quote(x) for x in cmd)))
+        if self.options.dry_run:
+            return ''
+        if return_stdout:
+            return subprocess.check_output(cmd, **kwargs).decode()
+
+        try:
+            subprocess.check_call(cmd, **kwargs)
+        except subprocess.CalledProcessError as e:
+            # If the subprocess failed, it likely emitted its own distress
+            # message already - don't scroll that message off the screen with a
+            # stack trace from this program as well. Emit a terse message and
+            # bail out here; otherwise a later step will try doing more work and
+            # may hide the subprocess message.
+            print('Subprocess failed with return code %d.' % e.returncode)
+            sys.exit(e.returncode)
+        return ''
 
 
 class GclientCheckout(Checkout):
-
-  def run_gclient(self, *cmd, **kwargs):
-    if not spawn.find_executable('gclient'):
-      cmd_prefix = (sys.executable, os.path.join(SCRIPT_PATH, 'gclient.py'))
-    else:
-      cmd_prefix = ('gclient',)
-    return self.run(cmd_prefix + cmd, **kwargs)
-
-  def exists(self):
-    try:
-      gclient_root = self.run_gclient('root', return_stdout=True).strip()
-      return (os.path.exists(os.path.join(gclient_root, '.gclient')) or
-              os.path.exists(os.path.join(os.getcwd(), self.root, '.git')))
-    except subprocess.CalledProcessError:
-      pass
-    return os.path.exists(os.path.join(os.getcwd(), self.root))
+    def run_gclient(self, *cmd, **kwargs):
+        if not spawn.find_executable('gclient'):
+            cmd_prefix = (sys.executable, os.path.join(SCRIPT_PATH,
+                                                       'gclient.py'))
+        else:
+            cmd_prefix = ('gclient', )
+        return self.run(cmd_prefix + cmd, **kwargs)
+
+    def exists(self):
+        try:
+            gclient_root = self.run_gclient('root', return_stdout=True).strip()
+            return (os.path.exists(os.path.join(gclient_root, '.gclient'))
+                    or os.path.exists(
+                        os.path.join(os.getcwd(), self.root, '.git')))
+        except subprocess.CalledProcessError:
+            pass
+        return os.path.exists(os.path.join(os.getcwd(), self.root))
 
 
 class GitCheckout(Checkout):
-
-  def run_git(self, *cmd, **kwargs):
-    print('Running: git %s' % (' '.join(pipes.quote(x) for x in cmd)))
-    if self.options.dry_run:
-      return ''
-    return git_common.run(*cmd, **kwargs)
+    def run_git(self, *cmd, **kwargs):
+        print('Running: git %s' % (' '.join(pipes.quote(x) for x in cmd)))
+        if self.options.dry_run:
+            return ''
+        return git_common.run(*cmd, **kwargs)
 
 
 class GclientGitCheckout(GclientCheckout, GitCheckout):
-
-  def __init__(self, options, spec, root):
-    super(GclientGitCheckout, self).__init__(options, spec, root)
-    assert 'solutions' in self.spec
-
-  def _format_spec(self):
-    def _format_literal(lit):
-      if isinstance(lit, str):
-        return '"%s"' % lit
-      if isinstance(lit, list):
-        return '[%s]' % ', '.join(_format_literal(i) for i in lit)
-      return '%r' % lit
-    soln_strings = []
-    for soln in self.spec['solutions']:
-      soln_string = '\n'.join('    "%s": %s,' % (key, _format_literal(value))
-                              for key, value in soln.items())
-      soln_strings.append('  {\n%s\n  },' % soln_string)
-    gclient_spec = 'solutions = [\n%s\n]\n' % '\n'.join(soln_strings)
-    extra_keys = ['target_os', 'target_os_only', 'cache_dir']
-    gclient_spec += ''.join('%s = %s\n' % (key, _format_literal(self.spec[key]))
-                             for key in extra_keys if key in self.spec)
-    return gclient_spec
-
-  def init(self):
-    # Configure and do the gclient checkout.
-    self.run_gclient('config', '--spec', self._format_spec())
-    sync_cmd = ['sync']
-    if self.options.nohooks:
-      sync_cmd.append('--nohooks')
-    if self.options.nohistory:
-      sync_cmd.append('--no-history')
-    if self.spec.get('with_branch_heads', False):
-      sync_cmd.append('--with_branch_heads')
-    self.run_gclient(*sync_cmd)
-
-    # Configure git.
-    wd = os.path.join(self.base, self.root)
-    if self.options.dry_run:
-      print('cd %s' % wd)
-    self.run_git(
-        'submodule', 'foreach',
-        'git config -f $toplevel/.git/config submodule.$name.ignore all',
-        cwd=wd)
-    if not self.options.nohistory:
-      self.run_git(
-          'config', '--add', 'remote.origin.fetch',
-          '+refs/tags/*:refs/tags/*', cwd=wd)
-    self.run_git('config', 'diff.ignoreSubmodules', 'dirty', cwd=wd)
+    def __init__(self, options, spec, root):
+        super(GclientGitCheckout, self).__init__(options, spec, root)
+        assert 'solutions' in self.spec
+
+    def _format_spec(self):
+        def _format_literal(lit):
+            if isinstance(lit, str):
+                return '"%s"' % lit
+            if isinstance(lit, list):
+                return '[%s]' % ', '.join(_format_literal(i) for i in lit)
+            return '%r' % lit
+
+        soln_strings = []
+        for soln in self.spec['solutions']:
+            soln_string = '\n'.join('    "%s": %s,' %
+                                    (key, _format_literal(value))
+                                    for key, value in soln.items())
+            soln_strings.append('  {\n%s\n  },' % soln_string)
+        gclient_spec = 'solutions = [\n%s\n]\n' % '\n'.join(soln_strings)
+        extra_keys = ['target_os', 'target_os_only', 'cache_dir']
+        gclient_spec += ''.join('%s = %s\n' %
+                                (key, _format_literal(self.spec[key]))
+                                for key in extra_keys if key in self.spec)
+        return gclient_spec
+
+    def init(self):
+        # Configure and do the gclient checkout.
+        self.run_gclient('config', '--spec', self._format_spec())
+        sync_cmd = ['sync']
+        if self.options.nohooks:
+            sync_cmd.append('--nohooks')
+        if self.options.nohistory:
+            sync_cmd.append('--no-history')
+        if self.spec.get('with_branch_heads', False):
+            sync_cmd.append('--with_branch_heads')
+        self.run_gclient(*sync_cmd)
+
+        # Configure git.
+        wd = os.path.join(self.base, self.root)
+        if self.options.dry_run:
+            print('cd %s' % wd)
+        self.run_git(
+            'submodule',
+            'foreach',
+            'git config -f $toplevel/.git/config submodule.$name.ignore all',
+            cwd=wd)
+        if not self.options.nohistory:
+            self.run_git('config',
+                         '--add',
+                         'remote.origin.fetch',
+                         '+refs/tags/*:refs/tags/*',
+                         cwd=wd)
+        self.run_git('config', 'diff.ignoreSubmodules', 'dirty', cwd=wd)
 
 
 CHECKOUT_TYPE_MAP = {
-    'gclient':         GclientCheckout,
-    'gclient_git':     GclientGitCheckout,
-    'git':             GitCheckout,
+    'gclient': GclientCheckout,
+    'gclient_git': GclientGitCheckout,
+    'git': GitCheckout,
 }
 
 
 def CheckoutFactory(type_name, options, spec, root):
-  """Factory to build Checkout class instances."""
-  class_ = CHECKOUT_TYPE_MAP.get(type_name)
-  if not class_:
-    raise KeyError('unrecognized checkout type: %s' % type_name)
-  return class_(options, spec, root)
+    """Factory to build Checkout class instances."""
+    class_ = CHECKOUT_TYPE_MAP.get(type_name)
+    if not class_:
+        raise KeyError('unrecognized checkout type: %s' % type_name)
+    return class_(options, spec, root)
+
 
 def handle_args(argv):
-  """Gets the config name from the command line arguments."""
+    """Gets the config name from the command line arguments."""
 
-  configs_dir = os.path.join(SCRIPT_PATH, 'fetch_configs')
-  configs = [f[:-3] for f in os.listdir(configs_dir) if f.endswith('.py')]
-  configs.sort()
+    configs_dir = os.path.join(SCRIPT_PATH, 'fetch_configs')
+    configs = [f[:-3] for f in os.listdir(configs_dir) if f.endswith('.py')]
+    configs.sort()
 
-  parser = argparse.ArgumentParser(
-    formatter_class=argparse.RawDescriptionHelpFormatter,
-    description='''
+    parser = argparse.ArgumentParser(
+      formatter_class=argparse.RawDescriptionHelpFormatter,
+      description='''
     This script can be used to download the Chromium sources. See
     http://www.chromium.org/developers/how-tos/get-the-code
     for full usage instructions.''',
-    epilog='Valid fetch configs:\n' + \
-      '\n'.join(map(lambda s: '  ' + s, configs))
-    )
-
-  parser.add_argument('-n', '--dry-run', action='store_true', default=False,
-    help='Don\'t run commands, only print them.')
-  parser.add_argument('--nohooks',
-                      '--no-hooks',
-                      action='store_true',
-                      default=False,
-                      help='Don\'t run hooks after checkout.')
-  parser.add_argument(
-      '--nohistory',
-      '--no-history',
-      action='store_true',
-      default=False,
-      help='Perform shallow clones, don\'t fetch the full git history.')
-  parser.add_argument('--force', action='store_true', default=False,
-    help='(dangerous) Don\'t look for existing .gclient file.')
-  parser.add_argument(
-    '-p',
-    '--protocol-override',
-    type=str,
-    default=None,
-    help='Protocol to use to fetch dependencies, defaults to https.')
-
-  parser.add_argument('config', type=str,
-    help="Project to fetch, e.g. chromium.")
-  parser.add_argument('props', metavar='props', type=str,
-    nargs=argparse.REMAINDER, default=[])
-
-  args = parser.parse_args(argv[1:])
-
-  # props passed to config must be of the format --<name>=<value>
-  looks_like_arg = lambda arg: arg.startswith('--') and arg.count('=') == 1
-  bad_param = [x for x in args.props if not looks_like_arg(x)]
-  if bad_param:
-    print('Error: Got bad arguments %s' % bad_param)
-    parser.print_help()
-    sys.exit(1)
-
-  return args
+      epilog='Valid fetch configs:\n' + \
+        '\n'.join(map(lambda s: '  ' + s, configs))
+      )
+
+    parser.add_argument('-n',
+                        '--dry-run',
+                        action='store_true',
+                        default=False,
+                        help='Don\'t run commands, only print them.')
+    parser.add_argument('--nohooks',
+                        '--no-hooks',
+                        action='store_true',
+                        default=False,
+                        help='Don\'t run hooks after checkout.')
+    parser.add_argument(
+        '--nohistory',
+        '--no-history',
+        action='store_true',
+        default=False,
+        help='Perform shallow clones, don\'t fetch the full git history.')
+    parser.add_argument(
+        '--force',
+        action='store_true',
+        default=False,
+        help='(dangerous) Don\'t look for existing .gclient file.')
+    parser.add_argument(
+        '-p',
+        '--protocol-override',
+        type=str,
+        default=None,
+        help='Protocol to use to fetch dependencies, defaults to https.')
+
+    parser.add_argument('config',
+                        type=str,
+                        help="Project to fetch, e.g. chromium.")
+    parser.add_argument('props',
+                        metavar='props',
+                        type=str,
+                        nargs=argparse.REMAINDER,
+                        default=[])
+
+    args = parser.parse_args(argv[1:])
+
+    # props passed to config must be of the format --<name>=<value>
+    looks_like_arg = lambda arg: arg.startswith('--') and arg.count('=') == 1
+    bad_param = [x for x in args.props if not looks_like_arg(x)]
+    if bad_param:
+        print('Error: Got bad arguments %s' % bad_param)
+        parser.print_help()
+        sys.exit(1)
+
+    return args
+
 
 def run_config_fetch(config, props, aliased=False):
-  """Invoke a config's fetch method with the passed-through args
+    """Invoke a config's fetch method with the passed-through args
   and return its json output as a python object."""
-  config_path = os.path.abspath(
-      os.path.join(SCRIPT_PATH, 'fetch_configs', config))
-  if not os.path.exists(config_path + '.py'):
-    print("Could not find a config for %s" % config)
-    sys.exit(1)
-
-  cmd = [sys.executable, config_path + '.py', 'fetch'] + props
-  result = subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0]
-
-  spec = json.loads(result.decode("utf-8"))
-  if 'alias' in spec:
-    assert not aliased
-    return run_config_fetch(
-        spec['alias']['config'], spec['alias']['props'] + props, aliased=True)
-  cmd = [sys.executable, config_path + '.py', 'root']
-  result = subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0]
-  root = json.loads(result.decode("utf-8"))
-  return spec, root
+    config_path = os.path.abspath(
+        os.path.join(SCRIPT_PATH, 'fetch_configs', config))
+    if not os.path.exists(config_path + '.py'):
+        print("Could not find a config for %s" % config)
+        sys.exit(1)
+
+    cmd = [sys.executable, config_path + '.py', 'fetch'] + props
+    result = subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0]
+
+    spec = json.loads(result.decode("utf-8"))
+    if 'alias' in spec:
+        assert not aliased
+        return run_config_fetch(spec['alias']['config'],
+                                spec['alias']['props'] + props,
+                                aliased=True)
+    cmd = [sys.executable, config_path + '.py', 'root']
+    result = subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0]
+    root = json.loads(result.decode("utf-8"))
+    return spec, root
 
 
 def run(options, spec, root):
-  """Perform a checkout with the given type and configuration.
+    """Perform a checkout with the given type and configuration.
 
     Args:
       options: Options instance.
@@ -260,45 +277,48 @@ def run(options, spec, root):
           method (checkout type, repository url, etc.).
       root: The directory into which the repo expects to be checkout out.
   """
-  assert 'type' in spec
-  checkout_type = spec['type']
-  checkout_spec = spec['%s_spec' % checkout_type]
-
-  # Use sso:// by default if the env is cog
-  if not options.protocol_override and \
-    (any(os.getcwd().startswith(x) for x in [
-        '/google/src/cloud', '/google/cog/cloud'])):
-    options.protocol_override = 'sso'
-
-  # Update solutions with protocol_override field
-  if options.protocol_override is not None:
-    for solution in checkout_spec['solutions']:
-      solution['protocol_override'] = options.protocol_override
-
-  try:
-    checkout = CheckoutFactory(checkout_type, options, checkout_spec, root)
-  except KeyError:
-    return 1
-  if not options.force and checkout.exists():
-    print('Your current directory appears to already contain, or be part of, ')
-    print('a checkout. "fetch" is used only to get new checkouts. Use ')
-    print('"gclient sync" to update existing checkouts.')
-    print()
-    print('Fetch also does not yet deal with partial checkouts, so if fetch')
-    print('failed, delete the checkout and start over (crbug.com/230691).')
-    return 1
-  return checkout.init()
+    assert 'type' in spec
+    checkout_type = spec['type']
+    checkout_spec = spec['%s_spec' % checkout_type]
+
+    # Use sso:// by default if the env is cog
+    if not options.protocol_override and \
+      (any(os.getcwd().startswith(x) for x in [
+          '/google/src/cloud', '/google/cog/cloud'])):
+        options.protocol_override = 'sso'
+
+    # Update solutions with protocol_override field
+    if options.protocol_override is not None:
+        for solution in checkout_spec['solutions']:
+            solution['protocol_override'] = options.protocol_override
+
+    try:
+        checkout = CheckoutFactory(checkout_type, options, checkout_spec, root)
+    except KeyError:
+        return 1
+    if not options.force and checkout.exists():
+        print(
+            'Your current directory appears to already contain, or be part of, '
+        )
+        print('a checkout. "fetch" is used only to get new checkouts. Use ')
+        print('"gclient sync" to update existing checkouts.')
+        print()
+        print(
+            'Fetch also does not yet deal with partial checkouts, so if fetch')
+        print('failed, delete the checkout and start over (crbug.com/230691).')
+        return 1
+    return checkout.init()
 
 
 def main():
-  args = handle_args(sys.argv)
-  spec, root = run_config_fetch(args.config, args.props)
-  return run(args, spec, root)
+    args = handle_args(sys.argv)
+    spec, root = run_config_fetch(args.config, args.props)
+    return run(args, spec, root)
 
 
 if __name__ == '__main__':
-  try:
-    sys.exit(main())
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main())
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 225 - 223
fix_encoding.py

@@ -1,7 +1,6 @@
 # Copyright (c) 2011 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Collection of functions and classes to fix various encoding problems on
 multiple platforms with python.
 """
@@ -15,266 +14,269 @@ import sys
 
 
 def complain(message):
-  """If any exception occurs in this file, we'll probably try to print it
+    """If any exception occurs in this file, we'll probably try to print it
   on stderr, which makes for frustrating debugging if stderr is directed
   to our wrapper. So be paranoid about catching errors and reporting them
   to sys.__stderr__, so that the user has a higher chance to see them.
   """
-  print(
-      isinstance(message, str) and message or repr(message),
-      file=sys.__stderr__)
+    print(isinstance(message, str) and message or repr(message),
+          file=sys.__stderr__)
 
 
 def fix_default_encoding():
-  """Forces utf8 solidly on all platforms.
+    """Forces utf8 solidly on all platforms.
 
   By default python execution environment is lazy and defaults to ascii
   encoding.
 
   http://uucode.com/blog/2007/03/23/shut-up-you-dummy-7-bit-python/
   """
-  if sys.getdefaultencoding() == 'utf-8':
-    return False
-
-  # Regenerate setdefaultencoding.
-  reload(sys)
-  # Module 'sys' has no 'setdefaultencoding' member
-  # pylint: disable=no-member
-  sys.setdefaultencoding('utf-8')
-  for attr in dir(locale):
-    if attr[0:3] != 'LC_':
-      continue
-    aref = getattr(locale, attr)
+    if sys.getdefaultencoding() == 'utf-8':
+        return False
+
+    # Regenerate setdefaultencoding.
+    reload(sys)
+    # Module 'sys' has no 'setdefaultencoding' member
+    # pylint: disable=no-member
+    sys.setdefaultencoding('utf-8')
+    for attr in dir(locale):
+        if attr[0:3] != 'LC_':
+            continue
+        aref = getattr(locale, attr)
+        try:
+            locale.setlocale(aref, '')
+        except locale.Error:
+            continue
+        try:
+            lang, _ = locale.getdefaultlocale()
+        except (TypeError, ValueError):
+            continue
+        if lang:
+            try:
+                locale.setlocale(aref, (lang, 'UTF-8'))
+            except locale.Error:
+                os.environ[attr] = lang + '.UTF-8'
     try:
-      locale.setlocale(aref, '')
+        locale.setlocale(locale.LC_ALL, '')
     except locale.Error:
-      continue
-    try:
-      lang, _ = locale.getdefaultlocale()
-    except (TypeError, ValueError):
-      continue
-    if lang:
-      try:
-        locale.setlocale(aref, (lang, 'UTF-8'))
-      except locale.Error:
-        os.environ[attr] = lang + '.UTF-8'
-  try:
-    locale.setlocale(locale.LC_ALL, '')
-  except locale.Error:
-    pass
-  return True
+        pass
+    return True
 
 
 ###############################
 # Windows specific
 
+
 def fix_win_codec():
-  """Works around <http://bugs.python.org/issue6058>."""
-  # <http://msdn.microsoft.com/en-us/library/dd317756.aspx>
-  try:
-    codecs.lookup('cp65001')
-    return False
-  except LookupError:
-    codecs.register(
-        lambda name: name == 'cp65001' and codecs.lookup('utf-8') or None)
-    return True
+    """Works around <http://bugs.python.org/issue6058>."""
+    # <http://msdn.microsoft.com/en-us/library/dd317756.aspx>
+    try:
+        codecs.lookup('cp65001')
+        return False
+    except LookupError:
+        codecs.register(
+            lambda name: name == 'cp65001' and codecs.lookup('utf-8') or None)
+        return True
 
 
 class WinUnicodeOutputBase(object):
-  """Base class to adapt sys.stdout or sys.stderr to behave correctly on
+    """Base class to adapt sys.stdout or sys.stderr to behave correctly on
   Windows.
 
   Setting encoding to utf-8 is recommended.
   """
-  def __init__(self, fileno, name, encoding):
-    # Corresponding file handle.
-    self._fileno = fileno
-    self.encoding = encoding
-    self.name = name
+    def __init__(self, fileno, name, encoding):
+        # Corresponding file handle.
+        self._fileno = fileno
+        self.encoding = encoding
+        self.name = name
 
-    self.closed = False
-    self.softspace = False
-    self.mode = 'w'
+        self.closed = False
+        self.softspace = False
+        self.mode = 'w'
 
-  @staticmethod
-  def isatty():
-    return False
+    @staticmethod
+    def isatty():
+        return False
 
-  def close(self):
-    # Don't really close the handle, that would only cause problems.
-    self.closed = True
+    def close(self):
+        # Don't really close the handle, that would only cause problems.
+        self.closed = True
 
-  def fileno(self):
-    return self._fileno
+    def fileno(self):
+        return self._fileno
 
-  def flush(self):
-    raise NotImplementedError()
+    def flush(self):
+        raise NotImplementedError()
 
-  def write(self, text):
-    raise NotImplementedError()
+    def write(self, text):
+        raise NotImplementedError()
 
-  def writelines(self, lines):
-    try:
-      for line in lines:
-        self.write(line)
-    except Exception as e:
-      complain('%s.writelines: %r' % (self.name, e))
-      raise
+    def writelines(self, lines):
+        try:
+            for line in lines:
+                self.write(line)
+        except Exception as e:
+            complain('%s.writelines: %r' % (self.name, e))
+            raise
 
 
 class WinUnicodeConsoleOutput(WinUnicodeOutputBase):
-  """Output adapter to a Windows Console.
+    """Output adapter to a Windows Console.
 
   Understands how to use the win32 console API.
   """
-  def __init__(self, console_handle, fileno, stream_name, encoding):
-    super(WinUnicodeConsoleOutput, self).__init__(
-        fileno, '<Unicode console %s>' % stream_name, encoding)
-    # Handle to use for WriteConsoleW
-    self._console_handle = console_handle
-
-    # Loads the necessary function.
-    # These types are available on linux but not Mac.
-    # pylint: disable=no-name-in-module,F0401
-    from ctypes import byref, GetLastError, POINTER, windll, WINFUNCTYPE
-    from ctypes.wintypes import BOOL, DWORD, HANDLE, LPWSTR
-    from ctypes.wintypes import LPVOID  # pylint: disable=no-name-in-module
-
-    self._DWORD = DWORD
-    self._byref = byref
-
-    # <http://msdn.microsoft.com/en-us/library/ms687401.aspx>
-    self._WriteConsoleW = WINFUNCTYPE(
-        BOOL, HANDLE, LPWSTR, DWORD, POINTER(DWORD), LPVOID)(
-            ('WriteConsoleW', windll.kernel32))
-    self._GetLastError = GetLastError
-
-  def flush(self):
-    # No need to flush the console since it's immediate.
-    pass
-
-  def write(self, text):
-    try:
-      if isinstance(text, bytes):
-        # Bytestrings need to be decoded to a string before being passed to
-        # Windows.
-        text = text.decode(self.encoding, 'replace')
-      remaining = len(text)
-      while remaining > 0:
-        n = self._DWORD(0)
-        # There is a shorter-than-documented limitation on the length of the
-        # string passed to WriteConsoleW. See
-        # <http://tahoe-lafs.org/trac/tahoe-lafs/ticket/1232>.
-        retval = self._WriteConsoleW(
-            self._console_handle, text,
-            min(remaining, 10000),
-            self._byref(n), None)
-        if retval == 0 or n.value == 0:
-          raise IOError(
-              'WriteConsoleW returned %r, n.value = %r, last error = %r' % (
-                retval, n.value, self._GetLastError()))
-        remaining -= n.value
-        if not remaining:
-          break
-        text = text[int(n.value):]
-    except Exception as e:
-      complain('%s.write: %r' % (self.name, e))
-      raise
+    def __init__(self, console_handle, fileno, stream_name, encoding):
+        super(WinUnicodeConsoleOutput,
+              self).__init__(fileno, '<Unicode console %s>' % stream_name,
+                             encoding)
+        # Handle to use for WriteConsoleW
+        self._console_handle = console_handle
+
+        # Loads the necessary function.
+        # These types are available on linux but not Mac.
+        # pylint: disable=no-name-in-module,F0401
+        from ctypes import byref, GetLastError, POINTER, windll, WINFUNCTYPE
+        from ctypes.wintypes import BOOL, DWORD, HANDLE, LPWSTR
+        from ctypes.wintypes import LPVOID  # pylint: disable=no-name-in-module
+
+        self._DWORD = DWORD
+        self._byref = byref
+
+        # <http://msdn.microsoft.com/en-us/library/ms687401.aspx>
+        self._WriteConsoleW = WINFUNCTYPE(BOOL, HANDLE, LPWSTR, DWORD,
+                                          POINTER(DWORD),
+                                          LPVOID)(('WriteConsoleW',
+                                                   windll.kernel32))
+        self._GetLastError = GetLastError
+
+    def flush(self):
+        # No need to flush the console since it's immediate.
+        pass
+
+    def write(self, text):
+        try:
+            if isinstance(text, bytes):
+                # Bytestrings need to be decoded to a string before being passed
+                # to Windows.
+                text = text.decode(self.encoding, 'replace')
+            remaining = len(text)
+            while remaining > 0:
+                n = self._DWORD(0)
+                # There is a shorter-than-documented limitation on the length of
+                # the string passed to WriteConsoleW. See
+                # <http://tahoe-lafs.org/trac/tahoe-lafs/ticket/1232>.
+                retval = self._WriteConsoleW(self._console_handle, text,
+                                             min(remaining, 10000),
+                                             self._byref(n), None)
+                if retval == 0 or n.value == 0:
+                    raise IOError('WriteConsoleW returned %r, n.value = %r, '
+                                  'last error = %r' %
+                                  (retval, n.value, self._GetLastError()))
+                remaining -= n.value
+                if not remaining:
+                    break
+                text = text[int(n.value):]
+        except Exception as e:
+            complain('%s.write: %r' % (self.name, e))
+            raise
 
 
 class WinUnicodeOutput(WinUnicodeOutputBase):
-  """Output adaptor to a file output on Windows.
+    """Output adaptor to a file output on Windows.
 
   If the standard FileWrite function is used, it will be encoded in the current
   code page. WriteConsoleW() permits writing any character.
   """
-  def __init__(self, stream, fileno, encoding):
-    super(WinUnicodeOutput, self).__init__(
-        fileno, '<Unicode redirected %s>' % stream.name, encoding)
-    # Output stream
-    self._stream = stream
+    def __init__(self, stream, fileno, encoding):
+        super(WinUnicodeOutput,
+              self).__init__(fileno, '<Unicode redirected %s>' % stream.name,
+                             encoding)
+        # Output stream
+        self._stream = stream
+
+        # Flush right now.
+        self.flush()
+
+    def flush(self):
+        try:
+            self._stream.flush()
+        except Exception as e:
+            complain('%s.flush: %r from %r' % (self.name, e, self._stream))
+            raise
+
+    def write(self, text):
+        try:
+            if isinstance(text, bytes):
+                # Replace characters that cannot be printed instead of failing.
+                text = text.decode(self.encoding, 'replace')
+            # When redirecting to a file or process any \n characters will be
+            # replaced with \r\n. If the text to be printed already has \r\n
+            # line endings then \r\r\n line endings will be generated, leading
+            # to double-spacing of some output. Normalizing line endings to \n
+            # avoids this problem.
+            text = text.replace('\r\n', '\n')
+            self._stream.write(text)
+        except Exception as e:
+            complain('%s.write: %r' % (self.name, e))
+            raise
 
-    # Flush right now.
-    self.flush()
 
-  def flush(self):
-    try:
-      self._stream.flush()
-    except Exception as e:
-      complain('%s.flush: %r from %r' % (self.name, e, self._stream))
-      raise
+def win_handle_is_a_console(handle):
+    """Returns True if a Windows file handle is a handle to a console."""
+    # These types are available on linux but not Mac.
+    # pylint: disable=no-name-in-module,F0401
+    from ctypes import byref, POINTER, windll, WINFUNCTYPE
+    from ctypes.wintypes import BOOL, DWORD, HANDLE
 
-  def write(self, text):
-    try:
-      if isinstance(text, bytes):
-        # Replace characters that cannot be printed instead of failing.
-        text = text.decode(self.encoding, 'replace')
-      # When redirecting to a file or process any \n characters will be replaced
-      # with \r\n. If the text to be printed already has \r\n line endings then
-      # \r\r\n line endings will be generated, leading to double-spacing of some
-      # output. Normalizing line endings to \n avoids this problem.
-      text = text.replace('\r\n', '\n')
-      self._stream.write(text)
-    except Exception as e:
-      complain('%s.write: %r' % (self.name, e))
-      raise
+    FILE_TYPE_CHAR = 0x0002
+    FILE_TYPE_REMOTE = 0x8000
+    INVALID_HANDLE_VALUE = DWORD(-1).value
 
+    # <http://msdn.microsoft.com/en-us/library/ms683167.aspx>
+    GetConsoleMode = WINFUNCTYPE(BOOL, HANDLE, POINTER(DWORD))(
+        ('GetConsoleMode', windll.kernel32))
+    # <http://msdn.microsoft.com/en-us/library/aa364960.aspx>
+    GetFileType = WINFUNCTYPE(DWORD, DWORD)(('GetFileType', windll.kernel32))
 
-def win_handle_is_a_console(handle):
-  """Returns True if a Windows file handle is a handle to a console."""
-  # These types are available on linux but not Mac.
-  # pylint: disable=no-name-in-module,F0401
-  from ctypes import byref, POINTER, windll, WINFUNCTYPE
-  from ctypes.wintypes import BOOL, DWORD, HANDLE
-
-  FILE_TYPE_CHAR   = 0x0002
-  FILE_TYPE_REMOTE = 0x8000
-  INVALID_HANDLE_VALUE = DWORD(-1).value
-
-  # <http://msdn.microsoft.com/en-us/library/ms683167.aspx>
-  GetConsoleMode = WINFUNCTYPE(BOOL, HANDLE, POINTER(DWORD))(
-      ('GetConsoleMode', windll.kernel32))
-  # <http://msdn.microsoft.com/en-us/library/aa364960.aspx>
-  GetFileType = WINFUNCTYPE(DWORD, DWORD)(('GetFileType', windll.kernel32))
-
-  # GetStdHandle returns INVALID_HANDLE_VALUE, NULL, or a valid handle.
-  if handle == INVALID_HANDLE_VALUE or handle is None:
-    return False
-  return (
-      (GetFileType(handle) & ~FILE_TYPE_REMOTE) == FILE_TYPE_CHAR and
-       GetConsoleMode(handle, byref(DWORD())))
+    # GetStdHandle returns INVALID_HANDLE_VALUE, NULL, or a valid handle.
+    if handle == INVALID_HANDLE_VALUE or handle is None:
+        return False
+    return ((GetFileType(handle) & ~FILE_TYPE_REMOTE) == FILE_TYPE_CHAR
+            and GetConsoleMode(handle, byref(DWORD())))
 
 
 def win_get_unicode_stream(stream, excepted_fileno, output_handle, encoding):
-  """Returns a unicode-compatible stream.
+    """Returns a unicode-compatible stream.
 
   This function will return a direct-Console writing object only if:
   - the file number is the expected console file number
   - the handle the expected file handle
   - the 'real' handle is in fact a handle to a console.
   """
-  old_fileno = getattr(stream, 'fileno', lambda: None)()
-  if old_fileno == excepted_fileno:
-    # These types are available on linux but not Mac.
-    # pylint: disable=no-name-in-module,F0401
-    from ctypes import windll, WINFUNCTYPE
-    from ctypes.wintypes import DWORD, HANDLE
+    old_fileno = getattr(stream, 'fileno', lambda: None)()
+    if old_fileno == excepted_fileno:
+        # These types are available on linux but not Mac.
+        # pylint: disable=no-name-in-module,F0401
+        from ctypes import windll, WINFUNCTYPE
+        from ctypes.wintypes import DWORD, HANDLE
 
-    # <http://msdn.microsoft.com/en-us/library/ms683231.aspx>
-    GetStdHandle = WINFUNCTYPE(HANDLE, DWORD)(('GetStdHandle', windll.kernel32))
+        # <http://msdn.microsoft.com/en-us/library/ms683231.aspx>
+        GetStdHandle = WINFUNCTYPE(HANDLE,
+                                   DWORD)(('GetStdHandle', windll.kernel32))
 
-    real_output_handle = GetStdHandle(DWORD(output_handle))
-    if win_handle_is_a_console(real_output_handle):
-      # It's a console.
-      return WinUnicodeConsoleOutput(
-          real_output_handle, old_fileno, stream.name, encoding)
+        real_output_handle = GetStdHandle(DWORD(output_handle))
+        if win_handle_is_a_console(real_output_handle):
+            # It's a console.
+            return WinUnicodeConsoleOutput(real_output_handle, old_fileno,
+                                           stream.name, encoding)
 
-  # It's something else. Create an auto-encoding stream.
-  return WinUnicodeOutput(stream, old_fileno, encoding)
+    # It's something else. Create an auto-encoding stream.
+    return WinUnicodeOutput(stream, old_fileno, encoding)
 
 
 def fix_win_console(encoding):
-  """Makes Unicode console output work independently of the current code page.
+    """Makes Unicode console output work independently of the current code page.
 
   This also fixes <http://bugs.python.org/issue1602>.
   Credit to Michael Kaplan
@@ -282,41 +284,41 @@ def fix_win_console(encoding):
   TZOmegaTZIOY
   <http://stackoverflow.com/questions/878972/windows-cmd-encoding-change-causes-python-crash/1432462#1432462>.
   """
-  if (isinstance(sys.stdout, WinUnicodeOutputBase) or
-      isinstance(sys.stderr, WinUnicodeOutputBase)):
-    return False
-
-  try:
-    # SetConsoleCP and SetConsoleOutputCP could be used to change the code page
-    # but it's not really useful since the code here is using WriteConsoleW().
-    # Also, changing the code page is 'permanent' to the console and needs to be
-    # reverted manually.
-    # In practice one needs to set the console font to a TTF font to be able to
-    # see all the characters but it failed for me in practice. In any case, it
-    # won't throw any exception when printing, which is the important part.
-    # -11 and -12 are defined in stdio.h
-    sys.stdout = win_get_unicode_stream(sys.stdout, 1, -11, encoding)
-    sys.stderr = win_get_unicode_stream(sys.stderr, 2, -12, encoding)
-    # TODO(maruel): Do sys.stdin with ReadConsoleW(). Albeit the limitation is
-    # "It doesn't appear to be possible to read Unicode characters in UTF-8
-    # mode" and this appears to be a limitation of cmd.exe.
-  except Exception as e:
-    complain('exception %r while fixing up sys.stdout and sys.stderr' % e)
-  return True
+    if (isinstance(sys.stdout, WinUnicodeOutputBase)
+            or isinstance(sys.stderr, WinUnicodeOutputBase)):
+        return False
+
+    try:
+        # SetConsoleCP and SetConsoleOutputCP could be used to change the code
+        # page but it's not really useful since the code here is using
+        # WriteConsoleW(). Also, changing the code page is 'permanent' to the
+        # console and needs to be reverted manually. In practice one needs to
+        # set the console font to a TTF font to be able to see all the
+        # characters but it failed for me in practice. In any case, it won't
+        # throw any exception when printing, which is the important part. -11
+        # and -12 are defined in stdio.h
+        sys.stdout = win_get_unicode_stream(sys.stdout, 1, -11, encoding)
+        sys.stderr = win_get_unicode_stream(sys.stderr, 2, -12, encoding)
+        # TODO(maruel): Do sys.stdin with ReadConsoleW(). Albeit the limitation
+        # is "It doesn't appear to be possible to read Unicode characters in
+        # UTF-8 mode" and this appears to be a limitation of cmd.exe.
+    except Exception as e:
+        complain('exception %r while fixing up sys.stdout and sys.stderr' % e)
+    return True
 
 
 def fix_encoding():
-  """Fixes various encoding problems on all platforms.
+    """Fixes various encoding problems on all platforms.
 
   Should be called at the very beginning of the process.
   """
-  ret = True
-  if sys.platform == 'win32':
-    ret &= fix_win_codec()
+    ret = True
+    if sys.platform == 'win32':
+        ret &= fix_win_codec()
 
-  ret &= fix_default_encoding()
+    ret &= fix_default_encoding()
 
-  if sys.platform == 'win32':
-    encoding = sys.getdefaultencoding()
-    ret &= fix_win_console(encoding)
-  return ret
+    if sys.platform == 'win32':
+        encoding = sys.getdefaultencoding()
+        ret &= fix_win_console(encoding)
+    return ret

+ 90 - 77
gclient-new-workdir.py

@@ -18,105 +18,118 @@ import git_common
 
 
 def parse_options():
-  if sys.platform == 'win32':
-    print('ERROR: This script cannot run on Windows because it uses symlinks.')
-    sys.exit(1)
+    if sys.platform == 'win32':
+        print(
+            'ERROR: This script cannot run on Windows because it uses symlinks.'
+        )
+        sys.exit(1)
 
-  parser = argparse.ArgumentParser(description='''\
+    parser = argparse.ArgumentParser(description='''\
       Clone an existing gclient directory, taking care of all sub-repositories.
       Works similarly to 'git new-workdir'.''')
-  parser.add_argument('repository', type=os.path.abspath,
-                      help='should contain a .gclient file')
-  parser.add_argument('new_workdir', help='must not exist')
-  parser.add_argument('--reflink', action='store_true', default=None,
-                      help='''force to use "cp --reflink" for speed and disk
+    parser.add_argument('repository',
+                        type=os.path.abspath,
+                        help='should contain a .gclient file')
+    parser.add_argument('new_workdir', help='must not exist')
+    parser.add_argument('--reflink',
+                        action='store_true',
+                        default=None,
+                        help='''force to use "cp --reflink" for speed and disk
                               space. need supported FS like btrfs or ZFS.''')
-  parser.add_argument('--no-reflink', action='store_false', dest='reflink',
-                      help='''force not to use "cp --reflink" even on supported
+    parser.add_argument(
+        '--no-reflink',
+        action='store_false',
+        dest='reflink',
+        help='''force not to use "cp --reflink" even on supported
                               FS like btrfs or ZFS.''')
-  args = parser.parse_args()
+    args = parser.parse_args()
 
-  if not os.path.exists(args.repository):
-    parser.error('Repository "%s" does not exist.' % args.repository)
+    if not os.path.exists(args.repository):
+        parser.error('Repository "%s" does not exist.' % args.repository)
 
-  gclient = os.path.join(args.repository, '.gclient')
-  if not os.path.exists(gclient):
-    parser.error('No .gclient file at "%s".' % gclient)
+    gclient = os.path.join(args.repository, '.gclient')
+    if not os.path.exists(gclient):
+        parser.error('No .gclient file at "%s".' % gclient)
 
-  if os.path.exists(args.new_workdir):
-    parser.error('New workdir "%s" already exists.' % args.new_workdir)
+    if os.path.exists(args.new_workdir):
+        parser.error('New workdir "%s" already exists.' % args.new_workdir)
 
-  return args
+    return args
 
 
 def support_cow(src, dest):
-  # 'cp --reflink' always succeeds when 'src' is a symlink or a directory
-  assert os.path.isfile(src) and not os.path.islink(src)
-  try:
-    subprocess.check_output(['cp', '-a', '--reflink', src, dest],
-                            stderr=subprocess.STDOUT)
-  except subprocess.CalledProcessError:
-    return False
-  finally:
-    if os.path.isfile(dest):
-      os.remove(dest)
-  return True
+    # 'cp --reflink' always succeeds when 'src' is a symlink or a directory
+    assert os.path.isfile(src) and not os.path.islink(src)
+    try:
+        subprocess.check_output(['cp', '-a', '--reflink', src, dest],
+                                stderr=subprocess.STDOUT)
+    except subprocess.CalledProcessError:
+        return False
+    finally:
+        if os.path.isfile(dest):
+            os.remove(dest)
+    return True
 
 
 def try_vol_snapshot(src, dest):
-  try:
-    subprocess.check_call(['btrfs', 'subvol', 'snapshot', src, dest],
-                            stderr=subprocess.STDOUT)
-  except (subprocess.CalledProcessError, OSError):
-    return False
-  return True
+    try:
+        subprocess.check_call(['btrfs', 'subvol', 'snapshot', src, dest],
+                              stderr=subprocess.STDOUT)
+    except (subprocess.CalledProcessError, OSError):
+        return False
+    return True
 
 
 def main():
-  args = parse_options()
-
-  gclient = os.path.join(args.repository, '.gclient')
-  if os.path.islink(gclient):
-    gclient = os.path.realpath(gclient)
-  new_gclient = os.path.join(args.new_workdir, '.gclient')
-
-  if try_vol_snapshot(args.repository, args.new_workdir):
-    args.reflink = True
-  else:
-    os.makedirs(args.new_workdir)
-    if args.reflink is None:
-      args.reflink = support_cow(gclient, new_gclient)
-      if args.reflink:
-        print('Copy-on-write support is detected.')
-    os.symlink(gclient, new_gclient)
-
-  for root, dirs, _ in os.walk(args.repository):
-    if '.git' in dirs:
-      workdir = root.replace(args.repository, args.new_workdir, 1)
-      print('Creating: %s' % workdir)
-
-      if args.reflink:
-        if not os.path.exists(workdir):
-          print('Copying: %s' % workdir)
-          subprocess.check_call(['cp', '-a', '--reflink', root, workdir])
-        shutil.rmtree(os.path.join(workdir, '.git'))
-
-      git_common.make_workdir(os.path.join(root, '.git'),
-                              os.path.join(workdir, '.git'))
-      if args.reflink:
-        subprocess.check_call(['cp', '-a', '--reflink',
-                              os.path.join(root, '.git', 'index'),
-                              os.path.join(workdir, '.git', 'index')])
-      else:
-        subprocess.check_call(['git', 'checkout', '-f'], cwd=workdir)
-
-  if args.reflink:
-    print(textwrap.dedent('''\
+    args = parse_options()
+
+    gclient = os.path.join(args.repository, '.gclient')
+    if os.path.islink(gclient):
+        gclient = os.path.realpath(gclient)
+    new_gclient = os.path.join(args.new_workdir, '.gclient')
+
+    if try_vol_snapshot(args.repository, args.new_workdir):
+        args.reflink = True
+    else:
+        os.makedirs(args.new_workdir)
+        if args.reflink is None:
+            args.reflink = support_cow(gclient, new_gclient)
+            if args.reflink:
+                print('Copy-on-write support is detected.')
+        os.symlink(gclient, new_gclient)
+
+    for root, dirs, _ in os.walk(args.repository):
+        if '.git' in dirs:
+            workdir = root.replace(args.repository, args.new_workdir, 1)
+            print('Creating: %s' % workdir)
+
+            if args.reflink:
+                if not os.path.exists(workdir):
+                    print('Copying: %s' % workdir)
+                    subprocess.check_call(
+                        ['cp', '-a', '--reflink', root, workdir])
+                shutil.rmtree(os.path.join(workdir, '.git'))
+
+            git_common.make_workdir(os.path.join(root, '.git'),
+                                    os.path.join(workdir, '.git'))
+            if args.reflink:
+                subprocess.check_call([
+                    'cp', '-a', '--reflink',
+                    os.path.join(root, '.git', 'index'),
+                    os.path.join(workdir, '.git', 'index')
+                ])
+            else:
+                subprocess.check_call(['git', 'checkout', '-f'], cwd=workdir)
+
+    if args.reflink:
+        print(
+            textwrap.dedent('''\
       The repo was copied with copy-on-write, and the artifacts were retained.
       More details on http://crbug.com/721585.
 
       Depending on your usage pattern, you might want to do "gn gen"
       on the output directories. More details: http://crbug.com/723856.'''))
 
+
 if __name__ == '__main__':
-  sys.exit(main())
+    sys.exit(main())

文件差异内容过多而无法显示
+ 888 - 851
gclient.py


+ 599 - 600
gclient_eval.py

@@ -12,6 +12,8 @@ import tokenize
 import gclient_utils
 from third_party import schema
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
 
 # git_dependencies migration states. Used within the DEPS file to indicate
 # the current migration state.
@@ -21,81 +23,82 @@ SUBMODULES = 'SUBMODULES'
 
 
 class ConstantString(object):
-  def __init__(self, value):
-    self.value = value
+    def __init__(self, value):
+        self.value = value
 
-  def __format__(self, format_spec):
-    del format_spec
-    return self.value
+    def __format__(self, format_spec):
+        del format_spec
+        return self.value
 
-  def __repr__(self):
-    return "Str('" + self.value + "')"
+    def __repr__(self):
+        return "Str('" + self.value + "')"
 
-  def __eq__(self, other):
-    if isinstance(other, ConstantString):
-      return self.value == other.value
+    def __eq__(self, other):
+        if isinstance(other, ConstantString):
+            return self.value == other.value
 
-    return self.value == other
+        return self.value == other
 
-  def __hash__(self):
-    return self.value.__hash__()
+    def __hash__(self):
+        return self.value.__hash__()
 
 
 class _NodeDict(collections.abc.MutableMapping):
-  """Dict-like type that also stores information on AST nodes and tokens."""
-  def __init__(self, data=None, tokens=None):
-    self.data = collections.OrderedDict(data or [])
-    self.tokens = tokens
+    """Dict-like type that also stores information on AST nodes and tokens."""
+    def __init__(self, data=None, tokens=None):
+        self.data = collections.OrderedDict(data or [])
+        self.tokens = tokens
 
-  def __str__(self):
-    return str({k: v[0] for k, v in self.data.items()})
+    def __str__(self):
+        return str({k: v[0] for k, v in self.data.items()})
 
-  def __repr__(self):
-    return self.__str__()
+    def __repr__(self):
+        return self.__str__()
 
-  def __getitem__(self, key):
-    return self.data[key][0]
+    def __getitem__(self, key):
+        return self.data[key][0]
 
-  def __setitem__(self, key, value):
-    self.data[key] = (value, None)
+    def __setitem__(self, key, value):
+        self.data[key] = (value, None)
 
-  def __delitem__(self, key):
-    del self.data[key]
+    def __delitem__(self, key):
+        del self.data[key]
 
-  def __iter__(self):
-    return iter(self.data)
+    def __iter__(self):
+        return iter(self.data)
 
-  def __len__(self):
-    return len(self.data)
+    def __len__(self):
+        return len(self.data)
 
-  def MoveTokens(self, origin, delta):
-    if self.tokens:
-      new_tokens = {}
-      for pos, token in self.tokens.items():
-        if pos[0] >= origin:
-          pos = (pos[0] + delta, pos[1])
-          token = token[:2] + (pos,) + token[3:]
-        new_tokens[pos] = token
+    def MoveTokens(self, origin, delta):
+        if self.tokens:
+            new_tokens = {}
+            for pos, token in self.tokens.items():
+                if pos[0] >= origin:
+                    pos = (pos[0] + delta, pos[1])
+                    token = token[:2] + (pos, ) + token[3:]
+                new_tokens[pos] = token
 
-    for value, node in self.data.values():
-      if node.lineno >= origin:
-        node.lineno += delta
-        if isinstance(value, _NodeDict):
-          value.MoveTokens(origin, delta)
+        for value, node in self.data.values():
+            if node.lineno >= origin:
+                node.lineno += delta
+                if isinstance(value, _NodeDict):
+                    value.MoveTokens(origin, delta)
 
-  def GetNode(self, key):
-    return self.data[key][1]
+    def GetNode(self, key):
+        return self.data[key][1]
 
-  def SetNode(self, key, value, node):
-    self.data[key] = (value, node)
+    def SetNode(self, key, value, node):
+        self.data[key] = (value, node)
 
 
 def _NodeDictSchema(dict_schema):
-  """Validate dict_schema after converting _NodeDict to a regular dict."""
-  def validate(d):
-    schema.Schema(dict_schema).validate(dict(d))
-    return True
-  return validate
+    """Validate dict_schema after converting _NodeDict to a regular dict."""
+    def validate(d):
+        schema.Schema(dict_schema).validate(dict(d))
+        return True
+
+    return validate
 
 
 # See https://github.com/keleshev/schema for docs how to configure schema.
@@ -269,245 +272,252 @@ _GCLIENT_SCHEMA = schema.Schema(
 
 
 def _gclient_eval(node_or_string, filename='<unknown>', vars_dict=None):
-  """Safely evaluates a single expression. Returns the result."""
-  _allowed_names = {'None': None, 'True': True, 'False': False}
-  if isinstance(node_or_string, ConstantString):
-    return node_or_string.value
-  if isinstance(node_or_string, str):
-    node_or_string = ast.parse(node_or_string, filename=filename, mode='eval')
-  if isinstance(node_or_string, ast.Expression):
-    node_or_string = node_or_string.body
-  def _convert(node):
-    if isinstance(node, ast.Str):
-      if vars_dict is None:
-        return node.s
-      try:
-        return node.s.format(**vars_dict)
-      except KeyError as e:
-        raise KeyError(
-            '%s was used as a variable, but was not declared in the vars dict '
-            '(file %r, line %s)' % (
-                e.args[0], filename, getattr(node, 'lineno', '<unknown>')))
-    elif isinstance(node, ast.Num):
-      return node.n
-    elif isinstance(node, ast.Tuple):
-      return tuple(map(_convert, node.elts))
-    elif isinstance(node, ast.List):
-      return list(map(_convert, node.elts))
-    elif isinstance(node, ast.Dict):
-      node_dict = _NodeDict()
-      for key_node, value_node in zip(node.keys, node.values):
-        key = _convert(key_node)
-        if key in node_dict:
-          raise ValueError(
-              'duplicate key in dictionary: %s (file %r, line %s)' % (
-                  key, filename, getattr(key_node, 'lineno', '<unknown>')))
-        node_dict.SetNode(key, _convert(value_node), value_node)
-      return node_dict
-    elif isinstance(node, ast.Name):
-      if node.id not in _allowed_names:
-        raise ValueError(
-            'invalid name %r (file %r, line %s)' % (
-                node.id, filename, getattr(node, 'lineno', '<unknown>')))
-      return _allowed_names[node.id]
-    elif not sys.version_info[:2] < (3, 4) and isinstance(
-        node, ast.NameConstant):  # Since Python 3.4
-      return node.value
-    elif isinstance(node, ast.Call):
-      if (not isinstance(node.func, ast.Name) or
-          (node.func.id not in ('Str', 'Var'))):
-        raise ValueError(
-            'Str and Var are the only allowed functions (file %r, line %s)' % (
-                filename, getattr(node, 'lineno', '<unknown>')))
-      if node.keywords or getattr(node, 'starargs', None) or getattr(
-          node, 'kwargs', None) or len(node.args) != 1:
-        raise ValueError(
-            '%s takes exactly one argument (file %r, line %s)' % (
-                node.func.id, filename, getattr(node, 'lineno', '<unknown>')))
-
-      if node.func.id == 'Str':
-        if isinstance(node.args[0], ast.Str):
-          return ConstantString(node.args[0].s)
-        raise ValueError('Passed a non-string to Str() (file %r, line%s)' % (
-            filename, getattr(node, 'lineno', '<unknown>')))
-
-      arg = _convert(node.args[0])
-      if not isinstance(arg, str):
-        raise ValueError(
-            'Var\'s argument must be a variable name (file %r, line %s)' % (
-                filename, getattr(node, 'lineno', '<unknown>')))
-      if vars_dict is None:
-        return '{' + arg + '}'
-      if arg not in vars_dict:
-        raise KeyError(
-            '%s was used as a variable, but was not declared in the vars dict '
-            '(file %r, line %s)' % (
-                arg, filename, getattr(node, 'lineno', '<unknown>')))
-      val = vars_dict[arg]
-      if isinstance(val, ConstantString):
-        val = val.value
-      return val
-    elif isinstance(node, ast.BinOp) and isinstance(node.op, ast.Add):
-      return _convert(node.left) + _convert(node.right)
-    elif isinstance(node, ast.BinOp) and isinstance(node.op, ast.Mod):
-      return _convert(node.left) % _convert(node.right)
-    else:
-      raise ValueError(
-          'unexpected AST node: %s %s (file %r, line %s)' % (
-              node, ast.dump(node), filename,
-              getattr(node, 'lineno', '<unknown>')))
-  return _convert(node_or_string)
+    """Safely evaluates a single expression. Returns the result."""
+    _allowed_names = {'None': None, 'True': True, 'False': False}
+    if isinstance(node_or_string, ConstantString):
+        return node_or_string.value
+    if isinstance(node_or_string, str):
+        node_or_string = ast.parse(node_or_string,
+                                   filename=filename,
+                                   mode='eval')
+    if isinstance(node_or_string, ast.Expression):
+        node_or_string = node_or_string.body
+
+    def _convert(node):
+        if isinstance(node, ast.Str):
+            if vars_dict is None:
+                return node.s
+            try:
+                return node.s.format(**vars_dict)
+            except KeyError as e:
+                raise KeyError(
+                    '%s was used as a variable, but was not declared in the vars dict '
+                    '(file %r, line %s)' %
+                    (e.args[0], filename, getattr(node, 'lineno', '<unknown>')))
+        elif isinstance(node, ast.Num):
+            return node.n
+        elif isinstance(node, ast.Tuple):
+            return tuple(map(_convert, node.elts))
+        elif isinstance(node, ast.List):
+            return list(map(_convert, node.elts))
+        elif isinstance(node, ast.Dict):
+            node_dict = _NodeDict()
+            for key_node, value_node in zip(node.keys, node.values):
+                key = _convert(key_node)
+                if key in node_dict:
+                    raise ValueError(
+                        'duplicate key in dictionary: %s (file %r, line %s)' %
+                        (key, filename, getattr(key_node, 'lineno',
+                                                '<unknown>')))
+                node_dict.SetNode(key, _convert(value_node), value_node)
+            return node_dict
+        elif isinstance(node, ast.Name):
+            if node.id not in _allowed_names:
+                raise ValueError(
+                    'invalid name %r (file %r, line %s)' %
+                    (node.id, filename, getattr(node, 'lineno', '<unknown>')))
+            return _allowed_names[node.id]
+        elif not sys.version_info[:2] < (3, 4) and isinstance(
+                node, ast.NameConstant):  # Since Python 3.4
+            return node.value
+        elif isinstance(node, ast.Call):
+            if (not isinstance(node.func, ast.Name)
+                    or (node.func.id not in ('Str', 'Var'))):
+                raise ValueError(
+                    'Str and Var are the only allowed functions (file %r, line %s)'
+                    % (filename, getattr(node, 'lineno', '<unknown>')))
+            if node.keywords or getattr(node, 'starargs', None) or getattr(
+                    node, 'kwargs', None) or len(node.args) != 1:
+                raise ValueError(
+                    '%s takes exactly one argument (file %r, line %s)' %
+                    (node.func.id, filename, getattr(node, 'lineno',
+                                                     '<unknown>')))
+
+            if node.func.id == 'Str':
+                if isinstance(node.args[0], ast.Str):
+                    return ConstantString(node.args[0].s)
+                raise ValueError(
+                    'Passed a non-string to Str() (file %r, line%s)' %
+                    (filename, getattr(node, 'lineno', '<unknown>')))
+
+            arg = _convert(node.args[0])
+            if not isinstance(arg, str):
+                raise ValueError(
+                    'Var\'s argument must be a variable name (file %r, line %s)'
+                    % (filename, getattr(node, 'lineno', '<unknown>')))
+            if vars_dict is None:
+                return '{' + arg + '}'
+            if arg not in vars_dict:
+                raise KeyError(
+                    '%s was used as a variable, but was not declared in the vars dict '
+                    '(file %r, line %s)' %
+                    (arg, filename, getattr(node, 'lineno', '<unknown>')))
+            val = vars_dict[arg]
+            if isinstance(val, ConstantString):
+                val = val.value
+            return val
+        elif isinstance(node, ast.BinOp) and isinstance(node.op, ast.Add):
+            return _convert(node.left) + _convert(node.right)
+        elif isinstance(node, ast.BinOp) and isinstance(node.op, ast.Mod):
+            return _convert(node.left) % _convert(node.right)
+        else:
+            raise ValueError('unexpected AST node: %s %s (file %r, line %s)' %
+                             (node, ast.dump(node), filename,
+                              getattr(node, 'lineno', '<unknown>')))
+
+    return _convert(node_or_string)
 
 
 def Exec(content, filename='<unknown>', vars_override=None, builtin_vars=None):
-  """Safely execs a set of assignments."""
-  def _validate_statement(node, local_scope):
-    if not isinstance(node, ast.Assign):
-      raise ValueError(
-          'unexpected AST node: %s %s (file %r, line %s)' % (
-              node, ast.dump(node), filename,
-              getattr(node, 'lineno', '<unknown>')))
-
-    if len(node.targets) != 1:
-      raise ValueError(
-          'invalid assignment: use exactly one target (file %r, line %s)' % (
-              filename, getattr(node, 'lineno', '<unknown>')))
-
-    target = node.targets[0]
-    if not isinstance(target, ast.Name):
-      raise ValueError(
-          'invalid assignment: target should be a name (file %r, line %s)' % (
-              filename, getattr(node, 'lineno', '<unknown>')))
-    if target.id in local_scope:
-      raise ValueError(
-          'invalid assignment: overrides var %r (file %r, line %s)' % (
-              target.id, filename, getattr(node, 'lineno', '<unknown>')))
-
-  node_or_string = ast.parse(content, filename=filename, mode='exec')
-  if isinstance(node_or_string, ast.Expression):
-    node_or_string = node_or_string.body
-
-  if not isinstance(node_or_string, ast.Module):
-    raise ValueError(
-        'unexpected AST node: %s %s (file %r, line %s)' % (
-            node_or_string,
-            ast.dump(node_or_string),
-            filename,
-            getattr(node_or_string, 'lineno', '<unknown>')))
-
-  statements = {}
-  for statement in node_or_string.body:
-    _validate_statement(statement, statements)
-    statements[statement.targets[0].id] = statement.value
-
-  # The tokenized representation needs to end with a newline token, otherwise
-  # untokenization will trigger an assert later on.
-  # In Python 2.7 on Windows we need to ensure the input ends with a newline
-  # for a newline token to be generated.
-  # In other cases a newline token is always generated during tokenization so
-  # this has no effect.
-  # TODO: Remove this workaround after migrating to Python 3.
-  content += '\n'
-  tokens = {
-      token[2]: list(token) for token in tokenize.generate_tokens(
-          StringIO(content).readline)
-  }
-
-  local_scope = _NodeDict({}, tokens)
-
-  # Process vars first, so we can expand variables in the rest of the DEPS file.
-  vars_dict = {}
-  if 'vars' in statements:
-    vars_statement = statements['vars']
-    value = _gclient_eval(vars_statement, filename)
-    local_scope.SetNode('vars', value, vars_statement)
-    # Update the parsed vars with the overrides, but only if they are already
-    # present (overrides do not introduce new variables).
-    vars_dict.update(value)
-
-  if builtin_vars:
-    vars_dict.update(builtin_vars)
-
-  if vars_override:
-    vars_dict.update({k: v for k, v in vars_override.items() if k in vars_dict})
-
-  for name, node in statements.items():
-    value = _gclient_eval(node, filename, vars_dict)
-    local_scope.SetNode(name, value, node)
-
-  try:
-    return _GCLIENT_SCHEMA.validate(local_scope)
-  except schema.SchemaError as e:
-    raise gclient_utils.Error(str(e))
+    """Safely execs a set of assignments."""
+    def _validate_statement(node, local_scope):
+        if not isinstance(node, ast.Assign):
+            raise ValueError('unexpected AST node: %s %s (file %r, line %s)' %
+                             (node, ast.dump(node), filename,
+                              getattr(node, 'lineno', '<unknown>')))
+
+        if len(node.targets) != 1:
+            raise ValueError(
+                'invalid assignment: use exactly one target (file %r, line %s)'
+                % (filename, getattr(node, 'lineno', '<unknown>')))
+
+        target = node.targets[0]
+        if not isinstance(target, ast.Name):
+            raise ValueError(
+                'invalid assignment: target should be a name (file %r, line %s)'
+                % (filename, getattr(node, 'lineno', '<unknown>')))
+        if target.id in local_scope:
+            raise ValueError(
+                'invalid assignment: overrides var %r (file %r, line %s)' %
+                (target.id, filename, getattr(node, 'lineno', '<unknown>')))
+
+    node_or_string = ast.parse(content, filename=filename, mode='exec')
+    if isinstance(node_or_string, ast.Expression):
+        node_or_string = node_or_string.body
+
+    if not isinstance(node_or_string, ast.Module):
+        raise ValueError('unexpected AST node: %s %s (file %r, line %s)' %
+                         (node_or_string, ast.dump(node_or_string), filename,
+                          getattr(node_or_string, 'lineno', '<unknown>')))
+
+    statements = {}
+    for statement in node_or_string.body:
+        _validate_statement(statement, statements)
+        statements[statement.targets[0].id] = statement.value
+
+    # The tokenized representation needs to end with a newline token, otherwise
+    # untokenization will trigger an assert later on.
+    # In Python 2.7 on Windows we need to ensure the input ends with a newline
+    # for a newline token to be generated.
+    # In other cases a newline token is always generated during tokenization so
+    # this has no effect.
+    # TODO: Remove this workaround after migrating to Python 3.
+    content += '\n'
+    tokens = {
+        token[2]: list(token)
+        for token in tokenize.generate_tokens(StringIO(content).readline)
+    }
+
+    local_scope = _NodeDict({}, tokens)
+
+    # Process vars first, so we can expand variables in the rest of the DEPS
+    # file.
+    vars_dict = {}
+    if 'vars' in statements:
+        vars_statement = statements['vars']
+        value = _gclient_eval(vars_statement, filename)
+        local_scope.SetNode('vars', value, vars_statement)
+        # Update the parsed vars with the overrides, but only if they are
+        # already present (overrides do not introduce new variables).
+        vars_dict.update(value)
+
+    if builtin_vars:
+        vars_dict.update(builtin_vars)
+
+    if vars_override:
+        vars_dict.update(
+            {k: v
+             for k, v in vars_override.items() if k in vars_dict})
+
+    for name, node in statements.items():
+        value = _gclient_eval(node, filename, vars_dict)
+        local_scope.SetNode(name, value, node)
+
+    try:
+        return _GCLIENT_SCHEMA.validate(local_scope)
+    except schema.SchemaError as e:
+        raise gclient_utils.Error(str(e))
 
 
 def _StandardizeDeps(deps_dict, vars_dict):
-  """"Standardizes the deps_dict.
+    """"Standardizes the deps_dict.
 
   For each dependency:
   - Expands the variable in the dependency name.
   - Ensures the dependency is a dictionary.
   - Set's the 'dep_type' to be 'git' by default.
   """
-  new_deps_dict = {}
-  for dep_name, dep_info in deps_dict.items():
-    dep_name = dep_name.format(**vars_dict)
-    if not isinstance(dep_info, collections.abc.Mapping):
-      dep_info = {'url': dep_info}
-    dep_info.setdefault('dep_type', 'git')
-    new_deps_dict[dep_name] = dep_info
-  return new_deps_dict
+    new_deps_dict = {}
+    for dep_name, dep_info in deps_dict.items():
+        dep_name = dep_name.format(**vars_dict)
+        if not isinstance(dep_info, collections.abc.Mapping):
+            dep_info = {'url': dep_info}
+        dep_info.setdefault('dep_type', 'git')
+        new_deps_dict[dep_name] = dep_info
+    return new_deps_dict
 
 
 def _MergeDepsOs(deps_dict, os_deps_dict, os_name):
-  """Merges the deps in os_deps_dict into conditional dependencies in deps_dict.
+    """Merges the deps in os_deps_dict into conditional dependencies in deps_dict.
 
   The dependencies in os_deps_dict are transformed into conditional dependencies
   using |'checkout_' + os_name|.
   If the dependency is already present, the URL and revision must coincide.
   """
-  for dep_name, dep_info in os_deps_dict.items():
-    # Make this condition very visible, so it's not a silent failure.
-    # It's unclear how to support None override in deps_os.
-    if dep_info['url'] is None:
-      logging.error('Ignoring %r:%r in %r deps_os', dep_name, dep_info, os_name)
-      continue
+    for dep_name, dep_info in os_deps_dict.items():
+        # Make this condition very visible, so it's not a silent failure.
+        # It's unclear how to support None override in deps_os.
+        if dep_info['url'] is None:
+            logging.error('Ignoring %r:%r in %r deps_os', dep_name, dep_info,
+                          os_name)
+            continue
 
-    os_condition = 'checkout_' + (os_name if os_name != 'unix' else 'linux')
-    UpdateCondition(dep_info, 'and', os_condition)
+        os_condition = 'checkout_' + (os_name if os_name != 'unix' else 'linux')
+        UpdateCondition(dep_info, 'and', os_condition)
 
-    if dep_name in deps_dict:
-      if deps_dict[dep_name]['url'] != dep_info['url']:
-        raise gclient_utils.Error(
-            'Value from deps_os (%r; %r: %r) conflicts with existing deps '
-            'entry (%r).' % (
-                os_name, dep_name, dep_info, deps_dict[dep_name]))
+        if dep_name in deps_dict:
+            if deps_dict[dep_name]['url'] != dep_info['url']:
+                raise gclient_utils.Error(
+                    'Value from deps_os (%r; %r: %r) conflicts with existing deps '
+                    'entry (%r).' %
+                    (os_name, dep_name, dep_info, deps_dict[dep_name]))
 
-      UpdateCondition(dep_info, 'or', deps_dict[dep_name].get('condition'))
+            UpdateCondition(dep_info, 'or',
+                            deps_dict[dep_name].get('condition'))
 
-    deps_dict[dep_name] = dep_info
+        deps_dict[dep_name] = dep_info
 
 
 def UpdateCondition(info_dict, op, new_condition):
-  """Updates info_dict's condition with |new_condition|.
+    """Updates info_dict's condition with |new_condition|.
 
   An absent value is treated as implicitly True.
   """
-  curr_condition = info_dict.get('condition')
-  # Easy case: Both are present.
-  if curr_condition and new_condition:
-    info_dict['condition'] = '(%s) %s (%s)' % (
-        curr_condition, op, new_condition)
-  # If |op| == 'and', and at least one condition is present, then use it.
-  elif op == 'and' and (curr_condition or new_condition):
-    info_dict['condition'] = curr_condition or new_condition
-  # Otherwise, no condition should be set
-  elif curr_condition:
-    del info_dict['condition']
+    curr_condition = info_dict.get('condition')
+    # Easy case: Both are present.
+    if curr_condition and new_condition:
+        info_dict['condition'] = '(%s) %s (%s)' % (curr_condition, op,
+                                                   new_condition)
+    # If |op| == 'and', and at least one condition is present, then use it.
+    elif op == 'and' and (curr_condition or new_condition):
+        info_dict['condition'] = curr_condition or new_condition
+    # Otherwise, no condition should be set
+    elif curr_condition:
+        del info_dict['condition']
 
 
 def Parse(content, filename, vars_override=None, builtin_vars=None):
-  """Parses DEPS strings.
+    """Parses DEPS strings.
 
   Executes the Python-like string stored in content, resulting in a Python
   dictionary specified by the schema above. Supports syntax validation and
@@ -526,408 +536,397 @@ def Parse(content, filename, vars_override=None, builtin_vars=None):
     A Python dict with the parsed contents of the DEPS file, as specified by the
     schema above.
   """
-  result = Exec(content, filename, vars_override, builtin_vars)
+    result = Exec(content, filename, vars_override, builtin_vars)
 
-  vars_dict = result.get('vars', {})
-  if 'deps' in result:
-    result['deps'] = _StandardizeDeps(result['deps'], vars_dict)
+    vars_dict = result.get('vars', {})
+    if 'deps' in result:
+        result['deps'] = _StandardizeDeps(result['deps'], vars_dict)
 
-  if 'deps_os' in result:
-    deps = result.setdefault('deps', {})
-    for os_name, os_deps in result['deps_os'].items():
-      os_deps = _StandardizeDeps(os_deps, vars_dict)
-      _MergeDepsOs(deps, os_deps, os_name)
-    del result['deps_os']
+    if 'deps_os' in result:
+        deps = result.setdefault('deps', {})
+        for os_name, os_deps in result['deps_os'].items():
+            os_deps = _StandardizeDeps(os_deps, vars_dict)
+            _MergeDepsOs(deps, os_deps, os_name)
+        del result['deps_os']
 
-  if 'hooks_os' in result:
-    hooks = result.setdefault('hooks', [])
-    for os_name, os_hooks in result['hooks_os'].items():
-      for hook in os_hooks:
-        UpdateCondition(hook, 'and', 'checkout_' + os_name)
-      hooks.extend(os_hooks)
-    del result['hooks_os']
+    if 'hooks_os' in result:
+        hooks = result.setdefault('hooks', [])
+        for os_name, os_hooks in result['hooks_os'].items():
+            for hook in os_hooks:
+                UpdateCondition(hook, 'and', 'checkout_' + os_name)
+            hooks.extend(os_hooks)
+        del result['hooks_os']
 
-  return result
+    return result
 
 
 def EvaluateCondition(condition, variables, referenced_variables=None):
-  """Safely evaluates a boolean condition. Returns the result."""
-  if not referenced_variables:
-    referenced_variables = set()
-  _allowed_names = {'None': None, 'True': True, 'False': False}
-  main_node = ast.parse(condition, mode='eval')
-  if isinstance(main_node, ast.Expression):
-    main_node = main_node.body
-  def _convert(node, allow_tuple=False):
-    if isinstance(node, ast.Str):
-      return node.s
-
-    if isinstance(node, ast.Tuple) and allow_tuple:
-      return tuple(map(_convert, node.elts))
-
-    if isinstance(node, ast.Name):
-      if node.id in referenced_variables:
-        raise ValueError(
-            'invalid cyclic reference to %r (inside %r)' % (
-                node.id, condition))
-
-      if node.id in _allowed_names:
-        return _allowed_names[node.id]
-
-      if node.id in variables:
-        value = variables[node.id]
-
-        # Allow using "native" types, without wrapping everything in strings.
-        # Note that schema constraints still apply to variables.
-        if not isinstance(value, str):
-          return value
-
-        # Recursively evaluate the variable reference.
-        return EvaluateCondition(
-            variables[node.id],
-            variables,
-            referenced_variables.union([node.id]))
-
-      # Implicitly convert unrecognized names to strings.
-      # If we want to change this, we'll need to explicitly distinguish
-      # between arguments for GN to be passed verbatim, and ones to
-      # be evaluated.
-      return node.id
-
-    if not sys.version_info[:2] < (3, 4) and isinstance(
-        node, ast.NameConstant):  # Since Python 3.4
-      return node.value
-
-    if isinstance(node, ast.BoolOp) and isinstance(node.op, ast.Or):
-      bool_values = []
-      for value in node.values:
-        bool_values.append(_convert(value))
-        if not isinstance(bool_values[-1], bool):
-          raise ValueError(
-              'invalid "or" operand %r (inside %r)' % (
-                  bool_values[-1], condition))
-      return any(bool_values)
-
-    if isinstance(node, ast.BoolOp) and isinstance(node.op, ast.And):
-      bool_values = []
-      for value in node.values:
-        bool_values.append(_convert(value))
-        if not isinstance(bool_values[-1], bool):
-          raise ValueError(
-              'invalid "and" operand %r (inside %r)' % (
-                  bool_values[-1], condition))
-      return all(bool_values)
-
-    if isinstance(node, ast.UnaryOp) and isinstance(node.op, ast.Not):
-      value = _convert(node.operand)
-      if not isinstance(value, bool):
-        raise ValueError(
-            'invalid "not" operand %r (inside %r)' % (value, condition))
-      return not value
-
-    if isinstance(node, ast.Compare):
-      if len(node.ops) != 1:
-        raise ValueError(
-            'invalid compare: exactly 1 operator required (inside %r)' % (
-                condition))
-      if len(node.comparators) != 1:
-        raise ValueError(
-            'invalid compare: exactly 1 comparator required (inside %r)' % (
-                condition))
-
-      left = _convert(node.left)
-      right = _convert(
-          node.comparators[0], allow_tuple=isinstance(node.ops[0], ast.In))
-
-      if isinstance(node.ops[0], ast.Eq):
-        return left == right
-      if isinstance(node.ops[0], ast.NotEq):
-        return left != right
-      if isinstance(node.ops[0], ast.In):
-        return left in right
-
-      raise ValueError(
-          'unexpected operator: %s %s (inside %r)' % (
-              node.ops[0], ast.dump(node), condition))
-
-    raise ValueError(
-        'unexpected AST node: %s %s (inside %r)' % (
-            node, ast.dump(node), condition))
-  return _convert(main_node)
+    """Safely evaluates a boolean condition. Returns the result."""
+    if not referenced_variables:
+        referenced_variables = set()
+    _allowed_names = {'None': None, 'True': True, 'False': False}
+    main_node = ast.parse(condition, mode='eval')
+    if isinstance(main_node, ast.Expression):
+        main_node = main_node.body
+
+    def _convert(node, allow_tuple=False):
+        if isinstance(node, ast.Str):
+            return node.s
+
+        if isinstance(node, ast.Tuple) and allow_tuple:
+            return tuple(map(_convert, node.elts))
+
+        if isinstance(node, ast.Name):
+            if node.id in referenced_variables:
+                raise ValueError('invalid cyclic reference to %r (inside %r)' %
+                                 (node.id, condition))
+
+            if node.id in _allowed_names:
+                return _allowed_names[node.id]
+
+            if node.id in variables:
+                value = variables[node.id]
+
+                # Allow using "native" types, without wrapping everything in
+                # strings. Note that schema constraints still apply to
+                # variables.
+                if not isinstance(value, str):
+                    return value
+
+                # Recursively evaluate the variable reference.
+                return EvaluateCondition(variables[node.id], variables,
+                                         referenced_variables.union([node.id]))
+
+            # Implicitly convert unrecognized names to strings.
+            # If we want to change this, we'll need to explicitly distinguish
+            # between arguments for GN to be passed verbatim, and ones to
+            # be evaluated.
+            return node.id
+
+        if not sys.version_info[:2] < (3, 4) and isinstance(
+                node, ast.NameConstant):  # Since Python 3.4
+            return node.value
+
+        if isinstance(node, ast.BoolOp) and isinstance(node.op, ast.Or):
+            bool_values = []
+            for value in node.values:
+                bool_values.append(_convert(value))
+                if not isinstance(bool_values[-1], bool):
+                    raise ValueError('invalid "or" operand %r (inside %r)' %
+                                     (bool_values[-1], condition))
+            return any(bool_values)
+
+        if isinstance(node, ast.BoolOp) and isinstance(node.op, ast.And):
+            bool_values = []
+            for value in node.values:
+                bool_values.append(_convert(value))
+                if not isinstance(bool_values[-1], bool):
+                    raise ValueError('invalid "and" operand %r (inside %r)' %
+                                     (bool_values[-1], condition))
+            return all(bool_values)
+
+        if isinstance(node, ast.UnaryOp) and isinstance(node.op, ast.Not):
+            value = _convert(node.operand)
+            if not isinstance(value, bool):
+                raise ValueError('invalid "not" operand %r (inside %r)' %
+                                 (value, condition))
+            return not value
+
+        if isinstance(node, ast.Compare):
+            if len(node.ops) != 1:
+                raise ValueError(
+                    'invalid compare: exactly 1 operator required (inside %r)' %
+                    (condition))
+            if len(node.comparators) != 1:
+                raise ValueError(
+                    'invalid compare: exactly 1 comparator required (inside %r)'
+                    % (condition))
+
+            left = _convert(node.left)
+            right = _convert(node.comparators[0],
+                             allow_tuple=isinstance(node.ops[0], ast.In))
+
+            if isinstance(node.ops[0], ast.Eq):
+                return left == right
+            if isinstance(node.ops[0], ast.NotEq):
+                return left != right
+            if isinstance(node.ops[0], ast.In):
+                return left in right
+
+            raise ValueError('unexpected operator: %s %s (inside %r)' %
+                             (node.ops[0], ast.dump(node), condition))
+
+        raise ValueError('unexpected AST node: %s %s (inside %r)' %
+                         (node, ast.dump(node), condition))
+
+    return _convert(main_node)
 
 
 def RenderDEPSFile(gclient_dict):
-  contents = sorted(gclient_dict.tokens.values(), key=lambda token: token[2])
-  # The last token is a newline, which we ensure in Exec() for compatibility.
-  # However tests pass in inputs not ending with a newline and expect the same
-  # back, so for backwards compatibility need to remove that newline character.
-  # TODO: Fix tests to expect the newline
-  return tokenize.untokenize(contents)[:-1]
+    contents = sorted(gclient_dict.tokens.values(), key=lambda token: token[2])
+    # The last token is a newline, which we ensure in Exec() for compatibility.
+    # However tests pass in inputs not ending with a newline and expect the same
+    # back, so for backwards compatibility need to remove that newline
+    # character. TODO: Fix tests to expect the newline
+    return tokenize.untokenize(contents)[:-1]
 
 
 def _UpdateAstString(tokens, node, value):
-  if isinstance(node, ast.Call):
-    node = node.args[0]
-  position = node.lineno, node.col_offset
-  quote_char = ''
-  if isinstance(node, ast.Str):
-    quote_char = tokens[position][1][0]
-    value = value.encode('unicode_escape').decode('utf-8')
-  tokens[position][1] = quote_char + value + quote_char
-  node.s = value
+    if isinstance(node, ast.Call):
+        node = node.args[0]
+    position = node.lineno, node.col_offset
+    quote_char = ''
+    if isinstance(node, ast.Str):
+        quote_char = tokens[position][1][0]
+        value = value.encode('unicode_escape').decode('utf-8')
+    tokens[position][1] = quote_char + value + quote_char
+    node.s = value
 
 
 def _ShiftLinesInTokens(tokens, delta, start):
-  new_tokens = {}
-  for token in tokens.values():
-    if token[2][0] >= start:
-      token[2] = token[2][0] + delta, token[2][1]
-      token[3] = token[3][0] + delta, token[3][1]
-    new_tokens[token[2]] = token
-  return new_tokens
+    new_tokens = {}
+    for token in tokens.values():
+        if token[2][0] >= start:
+            token[2] = token[2][0] + delta, token[2][1]
+            token[3] = token[3][0] + delta, token[3][1]
+        new_tokens[token[2]] = token
+    return new_tokens
 
 
 def AddVar(gclient_dict, var_name, value):
-  if not isinstance(gclient_dict, _NodeDict) or gclient_dict.tokens is None:
-    raise ValueError(
-        "Can't use SetVar for the given gclient dict. It contains no "
-        "formatting information.")
+    if not isinstance(gclient_dict, _NodeDict) or gclient_dict.tokens is None:
+        raise ValueError(
+            "Can't use SetVar for the given gclient dict. It contains no "
+            "formatting information.")
 
-  if 'vars' not in gclient_dict:
-    raise KeyError("vars dict is not defined.")
+    if 'vars' not in gclient_dict:
+        raise KeyError("vars dict is not defined.")
 
-  if var_name in gclient_dict['vars']:
-    raise ValueError(
-        "%s has already been declared in the vars dict. Consider using SetVar "
-        "instead." % var_name)
+    if var_name in gclient_dict['vars']:
+        raise ValueError(
+            "%s has already been declared in the vars dict. Consider using SetVar "
+            "instead." % var_name)
 
-  if not gclient_dict['vars']:
-    raise ValueError('vars dict is empty. This is not yet supported.')
+    if not gclient_dict['vars']:
+        raise ValueError('vars dict is empty. This is not yet supported.')
 
-  # We will attempt to add the var right after 'vars = {'.
-  node = gclient_dict.GetNode('vars')
-  if node is None:
-    raise ValueError(
-        "The vars dict has no formatting information." % var_name)
-  line = node.lineno + 1
+    # We will attempt to add the var right after 'vars = {'.
+    node = gclient_dict.GetNode('vars')
+    if node is None:
+        raise ValueError("The vars dict has no formatting information." %
+                         var_name)
+    line = node.lineno + 1
 
-  # We will try to match the new var's indentation to the next variable.
-  col = node.keys[0].col_offset
+    # We will try to match the new var's indentation to the next variable.
+    col = node.keys[0].col_offset
 
-  # We use a minimal Python dictionary, so that ast can parse it.
-  var_content = '{\n%s"%s": "%s",\n}\n' % (' ' * col, var_name, value)
-  var_ast = ast.parse(var_content).body[0].value
+    # We use a minimal Python dictionary, so that ast can parse it.
+    var_content = '{\n%s"%s": "%s",\n}\n' % (' ' * col, var_name, value)
+    var_ast = ast.parse(var_content).body[0].value
 
-  # Set the ast nodes for the key and value.
-  vars_node = gclient_dict.GetNode('vars')
+    # Set the ast nodes for the key and value.
+    vars_node = gclient_dict.GetNode('vars')
 
-  var_name_node = var_ast.keys[0]
-  var_name_node.lineno += line - 2
-  vars_node.keys.insert(0, var_name_node)
+    var_name_node = var_ast.keys[0]
+    var_name_node.lineno += line - 2
+    vars_node.keys.insert(0, var_name_node)
 
-  value_node = var_ast.values[0]
-  value_node.lineno += line - 2
-  vars_node.values.insert(0, value_node)
+    value_node = var_ast.values[0]
+    value_node.lineno += line - 2
+    vars_node.values.insert(0, value_node)
 
-  # Update the tokens.
-  var_tokens = list(tokenize.generate_tokens(StringIO(var_content).readline))
-  var_tokens = {
-      token[2]: list(token)
-      # Ignore the tokens corresponding to braces and new lines.
-      for token in var_tokens[2:-3]
-  }
+    # Update the tokens.
+    var_tokens = list(tokenize.generate_tokens(StringIO(var_content).readline))
+    var_tokens = {
+        token[2]: list(token)
+        # Ignore the tokens corresponding to braces and new lines.
+        for token in var_tokens[2:-3]
+    }
 
-  gclient_dict.tokens = _ShiftLinesInTokens(gclient_dict.tokens, 1, line)
-  gclient_dict.tokens.update(_ShiftLinesInTokens(var_tokens, line - 2, 0))
+    gclient_dict.tokens = _ShiftLinesInTokens(gclient_dict.tokens, 1, line)
+    gclient_dict.tokens.update(_ShiftLinesInTokens(var_tokens, line - 2, 0))
 
 
 def SetVar(gclient_dict, var_name, value):
-  if not isinstance(gclient_dict, _NodeDict) or gclient_dict.tokens is None:
-    raise ValueError(
-        "Can't use SetVar for the given gclient dict. It contains no "
-        "formatting information.")
-  tokens = gclient_dict.tokens
+    if not isinstance(gclient_dict, _NodeDict) or gclient_dict.tokens is None:
+        raise ValueError(
+            "Can't use SetVar for the given gclient dict. It contains no "
+            "formatting information.")
+    tokens = gclient_dict.tokens
 
-  if 'vars' not in gclient_dict:
-    raise KeyError("vars dict is not defined.")
+    if 'vars' not in gclient_dict:
+        raise KeyError("vars dict is not defined.")
 
-  if var_name not in gclient_dict['vars']:
-    raise ValueError(
-        "%s has not been declared in the vars dict. Consider using AddVar "
-        "instead." % var_name)
+    if var_name not in gclient_dict['vars']:
+        raise ValueError(
+            "%s has not been declared in the vars dict. Consider using AddVar "
+            "instead." % var_name)
 
-  node = gclient_dict['vars'].GetNode(var_name)
-  if node is None:
-    raise ValueError(
-        "The vars entry for %s has no formatting information." % var_name)
+    node = gclient_dict['vars'].GetNode(var_name)
+    if node is None:
+        raise ValueError(
+            "The vars entry for %s has no formatting information." % var_name)
 
-  _UpdateAstString(tokens, node, value)
-  gclient_dict['vars'].SetNode(var_name, value, node)
+    _UpdateAstString(tokens, node, value)
+    gclient_dict['vars'].SetNode(var_name, value, node)
 
 
 def _GetVarName(node):
-  if isinstance(node, ast.Call):
-    return node.args[0].s
+    if isinstance(node, ast.Call):
+        return node.args[0].s
 
-  if node.s.endswith('}'):
-    last_brace = node.s.rfind('{')
-    return node.s[last_brace+1:-1]
-  return None
+    if node.s.endswith('}'):
+        last_brace = node.s.rfind('{')
+        return node.s[last_brace + 1:-1]
+    return None
 
 
 def SetCIPD(gclient_dict, dep_name, package_name, new_version):
-  if not isinstance(gclient_dict, _NodeDict) or gclient_dict.tokens is None:
-    raise ValueError(
-        "Can't use SetCIPD for the given gclient dict. It contains no "
-        "formatting information.")
-  tokens = gclient_dict.tokens
-
-  if 'deps' not in gclient_dict or dep_name not in gclient_dict['deps']:
-    raise KeyError(
-        "Could not find any dependency called %s." % dep_name)
-
-  # Find the package with the given name
-  packages = [
-      package
-      for package in gclient_dict['deps'][dep_name]['packages']
-      if package['package'] == package_name
-  ]
-  if len(packages) != 1:
-    raise ValueError(
-        "There must be exactly one package with the given name (%s), "
-        "%s were found." % (package_name, len(packages)))
-
-  # TODO(ehmaldonado): Support Var in package's version.
-  node = packages[0].GetNode('version')
-  if node is None:
-    raise ValueError(
-        "The deps entry for %s:%s has no formatting information." %
-        (dep_name, package_name))
-
-  if not isinstance(node, ast.Call) and not isinstance(node, ast.Str):
-    raise ValueError(
-        "Unsupported dependency revision format. Please file a bug to the "
-        "Infra>SDK component in crbug.com")
-
-  var_name = _GetVarName(node)
-  if var_name is not None:
-    SetVar(gclient_dict, var_name, new_version)
-  else:
-    _UpdateAstString(tokens, node, new_version)
-    packages[0].SetNode('version', new_version, node)
-
-
-def SetRevision(gclient_dict, dep_name, new_revision):
-  def _UpdateRevision(dep_dict, dep_key, new_revision):
-    dep_node = dep_dict.GetNode(dep_key)
-    if dep_node is None:
-      raise ValueError(
-          "The deps entry for %s has no formatting information." % dep_name)
-
-    node = dep_node
-    if isinstance(node, ast.BinOp):
-      node = node.right
-
-    if isinstance(node, ast.Str):
-      token = _gclient_eval(tokens[node.lineno, node.col_offset][1])
-      if token != node.s:
+    if not isinstance(gclient_dict, _NodeDict) or gclient_dict.tokens is None:
+        raise ValueError(
+            "Can't use SetCIPD for the given gclient dict. It contains no "
+            "formatting information.")
+    tokens = gclient_dict.tokens
+
+    if 'deps' not in gclient_dict or dep_name not in gclient_dict['deps']:
+        raise KeyError("Could not find any dependency called %s." % dep_name)
+
+    # Find the package with the given name
+    packages = [
+        package for package in gclient_dict['deps'][dep_name]['packages']
+        if package['package'] == package_name
+    ]
+    if len(packages) != 1:
         raise ValueError(
-            'Can\'t update value for %s. Multiline strings and implicitly '
-            'concatenated strings are not supported.\n'
-            'Consider reformatting the DEPS file.' % dep_key)
+            "There must be exactly one package with the given name (%s), "
+            "%s were found." % (package_name, len(packages)))
 
+    # TODO(ehmaldonado): Support Var in package's version.
+    node = packages[0].GetNode('version')
+    if node is None:
+        raise ValueError(
+            "The deps entry for %s:%s has no formatting information." %
+            (dep_name, package_name))
 
     if not isinstance(node, ast.Call) and not isinstance(node, ast.Str):
-      raise ValueError(
-          "Unsupported dependency revision format. Please file a bug to the "
-          "Infra>SDK component in crbug.com")
+        raise ValueError(
+            "Unsupported dependency revision format. Please file a bug to the "
+            "Infra>SDK component in crbug.com")
 
     var_name = _GetVarName(node)
     if var_name is not None:
-      SetVar(gclient_dict, var_name, new_revision)
+        SetVar(gclient_dict, var_name, new_version)
     else:
-      if '@' in node.s:
-        # '@' is part of the last string, which we want to modify. Discard
-        # whatever was after the '@' and put the new revision in its place.
-        new_revision = node.s.split('@')[0] + '@' + new_revision
-      elif '@' not in dep_dict[dep_key]:
-        # '@' is not part of the URL at all. This mean the dependency is
-        # unpinned and we should pin it.
-        new_revision = node.s + '@' + new_revision
-      _UpdateAstString(tokens, node, new_revision)
-      dep_dict.SetNode(dep_key, new_revision, node)
-
-  if not isinstance(gclient_dict, _NodeDict) or gclient_dict.tokens is None:
-    raise ValueError(
-        "Can't use SetRevision for the given gclient dict. It contains no "
-        "formatting information.")
-  tokens = gclient_dict.tokens
-
-  if 'deps' not in gclient_dict or dep_name not in gclient_dict['deps']:
-    raise KeyError(
-        "Could not find any dependency called %s." % dep_name)
-
-  if isinstance(gclient_dict['deps'][dep_name], _NodeDict):
-    _UpdateRevision(gclient_dict['deps'][dep_name], 'url', new_revision)
-  else:
-    _UpdateRevision(gclient_dict['deps'], dep_name, new_revision)
+        _UpdateAstString(tokens, node, new_version)
+        packages[0].SetNode('version', new_version, node)
 
 
-def GetVar(gclient_dict, var_name):
-  if 'vars' not in gclient_dict or var_name not in gclient_dict['vars']:
-    raise KeyError(
-        "Could not find any variable called %s." % var_name)
+def SetRevision(gclient_dict, dep_name, new_revision):
+    def _UpdateRevision(dep_dict, dep_key, new_revision):
+        dep_node = dep_dict.GetNode(dep_key)
+        if dep_node is None:
+            raise ValueError(
+                "The deps entry for %s has no formatting information." %
+                dep_name)
+
+        node = dep_node
+        if isinstance(node, ast.BinOp):
+            node = node.right
+
+        if isinstance(node, ast.Str):
+            token = _gclient_eval(tokens[node.lineno, node.col_offset][1])
+            if token != node.s:
+                raise ValueError(
+                    'Can\'t update value for %s. Multiline strings and implicitly '
+                    'concatenated strings are not supported.\n'
+                    'Consider reformatting the DEPS file.' % dep_key)
+
+        if not isinstance(node, ast.Call) and not isinstance(node, ast.Str):
+            raise ValueError(
+                "Unsupported dependency revision format. Please file a bug to the "
+                "Infra>SDK component in crbug.com")
+
+        var_name = _GetVarName(node)
+        if var_name is not None:
+            SetVar(gclient_dict, var_name, new_revision)
+        else:
+            if '@' in node.s:
+                # '@' is part of the last string, which we want to modify.
+                # Discard whatever was after the '@' and put the new revision in
+                # its place.
+                new_revision = node.s.split('@')[0] + '@' + new_revision
+            elif '@' not in dep_dict[dep_key]:
+                # '@' is not part of the URL at all. This mean the dependency is
+                # unpinned and we should pin it.
+                new_revision = node.s + '@' + new_revision
+            _UpdateAstString(tokens, node, new_revision)
+            dep_dict.SetNode(dep_key, new_revision, node)
+
+    if not isinstance(gclient_dict, _NodeDict) or gclient_dict.tokens is None:
+        raise ValueError(
+            "Can't use SetRevision for the given gclient dict. It contains no "
+            "formatting information.")
+    tokens = gclient_dict.tokens
 
-  val = gclient_dict['vars'][var_name]
-  if isinstance(val, ConstantString):
-    return val.value
-  return val
+    if 'deps' not in gclient_dict or dep_name not in gclient_dict['deps']:
+        raise KeyError("Could not find any dependency called %s." % dep_name)
 
+    if isinstance(gclient_dict['deps'][dep_name], _NodeDict):
+        _UpdateRevision(gclient_dict['deps'][dep_name], 'url', new_revision)
+    else:
+        _UpdateRevision(gclient_dict['deps'], dep_name, new_revision)
 
-def GetCIPD(gclient_dict, dep_name, package_name):
-  if 'deps' not in gclient_dict or dep_name not in gclient_dict['deps']:
-    raise KeyError(
-        "Could not find any dependency called %s." % dep_name)
 
-  # Find the package with the given name
-  packages = [
-      package
-      for package in gclient_dict['deps'][dep_name]['packages']
-      if package['package'] == package_name
-  ]
-  if len(packages) != 1:
-    raise ValueError(
-        "There must be exactly one package with the given name (%s), "
-        "%s were found." % (package_name, len(packages)))
+def GetVar(gclient_dict, var_name):
+    if 'vars' not in gclient_dict or var_name not in gclient_dict['vars']:
+        raise KeyError("Could not find any variable called %s." % var_name)
 
-  return packages[0]['version']
+    val = gclient_dict['vars'][var_name]
+    if isinstance(val, ConstantString):
+        return val.value
+    return val
 
 
-def GetRevision(gclient_dict, dep_name):
-  if 'deps' not in gclient_dict or dep_name not in gclient_dict['deps']:
-    suggestions = []
-    if 'deps' in gclient_dict:
-      for key in gclient_dict['deps']:
-        if dep_name in key:
-          suggestions.append(key)
-    if suggestions:
-      raise KeyError(
-          "Could not find any dependency called %s. Did you mean %s" %
-          (dep_name, ' or '.join(suggestions)))
-    raise KeyError(
-        "Could not find any dependency called %s." % dep_name)
-
-  dep = gclient_dict['deps'][dep_name]
-  if dep is None:
-    return None
+def GetCIPD(gclient_dict, dep_name, package_name):
+    if 'deps' not in gclient_dict or dep_name not in gclient_dict['deps']:
+        raise KeyError("Could not find any dependency called %s." % dep_name)
+
+    # Find the package with the given name
+    packages = [
+        package for package in gclient_dict['deps'][dep_name]['packages']
+        if package['package'] == package_name
+    ]
+    if len(packages) != 1:
+        raise ValueError(
+            "There must be exactly one package with the given name (%s), "
+            "%s were found." % (package_name, len(packages)))
 
-  if isinstance(dep, str):
-    _, _, revision = dep.partition('@')
-    return revision or None
+    return packages[0]['version']
 
-  if isinstance(dep, collections.abc.Mapping) and 'url' in dep:
-    _, _, revision = dep['url'].partition('@')
-    return revision or None
 
-  raise ValueError(
-      '%s is not a valid git dependency.' % dep_name)
+def GetRevision(gclient_dict, dep_name):
+    if 'deps' not in gclient_dict or dep_name not in gclient_dict['deps']:
+        suggestions = []
+        if 'deps' in gclient_dict:
+            for key in gclient_dict['deps']:
+                if dep_name in key:
+                    suggestions.append(key)
+        if suggestions:
+            raise KeyError(
+                "Could not find any dependency called %s. Did you mean %s" %
+                (dep_name, ' or '.join(suggestions)))
+        raise KeyError("Could not find any dependency called %s." % dep_name)
+
+    dep = gclient_dict['deps'][dep_name]
+    if dep is None:
+        return None
+
+    if isinstance(dep, str):
+        _, _, revision = dep.partition('@')
+        return revision or None
+
+    if isinstance(dep, collections.abc.Mapping) and 'url' in dep:
+        _, _, revision = dep['url'].partition('@')
+        return revision or None
+
+    raise ValueError('%s is not a valid git dependency.' % dep_name)

+ 113 - 110
gclient_paths.py

@@ -15,136 +15,139 @@ import sys
 import gclient_utils
 import subprocess2
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
+
 
 def FindGclientRoot(from_dir, filename='.gclient'):
-  """Tries to find the gclient root."""
-  real_from_dir = os.path.abspath(from_dir)
-  path = real_from_dir
-  while not os.path.exists(os.path.join(path, filename)):
-    split_path = os.path.split(path)
-    if not split_path[1]:
-      return None
-    path = split_path[0]
-
-  logging.info('Found gclient root at ' + path)
-
-  if path == real_from_dir:
-    return path
-
-  # If we did not find the file in the current directory, make sure we are in a
-  # sub directory that is controlled by this configuration.
-  entries_filename = os.path.join(path, filename + '_entries')
-  if not os.path.exists(entries_filename):
-    # If .gclient_entries does not exist, a previous call to gclient sync
-    # might have failed. In that case, we cannot verify that the .gclient
-    # is the one we want to use. In order to not to cause too much trouble,
-    # just issue a warning and return the path anyway.
-    print(
-        "%s missing, %s file in parent directory %s might not be the file "
-        "you want to use." % (entries_filename, filename, path),
-        file=sys.stderr)
-    return path
-
-  entries_content = gclient_utils.FileRead(entries_filename)
-  scope = {}
-  try:
-    exec(entries_content, scope)
-  except (SyntaxError, Exception) as e:
-    gclient_utils.SyntaxErrorToError(filename, e)
-
-  all_directories = scope['entries'].keys()
-  path_to_check = os.path.relpath(real_from_dir, path)
-  while path_to_check:
-    if path_to_check in all_directories:
-      return path
-    path_to_check = os.path.dirname(path_to_check)
-
-  return None
+    """Tries to find the gclient root."""
+    real_from_dir = os.path.abspath(from_dir)
+    path = real_from_dir
+    while not os.path.exists(os.path.join(path, filename)):
+        split_path = os.path.split(path)
+        if not split_path[1]:
+            return None
+        path = split_path[0]
+
+    logging.info('Found gclient root at ' + path)
+
+    if path == real_from_dir:
+        return path
+
+    # If we did not find the file in the current directory, make sure we are in
+    # a sub directory that is controlled by this configuration.
+    entries_filename = os.path.join(path, filename + '_entries')
+    if not os.path.exists(entries_filename):
+        # If .gclient_entries does not exist, a previous call to gclient sync
+        # might have failed. In that case, we cannot verify that the .gclient
+        # is the one we want to use. In order to not to cause too much trouble,
+        # just issue a warning and return the path anyway.
+        print(
+            "%s missing, %s file in parent directory %s might not be the file "
+            "you want to use." % (entries_filename, filename, path),
+            file=sys.stderr)
+        return path
+
+    entries_content = gclient_utils.FileRead(entries_filename)
+    scope = {}
+    try:
+        exec(entries_content, scope)
+    except (SyntaxError, Exception) as e:
+        gclient_utils.SyntaxErrorToError(filename, e)
+
+    all_directories = scope['entries'].keys()
+    path_to_check = os.path.relpath(real_from_dir, path)
+    while path_to_check:
+        if path_to_check in all_directories:
+            return path
+        path_to_check = os.path.dirname(path_to_check)
+
+    return None
 
 
 def GetPrimarySolutionPath():
-  """Returns the full path to the primary solution. (gclient_root + src)"""
-
-  gclient_root = FindGclientRoot(os.getcwd())
-  if gclient_root:
-    # Some projects' top directory is not named 'src'.
-    source_dir_name = GetGClientPrimarySolutionName(gclient_root) or 'src'
-    return os.path.join(gclient_root, source_dir_name)
-
-  # Some projects might not use .gclient. Try to see whether we're in a git
-  # checkout that contains a 'buildtools' subdir.
-  top_dir = os.getcwd()
-  try:
-    top_dir = subprocess2.check_output(['git', 'rev-parse', '--show-toplevel'],
-                                       stderr=subprocess2.DEVNULL)
-    top_dir = top_dir.decode('utf-8', 'replace')
-    top_dir = os.path.normpath(top_dir.strip())
-  except subprocess2.CalledProcessError:
-    pass
-
-  if os.path.exists(os.path.join(top_dir, 'buildtools')):
-    return top_dir
-  return None
+    """Returns the full path to the primary solution. (gclient_root + src)"""
+
+    gclient_root = FindGclientRoot(os.getcwd())
+    if gclient_root:
+        # Some projects' top directory is not named 'src'.
+        source_dir_name = GetGClientPrimarySolutionName(gclient_root) or 'src'
+        return os.path.join(gclient_root, source_dir_name)
+
+    # Some projects might not use .gclient. Try to see whether we're in a git
+    # checkout that contains a 'buildtools' subdir.
+    top_dir = os.getcwd()
+    try:
+        top_dir = subprocess2.check_output(
+            ['git', 'rev-parse', '--show-toplevel'], stderr=subprocess2.DEVNULL)
+        top_dir = top_dir.decode('utf-8', 'replace')
+        top_dir = os.path.normpath(top_dir.strip())
+    except subprocess2.CalledProcessError:
+        pass
+
+    if os.path.exists(os.path.join(top_dir, 'buildtools')):
+        return top_dir
+    return None
 
 
 def GetBuildtoolsPath():
-  """Returns the full path to the buildtools directory.
+    """Returns the full path to the buildtools directory.
   This is based on the root of the checkout containing the current directory."""
 
-  # Overriding the build tools path by environment is highly unsupported and may
-  # break without warning.  Do not rely on this for anything important.
-  override = os.environ.get('CHROMIUM_BUILDTOOLS_PATH')
-  if override is not None:
-    return override
+    # Overriding the build tools path by environment is highly unsupported and
+    # may break without warning.  Do not rely on this for anything important.
+    override = os.environ.get('CHROMIUM_BUILDTOOLS_PATH')
+    if override is not None:
+        return override
 
-  primary_solution = GetPrimarySolutionPath()
-  if not primary_solution:
-    return None
+    primary_solution = GetPrimarySolutionPath()
+    if not primary_solution:
+        return None
 
-  buildtools_path = os.path.join(primary_solution, 'buildtools')
-  if os.path.exists(buildtools_path):
-    return buildtools_path
+    buildtools_path = os.path.join(primary_solution, 'buildtools')
+    if os.path.exists(buildtools_path):
+        return buildtools_path
 
-  # buildtools may be in the gclient root.
-  gclient_root = FindGclientRoot(os.getcwd())
-  buildtools_path = os.path.join(gclient_root, 'buildtools')
-  if os.path.exists(buildtools_path):
-    return buildtools_path
+    # buildtools may be in the gclient root.
+    gclient_root = FindGclientRoot(os.getcwd())
+    buildtools_path = os.path.join(gclient_root, 'buildtools')
+    if os.path.exists(buildtools_path):
+        return buildtools_path
 
-  return None
+    return None
 
 
 def GetBuildtoolsPlatformBinaryPath():
-  """Returns the full path to the binary directory for the current platform."""
-  buildtools_path = GetBuildtoolsPath()
-  if not buildtools_path:
-    return None
-
-  if sys.platform.startswith(('cygwin', 'win')):
-    subdir = 'win'
-  elif sys.platform == 'darwin':
-    subdir = 'mac'
-  elif sys.platform.startswith('linux'):
-    subdir = 'linux64'
-  else:
-    raise gclient_utils.Error('Unknown platform: ' + sys.platform)
-  return os.path.join(buildtools_path, subdir)
+    """Returns the full path to the binary directory for the current platform."""
+    buildtools_path = GetBuildtoolsPath()
+    if not buildtools_path:
+        return None
+
+    if sys.platform.startswith(('cygwin', 'win')):
+        subdir = 'win'
+    elif sys.platform == 'darwin':
+        subdir = 'mac'
+    elif sys.platform.startswith('linux'):
+        subdir = 'linux64'
+    else:
+        raise gclient_utils.Error('Unknown platform: ' + sys.platform)
+    return os.path.join(buildtools_path, subdir)
 
 
 def GetExeSuffix():
-  """Returns '' or '.exe' depending on how executables work on this platform."""
-  if sys.platform.startswith(('cygwin', 'win')):
-    return '.exe'
-  return ''
+    """Returns '' or '.exe' depending on how executables work on this platform."""
+    if sys.platform.startswith(('cygwin', 'win')):
+        return '.exe'
+    return ''
 
 
 def GetGClientPrimarySolutionName(gclient_root_dir_path):
-  """Returns the name of the primary solution in the .gclient file specified."""
-  gclient_config_file = os.path.join(gclient_root_dir_path, '.gclient')
-  gclient_config_contents = gclient_utils.FileRead(gclient_config_file)
-  env = {}
-  exec(gclient_config_contents, env)
-  solutions = env.get('solutions', [])
-  if solutions:
-    return solutions[0].get('name')
-  return None
+    """Returns the name of the primary solution in the .gclient file specified."""
+    gclient_config_file = os.path.join(gclient_root_dir_path, '.gclient')
+    gclient_config_contents = gclient_utils.FileRead(gclient_config_file)
+    env = {}
+    exec(gclient_config_contents, env)
+    solutions = env.get('solutions', [])
+    if solutions:
+        return solutions[0].get('name')
+    return None

文件差异内容过多而无法显示
+ 1388 - 1301
gclient_scm.py


文件差异内容过多而无法显示
+ 484 - 471
gclient_utils.py


+ 395 - 367
gerrit_client.py

@@ -2,7 +2,6 @@
 # Copyright 2017 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Simple client for the Gerrit REST API.
 
 Example usage:
@@ -24,376 +23,397 @@ __version__ = '0.1'
 
 
 def write_result(result, opt):
-  if opt.json_file:
-    with open(opt.json_file, 'w') as json_file:
-      json_file.write(json.dumps(result))
+    if opt.json_file:
+        with open(opt.json_file, 'w') as json_file:
+            json_file.write(json.dumps(result))
 
 
 @subcommand.usage('[args ...]')
 def CMDmovechanges(parser, args):
-  """Move changes to a different destination branch."""
-  parser.add_option('-p', '--param', dest='params', action='append',
-                    help='repeatable query parameter, format: -p key=value')
-  parser.add_option('--destination_branch', dest='destination_branch',
-                    help='where to move changes to')
-
-  (opt, args) = parser.parse_args(args)
-  assert opt.destination_branch, "--destination_branch not defined"
-  for p in opt.params:
-    assert '=' in p, '--param is key=value, not "%s"' % p
-  host = urllib.parse.urlparse(opt.host).netloc
-
-  limit = 100
-  while True:
-    result = gerrit_util.QueryChanges(
-        host,
-        list(tuple(p.split('=', 1)) for p in opt.params),
-        limit=limit,
-    )
-    for change in result:
-      gerrit_util.MoveChange(host, change['id'], opt.destination_branch)
-
-    if len(result) < limit:
-      break
-  logging.info("Done")
+    """Move changes to a different destination branch."""
+    parser.add_option('-p',
+                      '--param',
+                      dest='params',
+                      action='append',
+                      help='repeatable query parameter, format: -p key=value')
+    parser.add_option('--destination_branch',
+                      dest='destination_branch',
+                      help='where to move changes to')
+
+    (opt, args) = parser.parse_args(args)
+    assert opt.destination_branch, "--destination_branch not defined"
+    for p in opt.params:
+        assert '=' in p, '--param is key=value, not "%s"' % p
+    host = urllib.parse.urlparse(opt.host).netloc
+
+    limit = 100
+    while True:
+        result = gerrit_util.QueryChanges(
+            host,
+            list(tuple(p.split('=', 1)) for p in opt.params),
+            limit=limit,
+        )
+        for change in result:
+            gerrit_util.MoveChange(host, change['id'], opt.destination_branch)
+
+        if len(result) < limit:
+            break
+    logging.info("Done")
 
 
 @subcommand.usage('[args ...]')
 def CMDbranchinfo(parser, args):
-  """Get information on a gerrit branch."""
-  parser.add_option('--branch', dest='branch', help='branch name')
+    """Get information on a gerrit branch."""
+    parser.add_option('--branch', dest='branch', help='branch name')
 
-  (opt, args) = parser.parse_args(args)
-  host = urllib.parse.urlparse(opt.host).netloc
-  project = urllib.parse.quote_plus(opt.project)
-  branch = urllib.parse.quote_plus(opt.branch)
-  result = gerrit_util.GetGerritBranch(host, project, branch)
-  logging.info(result)
-  write_result(result, opt)
+    (opt, args) = parser.parse_args(args)
+    host = urllib.parse.urlparse(opt.host).netloc
+    project = urllib.parse.quote_plus(opt.project)
+    branch = urllib.parse.quote_plus(opt.branch)
+    result = gerrit_util.GetGerritBranch(host, project, branch)
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('[args ...]')
 def CMDrawapi(parser, args):
-  """Call an arbitrary Gerrit REST API endpoint."""
-  parser.add_option('--path', dest='path', help='HTTP path of the API endpoint')
-  parser.add_option('--method', dest='method',
-                    help='HTTP method for the API (default: GET)')
-  parser.add_option('--body', dest='body', help='API JSON body contents')
-  parser.add_option('--accept_status',
-                    dest='accept_status',
-                    help='Comma-delimited list of status codes for success.')
-
-  (opt, args) = parser.parse_args(args)
-  assert opt.path, "--path not defined"
-
-  host = urllib.parse.urlparse(opt.host).netloc
-  kwargs = {}
-  if opt.method:
-    kwargs['reqtype'] = opt.method.upper()
-  if opt.body:
-    kwargs['body'] = json.loads(opt.body)
-  if opt.accept_status:
-    kwargs['accept_statuses'] = [int(x) for x in opt.accept_status.split(',')]
-  result = gerrit_util.CallGerritApi(host, opt.path, **kwargs)
-  logging.info(result)
-  write_result(result, opt)
+    """Call an arbitrary Gerrit REST API endpoint."""
+    parser.add_option('--path',
+                      dest='path',
+                      help='HTTP path of the API endpoint')
+    parser.add_option('--method',
+                      dest='method',
+                      help='HTTP method for the API (default: GET)')
+    parser.add_option('--body', dest='body', help='API JSON body contents')
+    parser.add_option('--accept_status',
+                      dest='accept_status',
+                      help='Comma-delimited list of status codes for success.')
+
+    (opt, args) = parser.parse_args(args)
+    assert opt.path, "--path not defined"
+
+    host = urllib.parse.urlparse(opt.host).netloc
+    kwargs = {}
+    if opt.method:
+        kwargs['reqtype'] = opt.method.upper()
+    if opt.body:
+        kwargs['body'] = json.loads(opt.body)
+    if opt.accept_status:
+        kwargs['accept_statuses'] = [
+            int(x) for x in opt.accept_status.split(',')
+        ]
+    result = gerrit_util.CallGerritApi(host, opt.path, **kwargs)
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('[args ...]')
 def CMDbranch(parser, args):
-  """Create a branch in a gerrit project."""
-  parser.add_option('--branch', dest='branch', help='branch name')
-  parser.add_option('--commit', dest='commit', help='commit hash')
-  parser.add_option('--allow-existent-branch',
-                    action='store_true',
-                    help=('Accept that the branch alread exists as long as the'
-                          ' branch head points the given commit'))
-
-  (opt, args) = parser.parse_args(args)
-  assert opt.project, "--project not defined"
-  assert opt.branch, "--branch not defined"
-  assert opt.commit, "--commit not defined"
-
-  project = urllib.parse.quote_plus(opt.project)
-  host = urllib.parse.urlparse(opt.host).netloc
-  branch = urllib.parse.quote_plus(opt.branch)
-  result = gerrit_util.GetGerritBranch(host, project, branch)
-  if result:
-    if not opt.allow_existent_branch:
-      raise gerrit_util.GerritError(200, 'Branch already exists')
-    if result.get('revision') != opt.commit:
-      raise gerrit_util.GerritError(
-          200, ('Branch already exists but '
-                'the branch head is not at the given commit'))
-  else:
-    try:
-      result = gerrit_util.CreateGerritBranch(host, project, branch, opt.commit)
-    except gerrit_util.GerritError as e:
-      result = gerrit_util.GetGerritBranch(host, project, branch)
-      if not result:
-        raise e
-      # If reached here, we hit a real conflict error, because the
-      # branch just created is pointing a different commit.
-      if result.get('revision') != opt.commit:
-        raise gerrit_util.GerritError(
-            200, ('Conflict: branch was created but '
-                  'the branch head is not at the given commit'))
-  logging.info(result)
-  write_result(result, opt)
+    """Create a branch in a gerrit project."""
+    parser.add_option('--branch', dest='branch', help='branch name')
+    parser.add_option('--commit', dest='commit', help='commit hash')
+    parser.add_option(
+        '--allow-existent-branch',
+        action='store_true',
+        help=('Accept that the branch alread exists as long as the'
+              ' branch head points the given commit'))
+
+    (opt, args) = parser.parse_args(args)
+    assert opt.project, "--project not defined"
+    assert opt.branch, "--branch not defined"
+    assert opt.commit, "--commit not defined"
+
+    project = urllib.parse.quote_plus(opt.project)
+    host = urllib.parse.urlparse(opt.host).netloc
+    branch = urllib.parse.quote_plus(opt.branch)
+    result = gerrit_util.GetGerritBranch(host, project, branch)
+    if result:
+        if not opt.allow_existent_branch:
+            raise gerrit_util.GerritError(200, 'Branch already exists')
+        if result.get('revision') != opt.commit:
+            raise gerrit_util.GerritError(
+                200, ('Branch already exists but '
+                      'the branch head is not at the given commit'))
+    else:
+        try:
+            result = gerrit_util.CreateGerritBranch(host, project, branch,
+                                                    opt.commit)
+        except gerrit_util.GerritError as e:
+            result = gerrit_util.GetGerritBranch(host, project, branch)
+            if not result:
+                raise e
+            # If reached here, we hit a real conflict error, because the
+            # branch just created is pointing a different commit.
+            if result.get('revision') != opt.commit:
+                raise gerrit_util.GerritError(
+                    200, ('Conflict: branch was created but '
+                          'the branch head is not at the given commit'))
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('[args ...]')
 def CMDtag(parser, args):
-  """Create a tag in a gerrit project."""
-  parser.add_option('--tag', dest='tag', help='tag name')
-  parser.add_option('--commit', dest='commit', help='commit hash')
+    """Create a tag in a gerrit project."""
+    parser.add_option('--tag', dest='tag', help='tag name')
+    parser.add_option('--commit', dest='commit', help='commit hash')
 
-  (opt, args) = parser.parse_args(args)
-  assert opt.project, "--project not defined"
-  assert opt.tag, "--tag not defined"
-  assert opt.commit, "--commit not defined"
+    (opt, args) = parser.parse_args(args)
+    assert opt.project, "--project not defined"
+    assert opt.tag, "--tag not defined"
+    assert opt.commit, "--commit not defined"
 
-  project = urllib.parse.quote_plus(opt.project)
-  host = urllib.parse.urlparse(opt.host).netloc
-  tag = urllib.parse.quote_plus(opt.tag)
-  result = gerrit_util.CreateGerritTag(host, project, tag, opt.commit)
-  logging.info(result)
-  write_result(result, opt)
+    project = urllib.parse.quote_plus(opt.project)
+    host = urllib.parse.urlparse(opt.host).netloc
+    tag = urllib.parse.quote_plus(opt.tag)
+    result = gerrit_util.CreateGerritTag(host, project, tag, opt.commit)
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('[args ...]')
 def CMDhead(parser, args):
-  """Update which branch the project HEAD points to."""
-  parser.add_option('--branch', dest='branch', help='branch name')
+    """Update which branch the project HEAD points to."""
+    parser.add_option('--branch', dest='branch', help='branch name')
 
-  (opt, args) = parser.parse_args(args)
-  assert opt.project, "--project not defined"
-  assert opt.branch, "--branch not defined"
+    (opt, args) = parser.parse_args(args)
+    assert opt.project, "--project not defined"
+    assert opt.branch, "--branch not defined"
 
-  project = urllib.parse.quote_plus(opt.project)
-  host = urllib.parse.urlparse(opt.host).netloc
-  branch = urllib.parse.quote_plus(opt.branch)
-  result = gerrit_util.UpdateHead(host, project, branch)
-  logging.info(result)
-  write_result(result, opt)
+    project = urllib.parse.quote_plus(opt.project)
+    host = urllib.parse.urlparse(opt.host).netloc
+    branch = urllib.parse.quote_plus(opt.branch)
+    result = gerrit_util.UpdateHead(host, project, branch)
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('[args ...]')
 def CMDheadinfo(parser, args):
-  """Retrieves the current HEAD of the project."""
+    """Retrieves the current HEAD of the project."""
 
-  (opt, args) = parser.parse_args(args)
-  assert opt.project, "--project not defined"
+    (opt, args) = parser.parse_args(args)
+    assert opt.project, "--project not defined"
 
-  project = urllib.parse.quote_plus(opt.project)
-  host = urllib.parse.urlparse(opt.host).netloc
-  result = gerrit_util.GetHead(host, project)
-  logging.info(result)
-  write_result(result, opt)
+    project = urllib.parse.quote_plus(opt.project)
+    host = urllib.parse.urlparse(opt.host).netloc
+    result = gerrit_util.GetHead(host, project)
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('[args ...]')
 def CMDchanges(parser, args):
-  """Queries gerrit for matching changes."""
-  parser.add_option('-p',
-                    '--param',
-                    dest='params',
-                    action='append',
-                    default=[],
-                    help='repeatable query parameter, format: -p key=value')
-  parser.add_option('--query', help='raw gerrit search query string')
-  parser.add_option('-o', '--o-param', dest='o_params', action='append',
-                    help='gerrit output parameters, e.g. ALL_REVISIONS')
-  parser.add_option('--limit', dest='limit', type=int,
-                    help='maximum number of results to return')
-  parser.add_option('--start', dest='start', type=int,
-                    help='how many changes to skip '
-                         '(starting with the most recent)')
-
-  (opt, args) = parser.parse_args(args)
-  assert opt.params or opt.query, '--param or --query required'
-  for p in opt.params:
-    assert '=' in p, '--param is key=value, not "%s"' % p
-
-  result = gerrit_util.QueryChanges(
-      urllib.parse.urlparse(opt.host).netloc,
-      list(tuple(p.split('=', 1)) for p in opt.params),
-      first_param=opt.query,
-      start=opt.start,  # Default: None
-      limit=opt.limit,  # Default: None
-      o_params=opt.o_params,  # Default: None
-  )
-  logging.info('Change query returned %d changes.', len(result))
-  write_result(result, opt)
+    """Queries gerrit for matching changes."""
+    parser.add_option('-p',
+                      '--param',
+                      dest='params',
+                      action='append',
+                      default=[],
+                      help='repeatable query parameter, format: -p key=value')
+    parser.add_option('--query', help='raw gerrit search query string')
+    parser.add_option('-o',
+                      '--o-param',
+                      dest='o_params',
+                      action='append',
+                      help='gerrit output parameters, e.g. ALL_REVISIONS')
+    parser.add_option('--limit',
+                      dest='limit',
+                      type=int,
+                      help='maximum number of results to return')
+    parser.add_option('--start',
+                      dest='start',
+                      type=int,
+                      help='how many changes to skip '
+                      '(starting with the most recent)')
+
+    (opt, args) = parser.parse_args(args)
+    assert opt.params or opt.query, '--param or --query required'
+    for p in opt.params:
+        assert '=' in p, '--param is key=value, not "%s"' % p
+
+    result = gerrit_util.QueryChanges(
+        urllib.parse.urlparse(opt.host).netloc,
+        list(tuple(p.split('=', 1)) for p in opt.params),
+        first_param=opt.query,
+        start=opt.start,  # Default: None
+        limit=opt.limit,  # Default: None
+        o_params=opt.o_params,  # Default: None
+    )
+    logging.info('Change query returned %d changes.', len(result))
+    write_result(result, opt)
 
 
 @subcommand.usage('[args ...]')
 def CMDrelatedchanges(parser, args):
-  """Gets related changes for a given change and revision."""
-  parser.add_option('-c', '--change', type=str, help='change id')
-  parser.add_option('-r', '--revision', type=str, help='revision id')
+    """Gets related changes for a given change and revision."""
+    parser.add_option('-c', '--change', type=str, help='change id')
+    parser.add_option('-r', '--revision', type=str, help='revision id')
 
-  (opt, args) = parser.parse_args(args)
+    (opt, args) = parser.parse_args(args)
 
-  result = gerrit_util.GetRelatedChanges(
-      urllib.parse.urlparse(opt.host).netloc,
-      change=opt.change,
-      revision=opt.revision,
-  )
-  logging.info(result)
-  write_result(result, opt)
+    result = gerrit_util.GetRelatedChanges(
+        urllib.parse.urlparse(opt.host).netloc,
+        change=opt.change,
+        revision=opt.revision,
+    )
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('[args ...]')
 def CMDcreatechange(parser, args):
-  """Create a new change in gerrit."""
-  parser.add_option('-s', '--subject', help='subject for change')
-  parser.add_option('-b',
-                    '--branch',
-                    default='main',
-                    help='target branch for change')
-  parser.add_option(
-      '-p',
-      '--param',
-      dest='params',
-      action='append',
-      help='repeatable field value parameter, format: -p key=value')
-
-  parser.add_option('--cc',
-                    dest='cc_list',
-                    action='append',
-                    help='CC address to notify, format: --cc foo@example.com')
-
-  (opt, args) = parser.parse_args(args)
-  for p in opt.params:
-    assert '=' in p, '--param is key=value, not "%s"' % p
-
-  params = list(tuple(p.split('=', 1)) for p in opt.params)
-
-  if opt.cc_list:
-    params.append(('notify_details', {'CC': {'accounts': opt.cc_list}}))
-
-  result = gerrit_util.CreateChange(
-      urllib.parse.urlparse(opt.host).netloc,
-      opt.project,
-      branch=opt.branch,
-      subject=opt.subject,
-      params=params,
-  )
-  logging.info(result)
-  write_result(result, opt)
+    """Create a new change in gerrit."""
+    parser.add_option('-s', '--subject', help='subject for change')
+    parser.add_option('-b',
+                      '--branch',
+                      default='main',
+                      help='target branch for change')
+    parser.add_option(
+        '-p',
+        '--param',
+        dest='params',
+        action='append',
+        help='repeatable field value parameter, format: -p key=value')
+
+    parser.add_option('--cc',
+                      dest='cc_list',
+                      action='append',
+                      help='CC address to notify, format: --cc foo@example.com')
+
+    (opt, args) = parser.parse_args(args)
+    for p in opt.params:
+        assert '=' in p, '--param is key=value, not "%s"' % p
+
+    params = list(tuple(p.split('=', 1)) for p in opt.params)
+
+    if opt.cc_list:
+        params.append(('notify_details', {'CC': {'accounts': opt.cc_list}}))
+
+    result = gerrit_util.CreateChange(
+        urllib.parse.urlparse(opt.host).netloc,
+        opt.project,
+        branch=opt.branch,
+        subject=opt.subject,
+        params=params,
+    )
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('[args ...]')
 def CMDchangeedit(parser, args):
-  """Puts content of a file into a change edit."""
-  parser.add_option('-c', '--change', type=int, help='change number')
-  parser.add_option('--path', help='path for file')
-  parser.add_option('--file', help='file to place at |path|')
+    """Puts content of a file into a change edit."""
+    parser.add_option('-c', '--change', type=int, help='change number')
+    parser.add_option('--path', help='path for file')
+    parser.add_option('--file', help='file to place at |path|')
 
-  (opt, args) = parser.parse_args(args)
+    (opt, args) = parser.parse_args(args)
 
-  with open(opt.file) as f:
-    data = f.read()
-  result = gerrit_util.ChangeEdit(
-      urllib.parse.urlparse(opt.host).netloc, opt.change, opt.path, data)
-  logging.info(result)
-  write_result(result, opt)
+    with open(opt.file) as f:
+        data = f.read()
+    result = gerrit_util.ChangeEdit(
+        urllib.parse.urlparse(opt.host).netloc, opt.change, opt.path, data)
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('[args ...]')
 def CMDpublishchangeedit(parser, args):
-  """Publish a Gerrit change edit."""
-  parser.add_option('-c', '--change', type=int, help='change number')
-  parser.add_option('--notify', help='whether to notify')
+    """Publish a Gerrit change edit."""
+    parser.add_option('-c', '--change', type=int, help='change number')
+    parser.add_option('--notify', help='whether to notify')
 
-  (opt, args) = parser.parse_args(args)
+    (opt, args) = parser.parse_args(args)
 
-  result = gerrit_util.PublishChangeEdit(
-      urllib.parse.urlparse(opt.host).netloc, opt.change, opt.notify)
-  logging.info(result)
-  write_result(result, opt)
+    result = gerrit_util.PublishChangeEdit(
+        urllib.parse.urlparse(opt.host).netloc, opt.change, opt.notify)
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('[args ...]')
 def CMDsubmitchange(parser, args):
-  """Submit a Gerrit change."""
-  parser.add_option('-c', '--change', type=int, help='change number')
-  (opt, args) = parser.parse_args(args)
-  result = gerrit_util.SubmitChange(
-      urllib.parse.urlparse(opt.host).netloc, opt.change)
-  logging.info(result)
-  write_result(result, opt)
+    """Submit a Gerrit change."""
+    parser.add_option('-c', '--change', type=int, help='change number')
+    (opt, args) = parser.parse_args(args)
+    result = gerrit_util.SubmitChange(
+        urllib.parse.urlparse(opt.host).netloc, opt.change)
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('[args ...]')
 def CMDchangesubmittedtogether(parser, args):
-  """Get all changes submitted with the given one."""
-  parser.add_option('-c', '--change', type=int, help='change number')
-  (opt, args) = parser.parse_args(args)
-  result = gerrit_util.GetChangesSubmittedTogether(
-      urllib.parse.urlparse(opt.host).netloc, opt.change)
-  logging.info(result)
-  write_result(result, opt)
+    """Get all changes submitted with the given one."""
+    parser.add_option('-c', '--change', type=int, help='change number')
+    (opt, args) = parser.parse_args(args)
+    result = gerrit_util.GetChangesSubmittedTogether(
+        urllib.parse.urlparse(opt.host).netloc, opt.change)
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('[args ...]')
 def CMDgetcommitincludedin(parser, args):
-  """Retrieves the branches and tags for a given commit."""
-  parser.add_option('--commit', dest='commit', help='commit hash')
-  (opt, args) = parser.parse_args(args)
-  result = gerrit_util.GetCommitIncludedIn(
-      urllib.parse.urlparse(opt.host).netloc, opt.project, opt.commit)
-  logging.info(result)
-  write_result(result, opt)
+    """Retrieves the branches and tags for a given commit."""
+    parser.add_option('--commit', dest='commit', help='commit hash')
+    (opt, args) = parser.parse_args(args)
+    result = gerrit_util.GetCommitIncludedIn(
+        urllib.parse.urlparse(opt.host).netloc, opt.project, opt.commit)
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('[args ...]')
 def CMDsetbotcommit(parser, args):
-  """Sets bot-commit+1 to a bot generated change."""
-  parser.add_option('-c', '--change', type=int, help='change number')
-  (opt, args) = parser.parse_args(args)
-  result = gerrit_util.SetReview(urllib.parse.urlparse(opt.host).netloc,
-                                 opt.change,
-                                 labels={'Bot-Commit': 1},
-                                 ready=True)
-  logging.info(result)
-  write_result(result, opt)
+    """Sets bot-commit+1 to a bot generated change."""
+    parser.add_option('-c', '--change', type=int, help='change number')
+    (opt, args) = parser.parse_args(args)
+    result = gerrit_util.SetReview(urllib.parse.urlparse(opt.host).netloc,
+                                   opt.change,
+                                   labels={'Bot-Commit': 1},
+                                   ready=True)
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('[args ...]')
 def CMDsetlabel(parser, args):
-  """Sets a label to a specific value on a given change."""
-  parser.add_option('-c', '--change', type=int, help='change number')
-  parser.add_option('-l',
-                    '--label',
-                    nargs=2,
-                    metavar=('label_name', 'label_value'))
-  (opt, args) = parser.parse_args(args)
-  result = gerrit_util.SetReview(urllib.parse.urlparse(opt.host).netloc,
-                                 opt.change,
-                                 labels={opt.label[0]: opt.label[1]})
-  logging.info(result)
-  write_result(result, opt)
+    """Sets a label to a specific value on a given change."""
+    parser.add_option('-c', '--change', type=int, help='change number')
+    parser.add_option('-l',
+                      '--label',
+                      nargs=2,
+                      metavar=('label_name', 'label_value'))
+    (opt, args) = parser.parse_args(args)
+    result = gerrit_util.SetReview(urllib.parse.urlparse(opt.host).netloc,
+                                   opt.change,
+                                   labels={opt.label[0]: opt.label[1]})
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('')
 def CMDabandon(parser, args):
-  """Abandons a Gerrit change."""
-  parser.add_option('-c', '--change', type=int, help='change number')
-  parser.add_option('-m', '--message', default='', help='reason for abandoning')
+    """Abandons a Gerrit change."""
+    parser.add_option('-c', '--change', type=int, help='change number')
+    parser.add_option('-m',
+                      '--message',
+                      default='',
+                      help='reason for abandoning')
 
-  (opt, args) = parser.parse_args(args)
-  assert opt.change, "-c not defined"
-  result = gerrit_util.AbandonChange(
-      urllib.parse.urlparse(opt.host).netloc, opt.change, opt.message)
-  logging.info(result)
-  write_result(result, opt)
+    (opt, args) = parser.parse_args(args)
+    assert opt.change, "-c not defined"
+    result = gerrit_util.AbandonChange(
+        urllib.parse.urlparse(opt.host).netloc, opt.change, opt.message)
+    logging.info(result)
+    write_result(result, opt)
 
 
 @subcommand.usage('')
 def CMDmass_abandon(parser, args):
-  """Mass abandon changes
+    """Mass abandon changes
 
   Abandons CLs that match search criteria provided by user. Before any change is
   actually abandoned, user is presented with a list of CLs that will be affected
@@ -406,98 +426,106 @@ def CMDmass_abandon(parser, args):
   gerrit_client.py mass-abandon --host https://HOST -p 'message=testing'
   gerrit_client.py mass-abandon --host https://HOST -p 'is=wip' -p 'age=1y'
   """
-  parser.add_option('-p',
-                    '--param',
-                    dest='params',
-                    action='append',
-                    default=[],
-                    help='repeatable query parameter, format: -p key=value')
-  parser.add_option('-m', '--message', default='', help='reason for abandoning')
-  parser.add_option('-f',
-                    '--force',
-                    action='store_true',
-                    help='Don\'t prompt for confirmation')
-
-  opt, args = parser.parse_args(args)
-
-  for p in opt.params:
-    assert '=' in p, '--param is key=value, not "%s"' % p
-  search_query = list(tuple(p.split('=', 1)) for p in opt.params)
-  if not any(t for t in search_query if t[0] == 'owner'):
-    # owner should always be present when abandoning changes
-    search_query.append(('owner', 'me'))
-  search_query.append(('status', 'open'))
-  logging.info("Searching for: %s" % search_query)
-
-  host = urllib.parse.urlparse(opt.host).netloc
-
-  result = gerrit_util.QueryChanges(
-      host,
-      search_query,
-      # abandon at most 100 changes as not all Gerrit instances support
-      # unlimited results.
-      limit=100,
-  )
-  if len(result) == 0:
-    logging.warning("Nothing to abandon")
-    return
-
-  logging.warning("%s CLs match search query: " % len(result))
-  for change in result:
-    logging.warning("[ID: %d] %s" % (change['_number'], change['subject']))
-
-  if not opt.force:
-    q = input(
-        'Do you want to move forward with abandoning? [y to confirm] ').strip()
-    if q not in ['y', 'Y']:
-      logging.warning("Aborting...")
-      return
-
-  for change in result:
-    logging.warning("Abandoning: %s" % change['subject'])
-    gerrit_util.AbandonChange(host, change['id'], opt.message)
-
-  logging.warning("Done")
+    parser.add_option('-p',
+                      '--param',
+                      dest='params',
+                      action='append',
+                      default=[],
+                      help='repeatable query parameter, format: -p key=value')
+    parser.add_option('-m',
+                      '--message',
+                      default='',
+                      help='reason for abandoning')
+    parser.add_option('-f',
+                      '--force',
+                      action='store_true',
+                      help='Don\'t prompt for confirmation')
+
+    opt, args = parser.parse_args(args)
+
+    for p in opt.params:
+        assert '=' in p, '--param is key=value, not "%s"' % p
+    search_query = list(tuple(p.split('=', 1)) for p in opt.params)
+    if not any(t for t in search_query if t[0] == 'owner'):
+        # owner should always be present when abandoning changes
+        search_query.append(('owner', 'me'))
+    search_query.append(('status', 'open'))
+    logging.info("Searching for: %s" % search_query)
+
+    host = urllib.parse.urlparse(opt.host).netloc
+
+    result = gerrit_util.QueryChanges(
+        host,
+        search_query,
+        # abandon at most 100 changes as not all Gerrit instances support
+        # unlimited results.
+        limit=100,
+    )
+    if len(result) == 0:
+        logging.warning("Nothing to abandon")
+        return
+
+    logging.warning("%s CLs match search query: " % len(result))
+    for change in result:
+        logging.warning("[ID: %d] %s" % (change['_number'], change['subject']))
+
+    if not opt.force:
+        q = input('Do you want to move forward with abandoning? [y to confirm] '
+                  ).strip()
+        if q not in ['y', 'Y']:
+            logging.warning("Aborting...")
+            return
+
+    for change in result:
+        logging.warning("Abandoning: %s" % change['subject'])
+        gerrit_util.AbandonChange(host, change['id'], opt.message)
+
+    logging.warning("Done")
 
 
 class OptionParser(optparse.OptionParser):
-  """Creates the option parse and add --verbose support."""
-  def __init__(self, *args, **kwargs):
-    optparse.OptionParser.__init__(self, *args, version=__version__, **kwargs)
-    self.add_option(
-        '--verbose', action='count', default=0,
-        help='Use 2 times for more debugging info')
-    self.add_option('--host', dest='host', help='Url of host.')
-    self.add_option('--project', dest='project', help='project name')
-    self.add_option(
-        '--json_file', dest='json_file', help='output json filepath')
-
-  def parse_args(self, args=None, values=None):
-    options, args = optparse.OptionParser.parse_args(self, args, values)
-    # Host is always required
-    assert options.host, "--host not defined."
-    levels = [logging.WARNING, logging.INFO, logging.DEBUG]
-    logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)])
-    return options, args
+    """Creates the option parse and add --verbose support."""
+    def __init__(self, *args, **kwargs):
+        optparse.OptionParser.__init__(self,
+                                       *args,
+                                       version=__version__,
+                                       **kwargs)
+        self.add_option('--verbose',
+                        action='count',
+                        default=0,
+                        help='Use 2 times for more debugging info')
+        self.add_option('--host', dest='host', help='Url of host.')
+        self.add_option('--project', dest='project', help='project name')
+        self.add_option('--json_file',
+                        dest='json_file',
+                        help='output json filepath')
+
+    def parse_args(self, args=None, values=None):
+        options, args = optparse.OptionParser.parse_args(self, args, values)
+        # Host is always required
+        assert options.host, "--host not defined."
+        levels = [logging.WARNING, logging.INFO, logging.DEBUG]
+        logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)])
+        return options, args
 
 
 def main(argv):
-  if sys.hexversion < 0x02060000:
-    print('\nYour python version %s is unsupported, please upgrade.\n'
-          % (sys.version.split(' ', 1)[0],),
-          file=sys.stderr)
-    return 2
-  dispatcher = subcommand.CommandDispatcher(__name__)
-  return dispatcher.execute(OptionParser(), argv)
+    if sys.hexversion < 0x02060000:
+        print('\nYour python version %s is unsupported, please upgrade.\n' %
+              (sys.version.split(' ', 1)[0], ),
+              file=sys.stderr)
+        return 2
+    dispatcher = subcommand.CommandDispatcher(__name__)
+    return dispatcher.execute(OptionParser(), argv)
 
 
 if __name__ == '__main__':
-  # These affect sys.stdout so do it outside of main() to simplify mocks in
-  # unit testing.
-  fix_encoding.fix_encoding()
-  setup_color.init()
-  try:
-    sys.exit(main(sys.argv[1:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    # These affect sys.stdout so do it outside of main() to simplify mocks in
+    # unit testing.
+    fix_encoding.fix_encoding()
+    setup_color.init()
+    try:
+        sys.exit(main(sys.argv[1:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

文件差异内容过多而无法显示
+ 641 - 622
gerrit_util.py


+ 807 - 758
git_cache.py

@@ -2,7 +2,6 @@
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """A git command for managing a local cache of git repositories."""
 
 import contextlib
@@ -35,19 +34,25 @@ GIT_CACHE_CORRUPT_MESSAGE = 'WARNING: The Git cache is corrupt.'
 GSUTIL_CP_SEMAPHORE = threading.Semaphore(2)
 
 try:
-  # pylint: disable=undefined-variable
-  WinErr = WindowsError
+    # pylint: disable=undefined-variable
+    WinErr = WindowsError
 except NameError:
-  class WinErr(Exception):
-    pass
+
+    class WinErr(Exception):
+        pass
+
 
 class ClobberNeeded(Exception):
-  pass
+    pass
 
 
-def exponential_backoff_retry(fn, excs=(Exception,), name=None, count=10,
-                              sleep_time=0.25, printerr=None):
-  """Executes |fn| up to |count| times, backing off exponentially.
+def exponential_backoff_retry(fn,
+                              excs=(Exception, ),
+                              name=None,
+                              count=10,
+                              sleep_time=0.25,
+                              printerr=None):
+    """Executes |fn| up to |count| times, backing off exponentially.
 
   Args:
     fn (callable): The function to execute. If this raises a handled
@@ -66,818 +71,862 @@ def exponential_backoff_retry(fn, excs=(Exception,), name=None, count=10,
 
   Returns: The return value of the successful fn.
   """
-  printerr = printerr or logging.warning
-  for i in range(count):
-    try:
-      return fn()
-    except excs as e:
-      if (i+1) >= count:
-        raise
+    printerr = printerr or logging.warning
+    for i in range(count):
+        try:
+            return fn()
+        except excs as e:
+            if (i + 1) >= count:
+                raise
 
-      printerr('Retrying %s in %.2f second(s) (%d / %d attempts): %s' % (
-          (name or 'operation'), sleep_time, (i+1), count, e))
-      time.sleep(sleep_time)
-      sleep_time *= 2
+            printerr('Retrying %s in %.2f second(s) (%d / %d attempts): %s' %
+                     ((name or 'operation'), sleep_time, (i + 1), count, e))
+            time.sleep(sleep_time)
+            sleep_time *= 2
 
 
 class Mirror(object):
 
-  git_exe = 'git.bat' if sys.platform.startswith('win') else 'git'
-  gsutil_exe = os.path.join(
-    os.path.dirname(os.path.abspath(__file__)), 'gsutil.py')
-  cachepath_lock = threading.Lock()
+    git_exe = 'git.bat' if sys.platform.startswith('win') else 'git'
+    gsutil_exe = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+                              'gsutil.py')
+    cachepath_lock = threading.Lock()
 
-  UNSET_CACHEPATH = object()
+    UNSET_CACHEPATH = object()
 
-  # Used for tests
-  _GIT_CONFIG_LOCATION = []
+    # Used for tests
+    _GIT_CONFIG_LOCATION = []
 
-  @staticmethod
-  def parse_fetch_spec(spec):
-    """Parses and canonicalizes a fetch spec.
+    @staticmethod
+    def parse_fetch_spec(spec):
+        """Parses and canonicalizes a fetch spec.
 
     Returns (fetchspec, value_regex), where value_regex can be used
     with 'git config --replace-all'.
     """
-    parts = spec.split(':', 1)
-    src = parts[0].lstrip('+').rstrip('/')
-    if not src.startswith('refs/'):
-      src = 'refs/heads/%s' % src
-    dest = parts[1].rstrip('/') if len(parts) > 1 else src
-    regex = r'\+%s:.*' % src.replace('*', r'\*')
-    return ('+%s:%s' % (src, dest), regex)
-
-  def __init__(self, url, refs=None, commits=None, print_func=None):
-    self.url = url
-    self.fetch_specs = {self.parse_fetch_spec(ref) for ref in (refs or [])}
-    self.fetch_commits = set(commits or [])
-    self.basedir = self.UrlToCacheDir(url)
-    self.mirror_path = os.path.join(self.GetCachePath(), self.basedir)
-    if print_func:
-      self.print = self.print_without_file
-      self.print_func = print_func
-    else:
-      self.print = print
-
-  def print_without_file(self, message, **_kwargs):
-    self.print_func(message)
-
-  @contextlib.contextmanager
-  def print_duration_of(self, what):
-    start = time.time()
-    try:
-      yield
-    finally:
-      self.print('%s took %.1f minutes' % (what, (time.time() - start) / 60.0))
-
-  @property
-  def bootstrap_bucket(self):
-    b = os.getenv('OVERRIDE_BOOTSTRAP_BUCKET')
-    if b:
-      return b
-    u = urllib.parse.urlparse(self.url)
-    if u.netloc == 'chromium.googlesource.com':
-      return 'chromium-git-cache'
-    # Not recognized.
-    return None
-
-  @property
-  def _gs_path(self):
-    return 'gs://%s/v2/%s' % (self.bootstrap_bucket, self.basedir)
-
-  @classmethod
-  def FromPath(cls, path):
-    return cls(cls.CacheDirToUrl(path))
-
-  @staticmethod
-  def UrlToCacheDir(url):
-    """Convert a git url to a normalized form for the cache dir path."""
-    if os.path.isdir(url):
-      # Ignore the drive letter in Windows
-      url = os.path.splitdrive(url)[1]
-      return url.replace('-', '--').replace(os.sep, '-')
-
-    parsed = urllib.parse.urlparse(url)
-    norm_url = parsed.netloc + parsed.path
-    if norm_url.endswith('.git'):
-      norm_url = norm_url[:-len('.git')]
-
-    # Use the same dir for authenticated URLs and unauthenticated URLs.
-    norm_url = norm_url.replace('googlesource.com/a/', 'googlesource.com/')
-
-    return norm_url.replace('-', '--').replace('/', '-').lower()
-
-  @staticmethod
-  def CacheDirToUrl(path):
-    """Convert a cache dir path to its corresponding url."""
-    netpath = re.sub(r'\b-\b', '/', os.path.basename(path)).replace('--', '-')
-    return 'https://%s' % netpath
-
-  @classmethod
-  def SetCachePath(cls, cachepath):
-    with cls.cachepath_lock:
-      setattr(cls, 'cachepath', cachepath)
-
-  @classmethod
-  def GetCachePath(cls):
-    with cls.cachepath_lock:
-      if not hasattr(cls, 'cachepath'):
+        parts = spec.split(':', 1)
+        src = parts[0].lstrip('+').rstrip('/')
+        if not src.startswith('refs/'):
+            src = 'refs/heads/%s' % src
+        dest = parts[1].rstrip('/') if len(parts) > 1 else src
+        regex = r'\+%s:.*' % src.replace('*', r'\*')
+        return ('+%s:%s' % (src, dest), regex)
+
+    def __init__(self, url, refs=None, commits=None, print_func=None):
+        self.url = url
+        self.fetch_specs = {self.parse_fetch_spec(ref) for ref in (refs or [])}
+        self.fetch_commits = set(commits or [])
+        self.basedir = self.UrlToCacheDir(url)
+        self.mirror_path = os.path.join(self.GetCachePath(), self.basedir)
+        if print_func:
+            self.print = self.print_without_file
+            self.print_func = print_func
+        else:
+            self.print = print
+
+    def print_without_file(self, message, **_kwargs):
+        self.print_func(message)
+
+    @contextlib.contextmanager
+    def print_duration_of(self, what):
+        start = time.time()
+        try:
+            yield
+        finally:
+            self.print('%s took %.1f minutes' % (what,
+                                                 (time.time() - start) / 60.0))
+
+    @property
+    def bootstrap_bucket(self):
+        b = os.getenv('OVERRIDE_BOOTSTRAP_BUCKET')
+        if b:
+            return b
+        u = urllib.parse.urlparse(self.url)
+        if u.netloc == 'chromium.googlesource.com':
+            return 'chromium-git-cache'
+        # Not recognized.
+        return None
+
+    @property
+    def _gs_path(self):
+        return 'gs://%s/v2/%s' % (self.bootstrap_bucket, self.basedir)
+
+    @classmethod
+    def FromPath(cls, path):
+        return cls(cls.CacheDirToUrl(path))
+
+    @staticmethod
+    def UrlToCacheDir(url):
+        """Convert a git url to a normalized form for the cache dir path."""
+        if os.path.isdir(url):
+            # Ignore the drive letter in Windows
+            url = os.path.splitdrive(url)[1]
+            return url.replace('-', '--').replace(os.sep, '-')
+
+        parsed = urllib.parse.urlparse(url)
+        norm_url = parsed.netloc + parsed.path
+        if norm_url.endswith('.git'):
+            norm_url = norm_url[:-len('.git')]
+
+        # Use the same dir for authenticated URLs and unauthenticated URLs.
+        norm_url = norm_url.replace('googlesource.com/a/', 'googlesource.com/')
+
+        return norm_url.replace('-', '--').replace('/', '-').lower()
+
+    @staticmethod
+    def CacheDirToUrl(path):
+        """Convert a cache dir path to its corresponding url."""
+        netpath = re.sub(r'\b-\b', '/',
+                         os.path.basename(path)).replace('--', '-')
+        return 'https://%s' % netpath
+
+    @classmethod
+    def SetCachePath(cls, cachepath):
+        with cls.cachepath_lock:
+            setattr(cls, 'cachepath', cachepath)
+
+    @classmethod
+    def GetCachePath(cls):
+        with cls.cachepath_lock:
+            if not hasattr(cls, 'cachepath'):
+                try:
+                    cachepath = subprocess.check_output(
+                        [cls.git_exe, 'config'] + cls._GIT_CONFIG_LOCATION +
+                        ['cache.cachepath']).decode('utf-8', 'ignore').strip()
+                except subprocess.CalledProcessError:
+                    cachepath = os.environ.get('GIT_CACHE_PATH',
+                                               cls.UNSET_CACHEPATH)
+                setattr(cls, 'cachepath', cachepath)
+
+            ret = getattr(cls, 'cachepath')
+            if ret is cls.UNSET_CACHEPATH:
+                raise RuntimeError('No cache.cachepath git configuration or '
+                                   '$GIT_CACHE_PATH is set.')
+            return ret
+
+    @staticmethod
+    def _GetMostRecentCacheDirectory(ls_out_set):
+        ready_file_pattern = re.compile(r'.*/(\d+).ready$')
+        ready_dirs = []
+
+        for name in ls_out_set:
+            m = ready_file_pattern.match(name)
+            # Given <path>/<number>.ready,
+            # we are interested in <path>/<number> directory
+            if m and (name[:-len('.ready')] + '/') in ls_out_set:
+                ready_dirs.append((int(m.group(1)), name[:-len('.ready')]))
+
+        if not ready_dirs:
+            return None
+
+        return max(ready_dirs)[1]
+
+    def Rename(self, src, dst):
+        # This is somehow racy on Windows.
+        # Catching OSError because WindowsError isn't portable and
+        # pylint complains.
+        exponential_backoff_retry(lambda: os.rename(src, dst),
+                                  excs=(OSError, ),
+                                  name='rename [%s] => [%s]' % (src, dst),
+                                  printerr=self.print)
+
+    def RunGit(self, cmd, print_stdout=True, **kwargs):
+        """Run git in a subprocess."""
+        cwd = kwargs.setdefault('cwd', self.mirror_path)
+        if "--git-dir" not in cmd:
+            cmd = ['--git-dir', os.path.abspath(cwd)] + cmd
+
+        kwargs.setdefault('print_stdout', False)
+        if print_stdout:
+            kwargs.setdefault('filter_fn', self.print)
+        env = kwargs.get('env') or kwargs.setdefault('env', os.environ.copy())
+        env.setdefault('GIT_ASKPASS', 'true')
+        env.setdefault('SSH_ASKPASS', 'true')
+        self.print('running "git %s" in "%s"' % (' '.join(cmd), cwd))
+        gclient_utils.CheckCallAndFilter([self.git_exe] + cmd, **kwargs)
+
+    def config(self, reset_fetch_config=False):
+        if reset_fetch_config:
+            try:
+                self.RunGit(['config', '--unset-all', 'remote.origin.fetch'])
+            except subprocess.CalledProcessError as e:
+                # If exit code was 5, it means we attempted to unset a config
+                # that didn't exist. Ignore it.
+                if e.returncode != 5:
+                    raise
+
+        # Don't run git-gc in a daemon.  Bad things can happen if it gets
+        # killed.
         try:
-          cachepath = subprocess.check_output(
-              [cls.git_exe, 'config'] +
-              cls._GIT_CONFIG_LOCATION +
-              ['cache.cachepath']).decode('utf-8', 'ignore').strip()
+            self.RunGit(['config', 'gc.autodetach', '0'])
         except subprocess.CalledProcessError:
-          cachepath = os.environ.get('GIT_CACHE_PATH', cls.UNSET_CACHEPATH)
-        setattr(cls, 'cachepath', cachepath)
-
-      ret = getattr(cls, 'cachepath')
-      if ret is cls.UNSET_CACHEPATH:
-        raise RuntimeError('No cache.cachepath git configuration or '
-                           '$GIT_CACHE_PATH is set.')
-      return ret
-
-  @staticmethod
-  def _GetMostRecentCacheDirectory(ls_out_set):
-    ready_file_pattern = re.compile(r'.*/(\d+).ready$')
-    ready_dirs = []
-
-    for name in ls_out_set:
-      m = ready_file_pattern.match(name)
-      # Given <path>/<number>.ready,
-      # we are interested in <path>/<number> directory
-      if m and (name[:-len('.ready')] + '/') in ls_out_set:
-        ready_dirs.append((int(m.group(1)), name[:-len('.ready')]))
-
-    if not ready_dirs:
-      return None
-
-    return max(ready_dirs)[1]
-
-  def Rename(self, src, dst):
-    # This is somehow racy on Windows.
-    # Catching OSError because WindowsError isn't portable and
-    # pylint complains.
-    exponential_backoff_retry(
-        lambda: os.rename(src, dst),
-        excs=(OSError,),
-        name='rename [%s] => [%s]' % (src, dst),
-        printerr=self.print)
-
-  def RunGit(self, cmd, print_stdout=True, **kwargs):
-    """Run git in a subprocess."""
-    cwd = kwargs.setdefault('cwd', self.mirror_path)
-    if "--git-dir" not in cmd:
-      cmd = ['--git-dir', os.path.abspath(cwd)] + cmd
-
-    kwargs.setdefault('print_stdout', False)
-    if print_stdout:
-      kwargs.setdefault('filter_fn', self.print)
-    env = kwargs.get('env') or kwargs.setdefault('env', os.environ.copy())
-    env.setdefault('GIT_ASKPASS', 'true')
-    env.setdefault('SSH_ASKPASS', 'true')
-    self.print('running "git %s" in "%s"' % (' '.join(cmd), cwd))
-    gclient_utils.CheckCallAndFilter([self.git_exe] + cmd, **kwargs)
-
-  def config(self, reset_fetch_config=False):
-    if reset_fetch_config:
-      try:
-        self.RunGit(['config', '--unset-all', 'remote.origin.fetch'])
-      except subprocess.CalledProcessError as e:
-        # If exit code was 5, it means we attempted to unset a config that
-        # didn't exist. Ignore it.
-        if e.returncode != 5:
-          raise
-
-    # Don't run git-gc in a daemon.  Bad things can happen if it gets killed.
-    try:
-      self.RunGit(['config', 'gc.autodetach', '0'])
-    except subprocess.CalledProcessError:
-      # Hard error, need to clobber.
-      raise ClobberNeeded()
-
-    # Don't combine pack files into one big pack file.  It's really slow for
-    # repositories, and there's no way to track progress and make sure it's
-    # not stuck.
-    if self.supported_project():
-      self.RunGit(['config', 'gc.autopacklimit', '0'])
-
-    # Allocate more RAM for cache-ing delta chains, for better performance
-    # of "Resolving deltas".
-    self.RunGit([
-        'config', 'core.deltaBaseCacheLimit',
-        gclient_utils.DefaultDeltaBaseCacheLimit()
-    ])
-
-    self.RunGit(['config', 'remote.origin.url', self.url])
-    self.RunGit([
-        'config', '--replace-all', 'remote.origin.fetch',
-        '+refs/heads/*:refs/heads/*', r'\+refs/heads/\*:.*'
-    ])
-    for spec, value_regex in self.fetch_specs:
-      self.RunGit(
-          ['config', '--replace-all', 'remote.origin.fetch', spec, value_regex])
-
-  def bootstrap_repo(self, directory):
-    """Bootstrap the repo from Google Storage if possible.
+            # Hard error, need to clobber.
+            raise ClobberNeeded()
+
+        # Don't combine pack files into one big pack file.  It's really slow for
+        # repositories, and there's no way to track progress and make sure it's
+        # not stuck.
+        if self.supported_project():
+            self.RunGit(['config', 'gc.autopacklimit', '0'])
+
+        # Allocate more RAM for cache-ing delta chains, for better performance
+        # of "Resolving deltas".
+        self.RunGit([
+            'config', 'core.deltaBaseCacheLimit',
+            gclient_utils.DefaultDeltaBaseCacheLimit()
+        ])
+
+        self.RunGit(['config', 'remote.origin.url', self.url])
+        self.RunGit([
+            'config', '--replace-all', 'remote.origin.fetch',
+            '+refs/heads/*:refs/heads/*', r'\+refs/heads/\*:.*'
+        ])
+        for spec, value_regex in self.fetch_specs:
+            self.RunGit([
+                'config', '--replace-all', 'remote.origin.fetch', spec,
+                value_regex
+            ])
+
+    def bootstrap_repo(self, directory):
+        """Bootstrap the repo from Google Storage if possible.
 
     More apt-ly named bootstrap_repo_from_cloud_if_possible_else_do_nothing().
     """
-    if not self.bootstrap_bucket:
-      return False
+        if not self.bootstrap_bucket:
+            return False
 
-    gsutil = Gsutil(self.gsutil_exe, boto_path=None)
+        gsutil = Gsutil(self.gsutil_exe, boto_path=None)
 
-    # Get the most recent version of the directory.
-    # This is determined from the most recent version of a .ready file.
-    # The .ready file is only uploaded when an entire directory has been
-    # uploaded to GS.
-    _, ls_out, ls_err = gsutil.check_call('ls', self._gs_path)
-    ls_out_set = set(ls_out.strip().splitlines())
-    latest_dir = self._GetMostRecentCacheDirectory(ls_out_set)
+        # Get the most recent version of the directory.
+        # This is determined from the most recent version of a .ready file.
+        # The .ready file is only uploaded when an entire directory has been
+        # uploaded to GS.
+        _, ls_out, ls_err = gsutil.check_call('ls', self._gs_path)
+        ls_out_set = set(ls_out.strip().splitlines())
+        latest_dir = self._GetMostRecentCacheDirectory(ls_out_set)
 
-    if not latest_dir:
-      self.print('No bootstrap file for %s found in %s, stderr:\n  %s' %
-                 (self.mirror_path, self.bootstrap_bucket,
-                '  '.join((ls_err or '').splitlines(True))))
-      return False
+        if not latest_dir:
+            self.print('No bootstrap file for %s found in %s, stderr:\n  %s' %
+                       (self.mirror_path, self.bootstrap_bucket, '  '.join(
+                           (ls_err or '').splitlines(True))))
+            return False
 
-    try:
-      # create new temporary directory locally
-      tempdir = tempfile.mkdtemp(prefix='_cache_tmp', dir=self.GetCachePath())
-      self.RunGit(['init', '-b', 'main', '--bare'], cwd=tempdir)
-      self.print('Downloading files in %s/* into %s.' %
-                 (latest_dir, tempdir))
-      with self.print_duration_of('download'):
-        with GSUTIL_CP_SEMAPHORE:
-          code = gsutil.call('-m', 'cp', '-r', latest_dir + "/*",
-                             tempdir)
-      if code:
-        return False
-      # A quick validation that all references are valid.
-      self.RunGit(['for-each-ref'], print_stdout=False, cwd=tempdir)
-    except Exception as e:
-      self.print('Encountered error: %s' % str(e), file=sys.stderr)
-      gclient_utils.rmtree(tempdir)
-      return False
-    # delete the old directory
-    if os.path.exists(directory):
-      gclient_utils.rmtree(directory)
-    self.Rename(tempdir, directory)
-    return True
-
-  def contains_revision(self, revision):
-    if not self.exists():
-      return False
+        try:
+            # create new temporary directory locally
+            tempdir = tempfile.mkdtemp(prefix='_cache_tmp',
+                                       dir=self.GetCachePath())
+            self.RunGit(['init', '-b', 'main', '--bare'], cwd=tempdir)
+            self.print('Downloading files in %s/* into %s.' %
+                       (latest_dir, tempdir))
+            with self.print_duration_of('download'):
+                with GSUTIL_CP_SEMAPHORE:
+                    code = gsutil.call('-m', 'cp', '-r', latest_dir + "/*",
+                                       tempdir)
+            if code:
+                return False
+            # A quick validation that all references are valid.
+            self.RunGit(['for-each-ref'], print_stdout=False, cwd=tempdir)
+        except Exception as e:
+            self.print('Encountered error: %s' % str(e), file=sys.stderr)
+            gclient_utils.rmtree(tempdir)
+            return False
+        # delete the old directory
+        if os.path.exists(directory):
+            gclient_utils.rmtree(directory)
+        self.Rename(tempdir, directory)
+        return True
+
+    def contains_revision(self, revision):
+        if not self.exists():
+            return False
+
+        if sys.platform.startswith('win'):
+            # Windows .bat scripts use ^ as escape sequence, which means we have
+            # to escape it with itself for every .bat invocation.
+            needle = '%s^^^^{commit}' % revision
+        else:
+            needle = '%s^{commit}' % revision
+        try:
+            # cat-file exits with 0 on success, that is git object of given hash
+            # was found.
+            self.RunGit(['cat-file', '-e', needle])
+            return True
+        except subprocess.CalledProcessError:
+            self.print('Commit with hash "%s" not found' % revision,
+                       file=sys.stderr)
+            return False
 
-    if sys.platform.startswith('win'):
-      # Windows .bat scripts use ^ as escape sequence, which means we have to
-      # escape it with itself for every .bat invocation.
-      needle = '%s^^^^{commit}' % revision
-    else:
-      needle = '%s^{commit}' % revision
-    try:
-      # cat-file exits with 0 on success, that is git object of given hash was
-      # found.
-      self.RunGit(['cat-file', '-e', needle])
-      return True
-    except subprocess.CalledProcessError:
-      self.print('Commit with hash "%s" not found' % revision, file=sys.stderr)
-      return False
-
-  def exists(self):
-    return os.path.isfile(os.path.join(self.mirror_path, 'config'))
-
-  def supported_project(self):
-    """Returns true if this repo is known to have a bootstrap zip file."""
-    u = urllib.parse.urlparse(self.url)
-    return u.netloc in [
-        'chromium.googlesource.com',
-        'chrome-internal.googlesource.com']
-
-  def _preserve_fetchspec(self):
-    """Read and preserve remote.origin.fetch from an existing mirror.
+    def exists(self):
+        return os.path.isfile(os.path.join(self.mirror_path, 'config'))
+
+    def supported_project(self):
+        """Returns true if this repo is known to have a bootstrap zip file."""
+        u = urllib.parse.urlparse(self.url)
+        return u.netloc in [
+            'chromium.googlesource.com', 'chrome-internal.googlesource.com'
+        ]
+
+    def _preserve_fetchspec(self):
+        """Read and preserve remote.origin.fetch from an existing mirror.
 
     This modifies self.fetch_specs.
     """
-    if not self.exists():
-      return
-    try:
-      config_fetchspecs = subprocess.check_output(
-          [self.git_exe, 'config', '--get-all', 'remote.origin.fetch'],
-          cwd=self.mirror_path).decode('utf-8', 'ignore')
-      for fetchspec in config_fetchspecs.splitlines():
-        self.fetch_specs.add(self.parse_fetch_spec(fetchspec))
-    except subprocess.CalledProcessError:
-      logging.warning(
-          'Tried and failed to preserve remote.origin.fetch from the '
-          'existing cache directory.  You may need to manually edit '
-          '%s and "git cache fetch" again.' %
-          os.path.join(self.mirror_path, 'config'))
-
-  def _ensure_bootstrapped(
-      self, depth, bootstrap, reset_fetch_config, force=False):
-    pack_dir = os.path.join(self.mirror_path, 'objects', 'pack')
-    pack_files = []
-    if os.path.isdir(pack_dir):
-      pack_files = [f for f in os.listdir(pack_dir) if f.endswith('.pack')]
-      self.print('%s has %d .pack files, re-bootstrapping if >%d or ==0' %
-                (self.mirror_path, len(pack_files), GC_AUTOPACKLIMIT))
-
-    # master->main branch migration left the cache in some builders to have its
-    # HEAD still pointing to refs/heads/master. This causes bot_update to fail.
-    # If in this state, delete the cache and force bootstrap.
-    try:
-      with open(os.path.join(self.mirror_path, 'HEAD')) as f:
-        head_ref = f.read()
-    except FileNotFoundError:
-      head_ref = ''
-
-    # Check only when HEAD points to master.
-    if 'master' in head_ref:
-      # Some repos could still have master so verify if the ref exists first.
-      show_ref_master_cmd = subprocess.run(
-          [Mirror.git_exe, 'show-ref', '--verify', 'refs/heads/master'],
-          cwd=self.mirror_path)
-
-      if show_ref_master_cmd.returncode != 0:
-        # Remove mirror
-        gclient_utils.rmtree(self.mirror_path)
-
-        # force bootstrap
-        force = True
-
-    should_bootstrap = (force or
-                        not self.exists() or
-                        len(pack_files) > GC_AUTOPACKLIMIT or
-                        len(pack_files) == 0)
-
-    if not should_bootstrap:
-      if depth and os.path.exists(os.path.join(self.mirror_path, 'shallow')):
-        logging.warning(
-            'Shallow fetch requested, but repo cache already exists.')
-      return
-
-    if not self.exists():
-      if os.path.exists(self.mirror_path):
-        # If the mirror path exists but self.exists() returns false, we're
-        # in an unexpected state. Nuke the previous mirror directory and
-        # start fresh.
-        gclient_utils.rmtree(self.mirror_path)
-      os.mkdir(self.mirror_path)
-    elif not reset_fetch_config:
-      # Re-bootstrapping an existing mirror; preserve existing fetch spec.
-      self._preserve_fetchspec()
-
-    bootstrapped = (not depth and bootstrap and
-                    self.bootstrap_repo(self.mirror_path))
-
-    if not bootstrapped:
-      if not self.exists() or not self.supported_project():
-        # Bootstrap failed due to:
-        # 1. No previous cache.
-        # 2. Project doesn't have a bootstrap folder.
-        # Start with a bare git dir.
-        self.RunGit(['init', '--bare'])
-        # Set appropriate symbolic-ref
-        remote_info = exponential_backoff_retry(lambda: subprocess.check_output(
+        if not self.exists():
+            return
+        try:
+            config_fetchspecs = subprocess.check_output(
+                [self.git_exe, 'config', '--get-all', 'remote.origin.fetch'],
+                cwd=self.mirror_path).decode('utf-8', 'ignore')
+            for fetchspec in config_fetchspecs.splitlines():
+                self.fetch_specs.add(self.parse_fetch_spec(fetchspec))
+        except subprocess.CalledProcessError:
+            logging.warning(
+                'Tried and failed to preserve remote.origin.fetch from the '
+                'existing cache directory.  You may need to manually edit '
+                '%s and "git cache fetch" again.' %
+                os.path.join(self.mirror_path, 'config'))
+
+    def _ensure_bootstrapped(self,
+                             depth,
+                             bootstrap,
+                             reset_fetch_config,
+                             force=False):
+        pack_dir = os.path.join(self.mirror_path, 'objects', 'pack')
+        pack_files = []
+        if os.path.isdir(pack_dir):
+            pack_files = [
+                f for f in os.listdir(pack_dir) if f.endswith('.pack')
+            ]
+            self.print('%s has %d .pack files, re-bootstrapping if >%d or ==0' %
+                       (self.mirror_path, len(pack_files), GC_AUTOPACKLIMIT))
+
+        # master->main branch migration left the cache in some builders to have
+        # its HEAD still pointing to refs/heads/master. This causes bot_update
+        # to fail. If in this state, delete the cache and force bootstrap.
+        try:
+            with open(os.path.join(self.mirror_path, 'HEAD')) as f:
+                head_ref = f.read()
+        except FileNotFoundError:
+            head_ref = ''
+
+        # Check only when HEAD points to master.
+        if 'master' in head_ref:
+            # Some repos could still have master so verify if the ref exists
+            # first.
+            show_ref_master_cmd = subprocess.run(
+                [Mirror.git_exe, 'show-ref', '--verify', 'refs/heads/master'],
+                cwd=self.mirror_path)
+
+            if show_ref_master_cmd.returncode != 0:
+                # Remove mirror
+                gclient_utils.rmtree(self.mirror_path)
+
+                # force bootstrap
+                force = True
+
+        should_bootstrap = (force or not self.exists()
+                            or len(pack_files) > GC_AUTOPACKLIMIT
+                            or len(pack_files) == 0)
+
+        if not should_bootstrap:
+            if depth and os.path.exists(
+                    os.path.join(self.mirror_path, 'shallow')):
+                logging.warning(
+                    'Shallow fetch requested, but repo cache already exists.')
+            return
+
+        if not self.exists():
+            if os.path.exists(self.mirror_path):
+                # If the mirror path exists but self.exists() returns false,
+                # we're in an unexpected state. Nuke the previous mirror
+                # directory and start fresh.
+                gclient_utils.rmtree(self.mirror_path)
+            os.mkdir(self.mirror_path)
+        elif not reset_fetch_config:
+            # Re-bootstrapping an existing mirror; preserve existing fetch spec.
+            self._preserve_fetchspec()
+
+        bootstrapped = (not depth and bootstrap
+                        and self.bootstrap_repo(self.mirror_path))
+
+        if not bootstrapped:
+            if not self.exists() or not self.supported_project():
+                # Bootstrap failed due to:
+                # 1. No previous cache.
+                # 2. Project doesn't have a bootstrap folder.
+                # Start with a bare git dir.
+                self.RunGit(['init', '--bare'])
+                # Set appropriate symbolic-ref
+                remote_info = exponential_backoff_retry(
+                    lambda: subprocess.check_output(
+                        [
+                            self.git_exe, '--git-dir',
+                            os.path.abspath(self.mirror_path), 'remote', 'show',
+                            self.url
+                        ],
+                        cwd=self.mirror_path).decode('utf-8', 'ignore').strip())
+                default_branch_regexp = re.compile(r'HEAD branch: (.*)$')
+                m = default_branch_regexp.search(remote_info, re.MULTILINE)
+                if m:
+                    self.RunGit(
+                        ['symbolic-ref', 'HEAD', 'refs/heads/' + m.groups()[0]])
+            else:
+                # Bootstrap failed, previous cache exists; warn and continue.
+                logging.warning(
+                    'Git cache has a lot of pack files (%d). Tried to '
+                    're-bootstrap but failed. Continuing with non-optimized '
+                    'repository.' % len(pack_files))
+
+    def _fetch(self,
+               verbose,
+               depth,
+               no_fetch_tags,
+               reset_fetch_config,
+               prune=True):
+        self.config(reset_fetch_config)
+
+        fetch_cmd = ['fetch']
+        if verbose:
+            fetch_cmd.extend(['-v', '--progress'])
+        if depth:
+            fetch_cmd.extend(['--depth', str(depth)])
+        if no_fetch_tags:
+            fetch_cmd.append('--no-tags')
+        if prune:
+            fetch_cmd.append('--prune')
+        fetch_cmd.append('origin')
+
+        fetch_specs = subprocess.check_output(
             [
                 self.git_exe, '--git-dir',
-                os.path.abspath(self.mirror_path), 'remote', 'show', self.url
+                os.path.abspath(self.mirror_path), 'config', '--get-all',
+                'remote.origin.fetch'
             ],
-            cwd=self.mirror_path).decode('utf-8', 'ignore').strip())
-        default_branch_regexp = re.compile(r'HEAD branch: (.*)$')
-        m = default_branch_regexp.search(remote_info, re.MULTILINE)
-        if m:
-          self.RunGit(['symbolic-ref', 'HEAD', 'refs/heads/' + m.groups()[0]])
-      else:
-        # Bootstrap failed, previous cache exists; warn and continue.
-        logging.warning(
-            'Git cache has a lot of pack files (%d). Tried to re-bootstrap '
-            'but failed. Continuing with non-optimized repository.' %
-            len(pack_files))
-
-  def _fetch(self,
-             verbose,
-             depth,
-             no_fetch_tags,
-             reset_fetch_config,
-             prune=True):
-    self.config(reset_fetch_config)
-
-    fetch_cmd = ['fetch']
-    if verbose:
-      fetch_cmd.extend(['-v', '--progress'])
-    if depth:
-      fetch_cmd.extend(['--depth', str(depth)])
-    if no_fetch_tags:
-      fetch_cmd.append('--no-tags')
-    if prune:
-      fetch_cmd.append('--prune')
-    fetch_cmd.append('origin')
-
-    fetch_specs = subprocess.check_output([
-        self.git_exe, '--git-dir',
-        os.path.abspath(self.mirror_path), 'config', '--get-all',
-        'remote.origin.fetch'
-    ],
-                                          cwd=self.mirror_path).decode(
-                                              'utf-8',
-                                              'ignore').strip().splitlines()
-    for spec in fetch_specs:
-      try:
-        self.print('Fetching %s' % spec)
-        with self.print_duration_of('fetch %s' % spec):
-          self.RunGit(fetch_cmd + [spec], retry=True)
-      except subprocess.CalledProcessError:
-        if spec == '+refs/heads/*:refs/heads/*':
-          raise ClobberNeeded()  # Corrupted cache.
-        logging.warning('Fetch of %s failed' % spec)
-    for commit in self.fetch_commits:
-      self.print('Fetching %s' % commit)
-      try:
-        with self.print_duration_of('fetch %s' % commit):
-          self.RunGit(['fetch', 'origin', commit], retry=True)
-      except subprocess.CalledProcessError:
-        logging.warning('Fetch of %s failed' % commit)
-
-  def populate(self,
-               depth=None,
-               no_fetch_tags=False,
-               shallow=False,
-               bootstrap=False,
-               verbose=False,
-               lock_timeout=0,
-               reset_fetch_config=False):
-    assert self.GetCachePath()
-    if shallow and not depth:
-      depth = 10000
-    gclient_utils.safe_makedirs(self.GetCachePath())
-
-    with lockfile.lock(self.mirror_path, lock_timeout):
-      try:
-        self._ensure_bootstrapped(depth, bootstrap, reset_fetch_config)
-        self._fetch(verbose, depth, no_fetch_tags, reset_fetch_config)
-      except ClobberNeeded:
-        # This is a major failure, we need to clean and force a bootstrap.
-        gclient_utils.rmtree(self.mirror_path)
-        self.print(GIT_CACHE_CORRUPT_MESSAGE)
-        self._ensure_bootstrapped(depth,
-                                  bootstrap,
-                                  reset_fetch_config,
-                                  force=True)
-        self._fetch(verbose, depth, no_fetch_tags, reset_fetch_config)
-
-  def update_bootstrap(self, prune=False, gc_aggressive=False):
-    # NOTE: There have been cases where repos were being recursively uploaded
-    # to google storage.
-    # E.g. `<host_url>-<repo>/<gen_number>/<host_url>-<repo>/` in GS and
-    # <host_url>-<repo>/<host_url>-<repo>/ on the bot.
-    # Check for recursed files on the bot here and remove them if found
-    # before we upload to GS.
-    # See crbug.com/1370443; keep this check until root cause is found.
-    recursed_dir = os.path.join(self.mirror_path,
-                                self.mirror_path.split(os.path.sep)[-1])
-    if os.path.exists(recursed_dir):
-      self.print('Deleting unexpected directory: %s' % recursed_dir)
-      gclient_utils.rmtree(recursed_dir)
-
-    # The folder is <git number>
-    gen_number = subprocess.check_output([self.git_exe, 'number'],
-                                         cwd=self.mirror_path).decode(
-                                             'utf-8', 'ignore').strip()
-    gsutil = Gsutil(path=self.gsutil_exe, boto_path=None)
-
-    dest_prefix = '%s/%s' % (self._gs_path, gen_number)
-
-    # ls_out lists contents in the format: gs://blah/blah/123...
-    self.print('running "gsutil ls %s":' % self._gs_path)
-    ls_code, ls_out, ls_error = gsutil.check_call_with_retries(
-        'ls', self._gs_path)
-    if ls_code != 0:
-      self.print(ls_error)
-    else:
-      self.print(ls_out)
-
-    # Check to see if folder already exists in gs
-    ls_out_set = set(ls_out.strip().splitlines())
-    if (dest_prefix + '/' in ls_out_set and
-        dest_prefix + '.ready' in ls_out_set):
-      print('Cache %s already exists.' % dest_prefix)
-      return
-
-    # Reduce the number of individual files to download & write on disk.
-    self.RunGit(['pack-refs', '--all'])
-
-    # Run Garbage Collect to compress packfile.
-    gc_args = ['gc', '--prune=all']
-    if gc_aggressive:
-      # The default "gc --aggressive" is often too aggressive for some machines,
-      # since it attempts to create as many threads as there are CPU cores,
-      # while not limiting per-thread memory usage, which puts too much pressure
-      # on RAM on high-core machines, causing them to thrash. Using lower-level
-      # commands gives more control over those settings.
-
-      # This might not be strictly necessary, but it's fast and is normally run
-      # by 'gc --aggressive', so it shouldn't hurt.
-      self.RunGit(['reflog', 'expire', '--all'])
-
-      # These are the default repack settings for 'gc --aggressive'.
-      gc_args = ['repack', '-d', '-l', '-f', '--depth=50', '--window=250', '-A',
-                 '--unpack-unreachable=all']
-      # A 1G memory limit seems to provide comparable pack results as the
-      # default, even for our largest repos, while preventing runaway memory (at
-      # least on current Chromium builders which have about 4G RAM per core).
-      gc_args.append('--window-memory=1g')
-      # NOTE: It might also be possible to avoid thrashing with a larger window
-      # (e.g. "--window-memory=2g") by limiting the number of threads created
-      # (e.g. "--threads=[cores/2]"). Some limited testing didn't show much
-      # difference in outcomes on our current repos, but it might be worth
-      # trying if the repos grow much larger and the packs don't seem to be
-      # getting compressed enough.
-    self.RunGit(gc_args)
-
-    self.print('running "gsutil -m rsync -r -d %s %s"' %
-               (self.mirror_path, dest_prefix))
-    gsutil.call('-m', 'rsync', '-r', '-d', self.mirror_path, dest_prefix)
-
-    # Create .ready file and upload
-    _, ready_file_name =  tempfile.mkstemp(suffix='.ready')
-    try:
-      self.print('running "gsutil cp %s %s.ready"' %
-                 (ready_file_name, dest_prefix))
-      gsutil.call('cp', ready_file_name, '%s.ready' % (dest_prefix))
-    finally:
-      os.remove(ready_file_name)
-
-    # remove all other directory/.ready files in the same gs_path
-    # except for the directory/.ready file previously created
-    # which can be used for bootstrapping while the current one is
-    # being uploaded
-    if not prune:
-      return
-    prev_dest_prefix = self._GetMostRecentCacheDirectory(ls_out_set)
-    if not prev_dest_prefix:
-      return
-    for path in ls_out_set:
-      if path in (prev_dest_prefix + '/', prev_dest_prefix + '.ready'):
-        continue
-      if path.endswith('.ready'):
-        gsutil.call('rm', path)
-        continue
-      gsutil.call('-m', 'rm', '-r', path)
-
-
-  @staticmethod
-  def DeleteTmpPackFiles(path):
-    pack_dir = os.path.join(path, 'objects', 'pack')
-    if not os.path.isdir(pack_dir):
-      return
-    pack_files = [f for f in os.listdir(pack_dir) if
-                  f.startswith('.tmp-') or f.startswith('tmp_pack_')]
-    for f in pack_files:
-      f = os.path.join(pack_dir, f)
-      try:
-        os.remove(f)
-        logging.warning('Deleted stale temporary pack file %s' % f)
-      except OSError:
-        logging.warning('Unable to delete temporary pack file %s' % f)
+            cwd=self.mirror_path).decode('utf-8',
+                                         'ignore').strip().splitlines()
+        for spec in fetch_specs:
+            try:
+                self.print('Fetching %s' % spec)
+                with self.print_duration_of('fetch %s' % spec):
+                    self.RunGit(fetch_cmd + [spec], retry=True)
+            except subprocess.CalledProcessError:
+                if spec == '+refs/heads/*:refs/heads/*':
+                    raise ClobberNeeded()  # Corrupted cache.
+                logging.warning('Fetch of %s failed' % spec)
+        for commit in self.fetch_commits:
+            self.print('Fetching %s' % commit)
+            try:
+                with self.print_duration_of('fetch %s' % commit):
+                    self.RunGit(['fetch', 'origin', commit], retry=True)
+            except subprocess.CalledProcessError:
+                logging.warning('Fetch of %s failed' % commit)
+
+    def populate(self,
+                 depth=None,
+                 no_fetch_tags=False,
+                 shallow=False,
+                 bootstrap=False,
+                 verbose=False,
+                 lock_timeout=0,
+                 reset_fetch_config=False):
+        assert self.GetCachePath()
+        if shallow and not depth:
+            depth = 10000
+        gclient_utils.safe_makedirs(self.GetCachePath())
+
+        with lockfile.lock(self.mirror_path, lock_timeout):
+            try:
+                self._ensure_bootstrapped(depth, bootstrap, reset_fetch_config)
+                self._fetch(verbose, depth, no_fetch_tags, reset_fetch_config)
+            except ClobberNeeded:
+                # This is a major failure, we need to clean and force a
+                # bootstrap.
+                gclient_utils.rmtree(self.mirror_path)
+                self.print(GIT_CACHE_CORRUPT_MESSAGE)
+                self._ensure_bootstrapped(depth,
+                                          bootstrap,
+                                          reset_fetch_config,
+                                          force=True)
+                self._fetch(verbose, depth, no_fetch_tags, reset_fetch_config)
+
+    def update_bootstrap(self, prune=False, gc_aggressive=False):
+        # NOTE: There have been cases where repos were being recursively
+        # uploaded to google storage. E.g.
+        # `<host_url>-<repo>/<gen_number>/<host_url>-<repo>/` in GS and
+        # <host_url>-<repo>/<host_url>-<repo>/ on the bot. Check for recursed
+        # files on the bot here and remove them if found before we upload to GS.
+        # See crbug.com/1370443; keep this check until root cause is found.
+        recursed_dir = os.path.join(self.mirror_path,
+                                    self.mirror_path.split(os.path.sep)[-1])
+        if os.path.exists(recursed_dir):
+            self.print('Deleting unexpected directory: %s' % recursed_dir)
+            gclient_utils.rmtree(recursed_dir)
+
+        # The folder is <git number>
+        gen_number = subprocess.check_output([self.git_exe, 'number'],
+                                             cwd=self.mirror_path).decode(
+                                                 'utf-8', 'ignore').strip()
+        gsutil = Gsutil(path=self.gsutil_exe, boto_path=None)
+
+        dest_prefix = '%s/%s' % (self._gs_path, gen_number)
+
+        # ls_out lists contents in the format: gs://blah/blah/123...
+        self.print('running "gsutil ls %s":' % self._gs_path)
+        ls_code, ls_out, ls_error = gsutil.check_call_with_retries(
+            'ls', self._gs_path)
+        if ls_code != 0:
+            self.print(ls_error)
+        else:
+            self.print(ls_out)
+
+        # Check to see if folder already exists in gs
+        ls_out_set = set(ls_out.strip().splitlines())
+        if (dest_prefix + '/' in ls_out_set
+                and dest_prefix + '.ready' in ls_out_set):
+            print('Cache %s already exists.' % dest_prefix)
+            return
+
+        # Reduce the number of individual files to download & write on disk.
+        self.RunGit(['pack-refs', '--all'])
+
+        # Run Garbage Collect to compress packfile.
+        gc_args = ['gc', '--prune=all']
+        if gc_aggressive:
+            # The default "gc --aggressive" is often too aggressive for some
+            # machines, since it attempts to create as many threads as there are
+            # CPU cores, while not limiting per-thread memory usage, which puts
+            # too much pressure on RAM on high-core machines, causing them to
+            # thrash. Using lower-level commands gives more control over those
+            # settings.
+
+            # This might not be strictly necessary, but it's fast and is
+            # normally run by 'gc --aggressive', so it shouldn't hurt.
+            self.RunGit(['reflog', 'expire', '--all'])
+
+            # These are the default repack settings for 'gc --aggressive'.
+            gc_args = [
+                'repack', '-d', '-l', '-f', '--depth=50', '--window=250', '-A',
+                '--unpack-unreachable=all'
+            ]
+            # A 1G memory limit seems to provide comparable pack results as the
+            # default, even for our largest repos, while preventing runaway
+            # memory (at least on current Chromium builders which have about 4G
+            # RAM per core).
+            gc_args.append('--window-memory=1g')
+            # NOTE: It might also be possible to avoid thrashing with a larger
+            # window (e.g. "--window-memory=2g") by limiting the number of
+            # threads created (e.g. "--threads=[cores/2]"). Some limited testing
+            # didn't show much difference in outcomes on our current repos, but
+            # it might be worth trying if the repos grow much larger and the
+            # packs don't seem to be getting compressed enough.
+        self.RunGit(gc_args)
+
+        self.print('running "gsutil -m rsync -r -d %s %s"' %
+                   (self.mirror_path, dest_prefix))
+        gsutil.call('-m', 'rsync', '-r', '-d', self.mirror_path, dest_prefix)
+
+        # Create .ready file and upload
+        _, ready_file_name = tempfile.mkstemp(suffix='.ready')
+        try:
+            self.print('running "gsutil cp %s %s.ready"' %
+                       (ready_file_name, dest_prefix))
+            gsutil.call('cp', ready_file_name, '%s.ready' % (dest_prefix))
+        finally:
+            os.remove(ready_file_name)
+
+        # remove all other directory/.ready files in the same gs_path
+        # except for the directory/.ready file previously created
+        # which can be used for bootstrapping while the current one is
+        # being uploaded
+        if not prune:
+            return
+        prev_dest_prefix = self._GetMostRecentCacheDirectory(ls_out_set)
+        if not prev_dest_prefix:
+            return
+        for path in ls_out_set:
+            if path in (prev_dest_prefix + '/', prev_dest_prefix + '.ready'):
+                continue
+            if path.endswith('.ready'):
+                gsutil.call('rm', path)
+                continue
+            gsutil.call('-m', 'rm', '-r', path)
+
+    @staticmethod
+    def DeleteTmpPackFiles(path):
+        pack_dir = os.path.join(path, 'objects', 'pack')
+        if not os.path.isdir(pack_dir):
+            return
+        pack_files = [
+            f for f in os.listdir(pack_dir)
+            if f.startswith('.tmp-') or f.startswith('tmp_pack_')
+        ]
+        for f in pack_files:
+            f = os.path.join(pack_dir, f)
+            try:
+                os.remove(f)
+                logging.warning('Deleted stale temporary pack file %s' % f)
+            except OSError:
+                logging.warning('Unable to delete temporary pack file %s' % f)
 
 
 @subcommand.usage('[url of repo to check for caching]')
 @metrics.collector.collect_metrics('git cache exists')
 def CMDexists(parser, args):
-  """Check to see if there already is a cache of the given repo."""
-  _, args = parser.parse_args(args)
-  if not len(args) == 1:
-    parser.error('git cache exists only takes exactly one repo url.')
-  url = args[0]
-  mirror = Mirror(url)
-  if mirror.exists():
-    print(mirror.mirror_path)
-    return 0
-  return 1
+    """Check to see if there already is a cache of the given repo."""
+    _, args = parser.parse_args(args)
+    if not len(args) == 1:
+        parser.error('git cache exists only takes exactly one repo url.')
+    url = args[0]
+    mirror = Mirror(url)
+    if mirror.exists():
+        print(mirror.mirror_path)
+        return 0
+    return 1
 
 
 @subcommand.usage('[url of repo to create a bootstrap zip file]')
 @metrics.collector.collect_metrics('git cache update-bootstrap')
 def CMDupdate_bootstrap(parser, args):
-  """Create and uploads a bootstrap tarball."""
-  # Lets just assert we can't do this on Windows.
-  if sys.platform.startswith('win'):
-    print('Sorry, update bootstrap will not work on Windows.', file=sys.stderr)
-    return 1
+    """Create and uploads a bootstrap tarball."""
+    # Lets just assert we can't do this on Windows.
+    if sys.platform.startswith('win'):
+        print('Sorry, update bootstrap will not work on Windows.',
+              file=sys.stderr)
+        return 1
+
+    parser.add_option('--skip-populate',
+                      action='store_true',
+                      help='Skips "populate" step if mirror already exists.')
+    parser.add_option('--gc-aggressive',
+                      action='store_true',
+                      help='Run aggressive repacking of the repo.')
+    parser.add_option('--prune',
+                      action='store_true',
+                      help='Prune all other cached bundles of the same repo.')
+
+    populate_args = args[:]
+    options, args = parser.parse_args(args)
+    url = args[0]
+    mirror = Mirror(url)
+    if not options.skip_populate or not mirror.exists():
+        CMDpopulate(parser, populate_args)
+    else:
+        print('Skipped populate step.')
 
-  parser.add_option('--skip-populate', action='store_true',
-                    help='Skips "populate" step if mirror already exists.')
-  parser.add_option('--gc-aggressive', action='store_true',
-                    help='Run aggressive repacking of the repo.')
-  parser.add_option('--prune', action='store_true',
-                    help='Prune all other cached bundles of the same repo.')
-
-  populate_args = args[:]
-  options, args = parser.parse_args(args)
-  url = args[0]
-  mirror = Mirror(url)
-  if not options.skip_populate or not mirror.exists():
-    CMDpopulate(parser, populate_args)
-  else:
-    print('Skipped populate step.')
-
-  # Get the repo directory.
-  _, args2 = parser.parse_args(args)
-  url = args2[0]
-  mirror = Mirror(url)
-  mirror.update_bootstrap(options.prune, options.gc_aggressive)
-  return 0
+    # Get the repo directory.
+    _, args2 = parser.parse_args(args)
+    url = args2[0]
+    mirror = Mirror(url)
+    mirror.update_bootstrap(options.prune, options.gc_aggressive)
+    return 0
 
 
 @subcommand.usage('[url of repo to add to or update in cache]')
 @metrics.collector.collect_metrics('git cache populate')
 def CMDpopulate(parser, args):
-  """Ensure that the cache has all up-to-date objects for the given repo."""
-  parser.add_option('--depth', type='int',
-                    help='Only cache DEPTH commits of history')
-  parser.add_option(
-      '--no-fetch-tags',
-      action='store_true',
-      help=('Don\'t fetch tags from the server. This can speed up '
-            'fetch considerably when there are many tags.'))
-  parser.add_option('--shallow', '-s', action='store_true',
-                    help='Only cache 10000 commits of history')
-  parser.add_option('--ref', action='append',
-                    help='Specify additional refs to be fetched')
-  parser.add_option('--commit', action='append',
-                    help='Specify additional commits to be fetched')
-  parser.add_option('--no_bootstrap', '--no-bootstrap',
-                    action='store_true',
-                    help='Don\'t bootstrap from Google Storage')
-  parser.add_option('--ignore_locks',
-                    '--ignore-locks',
-                    action='store_true',
-                    help='NOOP. This flag will be removed in the future.')
-  parser.add_option('--break-locks',
-                    action='store_true',
-                    help='Break any existing lock instead of just ignoring it')
-  parser.add_option('--reset-fetch-config', action='store_true', default=False,
-                    help='Reset the fetch config before populating the cache.')
-
-  options, args = parser.parse_args(args)
-  if not len(args) == 1:
-    parser.error('git cache populate only takes exactly one repo url.')
-  if options.ignore_locks:
-    print('ignore_locks is no longer used. Please remove its usage.')
-  if options.break_locks:
-    print('break_locks is no longer used. Please remove its usage.')
-  url = args[0]
-
-  mirror = Mirror(url, refs=options.ref, commits=options.commit)
-  kwargs = {
-      'no_fetch_tags': options.no_fetch_tags,
-      'verbose': options.verbose,
-      'shallow': options.shallow,
-      'bootstrap': not options.no_bootstrap,
-      'lock_timeout': options.timeout,
-      'reset_fetch_config': options.reset_fetch_config,
-  }
-  if options.depth:
-    kwargs['depth'] = options.depth
-  mirror.populate(**kwargs)
+    """Ensure that the cache has all up-to-date objects for the given repo."""
+    parser.add_option('--depth',
+                      type='int',
+                      help='Only cache DEPTH commits of history')
+    parser.add_option(
+        '--no-fetch-tags',
+        action='store_true',
+        help=('Don\'t fetch tags from the server. This can speed up '
+              'fetch considerably when there are many tags.'))
+    parser.add_option('--shallow',
+                      '-s',
+                      action='store_true',
+                      help='Only cache 10000 commits of history')
+    parser.add_option('--ref',
+                      action='append',
+                      help='Specify additional refs to be fetched')
+    parser.add_option('--commit',
+                      action='append',
+                      help='Specify additional commits to be fetched')
+    parser.add_option('--no_bootstrap',
+                      '--no-bootstrap',
+                      action='store_true',
+                      help='Don\'t bootstrap from Google Storage')
+    parser.add_option('--ignore_locks',
+                      '--ignore-locks',
+                      action='store_true',
+                      help='NOOP. This flag will be removed in the future.')
+    parser.add_option(
+        '--break-locks',
+        action='store_true',
+        help='Break any existing lock instead of just ignoring it')
+    parser.add_option(
+        '--reset-fetch-config',
+        action='store_true',
+        default=False,
+        help='Reset the fetch config before populating the cache.')
+
+    options, args = parser.parse_args(args)
+    if not len(args) == 1:
+        parser.error('git cache populate only takes exactly one repo url.')
+    if options.ignore_locks:
+        print('ignore_locks is no longer used. Please remove its usage.')
+    if options.break_locks:
+        print('break_locks is no longer used. Please remove its usage.')
+    url = args[0]
+
+    mirror = Mirror(url, refs=options.ref, commits=options.commit)
+    kwargs = {
+        'no_fetch_tags': options.no_fetch_tags,
+        'verbose': options.verbose,
+        'shallow': options.shallow,
+        'bootstrap': not options.no_bootstrap,
+        'lock_timeout': options.timeout,
+        'reset_fetch_config': options.reset_fetch_config,
+    }
+    if options.depth:
+        kwargs['depth'] = options.depth
+    mirror.populate(**kwargs)
 
 
 @subcommand.usage('Fetch new commits into cache and current checkout')
 @metrics.collector.collect_metrics('git cache fetch')
 def CMDfetch(parser, args):
-  """Update mirror, and fetch in cwd."""
-  parser.add_option('--all', action='store_true', help='Fetch all remotes')
-  parser.add_option('--no_bootstrap', '--no-bootstrap',
-                    action='store_true',
-                    help='Don\'t (re)bootstrap from Google Storage')
-  parser.add_option(
-      '--no-fetch-tags',
-      action='store_true',
-      help=('Don\'t fetch tags from the server. This can speed up '
-            'fetch considerably when there are many tags.'))
-  options, args = parser.parse_args(args)
-
-  # Figure out which remotes to fetch.  This mimics the behavior of regular
-  # 'git fetch'.  Note that in the case of "stacked" or "pipelined" branches,
-  # this will NOT try to traverse up the branching structure to find the
-  # ultimate remote to update.
-  remotes = []
-  if options.all:
-    assert not args, 'fatal: fetch --all does not take a repository argument'
-    remotes = subprocess.check_output([Mirror.git_exe, 'remote'])
-    remotes = remotes.decode('utf-8', 'ignore').splitlines()
-  elif args:
-    remotes = args
-  else:
-    current_branch = subprocess.check_output(
-        [Mirror.git_exe, 'rev-parse', '--abbrev-ref', 'HEAD'])
-    current_branch = current_branch.decode('utf-8', 'ignore').strip()
-    if current_branch != 'HEAD':
-      upstream = subprocess.check_output(
-          [Mirror.git_exe, 'config', 'branch.%s.remote' % current_branch])
-      upstream = upstream.decode('utf-8', 'ignore').strip()
-      if upstream and upstream != '.':
-        remotes = [upstream]
-  if not remotes:
-    remotes = ['origin']
-
-  cachepath = Mirror.GetCachePath()
-  git_dir = os.path.abspath(subprocess.check_output(
-      [Mirror.git_exe, 'rev-parse', '--git-dir']).decode('utf-8', 'ignore'))
-  git_dir = os.path.abspath(git_dir)
-  if git_dir.startswith(cachepath):
-    mirror = Mirror.FromPath(git_dir)
-    mirror.populate(
-        bootstrap=not options.no_bootstrap,
-        no_fetch_tags=options.no_fetch_tags,
-        lock_timeout=options.timeout)
+    """Update mirror, and fetch in cwd."""
+    parser.add_option('--all', action='store_true', help='Fetch all remotes')
+    parser.add_option('--no_bootstrap',
+                      '--no-bootstrap',
+                      action='store_true',
+                      help='Don\'t (re)bootstrap from Google Storage')
+    parser.add_option(
+        '--no-fetch-tags',
+        action='store_true',
+        help=('Don\'t fetch tags from the server. This can speed up '
+              'fetch considerably when there are many tags.'))
+    options, args = parser.parse_args(args)
+
+    # Figure out which remotes to fetch.  This mimics the behavior of regular
+    # 'git fetch'.  Note that in the case of "stacked" or "pipelined" branches,
+    # this will NOT try to traverse up the branching structure to find the
+    # ultimate remote to update.
+    remotes = []
+    if options.all:
+        assert not args, 'fatal: fetch --all does not take repository argument'
+        remotes = subprocess.check_output([Mirror.git_exe, 'remote'])
+        remotes = remotes.decode('utf-8', 'ignore').splitlines()
+    elif args:
+        remotes = args
+    else:
+        current_branch = subprocess.check_output(
+            [Mirror.git_exe, 'rev-parse', '--abbrev-ref', 'HEAD'])
+        current_branch = current_branch.decode('utf-8', 'ignore').strip()
+        if current_branch != 'HEAD':
+            upstream = subprocess.check_output(
+                [Mirror.git_exe, 'config',
+                 'branch.%s.remote' % current_branch])
+            upstream = upstream.decode('utf-8', 'ignore').strip()
+            if upstream and upstream != '.':
+                remotes = [upstream]
+    if not remotes:
+        remotes = ['origin']
+
+    cachepath = Mirror.GetCachePath()
+    git_dir = os.path.abspath(
+        subprocess.check_output([Mirror.git_exe, 'rev-parse',
+                                 '--git-dir']).decode('utf-8', 'ignore'))
+    git_dir = os.path.abspath(git_dir)
+    if git_dir.startswith(cachepath):
+        mirror = Mirror.FromPath(git_dir)
+        mirror.populate(bootstrap=not options.no_bootstrap,
+                        no_fetch_tags=options.no_fetch_tags,
+                        lock_timeout=options.timeout)
+        return 0
+    for remote in remotes:
+        remote_url = subprocess.check_output(
+            [Mirror.git_exe, 'config',
+             'remote.%s.url' % remote])
+        remote_url = remote_url.decode('utf-8', 'ignore').strip()
+        if remote_url.startswith(cachepath):
+            mirror = Mirror.FromPath(remote_url)
+            mirror.print = lambda *args: None
+            print('Updating git cache...')
+            mirror.populate(bootstrap=not options.no_bootstrap,
+                            no_fetch_tags=options.no_fetch_tags,
+                            lock_timeout=options.timeout)
+        subprocess.check_call([Mirror.git_exe, 'fetch', remote])
     return 0
-  for remote in remotes:
-    remote_url = subprocess.check_output(
-        [Mirror.git_exe, 'config', 'remote.%s.url' % remote])
-    remote_url = remote_url.decode('utf-8', 'ignore').strip()
-    if remote_url.startswith(cachepath):
-      mirror = Mirror.FromPath(remote_url)
-      mirror.print = lambda *args: None
-      print('Updating git cache...')
-      mirror.populate(
-          bootstrap=not options.no_bootstrap,
-          no_fetch_tags=options.no_fetch_tags,
-          lock_timeout=options.timeout)
-    subprocess.check_call([Mirror.git_exe, 'fetch', remote])
-  return 0
 
 
 @subcommand.usage('do not use - it is a noop.')
 @metrics.collector.collect_metrics('git cache unlock')
 def CMDunlock(parser, args):
-  """This command does nothing."""
-  print('This command does nothing and will be removed in the future.')
+    """This command does nothing."""
+    print('This command does nothing and will be removed in the future.')
 
 
 class OptionParser(optparse.OptionParser):
-  """Wrapper class for OptionParser to handle global options."""
-
-  def __init__(self, *args, **kwargs):
-    optparse.OptionParser.__init__(self, *args, prog='git cache', **kwargs)
-    self.add_option('-c', '--cache-dir',
-                    help=(
-                      'Path to the directory containing the caches. Normally '
-                      'deduced from git config cache.cachepath or '
-                      '$GIT_CACHE_PATH.'))
-    self.add_option('-v', '--verbose', action='count', default=1,
-                    help='Increase verbosity (can be passed multiple times)')
-    self.add_option('-q', '--quiet', action='store_true',
-                    help='Suppress all extraneous output')
-    self.add_option('--timeout', type='int', default=0,
-                    help='Timeout for acquiring cache lock, in seconds')
-
-  def parse_args(self, args=None, values=None):
-    # Create an optparse.Values object that will store only the actual passed
-    # options, without the defaults.
-    actual_options = optparse.Values()
-    _, args = optparse.OptionParser.parse_args(self, args, actual_options)
-    # Create an optparse.Values object with the default options.
-    options = optparse.Values(self.get_default_values().__dict__)
-    # Update it with the options passed by the user.
-    options._update_careful(actual_options.__dict__)
-    # Store the options passed by the user in an _actual_options attribute.
-    # We store only the keys, and not the values, since the values can contain
-    # arbitrary information, which might be PII.
-    metrics.collector.add('arguments', list(actual_options.__dict__.keys()))
-
-    if options.quiet:
-      options.verbose = 0
-
-    levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG]
-    logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)])
+    """Wrapper class for OptionParser to handle global options."""
+    def __init__(self, *args, **kwargs):
+        optparse.OptionParser.__init__(self, *args, prog='git cache', **kwargs)
+        self.add_option(
+            '-c',
+            '--cache-dir',
+            help=('Path to the directory containing the caches. Normally '
+                  'deduced from git config cache.cachepath or '
+                  '$GIT_CACHE_PATH.'))
+        self.add_option(
+            '-v',
+            '--verbose',
+            action='count',
+            default=1,
+            help='Increase verbosity (can be passed multiple times)')
+        self.add_option('-q',
+                        '--quiet',
+                        action='store_true',
+                        help='Suppress all extraneous output')
+        self.add_option('--timeout',
+                        type='int',
+                        default=0,
+                        help='Timeout for acquiring cache lock, in seconds')
+
+    def parse_args(self, args=None, values=None):
+        # Create an optparse.Values object that will store only the actual
+        # passed options, without the defaults.
+        actual_options = optparse.Values()
+        _, args = optparse.OptionParser.parse_args(self, args, actual_options)
+        # Create an optparse.Values object with the default options.
+        options = optparse.Values(self.get_default_values().__dict__)
+        # Update it with the options passed by the user.
+        options._update_careful(actual_options.__dict__)
+        # Store the options passed by the user in an _actual_options attribute.
+        # We store only the keys, and not the values, since the values can
+        # contain arbitrary information, which might be PII.
+        metrics.collector.add('arguments', list(actual_options.__dict__.keys()))
+
+        if options.quiet:
+            options.verbose = 0
+
+        levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG]
+        logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)])
 
-    try:
-      global_cache_dir = Mirror.GetCachePath()
-    except RuntimeError:
-      global_cache_dir = None
-    if options.cache_dir:
-      if global_cache_dir and (
-          os.path.abspath(options.cache_dir) !=
-          os.path.abspath(global_cache_dir)):
-        logging.warning('Overriding globally-configured cache directory.')
-      Mirror.SetCachePath(options.cache_dir)
+        try:
+            global_cache_dir = Mirror.GetCachePath()
+        except RuntimeError:
+            global_cache_dir = None
+        if options.cache_dir:
+            if global_cache_dir and (os.path.abspath(options.cache_dir) !=
+                                     os.path.abspath(global_cache_dir)):
+                logging.warning(
+                    'Overriding globally-configured cache directory.')
+            Mirror.SetCachePath(options.cache_dir)
 
-    return options, args
+        return options, args
 
 
 def main(argv):
-  dispatcher = subcommand.CommandDispatcher(__name__)
-  return dispatcher.execute(OptionParser(), argv)
+    dispatcher = subcommand.CommandDispatcher(__name__)
+    return dispatcher.execute(OptionParser(), argv)
 
 
 if __name__ == '__main__':
-  try:
-    with metrics.collector.print_notice_and_exit():
-      sys.exit(main(sys.argv[1:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        with metrics.collector.print_notice_and_exit():
+            sys.exit(main(sys.argv[1:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

文件差异内容过多而无法显示
+ 431 - 413
git_cl.py


文件差异内容过多而无法显示
+ 449 - 443
git_common.py


+ 34 - 35
git_dates.py

@@ -1,14 +1,13 @@
 # Copyright 2016 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Utility module for dealing with Git timestamps."""
 
 import datetime
 
 
 def timestamp_offset_to_datetime(timestamp, offset):
-  """Converts a timestamp + offset into a datetime.datetime.
+    """Converts a timestamp + offset into a datetime.datetime.
 
   Useful for dealing with the output of porcelain commands, which provide times
   as timestamp and offset strings.
@@ -20,43 +19,43 @@ def timestamp_offset_to_datetime(timestamp, offset):
   Returns:
     A tz-aware datetime.datetime for this timestamp.
   """
-  timestamp = int(timestamp)
-  tz = FixedOffsetTZ.from_offset_string(offset)
-  return datetime.datetime.fromtimestamp(timestamp, tz)
+    timestamp = int(timestamp)
+    tz = FixedOffsetTZ.from_offset_string(offset)
+    return datetime.datetime.fromtimestamp(timestamp, tz)
 
 
 def datetime_string(dt):
-  """Converts a tz-aware datetime.datetime into a string in git format."""
-  return dt.strftime('%Y-%m-%d %H:%M:%S %z')
+    """Converts a tz-aware datetime.datetime into a string in git format."""
+    return dt.strftime('%Y-%m-%d %H:%M:%S %z')
 
 
 # Adapted from: https://docs.python.org/2/library/datetime.html#tzinfo-objects
 class FixedOffsetTZ(datetime.tzinfo):
-  def __init__(self, offset, name):
-    datetime.tzinfo.__init__(self)
-    self.__offset = offset
-    self.__name = name
-
-  def __repr__(self):  # pragma: no cover
-    return '{}({!r}, {!r})'.format(type(self).__name__, self.__offset,
-                                   self.__name)
-
-  @classmethod
-  def from_offset_string(cls, offset):
-    try:
-      hours = int(offset[:-2])
-      minutes = int(offset[-2:])
-    except ValueError:
-      return cls(datetime.timedelta(0), 'UTC')
-
-    delta = datetime.timedelta(hours=hours, minutes=minutes)
-    return cls(delta, offset)
-
-  def utcoffset(self, dt):
-    return self.__offset
-
-  def tzname(self, dt):
-    return self.__name
-
-  def dst(self, dt):
-    return datetime.timedelta(0)
+    def __init__(self, offset, name):
+        datetime.tzinfo.__init__(self)
+        self.__offset = offset
+        self.__name = name
+
+    def __repr__(self):  # pragma: no cover
+        return '{}({!r}, {!r})'.format(
+            type(self).__name__, self.__offset, self.__name)
+
+    @classmethod
+    def from_offset_string(cls, offset):
+        try:
+            hours = int(offset[:-2])
+            minutes = int(offset[-2:])
+        except ValueError:
+            return cls(datetime.timedelta(0), 'UTC')
+
+        delta = datetime.timedelta(hours=hours, minutes=minutes)
+        return cls(delta, offset)
+
+    def utcoffset(self, dt):
+        return self.__offset
+
+    def tzname(self, dt):
+        return self.__name
+
+    def dst(self, dt):
+        return datetime.timedelta(0)

+ 21 - 20
git_drover.py

@@ -5,7 +5,6 @@
 
 import argparse
 
-
 _HELP_MESSAGE = """\
 git drover has been deprecated in favor of cherry-picking using Gerrit.
 Try it, it's faster!
@@ -23,24 +22,26 @@ https://www.chromium.org/developers/how-tos/get-the-code/multiple-working-direct
 
 
 def main():
-  parser = argparse.ArgumentParser(description=_HELP_MESSAGE)
-  parser.add_argument(
-      '--branch',
-      default='BRANCH',
-      metavar='BRANCH',
-      type=str,
-      help='the name of the branch to which to cherry-pick; e.g. 1234')
-  parser.add_argument(
-      '--cherry-pick',
-      default='HASH_OF_THE_COMMIT_TO_CHERRY_PICK',
-      metavar='HASH_OF_THE_COMMIT_TO_CHERRY_PICK',
-      type=str,
-      help=('the change to cherry-pick; this can be any string '
-            'that unambiguosly refers to a revision not involving HEAD'))
-  options, _ = parser.parse_known_args()
-
-  print(_HELP_MESSAGE.format(
-      branch=options.branch, cherry_pick=options.cherry_pick))
+    parser = argparse.ArgumentParser(description=_HELP_MESSAGE)
+    parser.add_argument(
+        '--branch',
+        default='BRANCH',
+        metavar='BRANCH',
+        type=str,
+        help='the name of the branch to which to cherry-pick; e.g. 1234')
+    parser.add_argument(
+        '--cherry-pick',
+        default='HASH_OF_THE_COMMIT_TO_CHERRY_PICK',
+        metavar='HASH_OF_THE_COMMIT_TO_CHERRY_PICK',
+        type=str,
+        help=('the change to cherry-pick; this can be any string '
+              'that unambiguosly refers to a revision not involving HEAD'))
+    options, _ = parser.parse_known_args()
+
+    print(
+        _HELP_MESSAGE.format(branch=options.branch,
+                             cherry_pick=options.cherry_pick))
+
 
 if __name__ == '__main__':
-  main()
+    main()

+ 36 - 32
git_find_releases.py

@@ -19,47 +19,51 @@ import git_common as git
 
 
 def GetNameForCommit(sha1):
-  name = re.sub(r'~.*$', '', git.run('name-rev', '--tags', '--name-only', sha1))
-  if name == 'undefined':
-    name = git.run(
-        'name-rev', '--refs', 'remotes/branch-heads/*', '--name-only',
-        sha1) + ' [untagged]'
-  return name
+    name = re.sub(r'~.*$', '', git.run('name-rev', '--tags', '--name-only',
+                                       sha1))
+    if name == 'undefined':
+        name = git.run('name-rev', '--refs', 'remotes/branch-heads/*',
+                       '--name-only', sha1) + ' [untagged]'
+    return name
 
 
 def GetMergesForCommit(sha1):
-  return [c.split()[0] for c in
-          git.run('log', '--oneline', '-F', '--all', '--no-abbrev', '--grep',
-                  'cherry picked from commit %s' % sha1).splitlines()]
+    return [
+        c.split()[0]
+        for c in git.run('log', '--oneline', '-F', '--all', '--no-abbrev',
+                         '--grep', 'cherry picked from commit %s' %
+                         sha1).splitlines()
+    ]
 
 
 def main(args):
-  parser = optparse.OptionParser(usage=sys.modules[__name__].__doc__)
-  _, args = parser.parse_args(args)
+    parser = optparse.OptionParser(usage=sys.modules[__name__].__doc__)
+    _, args = parser.parse_args(args)
 
-  if len(args) == 0:
-    parser.error('Need at least one commit.')
+    if len(args) == 0:
+        parser.error('Need at least one commit.')
 
-  for arg in args:
-    commit_name = GetNameForCommit(arg)
-    if not commit_name:
-      print('%s not found' % arg)
-      return 1
-    print('commit %s was:' % arg)
-    print('  initially in ' + commit_name)
-    merges = GetMergesForCommit(arg)
-    for merge in merges:
-      print('  merged to ' + GetNameForCommit(merge) + ' (as ' + merge + ')')
-    if not merges:
-      print('No merges found. If this seems wrong, be sure that you did:')
-      print('  git fetch origin && gclient sync --with_branch_heads')
+    for arg in args:
+        commit_name = GetNameForCommit(arg)
+        if not commit_name:
+            print('%s not found' % arg)
+            return 1
+        print('commit %s was:' % arg)
+        print('  initially in ' + commit_name)
+        merges = GetMergesForCommit(arg)
+        for merge in merges:
+            print('  merged to ' + GetNameForCommit(merge) + ' (as ' + merge +
+                  ')')
+        if not merges:
+            print('No merges found. If this seems wrong, be sure that you did:')
+            print('  git fetch origin && gclient sync --with_branch_heads')
 
-  return 0
+    return 0
 
 
 if __name__ == '__main__':
-  try:
-    sys.exit(main(sys.argv[1:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main(sys.argv[1:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 168 - 161
git_footers.py

@@ -12,49 +12,48 @@ from collections import defaultdict
 
 import git_common as git
 
-
 FOOTER_PATTERN = re.compile(r'^\s*([\w-]+): *(.*)$')
 CHROME_COMMIT_POSITION_PATTERN = re.compile(r'^([\w/\-\.]+)@{#(\d+)}$')
 FOOTER_KEY_BLOCKLIST = set(['http', 'https'])
 
 
 def normalize_name(header):
-  return '-'.join([ word.title() for word in header.strip().split('-') ])
+    return '-'.join([word.title() for word in header.strip().split('-')])
 
 
 def parse_footer(line):
-  """Returns footer's (key, value) if footer is valid, else None."""
-  match = FOOTER_PATTERN.match(line)
-  if match and match.group(1) not in FOOTER_KEY_BLOCKLIST:
-    return (match.group(1), match.group(2))
-  return None
+    """Returns footer's (key, value) if footer is valid, else None."""
+    match = FOOTER_PATTERN.match(line)
+    if match and match.group(1) not in FOOTER_KEY_BLOCKLIST:
+        return (match.group(1), match.group(2))
+    return None
 
 
 def parse_footers(message):
-  """Parses a git commit message into a multimap of footers."""
-  _, _, parsed_footers = split_footers(message)
-  footer_map = defaultdict(list)
-  if parsed_footers:
-    # Read footers from bottom to top, because latter takes precedense,
-    # and we want it to be first in the multimap value.
-    for (k, v) in reversed(parsed_footers):
-      footer_map[normalize_name(k)].append(v.strip())
-  return footer_map
+    """Parses a git commit message into a multimap of footers."""
+    _, _, parsed_footers = split_footers(message)
+    footer_map = defaultdict(list)
+    if parsed_footers:
+        # Read footers from bottom to top, because latter takes precedense,
+        # and we want it to be first in the multimap value.
+        for (k, v) in reversed(parsed_footers):
+            footer_map[normalize_name(k)].append(v.strip())
+    return footer_map
 
 
 def matches_footer_key(line, key):
-  """Returns whether line is a valid footer whose key matches a given one.
+    """Returns whether line is a valid footer whose key matches a given one.
 
   Keys are compared in normalized form.
   """
-  r = parse_footer(line)
-  if r is None:
-    return False
-  return normalize_name(r[0]) == normalize_name(key)
+    r = parse_footer(line)
+    if r is None:
+        return False
+    return normalize_name(r[0]) == normalize_name(key)
 
 
 def split_footers(message):
-  """Returns (non_footer_lines, footer_lines, parsed footers).
+    """Returns (non_footer_lines, footer_lines, parsed footers).
 
   Guarantees that:
     (non_footer_lines + footer_lines) ~= message.splitlines(), with at
@@ -63,57 +62,59 @@ def split_footers(message):
       There could be fewer parsed_footers than footer lines if some lines in
       last paragraph are malformed.
   """
-  message_lines = list(message.rstrip().splitlines())
-  footer_lines = []
-  maybe_footer_lines = []
-  for line in reversed(message_lines):
-    if line == '' or line.isspace():
-      break
-
-    if parse_footer(line):
-      footer_lines.extend(maybe_footer_lines)
-      maybe_footer_lines = []
-      footer_lines.append(line)
-    else:
-      # We only want to include malformed lines if they are preceded by
-      # well-formed lines. So keep them in holding until we see a well-formed
-      # line (case above).
-      maybe_footer_lines.append(line)
-  else:
-    # The whole description was consisting of footers,
-    # which means those aren't footers.
+    message_lines = list(message.rstrip().splitlines())
     footer_lines = []
+    maybe_footer_lines = []
+    for line in reversed(message_lines):
+        if line == '' or line.isspace():
+            break
+
+        if parse_footer(line):
+            footer_lines.extend(maybe_footer_lines)
+            maybe_footer_lines = []
+            footer_lines.append(line)
+        else:
+            # We only want to include malformed lines if they are preceded by
+            # well-formed lines. So keep them in holding until we see a
+            # well-formed line (case above).
+            maybe_footer_lines.append(line)
+    else:
+        # The whole description was consisting of footers,
+        # which means those aren't footers.
+        footer_lines = []
 
-  footer_lines.reverse()
-  footers = [footer for footer in map(parse_footer, footer_lines) if footer]
-  if not footers:
-    return message_lines, [], []
-  if maybe_footer_lines:
-    # If some malformed lines were left over, add a newline to split them
-    # from the well-formed ones.
-    return message_lines[:-len(footer_lines)] + [''], footer_lines, footers
-  return message_lines[:-len(footer_lines)], footer_lines, footers
+    footer_lines.reverse()
+    footers = [footer for footer in map(parse_footer, footer_lines) if footer]
+    if not footers:
+        return message_lines, [], []
+    if maybe_footer_lines:
+        # If some malformed lines were left over, add a newline to split them
+        # from the well-formed ones.
+        return message_lines[:-len(footer_lines)] + [''], footer_lines, footers
+    return message_lines[:-len(footer_lines)], footer_lines, footers
 
 
 def get_footer_change_id(message):
-  """Returns a list of Gerrit's ChangeId from given commit message."""
-  return parse_footers(message).get(normalize_name('Change-Id'), [])
+    """Returns a list of Gerrit's ChangeId from given commit message."""
+    return parse_footers(message).get(normalize_name('Change-Id'), [])
 
 
 def add_footer_change_id(message, change_id):
-  """Returns message with Change-ID footer in it.
+    """Returns message with Change-ID footer in it.
 
   Assumes that Change-Id is not yet in footers, which is then inserted at
   earliest footer line which is after all of these footers:
     Bug|Issue|Test|Feature.
   """
-  assert 'Change-Id' not in parse_footers(message)
-  return add_footer(message, 'Change-Id', change_id,
-                    after_keys=['Bug', 'Issue', 'Test', 'Feature'])
+    assert 'Change-Id' not in parse_footers(message)
+    return add_footer(message,
+                      'Change-Id',
+                      change_id,
+                      after_keys=['Bug', 'Issue', 'Test', 'Feature'])
 
 
 def add_footer(message, key, value, after_keys=None, before_keys=None):
-  """Returns a message with given footer appended.
+    """Returns a message with given footer appended.
 
   If after_keys and before_keys are both None (default), appends footer last.
   If after_keys is provided and matches footers already present, inserts footer
@@ -127,66 +128,69 @@ def add_footer(message, key, value, after_keys=None, before_keys=None):
       after_keys=['Bug', 'Issue']
   the new footer will be inserted between Bug and Verified-By existing footers.
   """
-  assert key == normalize_name(key), 'Use normalized key'
-  new_footer = '%s: %s' % (key, value)
-  if not FOOTER_PATTERN.match(new_footer):
-    raise ValueError('Invalid footer %r' % new_footer)
-
-  top_lines, footer_lines, _ = split_footers(message)
-  if not footer_lines:
-    if not top_lines or top_lines[-1] != '':
-      top_lines.append('')
-    footer_lines = [new_footer]
-  else:
-    after_keys = set(map(normalize_name, after_keys or []))
-    after_indices = [
-        footer_lines.index(x) for x in footer_lines for k in after_keys
-        if matches_footer_key(x, k)]
-    before_keys = set(map(normalize_name, before_keys or []))
-    before_indices = [
-        footer_lines.index(x) for x in footer_lines for k in before_keys
-        if matches_footer_key(x, k)]
-    if after_indices:
-      # after_keys takes precedence, even if there's a conflict.
-      insert_idx = max(after_indices) + 1
-    elif before_indices:
-      insert_idx = min(before_indices)
+    assert key == normalize_name(key), 'Use normalized key'
+    new_footer = '%s: %s' % (key, value)
+    if not FOOTER_PATTERN.match(new_footer):
+        raise ValueError('Invalid footer %r' % new_footer)
+
+    top_lines, footer_lines, _ = split_footers(message)
+    if not footer_lines:
+        if not top_lines or top_lines[-1] != '':
+            top_lines.append('')
+        footer_lines = [new_footer]
     else:
-      insert_idx = len(footer_lines)
-    footer_lines.insert(insert_idx, new_footer)
-  return '\n'.join(top_lines + footer_lines)
+        after_keys = set(map(normalize_name, after_keys or []))
+        after_indices = [
+            footer_lines.index(x) for x in footer_lines for k in after_keys
+            if matches_footer_key(x, k)
+        ]
+        before_keys = set(map(normalize_name, before_keys or []))
+        before_indices = [
+            footer_lines.index(x) for x in footer_lines for k in before_keys
+            if matches_footer_key(x, k)
+        ]
+        if after_indices:
+            # after_keys takes precedence, even if there's a conflict.
+            insert_idx = max(after_indices) + 1
+        elif before_indices:
+            insert_idx = min(before_indices)
+        else:
+            insert_idx = len(footer_lines)
+        footer_lines.insert(insert_idx, new_footer)
+    return '\n'.join(top_lines + footer_lines)
 
 
 def remove_footer(message, key):
-  """Returns a message with all instances of given footer removed."""
-  key = normalize_name(key)
-  top_lines, footer_lines, _ = split_footers(message)
-  if not footer_lines:
-    return message
-  new_footer_lines = []
-  for line in footer_lines:
-    try:
-      f = normalize_name(parse_footer(line)[0])
-      if f != key:
-        new_footer_lines.append(line)
-    except TypeError:
-      # If the footer doesn't parse (i.e. is malformed), just let it carry over.
-      new_footer_lines.append(line)
-  return '\n'.join(top_lines + new_footer_lines)
+    """Returns a message with all instances of given footer removed."""
+    key = normalize_name(key)
+    top_lines, footer_lines, _ = split_footers(message)
+    if not footer_lines:
+        return message
+    new_footer_lines = []
+    for line in footer_lines:
+        try:
+            f = normalize_name(parse_footer(line)[0])
+            if f != key:
+                new_footer_lines.append(line)
+        except TypeError:
+            # If the footer doesn't parse (i.e. is malformed), just let it carry
+            # over.
+            new_footer_lines.append(line)
+    return '\n'.join(top_lines + new_footer_lines)
 
 
 def get_unique(footers, key):
-  key = normalize_name(key)
-  values = footers[key]
-  assert len(values) <= 1, 'Multiple %s footers' % key
-  if values:
-    return values[0]
+    key = normalize_name(key)
+    values = footers[key]
+    assert len(values) <= 1, 'Multiple %s footers' % key
+    if values:
+        return values[0]
 
-  return None
+    return None
 
 
 def get_position(footers):
-  """Get the commit position from the footers multimap using a heuristic.
+    """Get the commit position from the footers multimap using a heuristic.
 
   Returns:
     A tuple of the branch and the position on that branch. For example,
@@ -196,65 +200,68 @@ def get_position(footers):
     would give the return value ('refs/heads/main', 292272).
   """
 
-  position = get_unique(footers, 'Cr-Commit-Position')
-  if position:
-    match = CHROME_COMMIT_POSITION_PATTERN.match(position)
-    assert match, 'Invalid Cr-Commit-Position value: %s' % position
-    return (match.group(1), match.group(2))
+    position = get_unique(footers, 'Cr-Commit-Position')
+    if position:
+        match = CHROME_COMMIT_POSITION_PATTERN.match(position)
+        assert match, 'Invalid Cr-Commit-Position value: %s' % position
+        return (match.group(1), match.group(2))
 
-  raise ValueError('Unable to infer commit position from footers')
+    raise ValueError('Unable to infer commit position from footers')
 
 
 def main(args):
-  parser = argparse.ArgumentParser(
-    formatter_class=argparse.ArgumentDefaultsHelpFormatter
-  )
-  parser.add_argument('ref', nargs='?', help='Git ref to retrieve footers from.'
-                      ' Omit to parse stdin.')
-
-  g = parser.add_mutually_exclusive_group()
-  g.add_argument('--key', metavar='KEY',
-                 help='Get all values for the given footer name, one per '
-                 'line (case insensitive)')
-  g.add_argument('--position', action='store_true')
-  g.add_argument('--position-ref', action='store_true')
-  g.add_argument('--position-num', action='store_true')
-  g.add_argument('--json', help='filename to dump JSON serialized footers to.')
-
-  opts = parser.parse_args(args)
-
-  if opts.ref:
-    message = git.run('log', '-1', '--format=%B', opts.ref)
-  else:
-    message = sys.stdin.read()
-
-  footers = parse_footers(message)
-
-  if opts.key:
-    for v in footers.get(normalize_name(opts.key), []):
-      print(v)
-  elif opts.position:
-    pos = get_position(footers)
-    print('%s@{#%s}' % (pos[0], pos[1] or '?'))
-  elif opts.position_ref:
-    print(get_position(footers)[0])
-  elif opts.position_num:
-    pos = get_position(footers)
-    assert pos[1], 'No valid position for commit'
-    print(pos[1])
-  elif opts.json:
-    with open(opts.json, 'w') as f:
-      json.dump(footers, f)
-  else:
-    for k in footers.keys():
-      for v in footers[k]:
-        print('%s: %s' % (k, v))
-  return 0
+    parser = argparse.ArgumentParser(
+        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    parser.add_argument('ref',
+                        nargs='?',
+                        help='Git ref to retrieve footers from.'
+                        ' Omit to parse stdin.')
+
+    g = parser.add_mutually_exclusive_group()
+    g.add_argument('--key',
+                   metavar='KEY',
+                   help='Get all values for the given footer name, one per '
+                   'line (case insensitive)')
+    g.add_argument('--position', action='store_true')
+    g.add_argument('--position-ref', action='store_true')
+    g.add_argument('--position-num', action='store_true')
+    g.add_argument('--json',
+                   help='filename to dump JSON serialized footers to.')
+
+    opts = parser.parse_args(args)
+
+    if opts.ref:
+        message = git.run('log', '-1', '--format=%B', opts.ref)
+    else:
+        message = sys.stdin.read()
+
+    footers = parse_footers(message)
+
+    if opts.key:
+        for v in footers.get(normalize_name(opts.key), []):
+            print(v)
+    elif opts.position:
+        pos = get_position(footers)
+        print('%s@{#%s}' % (pos[0], pos[1] or '?'))
+    elif opts.position_ref:
+        print(get_position(footers)[0])
+    elif opts.position_num:
+        pos = get_position(footers)
+        assert pos[1], 'No valid position for commit'
+        print(pos[1])
+    elif opts.json:
+        with open(opts.json, 'w') as f:
+            json.dump(footers, f)
+    else:
+        for k in footers.keys():
+            for v in footers[k]:
+                print('%s: %s' % (k, v))
+    return 0
 
 
 if __name__ == '__main__':
-  try:
-    sys.exit(main(sys.argv[1:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main(sys.argv[1:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 16 - 16
git_freezer.py

@@ -12,28 +12,28 @@ from git_common import freeze, thaw
 
 
 def CMDfreeze(parser, args):
-  """Freeze a branch's changes, excluding unstaged gitlinks changes."""
-  parser.parse_args(args)
-  return freeze()
+    """Freeze a branch's changes, excluding unstaged gitlinks changes."""
+    parser.parse_args(args)
+    return freeze()
 
 
 def CMDthaw(parser, args):
-  """Returns a frozen branch to the state before it was frozen."""
-  parser.parse_args(args)
-  return thaw()
+    """Returns a frozen branch to the state before it was frozen."""
+    parser.parse_args(args)
+    return thaw()
 
 
 def main(args):
-  dispatcher = subcommand.CommandDispatcher(__name__)
-  ret = dispatcher.execute(optparse.OptionParser(), args)
-  if ret:
-    print(ret)
-  return 0
+    dispatcher = subcommand.CommandDispatcher(__name__)
+    ret = dispatcher.execute(optparse.OptionParser(), args)
+    if ret:
+        print(ret)
+    return 0
 
 
 if __name__ == '__main__':
-  try:
-    sys.exit(main(sys.argv[1:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main(sys.argv[1:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 295 - 281
git_hyper_blame.py

@@ -2,7 +2,6 @@
 # Copyright 2016 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Wrapper around git blame that ignores certain commits.
 """
 
@@ -17,135 +16,133 @@ import git_common
 import git_dates
 import setup_color
 
-
 logging.getLogger().setLevel(logging.INFO)
 
-
 DEFAULT_IGNORE_FILE_NAME = '.git-blame-ignore-revs'
 
 
 class Commit(object):
-  """Info about a commit."""
-  def __init__(self, commithash):
-    self.commithash = commithash
-    self.author = None
-    self.author_mail = None
-    self.author_time = None
-    self.author_tz = None
-    self.committer = None
-    self.committer_mail = None
-    self.committer_time = None
-    self.committer_tz = None
-    self.summary = None
-    self.boundary = None
-    self.previous = None
-    self.filename = None
-
-  def __repr__(self):  # pragma: no cover
-    return '<Commit %s>' % self.commithash
+    """Info about a commit."""
+    def __init__(self, commithash):
+        self.commithash = commithash
+        self.author = None
+        self.author_mail = None
+        self.author_time = None
+        self.author_tz = None
+        self.committer = None
+        self.committer_mail = None
+        self.committer_time = None
+        self.committer_tz = None
+        self.summary = None
+        self.boundary = None
+        self.previous = None
+        self.filename = None
+
+    def __repr__(self):  # pragma: no cover
+        return '<Commit %s>' % self.commithash
 
 
 BlameLine = collections.namedtuple(
-    'BlameLine',
-    'commit context lineno_then lineno_now modified')
+    'BlameLine', 'commit context lineno_then lineno_now modified')
 
 
 def parse_blame(blameoutput):
-  """Parses the output of git blame -p into a data structure."""
-  lines = blameoutput.split('\n')
-  i = 0
-  commits = {}
-
-  while i < len(lines):
-    # Read a commit line and parse it.
-    line = lines[i]
-    i += 1
-    if not line.strip():
-      continue
-    commitline = line.split()
-    commithash = commitline[0]
-    lineno_then = int(commitline[1])
-    lineno_now = int(commitline[2])
-
-    try:
-      commit = commits[commithash]
-    except KeyError:
-      commit = Commit(commithash)
-      commits[commithash] = commit
+    """Parses the output of git blame -p into a data structure."""
+    lines = blameoutput.split('\n')
+    i = 0
+    commits = {}
 
-    # Read commit details until we find a context line.
     while i < len(lines):
-      line = lines[i]
-      i += 1
-      if line.startswith('\t'):
-        break
-
-      try:
-        key, value = line.split(' ', 1)
-      except ValueError:
-        key = line
-        value = True
-      setattr(commit, key.replace('-', '_'), value)
-
-    context = line[1:]
-
-    yield BlameLine(commit, context, lineno_then, lineno_now, False)
+        # Read a commit line and parse it.
+        line = lines[i]
+        i += 1
+        if not line.strip():
+            continue
+        commitline = line.split()
+        commithash = commitline[0]
+        lineno_then = int(commitline[1])
+        lineno_now = int(commitline[2])
+
+        try:
+            commit = commits[commithash]
+        except KeyError:
+            commit = Commit(commithash)
+            commits[commithash] = commit
+
+        # Read commit details until we find a context line.
+        while i < len(lines):
+            line = lines[i]
+            i += 1
+            if line.startswith('\t'):
+                break
+
+            try:
+                key, value = line.split(' ', 1)
+            except ValueError:
+                key = line
+                value = True
+            setattr(commit, key.replace('-', '_'), value)
+
+        context = line[1:]
+
+        yield BlameLine(commit, context, lineno_then, lineno_now, False)
 
 
 def print_table(outbuf, table, align):
-  """Print a 2D rectangular array, aligning columns with spaces.
+    """Print a 2D rectangular array, aligning columns with spaces.
 
   Args:
     align: string of 'l' and 'r', designating whether each column is left- or
            right-aligned.
   """
-  if len(table) == 0:
-    return
-
-  colwidths = None
-  for row in table:
-    if colwidths is None:
-      colwidths = [len(x) for x in row]
-    else:
-      colwidths = [max(colwidths[i], len(x)) for i, x in enumerate(row)]
-
-  for row in table:
-    cells = []
-    for i, cell in enumerate(row):
-      padding = ' ' * (colwidths[i] - len(cell))
-      if align[i] == 'r':
-        cell = padding + cell
-      elif i < len(row) - 1:
-        # Do not pad the final column if left-aligned.
-        cell += padding
-      cells.append(cell.encode('utf-8', 'replace'))
-    try:
-      outbuf.write(b' '.join(cells) + b'\n')
-    except IOError:  # pragma: no cover
-      # Can happen on Windows if the pipe is closed early.
-      pass
+    if len(table) == 0:
+        return
+
+    colwidths = None
+    for row in table:
+        if colwidths is None:
+            colwidths = [len(x) for x in row]
+        else:
+            colwidths = [max(colwidths[i], len(x)) for i, x in enumerate(row)]
+
+    for row in table:
+        cells = []
+        for i, cell in enumerate(row):
+            padding = ' ' * (colwidths[i] - len(cell))
+            if align[i] == 'r':
+                cell = padding + cell
+            elif i < len(row) - 1:
+                # Do not pad the final column if left-aligned.
+                cell += padding
+            cells.append(cell.encode('utf-8', 'replace'))
+        try:
+            outbuf.write(b' '.join(cells) + b'\n')
+        except IOError:  # pragma: no cover
+            # Can happen on Windows if the pipe is closed early.
+            pass
 
 
 def pretty_print(outbuf, parsedblame, show_filenames=False):
-  """Pretty-prints the output of parse_blame."""
-  table = []
-  for line in parsedblame:
-    author_time = git_dates.timestamp_offset_to_datetime(
-        line.commit.author_time, line.commit.author_tz)
-    row = [line.commit.commithash[:8],
-           '(' + line.commit.author,
-           git_dates.datetime_string(author_time),
-           str(line.lineno_now) + ('*' if line.modified else '') + ')',
-           line.context]
-    if show_filenames:
-      row.insert(1, line.commit.filename)
-    table.append(row)
-  print_table(outbuf, table, align='llllrl' if show_filenames else 'lllrl')
+    """Pretty-prints the output of parse_blame."""
+    table = []
+    for line in parsedblame:
+        author_time = git_dates.timestamp_offset_to_datetime(
+            line.commit.author_time, line.commit.author_tz)
+        row = [
+            line.commit.commithash[:8], '(' + line.commit.author,
+            git_dates.datetime_string(author_time),
+            str(line.lineno_now) + ('*' if line.modified else '') + ')',
+            line.context
+        ]
+        if show_filenames:
+            row.insert(1, line.commit.filename)
+        table.append(row)
+    print_table(outbuf, table, align='llllrl' if show_filenames else 'lllrl')
 
 
 def get_parsed_blame(filename, revision='HEAD'):
-  blame = git_common.blame(filename, revision=revision, porcelain=True)
-  return list(parse_blame(blame))
+    blame = git_common.blame(filename, revision=revision, porcelain=True)
+    return list(parse_blame(blame))
 
 
 # Map from (oldrev, newrev) to hunk list (caching the results of git diff, but
@@ -156,41 +153,41 @@ diff_hunks_cache = {}
 
 
 def cache_diff_hunks(oldrev, newrev):
-  def parse_start_length(s):
-    # Chop the '-' or '+'.
-    s = s[1:]
-    # Length is optional (defaults to 1).
-    try:
-      start, length = s.split(',')
-    except ValueError:
-      start = s
-      length = 1
-    return int(start), int(length)
+    def parse_start_length(s):
+        # Chop the '-' or '+'.
+        s = s[1:]
+        # Length is optional (defaults to 1).
+        try:
+            start, length = s.split(',')
+        except ValueError:
+            start = s
+            length = 1
+        return int(start), int(length)
 
-  try:
-    return diff_hunks_cache[(oldrev, newrev)]
-  except KeyError:
-    pass
+    try:
+        return diff_hunks_cache[(oldrev, newrev)]
+    except KeyError:
+        pass
 
-  # Use -U0 to get the smallest possible hunks.
-  diff = git_common.diff(oldrev, newrev, '-U0')
+    # Use -U0 to get the smallest possible hunks.
+    diff = git_common.diff(oldrev, newrev, '-U0')
 
-  # Get all the hunks.
-  hunks = []
-  for line in diff.split('\n'):
-    if not line.startswith('@@'):
-      continue
-    ranges = line.split(' ', 3)[1:3]
-    ranges = tuple(parse_start_length(r) for r in ranges)
-    hunks.append(ranges)
+    # Get all the hunks.
+    hunks = []
+    for line in diff.split('\n'):
+        if not line.startswith('@@'):
+            continue
+        ranges = line.split(' ', 3)[1:3]
+        ranges = tuple(parse_start_length(r) for r in ranges)
+        hunks.append(ranges)
 
-  diff_hunks_cache[(oldrev, newrev)] = hunks
-  return hunks
+    diff_hunks_cache[(oldrev, newrev)] = hunks
+    return hunks
 
 
 def approx_lineno_across_revs(filename, newfilename, revision, newrevision,
                               lineno):
-  """Computes the approximate movement of a line number between two revisions.
+    """Computes the approximate movement of a line number between two revisions.
 
   Consider line |lineno| in |filename| at |revision|. This function computes the
   line number of that line in |newfilename| at |newrevision|. This is
@@ -206,183 +203,200 @@ def approx_lineno_across_revs(filename, newfilename, revision, newrevision,
   Returns:
     Line number within |newfilename| at |newrevision|.
   """
-  # This doesn't work that well if there are a lot of line changes within the
-  # hunk (demonstrated by GitHyperBlameLineMotionTest.testIntraHunkLineMotion).
-  # A fuzzy heuristic that takes the text of the new line and tries to find a
-  # deleted line within the hunk that mostly matches the new line could help.
+    # This doesn't work that well if there are a lot of line changes within the
+    # hunk (demonstrated by
+    # GitHyperBlameLineMotionTest.testIntraHunkLineMotion). A fuzzy heuristic
+    # that takes the text of the new line and tries to find a deleted line
+    # within the hunk that mostly matches the new line could help.
 
-  # Use the <revision>:<filename> syntax to diff between two blobs. This is the
-  # only way to diff a file that has been renamed.
-  old = '%s:%s' % (revision, filename)
-  new = '%s:%s' % (newrevision, newfilename)
-  hunks = cache_diff_hunks(old, new)
+    # Use the <revision>:<filename> syntax to diff between two blobs. This is
+    # the only way to diff a file that has been renamed.
+    old = '%s:%s' % (revision, filename)
+    new = '%s:%s' % (newrevision, newfilename)
+    hunks = cache_diff_hunks(old, new)
 
-  cumulative_offset = 0
+    cumulative_offset = 0
 
-  # Find the hunk containing lineno (if any).
-  for (oldstart, oldlength), (newstart, newlength) in hunks:
-    cumulative_offset += newlength - oldlength
+    # Find the hunk containing lineno (if any).
+    for (oldstart, oldlength), (newstart, newlength) in hunks:
+        cumulative_offset += newlength - oldlength
 
-    if lineno >= oldstart + oldlength:
-      # Not there yet.
-      continue
+        if lineno >= oldstart + oldlength:
+            # Not there yet.
+            continue
 
-    if lineno < oldstart:
-      # Gone too far.
-      break
+        if lineno < oldstart:
+            # Gone too far.
+            break
 
-    # lineno is in [oldstart, oldlength] at revision; [newstart, newlength] at
-    # newrevision.
+        # lineno is in [oldstart, oldlength] at revision; [newstart, newlength]
+        # at newrevision.
 
-    # If newlength == 0, newstart will be the line before the deleted hunk.
-    # Since the line must have been deleted, just return that as the nearest
-    # line in the new file. Caution: newstart can be 0 in this case.
-    if newlength == 0:
-      return max(1, newstart)
+        # If newlength == 0, newstart will be the line before the deleted hunk.
+        # Since the line must have been deleted, just return that as the nearest
+        # line in the new file. Caution: newstart can be 0 in this case.
+        if newlength == 0:
+            return max(1, newstart)
 
-    newend = newstart + newlength - 1
+        newend = newstart + newlength - 1
 
-    # Move lineno based on the amount the entire hunk shifted.
-    lineno = lineno + newstart - oldstart
-    # Constrain the output within the range [newstart, newend].
-    return min(newend, max(newstart, lineno))
+        # Move lineno based on the amount the entire hunk shifted.
+        lineno = lineno + newstart - oldstart
+        # Constrain the output within the range [newstart, newend].
+        return min(newend, max(newstart, lineno))
 
-  # Wasn't in a hunk. Figure out the line motion based on the difference in
-  # length between the hunks seen so far.
-  return lineno + cumulative_offset
+    # Wasn't in a hunk. Figure out the line motion based on the difference in
+    # length between the hunks seen so far.
+    return lineno + cumulative_offset
 
 
 def hyper_blame(outbuf, ignored, filename, revision):
-  # Map from commit to parsed blame from that commit.
-  blame_from = {}
-  filename = os.path.normpath(filename)
+    # Map from commit to parsed blame from that commit.
+    blame_from = {}
+    filename = os.path.normpath(filename)
+
+    def cache_blame_from(filename, commithash):
+        try:
+            return blame_from[commithash]
+        except KeyError:
+            parsed = get_parsed_blame(filename, commithash)
+            blame_from[commithash] = parsed
+            return parsed
 
-  def cache_blame_from(filename, commithash):
     try:
-      return blame_from[commithash]
-    except KeyError:
-      parsed = get_parsed_blame(filename, commithash)
-      blame_from[commithash] = parsed
-      return parsed
-
-  try:
-    parsed = cache_blame_from(filename, git_common.hash_one(revision))
-  except subprocess2.CalledProcessError as e:
-    sys.stderr.write(e.stderr.decode())
-    return e.returncode
-
-  new_parsed = []
-
-  # We don't show filenames in blame output unless we have to.
-  show_filenames = False
-
-  for line in parsed:
-    # If a line references an ignored commit, blame that commit's parent
-    # repeatedly until we find a non-ignored commit.
-    while line.commit.commithash in ignored:
-      if line.commit.previous is None:
-        # You can't ignore the commit that added this file.
-        break
-
-      previouscommit, previousfilename = line.commit.previous.split(' ', 1)
-      parent_blame = cache_blame_from(previousfilename, previouscommit)
-
-      if len(parent_blame) == 0:
-        # The previous version of this file was empty, therefore, you can't
-        # ignore this commit.
-        break
-
-      # line.lineno_then is the line number in question at line.commit. We need
-      # to translate that line number so that it refers to the position of the
-      # same line on previouscommit.
-      lineno_previous = approx_lineno_across_revs(
-          line.commit.filename, previousfilename, line.commit.commithash,
-          previouscommit, line.lineno_then)
-      logging.debug('ignore commit %s on line p%d/t%d/n%d',
-                    line.commit.commithash, lineno_previous, line.lineno_then,
-                    line.lineno_now)
-
-      # Get the line at lineno_previous in the parent commit.
-      assert 1 <= lineno_previous <= len(parent_blame)
-      newline = parent_blame[lineno_previous - 1]
+        parsed = cache_blame_from(filename, git_common.hash_one(revision))
+    except subprocess2.CalledProcessError as e:
+        sys.stderr.write(e.stderr.decode())
+        return e.returncode
+
+    new_parsed = []
+
+    # We don't show filenames in blame output unless we have to.
+    show_filenames = False
+
+    for line in parsed:
+        # If a line references an ignored commit, blame that commit's parent
+        # repeatedly until we find a non-ignored commit.
+        while line.commit.commithash in ignored:
+            if line.commit.previous is None:
+                # You can't ignore the commit that added this file.
+                break
+
+            previouscommit, previousfilename = line.commit.previous.split(
+                ' ', 1)
+            parent_blame = cache_blame_from(previousfilename, previouscommit)
+
+            if len(parent_blame) == 0:
+                # The previous version of this file was empty, therefore, you
+                # can't ignore this commit.
+                break
+
+            # line.lineno_then is the line number in question at line.commit. We
+            # need to translate that line number so that it refers to the
+            # position of the same line on previouscommit.
+            lineno_previous = approx_lineno_across_revs(line.commit.filename,
+                                                        previousfilename,
+                                                        line.commit.commithash,
+                                                        previouscommit,
+                                                        line.lineno_then)
+            logging.debug('ignore commit %s on line p%d/t%d/n%d',
+                          line.commit.commithash, lineno_previous,
+                          line.lineno_then, line.lineno_now)
+
+            # Get the line at lineno_previous in the parent commit.
+            assert 1 <= lineno_previous <= len(parent_blame)
+            newline = parent_blame[lineno_previous - 1]
+
+            # Replace the commit and lineno_then, but not the lineno_now or
+            # context.
+            line = BlameLine(newline.commit, line.context, newline.lineno_then,
+                             line.lineno_now, True)
+            logging.debug('    replacing with %r', line)
+
+        # If any line has a different filename to the file's current name, turn
+        # on filename display for the entire blame output. Use normpath to make
+        # variable consistent across platforms.
+        if os.path.normpath(line.commit.filename) != filename:
+            show_filenames = True
+
+        new_parsed.append(line)
+
+    pretty_print(outbuf, new_parsed, show_filenames=show_filenames)
+
+    return 0
 
-      # Replace the commit and lineno_then, but not the lineno_now or context.
-      line = BlameLine(newline.commit, line.context, newline.lineno_then,
-                       line.lineno_now, True)
-      logging.debug('    replacing with %r', line)
 
-    # If any line has a different filename to the file's current name, turn on
-    # filename display for the entire blame output.
-    # Use normpath to make variable consistent across platforms.
-    if os.path.normpath(line.commit.filename) != filename:
-      show_filenames = True
+def parse_ignore_file(ignore_file):
+    for line in ignore_file:
+        line = line.split('#', 1)[0].strip()
+        if line:
+            yield line
 
-    new_parsed.append(line)
 
-  pretty_print(outbuf, new_parsed, show_filenames=show_filenames)
+def main(args, outbuf):
+    parser = argparse.ArgumentParser(
+        prog='git hyper-blame',
+        description='git blame with support for ignoring certain commits.')
+    parser.add_argument('-i',
+                        metavar='REVISION',
+                        action='append',
+                        dest='ignored',
+                        default=[],
+                        help='a revision to ignore')
+    parser.add_argument('--ignore-file',
+                        metavar='FILE',
+                        dest='ignore_file',
+                        help='a file containing a list of revisions to ignore')
+    parser.add_argument(
+        '--no-default-ignores',
+        dest='no_default_ignores',
+        action='store_true',
+        help='Do not ignore commits from .git-blame-ignore-revs.')
+    parser.add_argument('revision',
+                        nargs='?',
+                        default='HEAD',
+                        metavar='REVISION',
+                        help='revision to look at')
+    parser.add_argument('filename', metavar='FILE', help='filename to blame')
+
+    args = parser.parse_args(args)
+    try:
+        repo_root = git_common.repo_root()
+    except subprocess2.CalledProcessError as e:
+        sys.stderr.write(e.stderr.decode())
+        return e.returncode
 
-  return 0
+    # Make filename relative to the repository root, and cd to the root dir (so
+    # all filenames throughout this script are relative to the root).
+    filename = os.path.relpath(args.filename, repo_root)
+    os.chdir(repo_root)
 
+    # Normalize filename so we can compare it to other filenames git gives us.
+    filename = os.path.normpath(filename)
+    filename = os.path.normcase(filename)
 
-def parse_ignore_file(ignore_file):
-  for line in ignore_file:
-    line = line.split('#', 1)[0].strip()
-    if line:
-      yield line
+    ignored_list = list(args.ignored)
+    if not args.no_default_ignores and os.path.exists(DEFAULT_IGNORE_FILE_NAME):
+        with open(DEFAULT_IGNORE_FILE_NAME) as ignore_file:
+            ignored_list.extend(parse_ignore_file(ignore_file))
 
+    if args.ignore_file:
+        with open(args.ignore_file) as ignore_file:
+            ignored_list.extend(parse_ignore_file(ignore_file))
 
-def main(args, outbuf):
-  parser = argparse.ArgumentParser(
-      prog='git hyper-blame',
-      description='git blame with support for ignoring certain commits.')
-  parser.add_argument('-i', metavar='REVISION', action='append', dest='ignored',
-                      default=[], help='a revision to ignore')
-  parser.add_argument('--ignore-file', metavar='FILE', dest='ignore_file',
-                      help='a file containing a list of revisions to ignore')
-  parser.add_argument('--no-default-ignores', dest='no_default_ignores',
-                      action='store_true',
-                      help='Do not ignore commits from .git-blame-ignore-revs.')
-  parser.add_argument('revision', nargs='?', default='HEAD', metavar='REVISION',
-                      help='revision to look at')
-  parser.add_argument('filename', metavar='FILE', help='filename to blame')
-
-  args = parser.parse_args(args)
-  try:
-    repo_root = git_common.repo_root()
-  except subprocess2.CalledProcessError as e:
-    sys.stderr.write(e.stderr.decode())
-    return e.returncode
-
-  # Make filename relative to the repository root, and cd to the root dir (so
-  # all filenames throughout this script are relative to the root).
-  filename = os.path.relpath(args.filename, repo_root)
-  os.chdir(repo_root)
-
-  # Normalize filename so we can compare it to other filenames git gives us.
-  filename = os.path.normpath(filename)
-  filename = os.path.normcase(filename)
-
-  ignored_list = list(args.ignored)
-  if not args.no_default_ignores and os.path.exists(DEFAULT_IGNORE_FILE_NAME):
-    with open(DEFAULT_IGNORE_FILE_NAME) as ignore_file:
-      ignored_list.extend(parse_ignore_file(ignore_file))
-
-  if args.ignore_file:
-    with open(args.ignore_file) as ignore_file:
-      ignored_list.extend(parse_ignore_file(ignore_file))
-
-  ignored = set()
-  for c in ignored_list:
-    try:
-      ignored.add(git_common.hash_one(c))
-    except subprocess2.CalledProcessError as e:
-      # Custom warning string (the message from git-rev-parse is inappropriate).
-      sys.stderr.write('warning: unknown revision \'%s\'.\n' % c)
+    ignored = set()
+    for c in ignored_list:
+        try:
+            ignored.add(git_common.hash_one(c))
+        except subprocess2.CalledProcessError as e:
+            # Custom warning string (the message from git-rev-parse is
+            # inappropriate).
+            sys.stderr.write('warning: unknown revision \'%s\'.\n' % c)
 
-  return hyper_blame(outbuf, ignored, filename, args.revision)
+    return hyper_blame(outbuf, ignored, filename, args.revision)
 
 
 if __name__ == '__main__':  # pragma: no cover
-  setup_color.init()
-  with git_common.less() as less_input:
-    sys.exit(main(sys.argv[1:], less_input))
+    setup_color.init()
+    with git_common.less() as less_input:
+        sys.exit(main(sys.argv[1:], less_input))

+ 103 - 102
git_map.py

@@ -2,7 +2,6 @@
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """
 usage: git map [-h] [--help] [<args>]
 
@@ -26,7 +25,6 @@ import subprocess2
 
 from third_party import colorama
 
-
 RESET = colorama.Fore.RESET + colorama.Back.RESET + colorama.Style.RESET_ALL
 BRIGHT = colorama.Style.BRIGHT
 
@@ -41,119 +39,122 @@ YELLOW = colorama.Fore.YELLOW
 
 
 def _print_help(outbuf):
-  names = {
-    'Cyan': CYAN,
-    'Green': GREEN,
-    'Magenta': MAGENTA,
-    'Red': RED,
-    'White': WHITE,
-    'Blue background': BLUE_BACK,
-  }
-  msg = ''
-  for line in __doc__.splitlines():
-    for name, color in names.items():
-      if name in line:
-        msg += line.replace('* ' + name, color + '* ' + name + RESET) + '\n'
-        break
-    else:
-      msg += line + '\n'
-  outbuf.write(msg.encode('utf-8', 'replace'))
+    names = {
+        'Cyan': CYAN,
+        'Green': GREEN,
+        'Magenta': MAGENTA,
+        'Red': RED,
+        'White': WHITE,
+        'Blue background': BLUE_BACK,
+    }
+    msg = ''
+    for line in __doc__.splitlines():
+        for name, color in names.items():
+            if name in line:
+                msg += line.replace('* ' + name,
+                                    color + '* ' + name + RESET) + '\n'
+                break
+        else:
+            msg += line + '\n'
+    outbuf.write(msg.encode('utf-8', 'replace'))
 
 
 def _color_branch(branch, all_branches, all_tags, current):
-  if branch in (current, 'HEAD -> ' + current):
-    color = CYAN
-    current = None
-  elif branch in all_branches:
-    color = GREEN
-    all_branches.remove(branch)
-  elif branch in all_tags:
-    color = MAGENTA
-  elif branch.startswith('tag: '):
-    color = MAGENTA
-    branch = branch[len('tag: '):]
-  else:
-    color = RED
-  return color + branch + RESET
+    if branch in (current, 'HEAD -> ' + current):
+        color = CYAN
+        current = None
+    elif branch in all_branches:
+        color = GREEN
+        all_branches.remove(branch)
+    elif branch in all_tags:
+        color = MAGENTA
+    elif branch.startswith('tag: '):
+        color = MAGENTA
+        branch = branch[len('tag: '):]
+    else:
+        color = RED
+    return color + branch + RESET
 
 
 def _color_branch_list(branch_list, all_branches, all_tags, current):
-  if not branch_list:
-    return ''
-  colored_branches = (GREEN + ', ').join(
-      _color_branch(branch, all_branches, all_tags, current)
-      for branch in branch_list if branch != 'HEAD')
-  return (GREEN + '(' + colored_branches + GREEN + ') ' + RESET)
+    if not branch_list:
+        return ''
+    colored_branches = (GREEN + ', ').join(
+        _color_branch(branch, all_branches, all_tags, current)
+        for branch in branch_list if branch != 'HEAD')
+    return (GREEN + '(' + colored_branches + GREEN + ') ' + RESET)
 
 
 def _parse_log_line(line):
-  graph, branch_list, commit_date, subject = (
-      line.decode('utf-8', 'replace').strip().split('\x00'))
-  branch_list = [] if not branch_list else branch_list.split(', ')
-  commit = graph.split()[-1]
-  graph = graph[:-len(commit)]
-  return graph, commit, branch_list, commit_date, subject
+    graph, branch_list, commit_date, subject = (line.decode(
+        'utf-8', 'replace').strip().split('\x00'))
+    branch_list = [] if not branch_list else branch_list.split(', ')
+    commit = graph.split()[-1]
+    graph = graph[:-len(commit)]
+    return graph, commit, branch_list, commit_date, subject
 
 
 def main(argv, outbuf):
-  if '-h' in argv or '--help' in argv:
-    _print_help(outbuf)
+    if '-h' in argv or '--help' in argv:
+        _print_help(outbuf)
+        return 0
+
+    map_extra = git_common.get_config_list('depot_tools.map_extra')
+    cmd = [
+        git_common.GIT_EXE, 'log',
+        git_common.root(), '--graph', '--branches', '--tags', '--color=always',
+        '--date=short', '--pretty=format:%H%x00%D%x00%cd%x00%s'
+    ] + map_extra + argv
+
+    log_proc = subprocess2.Popen(cmd, stdout=subprocess2.PIPE, shell=False)
+
+    current = git_common.current_branch()
+    all_tags = set(git_common.tags())
+    all_branches = set(git_common.branches())
+    if current in all_branches:
+        all_branches.remove(current)
+
+    merge_base_map = {}
+    for branch in all_branches:
+        merge_base = git_common.get_or_create_merge_base(branch)
+        if merge_base:
+            merge_base_map.setdefault(merge_base, set()).add(branch)
+
+    for merge_base, branches in merge_base_map.items():
+        merge_base_map[merge_base] = ', '.join(branches)
+
+    try:
+        for line in log_proc.stdout:
+            if b'\x00' not in line:
+                outbuf.write(line)
+                continue
+
+            graph, commit, branch_list, commit_date, subject = _parse_log_line(
+                line)
+
+            if 'HEAD' in branch_list:
+                graph = graph.replace('*', BLUE_BACK + '*')
+
+            line = '{graph}{commit}\t{branches}{date} ~ {subject}'.format(
+                graph=graph,
+                commit=BRIGHT_RED + commit[:10] + RESET,
+                branches=_color_branch_list(branch_list, all_branches, all_tags,
+                                            current),
+                date=YELLOW + commit_date + RESET,
+                subject=subject)
+
+            if commit in merge_base_map:
+                line += '    <({})'.format(WHITE + merge_base_map[commit] +
+                                           RESET)
+
+            line += os.linesep
+            outbuf.write(line.encode('utf-8', 'replace'))
+    except (BrokenPipeError, KeyboardInterrupt):
+        pass
     return 0
 
-  map_extra = git_common.get_config_list('depot_tools.map_extra')
-  cmd = [
-      git_common.GIT_EXE, 'log', git_common.root(),
-      '--graph', '--branches', '--tags', '--color=always', '--date=short',
-      '--pretty=format:%H%x00%D%x00%cd%x00%s'
-  ] + map_extra + argv
-
-  log_proc = subprocess2.Popen(cmd, stdout=subprocess2.PIPE, shell=False)
-
-  current = git_common.current_branch()
-  all_tags = set(git_common.tags())
-  all_branches = set(git_common.branches())
-  if current in all_branches:
-    all_branches.remove(current)
-
-  merge_base_map = {}
-  for branch in all_branches:
-    merge_base = git_common.get_or_create_merge_base(branch)
-    if merge_base:
-      merge_base_map.setdefault(merge_base, set()).add(branch)
-
-  for merge_base, branches in merge_base_map.items():
-    merge_base_map[merge_base] = ', '.join(branches)
-
-  try:
-    for line in log_proc.stdout:
-      if b'\x00' not in line:
-        outbuf.write(line)
-        continue
-
-      graph, commit, branch_list, commit_date, subject = _parse_log_line(line)
-
-      if 'HEAD' in branch_list:
-        graph = graph.replace('*', BLUE_BACK + '*')
-
-      line = '{graph}{commit}\t{branches}{date} ~ {subject}'.format(
-          graph=graph,
-          commit=BRIGHT_RED + commit[:10] + RESET,
-          branches=_color_branch_list(
-              branch_list, all_branches, all_tags, current),
-          date=YELLOW + commit_date + RESET,
-          subject=subject)
-
-      if commit in merge_base_map:
-        line += '    <({})'.format(WHITE + merge_base_map[commit] + RESET)
-
-      line += os.linesep
-      outbuf.write(line.encode('utf-8', 'replace'))
-  except (BrokenPipeError, KeyboardInterrupt):
-    pass
-  return 0
-
 
 if __name__ == '__main__':
-  setup_color.init()
-  with git_common.less() as less_input:
-    sys.exit(main(sys.argv[1:], less_input))
+    setup_color.init()
+    with git_common.less() as less_input:
+        sys.exit(main(sys.argv[1:], less_input))

+ 328 - 314
git_map_branches.py

@@ -2,7 +2,6 @@
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Print dependency tree of branches in local repo.
 
 Example:
@@ -42,349 +41,364 @@ DEFAULT_SEPARATOR = ' ' * 4
 
 
 class OutputManager(object):
-  """Manages a number of OutputLines and formats them into aligned columns."""
-
-  def __init__(self):
-    self.lines = []
-    self.nocolor = False
-    self.max_column_lengths = []
-    self.num_columns = None
+    """Manages a number of OutputLines and formats them into aligned columns."""
+    def __init__(self):
+        self.lines = []
+        self.nocolor = False
+        self.max_column_lengths = []
+        self.num_columns = None
 
-  def append(self, line):
-    # All lines must have the same number of columns.
-    if not self.num_columns:
-      self.num_columns = len(line.columns)
-      self.max_column_lengths = [0] * self.num_columns
-    assert self.num_columns == len(line.columns)
+    def append(self, line):
+        # All lines must have the same number of columns.
+        if not self.num_columns:
+            self.num_columns = len(line.columns)
+            self.max_column_lengths = [0] * self.num_columns
+        assert self.num_columns == len(line.columns)
 
-    if self.nocolor:
-      line.colors = [''] * self.num_columns
+        if self.nocolor:
+            line.colors = [''] * self.num_columns
 
-    self.lines.append(line)
+        self.lines.append(line)
 
-    # Update maximum column lengths.
-    for i, col in enumerate(line.columns):
-      self.max_column_lengths[i] = max(self.max_column_lengths[i], len(col))
+        # Update maximum column lengths.
+        for i, col in enumerate(line.columns):
+            self.max_column_lengths[i] = max(self.max_column_lengths[i],
+                                             len(col))
 
-  def merge(self, other):
-    for line in other.lines:
-      self.append(line)
+    def merge(self, other):
+        for line in other.lines:
+            self.append(line)
 
-  def as_formatted_string(self):
-    return '\n'.join(
-        l.as_padded_string(self.max_column_lengths) for l in self.lines)
+    def as_formatted_string(self):
+        return '\n'.join(
+            l.as_padded_string(self.max_column_lengths) for l in self.lines)
 
 
 class OutputLine(object):
-  """A single line of data.
+    """A single line of data.
 
   This consists of an equal number of columns, colors and separators."""
-
-  def __init__(self):
-    self.columns = []
-    self.separators = []
-    self.colors = []
-
-  def append(self, data, separator=DEFAULT_SEPARATOR, color=Fore.WHITE):
-    self.columns.append(data)
-    self.separators.append(separator)
-    self.colors.append(color)
-
-  def as_padded_string(self, max_column_lengths):
-    """"Returns the data as a string with each column padded to
+    def __init__(self):
+        self.columns = []
+        self.separators = []
+        self.colors = []
+
+    def append(self, data, separator=DEFAULT_SEPARATOR, color=Fore.WHITE):
+        self.columns.append(data)
+        self.separators.append(separator)
+        self.colors.append(color)
+
+    def as_padded_string(self, max_column_lengths):
+        """"Returns the data as a string with each column padded to
     |max_column_lengths|."""
-    output_string = ''
-    for i, (color, data, separator) in enumerate(
-        zip(self.colors, self.columns, self.separators)):
-      if max_column_lengths[i] == 0:
-        continue
+        output_string = ''
+        for i, (color, data, separator) in enumerate(
+                zip(self.colors, self.columns, self.separators)):
+            if max_column_lengths[i] == 0:
+                continue
 
-      padding = (max_column_lengths[i] - len(data)) * ' '
-      output_string += color + data + padding + separator
+            padding = (max_column_lengths[i] - len(data)) * ' '
+            output_string += color + data + padding + separator
 
-    return output_string.rstrip()
+        return output_string.rstrip()
 
 
 class BranchMapper(object):
-  """A class which constructs output representing the tree's branch structure.
+    """A class which constructs output representing the tree's branch structure.
 
   Attributes:
     __branches_info: a map of branches to their BranchesInfo objects which
       consist of the branch hash, upstream and ahead/behind status.
     __gone_branches: a set of upstreams which are not fetchable by git"""
+    def __init__(self):
+        self.verbosity = 0
+        self.maxjobs = 0
+        self.show_subject = False
+        self.hide_dormant = False
+        self.output = OutputManager()
+        self.__gone_branches = set()
+        self.__branches_info = None
+        self.__parent_map = collections.defaultdict(list)
+        self.__current_branch = None
+        self.__current_hash = None
+        self.__tag_set = None
+        self.__status_info = {}
+
+    def start(self):
+        self.__branches_info = get_branches_info(
+            include_tracking_status=self.verbosity >= 1)
+        if (self.verbosity >= 2):
+            # Avoid heavy import unless necessary.
+            from git_cl import get_cl_statuses, color_for_status, Changelist
+
+            change_cls = [
+                Changelist(branchref='refs/heads/' + b)
+                for b in self.__branches_info.keys() if b
+            ]
+            status_info = get_cl_statuses(change_cls,
+                                          fine_grained=self.verbosity > 2,
+                                          max_processes=self.maxjobs)
+
+            # This is a blocking get which waits for the remote CL status to be
+            # retrieved.
+            for cl, status in status_info:
+                self.__status_info[cl.GetBranch()] = (cl.GetIssueURL(
+                    short=True), color_for_status(status), status)
+
+        roots = set()
+
+        # A map of parents to a list of their children.
+        for branch, branch_info in self.__branches_info.items():
+            if not branch_info:
+                continue
+
+            parent = branch_info.upstream
+            if self.__check_cycle(branch):
+                continue
+            if not self.__branches_info[parent]:
+                branch_upstream = upstream(branch)
+                # If git can't find the upstream, mark the upstream as gone.
+                if branch_upstream:
+                    parent = branch_upstream
+                else:
+                    self.__gone_branches.add(parent)
+                # A parent that isn't in the branches info is a root.
+                roots.add(parent)
+
+            self.__parent_map[parent].append(branch)
+
+        self.__current_branch = current_branch()
+        self.__current_hash = hash_one('HEAD', short=True)
+        self.__tag_set = tags()
+
+        if roots:
+            for root in sorted(roots):
+                self.__append_branch(root, self.output)
+        else:
+            no_branches = OutputLine()
+            no_branches.append('No User Branches')
+            self.output.append(no_branches)
+
+    def __check_cycle(self, branch):
+        # Maximum length of the cycle is `num_branches`. This limit avoids
+        # running into a cycle which does *not* contain `branch`.
+        num_branches = len(self.__branches_info)
+        cycle = [branch]
+        while len(cycle) < num_branches and self.__branches_info[cycle[-1]]:
+            parent = self.__branches_info[cycle[-1]].upstream
+            cycle.append(parent)
+            if parent == branch:
+                print('Warning: Detected cycle in branches: {}'.format(
+                    ' -> '.join(cycle)),
+                      file=sys.stderr)
+                return True
+        return False
+
+    def __is_invalid_parent(self, parent):
+        return not parent or parent in self.__gone_branches
+
+    def __color_for_branch(self, branch, branch_hash):
+        if branch.startswith('origin/'):
+            color = Fore.RED
+        elif branch.startswith('branch-heads'):
+            color = Fore.BLUE
+        elif self.__is_invalid_parent(branch) or branch in self.__tag_set:
+            color = Fore.MAGENTA
+        elif self.__current_hash.startswith(branch_hash):
+            color = Fore.CYAN
+        else:
+            color = Fore.GREEN
 
-  def __init__(self):
-    self.verbosity = 0
-    self.maxjobs = 0
-    self.show_subject = False
-    self.hide_dormant = False
-    self.output = OutputManager()
-    self.__gone_branches = set()
-    self.__branches_info = None
-    self.__parent_map = collections.defaultdict(list)
-    self.__current_branch = None
-    self.__current_hash = None
-    self.__tag_set = None
-    self.__status_info = {}
-
-  def start(self):
-    self.__branches_info = get_branches_info(
-        include_tracking_status=self.verbosity >= 1)
-    if (self.verbosity >= 2):
-      # Avoid heavy import unless necessary.
-      from git_cl import get_cl_statuses, color_for_status, Changelist
-
-      change_cls = [Changelist(branchref='refs/heads/'+b)
-                    for b in self.__branches_info.keys() if b]
-      status_info = get_cl_statuses(change_cls,
-                                    fine_grained=self.verbosity > 2,
-                                    max_processes=self.maxjobs)
-
-      # This is a blocking get which waits for the remote CL status to be
-      # retrieved.
-      for cl, status in status_info:
-        self.__status_info[cl.GetBranch()] = (cl.GetIssueURL(short=True),
-                                              color_for_status(status), status)
-
-    roots = set()
-
-    # A map of parents to a list of their children.
-    for branch, branch_info in self.__branches_info.items():
-      if not branch_info:
-        continue
-
-      parent = branch_info.upstream
-      if self.__check_cycle(branch):
-        continue
-      if not self.__branches_info[parent]:
-        branch_upstream = upstream(branch)
-        # If git can't find the upstream, mark the upstream as gone.
-        if branch_upstream:
-          parent = branch_upstream
+        if branch_hash and self.__current_hash.startswith(branch_hash):
+            color += Style.BRIGHT
         else:
-          self.__gone_branches.add(parent)
-        # A parent that isn't in the branches info is a root.
-        roots.add(parent)
-
-      self.__parent_map[parent].append(branch)
-
-    self.__current_branch = current_branch()
-    self.__current_hash = hash_one('HEAD', short=True)
-    self.__tag_set = tags()
-
-    if roots:
-      for root in sorted(roots):
-        self.__append_branch(root, self.output)
-    else:
-      no_branches = OutputLine()
-      no_branches.append('No User Branches')
-      self.output.append(no_branches)
-
-  def __check_cycle(self, branch):
-    # Maximum length of the cycle is `num_branches`. This limit avoids running
-    # into a cycle which does *not* contain `branch`.
-    num_branches = len(self.__branches_info)
-    cycle = [branch]
-    while len(cycle) < num_branches and self.__branches_info[cycle[-1]]:
-      parent = self.__branches_info[cycle[-1]].upstream
-      cycle.append(parent)
-      if parent == branch:
-        print('Warning: Detected cycle in branches: {}'.format(
-            ' -> '.join(cycle)), file=sys.stderr)
-        return True
-    return False
-
-  def __is_invalid_parent(self, parent):
-    return not parent or parent in self.__gone_branches
-
-  def __color_for_branch(self, branch, branch_hash):
-    if branch.startswith('origin/'):
-      color = Fore.RED
-    elif branch.startswith('branch-heads'):
-      color = Fore.BLUE
-    elif self.__is_invalid_parent(branch) or branch in self.__tag_set:
-      color = Fore.MAGENTA
-    elif self.__current_hash.startswith(branch_hash):
-      color = Fore.CYAN
-    else:
-      color = Fore.GREEN
-
-    if branch_hash and self.__current_hash.startswith(branch_hash):
-      color += Style.BRIGHT
-    else:
-      color += Style.NORMAL
-
-    return color
-
-  def __is_dormant_branch(self, branch):
-    if '/' in branch:
-      return False
-
-    is_dormant = run('config',
-                     '--get',
-                     'branch.{}.dormant'.format(branch),
-                     accepted_retcodes=[0, 1])
-    return is_dormant == 'true'
-
-  def __append_branch(self, branch, output, depth=0):
-    """Recurses through the tree structure and appends an OutputLine to the
+            color += Style.NORMAL
+
+        return color
+
+    def __is_dormant_branch(self, branch):
+        if '/' in branch:
+            return False
+
+        is_dormant = run('config',
+                         '--get',
+                         'branch.{}.dormant'.format(branch),
+                         accepted_retcodes=[0, 1])
+        return is_dormant == 'true'
+
+    def __append_branch(self, branch, output, depth=0):
+        """Recurses through the tree structure and appends an OutputLine to the
     OutputManager for each branch."""
-    child_output = OutputManager()
-    for child in sorted(self.__parent_map.pop(branch, ())):
-      self.__append_branch(child, child_output, depth=depth + 1)
-
-    is_dormant_branch = self.__is_dormant_branch(branch)
-    if self.hide_dormant and is_dormant_branch and not child_output.lines:
-      return
-
-    branch_info = self.__branches_info[branch]
-    if branch_info:
-      branch_hash = branch_info.hash
-    else:
-      try:
-        branch_hash = hash_one(branch, short=True)
-      except subprocess2.CalledProcessError:
-        branch_hash = None
-
-    line = OutputLine()
-
-    # The branch name with appropriate indentation.
-    suffix = ''
-    if branch == self.__current_branch or (
-        self.__current_branch == 'HEAD' and branch == self.__current_hash):
-      suffix = ' *'
-    branch_string = branch
-    if branch in self.__gone_branches:
-      branch_string = '{%s:GONE}' % branch
-    if not branch:
-      branch_string = '{NO_UPSTREAM}'
-    main_string = '  ' * depth + branch_string + suffix
-    line.append(
-        main_string,
-        color=self.__color_for_branch(branch, branch_hash))
-
-    # The branch hash.
-    if self.verbosity >= 2:
-      line.append(branch_hash or '', separator=' ', color=Fore.RED)
-
-    # The branch tracking status.
-    if self.verbosity >= 1:
-      commits_string = ''
-      behind_string = ''
-      front_separator = ''
-      center_separator = ''
-      back_separator = ''
-      if branch_info and not self.__is_invalid_parent(branch_info.upstream):
-        behind = branch_info.behind
-        commits = branch_info.commits
-
-        if commits:
-          commits_string = '%d commit' % commits
-          commits_string += 's' if commits > 1 else ' '
-        if behind:
-          behind_string = 'behind %d' % behind
-
-        if commits or behind:
-          front_separator = '['
-          back_separator = ']'
-
-        if commits and behind:
-          center_separator = '|'
-
-      line.append(front_separator, separator=' ')
-      line.append(commits_string, separator=' ', color=Fore.MAGENTA)
-      line.append(center_separator, separator=' ')
-      line.append(behind_string, separator=' ', color=Fore.MAGENTA)
-      line.append(back_separator)
-
-    if self.verbosity >= 4:
-      line.append(' (dormant)' if is_dormant_branch else '          ',
-                  separator='  ',
-                  color=Fore.RED)
-
-    # The Rietveld issue associated with the branch.
-    if self.verbosity >= 2:
-      (url, color, status) = ('', '', '') if self.__is_invalid_parent(branch) \
-          else self.__status_info[branch]
-      if self.verbosity > 2:
-        line.append('{} ({})'.format(url, status) if url else '', color=color)
-      else:
-        line.append(url or '', color=color)
-
-    # The subject of the most recent commit on the branch.
-    if self.show_subject:
-      if not self.__is_invalid_parent(branch):
-        line.append(run('log', '-n1', '--format=%s', branch, '--'))
-      else:
-        line.append('')
-
-    output.append(line)
-
-    output.merge(child_output)
+        child_output = OutputManager()
+        for child in sorted(self.__parent_map.pop(branch, ())):
+            self.__append_branch(child, child_output, depth=depth + 1)
+
+        is_dormant_branch = self.__is_dormant_branch(branch)
+        if self.hide_dormant and is_dormant_branch and not child_output.lines:
+            return
+
+        branch_info = self.__branches_info[branch]
+        if branch_info:
+            branch_hash = branch_info.hash
+        else:
+            try:
+                branch_hash = hash_one(branch, short=True)
+            except subprocess2.CalledProcessError:
+                branch_hash = None
+
+        line = OutputLine()
+
+        # The branch name with appropriate indentation.
+        suffix = ''
+        if branch == self.__current_branch or (self.__current_branch == 'HEAD'
+                                               and branch
+                                               == self.__current_hash):
+            suffix = ' *'
+        branch_string = branch
+        if branch in self.__gone_branches:
+            branch_string = '{%s:GONE}' % branch
+        if not branch:
+            branch_string = '{NO_UPSTREAM}'
+        main_string = '  ' * depth + branch_string + suffix
+        line.append(main_string,
+                    color=self.__color_for_branch(branch, branch_hash))
+
+        # The branch hash.
+        if self.verbosity >= 2:
+            line.append(branch_hash or '', separator=' ', color=Fore.RED)
+
+        # The branch tracking status.
+        if self.verbosity >= 1:
+            commits_string = ''
+            behind_string = ''
+            front_separator = ''
+            center_separator = ''
+            back_separator = ''
+            if branch_info and not self.__is_invalid_parent(
+                    branch_info.upstream):
+                behind = branch_info.behind
+                commits = branch_info.commits
+
+                if commits:
+                    commits_string = '%d commit' % commits
+                    commits_string += 's' if commits > 1 else ' '
+                if behind:
+                    behind_string = 'behind %d' % behind
+
+                if commits or behind:
+                    front_separator = '['
+                    back_separator = ']'
+
+                if commits and behind:
+                    center_separator = '|'
+
+            line.append(front_separator, separator=' ')
+            line.append(commits_string, separator=' ', color=Fore.MAGENTA)
+            line.append(center_separator, separator=' ')
+            line.append(behind_string, separator=' ', color=Fore.MAGENTA)
+            line.append(back_separator)
+
+        if self.verbosity >= 4:
+            line.append(' (dormant)' if is_dormant_branch else '          ',
+                        separator='  ',
+                        color=Fore.RED)
+
+        # The Rietveld issue associated with the branch.
+        if self.verbosity >= 2:
+            (url, color,
+             status) = (('', '', '') if self.__is_invalid_parent(branch) else
+                        self.__status_info[branch])
+            if self.verbosity > 2:
+                line.append('{} ({})'.format(url, status) if url else '',
+                            color=color)
+            else:
+                line.append(url or '', color=color)
+
+        # The subject of the most recent commit on the branch.
+        if self.show_subject:
+            if not self.__is_invalid_parent(branch):
+                line.append(run('log', '-n1', '--format=%s', branch, '--'))
+            else:
+                line.append('')
+
+        output.append(line)
+
+        output.merge(child_output)
 
 
 def print_desc():
-  for line in __doc__.splitlines():
-    starpos = line.find('* ')
-    if starpos == -1 or '-' not in line:
-      print(line)
-    else:
-      _, color, rest = line.split(None, 2)
-      outline = line[:starpos+1]
-      outline += getattr(Fore, color.upper()) + " " + color + " " + Fore.RESET
-      outline += rest
-      print(outline)
-  print('')
+    for line in __doc__.splitlines():
+        starpos = line.find('* ')
+        if starpos == -1 or '-' not in line:
+            print(line)
+        else:
+            _, color, rest = line.split(None, 2)
+            outline = line[:starpos + 1]
+            outline += getattr(Fore,
+                               color.upper()) + " " + color + " " + Fore.RESET
+            outline += rest
+            print(outline)
+    print('')
+
 
 @metrics.collector.collect_metrics('git map-branches')
 def main(argv):
-  setup_color.init()
-  if get_git_version() < MIN_UPSTREAM_TRACK_GIT_VERSION:
-    print(
-        'This tool will not show all tracking information for git version '
-        'earlier than ' +
-        '.'.join(str(x) for x in MIN_UPSTREAM_TRACK_GIT_VERSION) +
-        '. Please consider upgrading.', file=sys.stderr)
-
-  if '-h' in argv:
-    print_desc()
-
-  parser = argparse.ArgumentParser()
-  parser.add_argument('-v',
-                      action='count',
-                      default=0,
-                      help=('Pass once to show tracking info, '
-                            'twice for hash and review url, '
-                            'thrice for review status, '
-                            'four times to mark dormant branches'))
-  parser.add_argument('--no-color', action='store_true', dest='nocolor',
-                      help='Turn off colors.')
-  parser.add_argument(
-      '-j', '--maxjobs', action='store', type=int,
-      help='The number of jobs to use when retrieving review status')
-  parser.add_argument('--show-subject', action='store_true',
-                      dest='show_subject', help='Show the commit subject.')
-  parser.add_argument('--hide-dormant',
-                      action='store_true',
-                      dest='hide_dormant',
-                      help='Hides dormant branches.')
-
-  opts = parser.parse_args(argv)
-
-  mapper = BranchMapper()
-  mapper.verbosity = opts.v
-  mapper.output.nocolor = opts.nocolor
-  mapper.maxjobs = opts.maxjobs
-  mapper.show_subject = opts.show_subject
-  mapper.hide_dormant = opts.hide_dormant
-  mapper.start()
-  print(mapper.output.as_formatted_string())
-  return 0
+    setup_color.init()
+    if get_git_version() < MIN_UPSTREAM_TRACK_GIT_VERSION:
+        print(
+            'This tool will not show all tracking information for git version '
+            'earlier than ' +
+            '.'.join(str(x) for x in MIN_UPSTREAM_TRACK_GIT_VERSION) +
+            '. Please consider upgrading.',
+            file=sys.stderr)
+
+    if '-h' in argv:
+        print_desc()
+
+    parser = argparse.ArgumentParser()
+    parser.add_argument('-v',
+                        action='count',
+                        default=0,
+                        help=('Pass once to show tracking info, '
+                              'twice for hash and review url, '
+                              'thrice for review status, '
+                              'four times to mark dormant branches'))
+    parser.add_argument('--no-color',
+                        action='store_true',
+                        dest='nocolor',
+                        help='Turn off colors.')
+    parser.add_argument(
+        '-j',
+        '--maxjobs',
+        action='store',
+        type=int,
+        help='The number of jobs to use when retrieving review status')
+    parser.add_argument('--show-subject',
+                        action='store_true',
+                        dest='show_subject',
+                        help='Show the commit subject.')
+    parser.add_argument('--hide-dormant',
+                        action='store_true',
+                        dest='hide_dormant',
+                        help='Hides dormant branches.')
+
+    opts = parser.parse_args(argv)
+
+    mapper = BranchMapper()
+    mapper.verbosity = opts.v
+    mapper.output.nocolor = opts.nocolor
+    mapper.maxjobs = opts.maxjobs
+    mapper.show_subject = opts.show_subject
+    mapper.hide_dormant = opts.hide_dormant
+    mapper.start()
+    print(mapper.output.as_formatted_string())
+    return 0
+
 
 if __name__ == '__main__':
-  try:
-    with metrics.collector.print_notice_and_exit():
-      sys.exit(main(sys.argv[1:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        with metrics.collector.print_notice_and_exit():
+            sys.exit(main(sys.argv[1:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 40 - 40
git_mark_merge_base.py

@@ -2,7 +2,6 @@
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """
 Explicitly set/remove/print the merge-base for the current branch.
 
@@ -21,51 +20,52 @@ from git_common import get_or_create_merge_base, hash_one, upstream
 
 
 def main(argv):
-  parser = argparse.ArgumentParser(
-    description=__doc__.strip().splitlines()[0],
-    epilog=' '.join(__doc__.strip().splitlines()[1:]))
-  g = parser.add_mutually_exclusive_group()
-  g.add_argument(
-    'merge_base', nargs='?',
-    help='The new hash to use as the merge base for the current branch'
-  )
-  g.add_argument('--delete', '-d', action='store_true',
-                 help='Remove the set mark.')
-  opts = parser.parse_args(argv)
+    parser = argparse.ArgumentParser(
+        description=__doc__.strip().splitlines()[0],
+        epilog=' '.join(__doc__.strip().splitlines()[1:]))
+    g = parser.add_mutually_exclusive_group()
+    g.add_argument(
+        'merge_base',
+        nargs='?',
+        help='The new hash to use as the merge base for the current branch')
+    g.add_argument('--delete',
+                   '-d',
+                   action='store_true',
+                   help='Remove the set mark.')
+    opts = parser.parse_args(argv)
 
-  cur = current_branch()
+    cur = current_branch()
 
-  if opts.delete:
-    try:
-      remove_merge_base(cur)
-    except CalledProcessError:
-      print('No merge base currently exists for %s.' % cur)
-    return 0
+    if opts.delete:
+        try:
+            remove_merge_base(cur)
+        except CalledProcessError:
+            print('No merge base currently exists for %s.' % cur)
+        return 0
 
-  if opts.merge_base:
-    try:
-      opts.merge_base = hash_one(opts.merge_base)
-    except CalledProcessError:
-      print(
-          'fatal: could not resolve %s as a commit' % opts.merge_base,
-          file=sys.stderr)
-      return 1
+    if opts.merge_base:
+        try:
+            opts.merge_base = hash_one(opts.merge_base)
+        except CalledProcessError:
+            print('fatal: could not resolve %s as a commit' % opts.merge_base,
+                  file=sys.stderr)
+            return 1
 
-    manual_merge_base(cur, opts.merge_base, upstream(cur))
+        manual_merge_base(cur, opts.merge_base, upstream(cur))
 
-  ret = 0
-  actual = get_or_create_merge_base(cur)
-  if opts.merge_base and opts.merge_base != actual:
-    ret = 1
-    print("Invalid merge_base %s" % opts.merge_base)
+    ret = 0
+    actual = get_or_create_merge_base(cur)
+    if opts.merge_base and opts.merge_base != actual:
+        ret = 1
+        print("Invalid merge_base %s" % opts.merge_base)
 
-  print("merge_base(%s): %s" % (cur, actual))
-  return ret
+    print("merge_base(%s): %s" % (cur, actual))
+    return ret
 
 
 if __name__ == '__main__':
-  try:
-    sys.exit(main(sys.argv[1:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main(sys.argv[1:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 71 - 70
git_migrate_default_branch.py

@@ -15,88 +15,89 @@ import urllib.parse
 
 
 def GetGerritProject(remote_url):
-  """Returns Gerrit project name based on remote git URL."""
-  if remote_url is None:
-    raise RuntimeError('can\'t detect Gerrit project.')
-  project = urllib.parse.urlparse(remote_url).path.strip('/')
-  if project.endswith('.git'):
-    project = project[:-len('.git')]
-  # *.googlesource.com hosts ensure that Git/Gerrit projects don't start with
-  # 'a/' prefix, because 'a/' prefix is used to force authentication in
-  # gitiles/git-over-https protocol. E.g.,
-  # https://chromium.googlesource.com/a/v8/v8 refers to the same repo/project
-  # as
-  # https://chromium.googlesource.com/v8/v8
-  if project.startswith('a/'):
-    project = project[len('a/'):]
-  return project
+    """Returns Gerrit project name based on remote git URL."""
+    if remote_url is None:
+        raise RuntimeError('can\'t detect Gerrit project.')
+    project = urllib.parse.urlparse(remote_url).path.strip('/')
+    if project.endswith('.git'):
+        project = project[:-len('.git')]
+    # *.googlesource.com hosts ensure that Git/Gerrit projects don't start with
+    # 'a/' prefix, because 'a/' prefix is used to force authentication in
+    # gitiles/git-over-https protocol. E.g.,
+    # https://chromium.googlesource.com/a/v8/v8 refers to the same repo/project
+    # as
+    # https://chromium.googlesource.com/v8/v8
+    if project.startswith('a/'):
+        project = project[len('a/'):]
+    return project
 
 
 def GetGerritHost(git_host):
-  parts = git_host.split('.')
-  parts[0] = parts[0] + '-review'
-  return '.'.join(parts)
+    parts = git_host.split('.')
+    parts[0] = parts[0] + '-review'
+    return '.'.join(parts)
 
 
 def main():
-  remote = git_common.run('remote')
-  # Use first remote as source of truth
-  remote = remote.split("\n")[0]
-  if not remote:
-    raise RuntimeError('Could not find any remote')
-  url = scm.GIT.GetConfig(git_common.repo_root(), 'remote.%s.url' % remote)
-  host = urllib.parse.urlparse(url).netloc
-  if not host:
-    raise RuntimeError('Could not find remote host')
+    remote = git_common.run('remote')
+    # Use first remote as source of truth
+    remote = remote.split("\n")[0]
+    if not remote:
+        raise RuntimeError('Could not find any remote')
+    url = scm.GIT.GetConfig(git_common.repo_root(), 'remote.%s.url' % remote)
+    host = urllib.parse.urlparse(url).netloc
+    if not host:
+        raise RuntimeError('Could not find remote host')
 
-  project_head = gerrit_util.GetProjectHead(GetGerritHost(host),
-                                            GetGerritProject(url))
-  if project_head != 'refs/heads/main':
-    raise RuntimeError("The repository is not migrated yet.")
+    project_head = gerrit_util.GetProjectHead(GetGerritHost(host),
+                                              GetGerritProject(url))
+    if project_head != 'refs/heads/main':
+        raise RuntimeError("The repository is not migrated yet.")
 
-  # User may have set to fetch only old default branch. Ensure fetch is tracking
-  # main too.
-  git_common.run('config', '--unset-all',
-                 'remote.origin.fetch', 'refs/heads/*')
-  git_common.run('config', '--add',
-                 'remote.origin.fetch', '+refs/heads/*:refs/remotes/origin/*')
-  logging.info("Running fetch...")
-  git_common.run('fetch', remote)
-  logging.info("Updating remote HEAD...")
-  git_common.run('remote', 'set-head', '-a', remote)
+    # User may have set to fetch only old default branch. Ensure fetch is
+    # tracking main too.
+    git_common.run('config', '--unset-all', 'remote.origin.fetch',
+                   'refs/heads/*')
+    git_common.run('config', '--add', 'remote.origin.fetch',
+                   '+refs/heads/*:refs/remotes/origin/*')
+    logging.info("Running fetch...")
+    git_common.run('fetch', remote)
+    logging.info("Updating remote HEAD...")
+    git_common.run('remote', 'set-head', '-a', remote)
 
-  branches = git_common.get_branches_info(True)
-
-  if 'master' in branches:
-    logging.info("Migrating master branch...")
-    if 'main' in branches:
-      logging.info('You already have master and main branch, consider removing '
-                   'master manually:\n'
-                   ' $ git branch -d master\n')
-    else:
-      git_common.run('branch', '-m', 'master', 'main')
     branches = git_common.get_branches_info(True)
 
-  for name in branches:
-    branch = branches[name]
-    if not branch:
-      continue
+    if 'master' in branches:
+        logging.info("Migrating master branch...")
+        if 'main' in branches:
+            logging.info(
+                'You already have master and main branch, consider removing '
+                'master manually:\n'
+                ' $ git branch -d master\n')
+        else:
+            git_common.run('branch', '-m', 'master', 'main')
+        branches = git_common.get_branches_info(True)
+
+    for name in branches:
+        branch = branches[name]
+        if not branch:
+            continue
 
-    if 'master' in branch.upstream:
-      logging.info("Migrating %s branch..." % name)
-      new_upstream = branch.upstream.replace('master', 'main')
-      git_common.run('branch', '--set-upstream-to', new_upstream, name)
-      git_common.remove_merge_base(name)
+        if 'master' in branch.upstream:
+            logging.info("Migrating %s branch..." % name)
+            new_upstream = branch.upstream.replace('master', 'main')
+            git_common.run('branch', '--set-upstream-to', new_upstream, name)
+            git_common.remove_merge_base(name)
 
 
 if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  logging.basicConfig(level=logging.INFO)
-  with metrics.collector.print_notice_and_exit():
-    try:
-      logging.info("Starting migration")
-      main()
-      logging.info("Migration completed")
-    except RuntimeError as e:
-      logging.error("Error %s" % str(e))
-      sys.exit(1)
+    fix_encoding.fix_encoding()
+    logging.basicConfig(level=logging.INFO)
+    with metrics.collector.print_notice_and_exit():
+        try:
+            logging.info("Starting migration")
+            main()
+            logging.info("Migration completed")
+        except RuntimeError as e:
+            logging.error("Error %s" % str(e))
+            sys.exit(1)

+ 45 - 43
git_nav_downstream.py

@@ -2,7 +2,6 @@
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """
 Checks out a downstream branch from the currently checked out branch. If there
 is more than one downstream branch, then this script will prompt you to select
@@ -19,50 +18,53 @@ import metrics
 
 @metrics.collector.collect_metrics('git nav-downstream')
 def main(args):
-  parser = argparse.ArgumentParser()
-  parser.add_argument('--pick',
-                      help=(
-                          'The number to pick if this command would '
-                          'prompt'))
-  opts = parser.parse_args(args)
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--pick',
+                        help=('The number to pick if this command would '
+                              'prompt'))
+    opts = parser.parse_args(args)
+
+    upfn = upstream
+    cur = current_branch()
+    if cur == 'HEAD':
+
+        def _upfn(b):
+            parent = upstream(b)
+            if parent:
+                return hash_one(parent)
 
-  upfn = upstream
-  cur = current_branch()
-  if cur == 'HEAD':
-    def _upfn(b):
-      parent = upstream(b)
-      if parent:
-        return hash_one(parent)
-    upfn = _upfn
-    cur = hash_one(cur)
-  downstreams = [b for b in branches() if upfn(b) == cur]
-  if not downstreams:
-    print("No downstream branches")
-    return 1
+        upfn = _upfn
+        cur = hash_one(cur)
+    downstreams = [b for b in branches() if upfn(b) == cur]
+    if not downstreams:
+        print("No downstream branches")
+        return 1
 
-  if len(downstreams) == 1:
-    run('checkout', downstreams[0], stdout=sys.stdout, stderr=sys.stderr)
-  else:
-    high = len(downstreams) - 1
-    while True:
-      print("Please select a downstream branch")
-      for i, b in enumerate(downstreams):
-        print("  %d. %s" % (i, b))
-      prompt = "Selection (0-%d)[0]: " % high
-      r = opts.pick
-      if r:
-        print(prompt + r)
-      else:
-        r = gclient_utils.AskForData(prompt).strip() or '0'
-      if not r.isdigit() or (0 > int(r) > high):
-        print("Invalid choice.")
-      else:
-        run('checkout', downstreams[int(r)], stdout=sys.stdout,
-            stderr=sys.stderr)
-        break
-  return 0
+    if len(downstreams) == 1:
+        run('checkout', downstreams[0], stdout=sys.stdout, stderr=sys.stderr)
+    else:
+        high = len(downstreams) - 1
+        while True:
+            print("Please select a downstream branch")
+            for i, b in enumerate(downstreams):
+                print("  %d. %s" % (i, b))
+            prompt = "Selection (0-%d)[0]: " % high
+            r = opts.pick
+            if r:
+                print(prompt + r)
+            else:
+                r = gclient_utils.AskForData(prompt).strip() or '0'
+            if not r.isdigit() or (0 > int(r) > high):
+                print("Invalid choice.")
+            else:
+                run('checkout',
+                    downstreams[int(r)],
+                    stdout=sys.stdout,
+                    stderr=sys.stderr)
+                break
+    return 0
 
 
 if __name__ == '__main__':
-  with metrics.collector.print_notice_and_exit():
-    sys.exit(main(sys.argv[1:]))
+    with metrics.collector.print_notice_and_exit():
+        sys.exit(main(sys.argv[1:]))

+ 68 - 60
git_new_branch.py

@@ -13,70 +13,78 @@ import git_common
 import subprocess2
 
 
-def create_new_branch(
-    branch_name, upstream_current=False, upstream=None, inject_current=False):
-  upstream = upstream or git_common.root()
-  try:
-    if inject_current:
-      below = git_common.current_branch()
-      if below is None:
-        raise Exception('no current branch')
-      above = git_common.upstream(below)
-      if above is None:
-        raise Exception('branch %s has no upstream' % (below))
-      git_common.run('checkout', '--track', above, '-b', branch_name)
-      git_common.run('branch', '--set-upstream-to', branch_name, below)
-    elif upstream_current:
-      git_common.run('checkout', '--track', '-b', branch_name)
-    else:
-      if upstream in git_common.tags():
-        # TODO(iannucci): ensure that basis_ref is an ancestor of HEAD?
-        git_common.run(
-            'checkout', '--no-track', '-b', branch_name,
-            git_common.hash_one(upstream))
-        git_common.set_config('branch.%s.remote' % branch_name, '.')
-        git_common.set_config('branch.%s.merge' % branch_name, upstream)
-      else:
-        # TODO(iannucci): Detect unclean workdir then stash+pop if we need to
-        # teleport to a conflicting portion of history?
-        git_common.run('checkout', '--track', upstream, '-b', branch_name)
-    git_common.get_or_create_merge_base(branch_name)
-  except subprocess2.CalledProcessError as cpe:
-    sys.stdout.write(cpe.stdout.decode('utf-8', 'replace'))
-    sys.stderr.write(cpe.stderr.decode('utf-8', 'replace'))
-    return 1
-  sys.stderr.write('Switched to branch %s.\n' % branch_name)
-  return 0
+def create_new_branch(branch_name,
+                      upstream_current=False,
+                      upstream=None,
+                      inject_current=False):
+    upstream = upstream or git_common.root()
+    try:
+        if inject_current:
+            below = git_common.current_branch()
+            if below is None:
+                raise Exception('no current branch')
+            above = git_common.upstream(below)
+            if above is None:
+                raise Exception('branch %s has no upstream' % (below))
+            git_common.run('checkout', '--track', above, '-b', branch_name)
+            git_common.run('branch', '--set-upstream-to', branch_name, below)
+        elif upstream_current:
+            git_common.run('checkout', '--track', '-b', branch_name)
+        else:
+            if upstream in git_common.tags():
+                # TODO(iannucci): ensure that basis_ref is an ancestor of HEAD?
+                git_common.run('checkout', '--no-track', '-b', branch_name,
+                               git_common.hash_one(upstream))
+                git_common.set_config('branch.%s.remote' % branch_name, '.')
+                git_common.set_config('branch.%s.merge' % branch_name, upstream)
+            else:
+                # TODO(iannucci): Detect unclean workdir then stash+pop if we
+                # need to teleport to a conflicting portion of history?
+                git_common.run('checkout', '--track', upstream, '-b',
+                               branch_name)
+        git_common.get_or_create_merge_base(branch_name)
+    except subprocess2.CalledProcessError as cpe:
+        sys.stdout.write(cpe.stdout.decode('utf-8', 'replace'))
+        sys.stderr.write(cpe.stderr.decode('utf-8', 'replace'))
+        return 1
+    sys.stderr.write('Switched to branch %s.\n' % branch_name)
+    return 0
+
 
 def main(args):
-  parser = argparse.ArgumentParser(
-    formatter_class=argparse.ArgumentDefaultsHelpFormatter,
-    description=__doc__,
-  )
-  parser.add_argument('branch_name')
-  g = parser.add_mutually_exclusive_group()
-  g.add_argument('--upstream-current', '--upstream_current',
-                 action='store_true',
-                 help='set upstream branch to current branch.')
-  g.add_argument('--upstream', metavar='REF',
-                 help='upstream branch (or tag) to track.')
-  g.add_argument('--inject-current', '--inject_current',
-                 action='store_true',
-                 help='new branch adopts current branch\'s upstream,' +
-                 ' and new branch becomes current branch\'s upstream.')
-  g.add_argument('--lkgr', action='store_const', const='lkgr', dest='upstream',
-                 help='set basis ref for new branch to lkgr.')
+    parser = argparse.ArgumentParser(
+        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+        description=__doc__,
+    )
+    parser.add_argument('branch_name')
+    g = parser.add_mutually_exclusive_group()
+    g.add_argument('--upstream-current',
+                   '--upstream_current',
+                   action='store_true',
+                   help='set upstream branch to current branch.')
+    g.add_argument('--upstream',
+                   metavar='REF',
+                   help='upstream branch (or tag) to track.')
+    g.add_argument('--inject-current',
+                   '--inject_current',
+                   action='store_true',
+                   help='new branch adopts current branch\'s upstream,' +
+                   ' and new branch becomes current branch\'s upstream.')
+    g.add_argument('--lkgr',
+                   action='store_const',
+                   const='lkgr',
+                   dest='upstream',
+                   help='set basis ref for new branch to lkgr.')
 
-  opts = parser.parse_args(args)
+    opts = parser.parse_args(args)
 
-  return create_new_branch(
-      opts.branch_name, opts.upstream_current, opts.upstream,
-      opts.inject_current)
+    return create_new_branch(opts.branch_name, opts.upstream_current,
+                             opts.upstream, opts.inject_current)
 
 
 if __name__ == '__main__':  # pragma: no cover
-  try:
-    sys.exit(main(sys.argv[1:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main(sys.argv[1:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 187 - 166
git_number.py

@@ -2,7 +2,6 @@
 # Copyright 2013 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Usage: %prog [options] [<commitref>]*
 
 If no <commitref>'s are supplied, it defaults to HEAD.
@@ -51,18 +50,18 @@ POOL_KIND = 'procs'
 
 
 def pathlify(hash_prefix):
-  """Converts a binary object hash prefix into a posix path, one folder per
+    """Converts a binary object hash prefix into a posix path, one folder per
   byte.
 
   >>> pathlify('\xDE\xAD')
   'de/ad'
   """
-  return '/'.join('%02x' % b for b in hash_prefix)
+    return '/'.join('%02x' % b for b in hash_prefix)
 
 
 @git.memoize_one(threadsafe=False)
 def get_number_tree(prefix_bytes):
-  """Returns a dictionary of the git-number registry specified by
+    """Returns a dictionary of the git-number registry specified by
   |prefix_bytes|.
 
   This is in the form of {<full binary ref>: <gen num> ...}
@@ -70,36 +69,37 @@ def get_number_tree(prefix_bytes):
   >>> get_number_tree('\x83\xb4')
   {'\x83\xb4\xe3\xe4W\xf9J*\x8f/c\x16\xecD\xd1\x04\x8b\xa9qz': 169, ...}
   """
-  ref = '%s:%s' % (REF, pathlify(prefix_bytes))
+    ref = '%s:%s' % (REF, pathlify(prefix_bytes))
 
-  try:
-    raw = git.run('cat-file', 'blob', ref, autostrip=False, decode=False)
-    return dict(struct.unpack_from(CHUNK_FMT, raw, i * CHUNK_SIZE)
-                for i in range(len(raw) // CHUNK_SIZE))
-  except subprocess2.CalledProcessError:
-    return {}
+    try:
+        raw = git.run('cat-file', 'blob', ref, autostrip=False, decode=False)
+        return dict(
+            struct.unpack_from(CHUNK_FMT, raw, i * CHUNK_SIZE)
+            for i in range(len(raw) // CHUNK_SIZE))
+    except subprocess2.CalledProcessError:
+        return {}
 
 
 @git.memoize_one(threadsafe=False)
 def get_num(commit_hash):
-  """Returns the generation number for a commit.
+    """Returns the generation number for a commit.
 
   Returns None if the generation number for this commit hasn't been calculated
   yet (see load_generation_numbers()).
   """
-  return get_number_tree(commit_hash[:PREFIX_LEN]).get(commit_hash)
+    return get_number_tree(commit_hash[:PREFIX_LEN]).get(commit_hash)
 
 
 def clear_caches(on_disk=False):
-  """Clears in-process caches for e.g. unit testing."""
-  get_number_tree.clear()
-  get_num.clear()
-  if on_disk:
-    git.run('update-ref', '-d', REF)
+    """Clears in-process caches for e.g. unit testing."""
+    get_number_tree.clear()
+    get_num.clear()
+    if on_disk:
+        git.run('update-ref', '-d', REF)
 
 
 def intern_number_tree(tree):
-  """Transforms a number tree (in the form returned by |get_number_tree|) into
+    """Transforms a number tree (in the form returned by |get_number_tree|) into
   a git blob.
 
   Returns the git blob id as hex-encoded string.
@@ -108,88 +108,95 @@ def intern_number_tree(tree):
   >>> intern_number_tree(d)
   'c552317aa95ca8c3f6aae3357a4be299fbcb25ce'
   """
-  with tempfile.TemporaryFile() as f:
-    for k, v in sorted(tree.items()):
-      f.write(struct.pack(CHUNK_FMT, k, v))
-    f.seek(0)
-    return git.intern_f(f)
+    with tempfile.TemporaryFile() as f:
+        for k, v in sorted(tree.items()):
+            f.write(struct.pack(CHUNK_FMT, k, v))
+        f.seek(0)
+        return git.intern_f(f)
 
 
 def leaf_map_fn(pre_tree):
-  """Converts a prefix and number tree into a git index line."""
-  pre, tree = pre_tree
-  return '100644 blob %s\t%s\0' % (intern_number_tree(tree), pathlify(pre))
+    """Converts a prefix and number tree into a git index line."""
+    pre, tree = pre_tree
+    return '100644 blob %s\t%s\0' % (intern_number_tree(tree), pathlify(pre))
 
 
 def finalize(targets):
-  """Saves all cache data to the git repository.
+    """Saves all cache data to the git repository.
 
   After calculating the generation number for |targets|, call finalize() to
   save all the work to the git repository.
 
   This in particular saves the trees referred to by DIRTY_TREES.
   """
-  if not DIRTY_TREES:
-    return
-
-  msg = 'git-number Added %s numbers' % sum(DIRTY_TREES.values())
-
-  idx = os.path.join(git.run('rev-parse', '--git-dir'), 'number.idx')
-  env = os.environ.copy()
-  env['GIT_INDEX_FILE'] = str(idx)
-
-  progress_message = 'Finalizing: (%%(count)d/%d)' % len(DIRTY_TREES)
-  with git.ProgressPrinter(progress_message) as inc:
-    git.run('read-tree', REF, env=env)
-
-    prefixes_trees = ((p, get_number_tree(p)) for p in sorted(DIRTY_TREES))
-    updater = subprocess2.Popen(['git', 'update-index', '-z', '--index-info'],
-                                stdin=subprocess2.PIPE, env=env)
-
-    with git.ScopedPool(kind=POOL_KIND) as leaf_pool:
-      for item in leaf_pool.imap(leaf_map_fn, prefixes_trees):
-        updater.stdin.write(item.encode())
-        inc()
-
-    updater.stdin.close()
-    updater.wait()
-    assert updater.returncode == 0
-
-    tree_id = git.run('write-tree', env=env)
-    commit_cmd = [
-        # Git user.name and/or user.email may not be configured, so specifying
-        # them explicitly. They are not used, but required by Git.
-        '-c', 'user.name=%s' % AUTHOR_NAME,
-        '-c', 'user.email=%s' % AUTHOR_EMAIL,
-        'commit-tree',
-        '-m', msg,
-        '-p'] + git.hash_multi(REF)
-    for t in targets:
-      commit_cmd.extend(['-p', binascii.hexlify(t).decode()])
-    commit_cmd.append(tree_id)
-    commit_hash = git.run(*commit_cmd)
-    git.run('update-ref', REF, commit_hash)
-  DIRTY_TREES.clear()
+    if not DIRTY_TREES:
+        return
+
+    msg = 'git-number Added %s numbers' % sum(DIRTY_TREES.values())
+
+    idx = os.path.join(git.run('rev-parse', '--git-dir'), 'number.idx')
+    env = os.environ.copy()
+    env['GIT_INDEX_FILE'] = str(idx)
+
+    progress_message = 'Finalizing: (%%(count)d/%d)' % len(DIRTY_TREES)
+    with git.ProgressPrinter(progress_message) as inc:
+        git.run('read-tree', REF, env=env)
+
+        prefixes_trees = ((p, get_number_tree(p)) for p in sorted(DIRTY_TREES))
+        updater = subprocess2.Popen(
+            ['git', 'update-index', '-z', '--index-info'],
+            stdin=subprocess2.PIPE,
+            env=env)
+
+        with git.ScopedPool(kind=POOL_KIND) as leaf_pool:
+            for item in leaf_pool.imap(leaf_map_fn, prefixes_trees):
+                updater.stdin.write(item.encode())
+                inc()
+
+        updater.stdin.close()
+        updater.wait()
+        assert updater.returncode == 0
+
+        tree_id = git.run('write-tree', env=env)
+        commit_cmd = [
+            # Git user.name and/or user.email may not be configured, so
+            # specifying them explicitly. They are not used, but required by
+            # Git.
+            '-c',
+            'user.name=%s' % AUTHOR_NAME,
+            '-c',
+            'user.email=%s' % AUTHOR_EMAIL,
+            'commit-tree',
+            '-m',
+            msg,
+            '-p'
+        ] + git.hash_multi(REF)
+        for t in targets:
+            commit_cmd.extend(['-p', binascii.hexlify(t).decode()])
+        commit_cmd.append(tree_id)
+        commit_hash = git.run(*commit_cmd)
+        git.run('update-ref', REF, commit_hash)
+    DIRTY_TREES.clear()
 
 
 def preload_tree(prefix):
-  """Returns the prefix and parsed tree object for the specified prefix."""
-  return prefix, get_number_tree(prefix)
+    """Returns the prefix and parsed tree object for the specified prefix."""
+    return prefix, get_number_tree(prefix)
 
 
 def all_prefixes(depth=PREFIX_LEN):
-  prefixes = [bytes([i]) for i in range(255)]
-  for x in prefixes:
-    # This isn't covered because PREFIX_LEN currently == 1
-    if depth > 1:  # pragma: no cover
-      for r in all_prefixes(depth - 1):
-        yield x + r
-    else:
-      yield x
+    prefixes = [bytes([i]) for i in range(255)]
+    for x in prefixes:
+        # This isn't covered because PREFIX_LEN currently == 1
+        if depth > 1:  # pragma: no cover
+            for r in all_prefixes(depth - 1):
+                yield x + r
+        else:
+            yield x
 
 
 def load_generation_numbers(targets):
-  """Populates the caches of get_num and get_number_tree so they contain
+    """Populates the caches of get_num and get_number_tree so they contain
   the results for |targets|.
 
   Loads cached numbers from disk, and calculates missing numbers if one or
@@ -198,95 +205,109 @@ def load_generation_numbers(targets):
   Args:
     targets - An iterable of binary-encoded full git commit hashes.
   """
-  # In case they pass us a generator, listify targets.
-  targets = list(targets)
-
-  if all(get_num(t) is not None for t in targets):
-    return
-
-  if git.tree(REF) is None:
-    empty = git.mktree({})
-    commit_hash = git.run(
-        # Git user.name and/or user.email may not be configured, so specifying
-        # them explicitly. They are not used, but required by Git.
-        '-c', 'user.name=%s' % AUTHOR_NAME,
-        '-c', 'user.email=%s' % AUTHOR_EMAIL,
-        'commit-tree',
-        '-m', 'Initial commit from git-number',
-        empty)
-    git.run('update-ref', REF, commit_hash)
-
-  with git.ScopedPool(kind=POOL_KIND) as pool:
-    preload_iter = pool.imap_unordered(preload_tree, all_prefixes())
-
-    rev_list = []
-
-    with git.ProgressPrinter('Loading commits: %(count)d') as inc:
-      # Curiously, buffering the list into memory seems to be the fastest
-      # approach in python (as opposed to iterating over the lines in the
-      # stdout as they're produced). GIL strikes again :/
-      cmd = [
-        'rev-list', '--topo-order', '--parents', '--reverse', '^' + REF,
-      ] + [binascii.hexlify(target).decode() for target in targets]
-      for line in git.run(*cmd).splitlines():
-        tokens = [binascii.unhexlify(token) for token in line.split()]
-        rev_list.append((tokens[0], tokens[1:]))
-        inc()
-
-    get_number_tree.update(preload_iter)
-
-  with git.ProgressPrinter('Counting: %%(count)d/%d' % len(rev_list)) as inc:
-    for commit_hash, pars in rev_list:
-      num = max(map(get_num, pars)) + 1 if pars else 0
-
-      prefix = commit_hash[:PREFIX_LEN]
-      get_number_tree(prefix)[commit_hash] = num
-      DIRTY_TREES[prefix] += 1
-      get_num.set(commit_hash, num)
-
-      inc()
+    # In case they pass us a generator, listify targets.
+    targets = list(targets)
+
+    if all(get_num(t) is not None for t in targets):
+        return
+
+    if git.tree(REF) is None:
+        empty = git.mktree({})
+        commit_hash = git.run(
+            # Git user.name and/or user.email may not be configured, so
+            # specifying them explicitly. They are not used, but required by
+            # Git.
+            '-c',
+            'user.name=%s' % AUTHOR_NAME,
+            '-c',
+            'user.email=%s' % AUTHOR_EMAIL,
+            'commit-tree',
+            '-m',
+            'Initial commit from git-number',
+            empty)
+        git.run('update-ref', REF, commit_hash)
+
+    with git.ScopedPool(kind=POOL_KIND) as pool:
+        preload_iter = pool.imap_unordered(preload_tree, all_prefixes())
+
+        rev_list = []
+
+        with git.ProgressPrinter('Loading commits: %(count)d') as inc:
+            # Curiously, buffering the list into memory seems to be the fastest
+            # approach in python (as opposed to iterating over the lines in the
+            # stdout as they're produced). GIL strikes again :/
+            cmd = [
+                'rev-list',
+                '--topo-order',
+                '--parents',
+                '--reverse',
+                '^' + REF,
+            ] + [binascii.hexlify(target).decode() for target in targets]
+            for line in git.run(*cmd).splitlines():
+                tokens = [binascii.unhexlify(token) for token in line.split()]
+                rev_list.append((tokens[0], tokens[1:]))
+                inc()
+
+        get_number_tree.update(preload_iter)
+
+    with git.ProgressPrinter('Counting: %%(count)d/%d' % len(rev_list)) as inc:
+        for commit_hash, pars in rev_list:
+            num = max(map(get_num, pars)) + 1 if pars else 0
+
+            prefix = commit_hash[:PREFIX_LEN]
+            get_number_tree(prefix)[commit_hash] = num
+            DIRTY_TREES[prefix] += 1
+            get_num.set(commit_hash, num)
+
+            inc()
 
 
 def main():  # pragma: no cover
-  parser = optparse.OptionParser(usage=sys.modules[__name__].__doc__)
-  parser.add_option('--no-cache', action='store_true',
-                    help='Do not actually cache anything we calculate.')
-  parser.add_option('--reset', action='store_true',
-                    help='Reset the generation number cache and quit.')
-  parser.add_option('-v', '--verbose', action='count', default=0,
-                    help='Be verbose. Use more times for more verbosity.')
-  opts, args = parser.parse_args()
-
-  levels = [logging.ERROR, logging.INFO, logging.DEBUG]
-  logging.basicConfig(level=levels[min(opts.verbose, len(levels) - 1)])
-
-  # 'git number' should only be used on bots.
-  if os.getenv('CHROME_HEADLESS') != '1':
-    logging.error("'git-number' is an infrastructure tool that is only "
-                  "intended to be used internally by bots. Developers should "
-                  "use the 'Cr-Commit-Position' value in the commit's message.")
-    return 1
-
-  if opts.reset:
-    clear_caches(on_disk=True)
-    return
-
-  try:
-    targets = git.parse_commitrefs(*(args or ['HEAD']))
-  except git.BadCommitRefException as e:
-    parser.error(e)
-
-  load_generation_numbers(targets)
-  if not opts.no_cache:
-    finalize(targets)
-
-  print('\n'.join(map(str, map(get_num, targets))))
-  return 0
+    parser = optparse.OptionParser(usage=sys.modules[__name__].__doc__)
+    parser.add_option('--no-cache',
+                      action='store_true',
+                      help='Do not actually cache anything we calculate.')
+    parser.add_option('--reset',
+                      action='store_true',
+                      help='Reset the generation number cache and quit.')
+    parser.add_option('-v',
+                      '--verbose',
+                      action='count',
+                      default=0,
+                      help='Be verbose. Use more times for more verbosity.')
+    opts, args = parser.parse_args()
+
+    levels = [logging.ERROR, logging.INFO, logging.DEBUG]
+    logging.basicConfig(level=levels[min(opts.verbose, len(levels) - 1)])
+
+    # 'git number' should only be used on bots.
+    if os.getenv('CHROME_HEADLESS') != '1':
+        logging.error(
+            "'git-number' is an infrastructure tool that is only "
+            "intended to be used internally by bots. Developers should "
+            "use the 'Cr-Commit-Position' value in the commit's message.")
+        return 1
+
+    if opts.reset:
+        clear_caches(on_disk=True)
+        return
+
+    try:
+        targets = git.parse_commitrefs(*(args or ['HEAD']))
+    except git.BadCommitRefException as e:
+        parser.error(e)
+
+    load_generation_numbers(targets)
+    if not opts.no_cache:
+        finalize(targets)
+
+    print('\n'.join(map(str, map(get_num, targets))))
+    return 0
 
 
 if __name__ == '__main__':  # pragma: no cover
-  try:
-    sys.exit(main())
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main())
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 309 - 289
git_rebase_update.py

@@ -2,7 +2,6 @@
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """
 Tool to update all branches to have the latest changes from their upstreams.
 """
@@ -19,185 +18,192 @@ from pprint import pformat
 
 import git_common as git
 
-
 STARTING_BRANCH_KEY = 'depot-tools.rebase-update.starting-branch'
 STARTING_WORKDIR_KEY = 'depot-tools.rebase-update.starting-workdir'
 
 
 def find_return_branch_workdir():
-  """Finds the branch and working directory which we should return to after
+    """Finds the branch and working directory which we should return to after
   rebase-update completes.
 
   These values may persist across multiple invocations of rebase-update, if
   rebase-update runs into a conflict mid-way.
   """
-  return_branch = git.get_config(STARTING_BRANCH_KEY)
-  workdir = git.get_config(STARTING_WORKDIR_KEY)
-  if not return_branch:
-    workdir = os.getcwd()
-    git.set_config(STARTING_WORKDIR_KEY, workdir)
-    return_branch = git.current_branch()
-    if return_branch != 'HEAD':
-      git.set_config(STARTING_BRANCH_KEY, return_branch)
+    return_branch = git.get_config(STARTING_BRANCH_KEY)
+    workdir = git.get_config(STARTING_WORKDIR_KEY)
+    if not return_branch:
+        workdir = os.getcwd()
+        git.set_config(STARTING_WORKDIR_KEY, workdir)
+        return_branch = git.current_branch()
+        if return_branch != 'HEAD':
+            git.set_config(STARTING_BRANCH_KEY, return_branch)
 
-  return return_branch, workdir
+    return return_branch, workdir
 
 
 def fetch_remotes(branch_tree):
-  """Fetches all remotes which are needed to update |branch_tree|."""
-  fetch_tags = False
-  remotes = set()
-  tag_set = git.tags()
-  fetchspec_map = {}
-  all_fetchspec_configs = git.get_config_regexp(r'^remote\..*\.fetch')
-  for fetchspec_config in all_fetchspec_configs:
-    key, _, fetchspec = fetchspec_config.partition(' ')
-    dest_spec = fetchspec.partition(':')[2]
-    remote_name = key.split('.')[1]
-    fetchspec_map[dest_spec] = remote_name
-  for parent in branch_tree.values():
-    if parent in tag_set:
-      fetch_tags = True
+    """Fetches all remotes which are needed to update |branch_tree|."""
+    fetch_tags = False
+    remotes = set()
+    tag_set = git.tags()
+    fetchspec_map = {}
+    all_fetchspec_configs = git.get_config_regexp(r'^remote\..*\.fetch')
+    for fetchspec_config in all_fetchspec_configs:
+        key, _, fetchspec = fetchspec_config.partition(' ')
+        dest_spec = fetchspec.partition(':')[2]
+        remote_name = key.split('.')[1]
+        fetchspec_map[dest_spec] = remote_name
+    for parent in branch_tree.values():
+        if parent in tag_set:
+            fetch_tags = True
+        else:
+            full_ref = git.run('rev-parse', '--symbolic-full-name', parent)
+            for dest_spec, remote_name in fetchspec_map.items():
+                if fnmatch(full_ref, dest_spec):
+                    remotes.add(remote_name)
+                    break
+
+    fetch_args = []
+    if fetch_tags:
+        # Need to fetch all because we don't know what remote the tag comes from
+        # :( TODO(iannucci): assert that the tags are in the remote fetch
+        # refspec
+        fetch_args = ['--all']
+    else:
+        fetch_args.append('--multiple')
+        fetch_args.extend(remotes)
+    # TODO(iannucci): Should we fetch git-svn?
+
+    if not fetch_args:  # pragma: no cover
+        print('Nothing to fetch.')
     else:
-      full_ref = git.run('rev-parse', '--symbolic-full-name', parent)
-      for dest_spec, remote_name in fetchspec_map.items():
-        if fnmatch(full_ref, dest_spec):
-          remotes.add(remote_name)
-          break
-
-  fetch_args = []
-  if fetch_tags:
-    # Need to fetch all because we don't know what remote the tag comes from :(
-    # TODO(iannucci): assert that the tags are in the remote fetch refspec
-    fetch_args = ['--all']
-  else:
-    fetch_args.append('--multiple')
-    fetch_args.extend(remotes)
-  # TODO(iannucci): Should we fetch git-svn?
-
-  if not fetch_args:  # pragma: no cover
-    print('Nothing to fetch.')
-  else:
-    git.run_with_stderr('fetch', *fetch_args, stdout=sys.stdout,
-                        stderr=sys.stderr)
+        git.run_with_stderr('fetch',
+                            *fetch_args,
+                            stdout=sys.stdout,
+                            stderr=sys.stderr)
 
 
 def remove_empty_branches(branch_tree):
-  tag_set = git.tags()
-  ensure_root_checkout = git.once(lambda: git.run('checkout', git.root()))
-
-  deletions = {}
-  reparents = {}
-  downstreams = collections.defaultdict(list)
-  for branch, parent in git.topo_iter(branch_tree, top_down=False):
-    if git.is_dormant(branch):
-      continue
-
-    downstreams[parent].append(branch)
-
-    # If branch and parent have the same tree, then branch has to be marked
-    # for deletion and its children and grand-children reparented to parent.
-    if git.hash_one(branch+":") == git.hash_one(parent+":"):
-      ensure_root_checkout()
-
-      logging.debug('branch %s merged to %s', branch, parent)
-
-      # Mark branch for deletion while remembering the ordering, then add all
-      # its children as grand-children of its parent and record reparenting
-      # information if necessary.
-      deletions[branch] = len(deletions)
-
-      for down in downstreams[branch]:
-        if down in deletions:
-          continue
-
-        # Record the new and old parent for down, or update such a record
-        # if it already exists. Keep track of the ordering so that reparenting
-        # happen in topological order.
-        downstreams[parent].append(down)
-        if down not in reparents:
-          reparents[down] = (len(reparents), parent, branch)
+    tag_set = git.tags()
+    ensure_root_checkout = git.once(lambda: git.run('checkout', git.root()))
+
+    deletions = {}
+    reparents = {}
+    downstreams = collections.defaultdict(list)
+    for branch, parent in git.topo_iter(branch_tree, top_down=False):
+        if git.is_dormant(branch):
+            continue
+
+        downstreams[parent].append(branch)
+
+        # If branch and parent have the same tree, then branch has to be marked
+        # for deletion and its children and grand-children reparented to parent.
+        if git.hash_one(branch + ":") == git.hash_one(parent + ":"):
+            ensure_root_checkout()
+
+            logging.debug('branch %s merged to %s', branch, parent)
+
+            # Mark branch for deletion while remembering the ordering, then add
+            # all its children as grand-children of its parent and record
+            # reparenting information if necessary.
+            deletions[branch] = len(deletions)
+
+            for down in downstreams[branch]:
+                if down in deletions:
+                    continue
+
+                # Record the new and old parent for down, or update such a
+                # record if it already exists. Keep track of the ordering so
+                # that reparenting happen in topological order.
+                downstreams[parent].append(down)
+                if down not in reparents:
+                    reparents[down] = (len(reparents), parent, branch)
+                else:
+                    order, _, old_parent = reparents[down]
+                    reparents[down] = (order, parent, old_parent)
+
+    # Apply all reparenting recorded, in order.
+    for branch, value in sorted(reparents.items(), key=lambda x: x[1][0]):
+        _, parent, old_parent = value
+        if parent in tag_set:
+            git.set_branch_config(branch, 'remote', '.')
+            git.set_branch_config(branch, 'merge', 'refs/tags/%s' % parent)
+            print('Reparented %s to track %s [tag] (was tracking %s)' %
+                  (branch, parent, old_parent))
         else:
-          order, _, old_parent = reparents[down]
-          reparents[down] = (order, parent, old_parent)
-
-  # Apply all reparenting recorded, in order.
-  for branch, value in sorted(reparents.items(), key=lambda x:x[1][0]):
-    _, parent, old_parent = value
-    if parent in tag_set:
-      git.set_branch_config(branch, 'remote', '.')
-      git.set_branch_config(branch, 'merge', 'refs/tags/%s' % parent)
-      print('Reparented %s to track %s [tag] (was tracking %s)' %
-            (branch, parent, old_parent))
-    else:
-      git.run('branch', '--set-upstream-to', parent, branch)
-      print('Reparented %s to track %s (was tracking %s)' % (branch, parent,
-                                                             old_parent))
+            git.run('branch', '--set-upstream-to', parent, branch)
+            print('Reparented %s to track %s (was tracking %s)' %
+                  (branch, parent, old_parent))
 
-  # Apply all deletions recorded, in order.
-  for branch, _ in sorted(deletions.items(), key=lambda x: x[1]):
-    print(git.run('branch', '-d', branch))
+    # Apply all deletions recorded, in order.
+    for branch, _ in sorted(deletions.items(), key=lambda x: x[1]):
+        print(git.run('branch', '-d', branch))
 
 
 def rebase_branch(branch, parent, start_hash):
-  logging.debug('considering %s(%s) -> %s(%s) : %s',
-                branch, git.hash_one(branch), parent, git.hash_one(parent),
-                start_hash)
-
-  # If parent has FROZEN commits, don't base branch on top of them. Instead,
-  # base branch on top of whatever commit is before them.
-  back_ups = 0
-  orig_parent = parent
-  while git.run('log', '-n1', '--format=%s',
-                parent, '--').startswith(git.FREEZE):
-    back_ups += 1
-    parent = git.run('rev-parse', parent+'~')
-
-  if back_ups:
-    logging.debug('Backed parent up by %d from %s to %s',
-                  back_ups, orig_parent, parent)
-
-  if git.hash_one(parent) != start_hash:
-    # Try a plain rebase first
-    print('Rebasing:', branch)
-    rebase_ret = git.rebase(parent, start_hash, branch, abort=True)
-    if not rebase_ret.success:
-      # TODO(iannucci): Find collapsible branches in a smarter way?
-      print("Failed! Attempting to squash", branch, "...", end=' ')
-      sys.stdout.flush()
-      squash_branch = branch+"_squash_attempt"
-      git.run('checkout', '-b', squash_branch)
-      git.squash_current_branch(merge_base=start_hash)
-
-      # Try to rebase the branch_squash_attempt branch to see if it's empty.
-      squash_ret = git.rebase(parent, start_hash, squash_branch, abort=True)
-      empty_rebase = git.hash_one(squash_branch) == git.hash_one(parent)
-      git.run('checkout', branch)
-      git.run('branch', '-D', squash_branch)
-      if squash_ret.success and empty_rebase:
-        print('Success!')
-        git.squash_current_branch(merge_base=start_hash)
-        git.rebase(parent, start_hash, branch)
-      else:
-        print("Failed!")
-        print()
-
-        # rebase and leave in mid-rebase state.
-        # This second rebase attempt should always fail in the same
-        # way that the first one does.  If it magically succeeds then
-        # something very strange has happened.
-        second_rebase_ret = git.rebase(parent, start_hash, branch)
-        if second_rebase_ret.success: # pragma: no cover
-          print("Second rebase succeeded unexpectedly!")
-          print("Please see: http://crbug.com/425696")
-          print("First rebased failed with:")
-          print(rebase_ret.stderr)
-        else:
-          print("Here's what git-rebase (squashed) had to say:")
-          print()
-          print(squash_ret.stdout)
-          print(squash_ret.stderr)
-          print(textwrap.dedent("""\
+    logging.debug('considering %s(%s) -> %s(%s) : %s', branch,
+                  git.hash_one(branch), parent, git.hash_one(parent),
+                  start_hash)
+
+    # If parent has FROZEN commits, don't base branch on top of them. Instead,
+    # base branch on top of whatever commit is before them.
+    back_ups = 0
+    orig_parent = parent
+    while git.run('log', '-n1', '--format=%s', parent,
+                  '--').startswith(git.FREEZE):
+        back_ups += 1
+        parent = git.run('rev-parse', parent + '~')
+
+    if back_ups:
+        logging.debug('Backed parent up by %d from %s to %s', back_ups,
+                      orig_parent, parent)
+
+    if git.hash_one(parent) != start_hash:
+        # Try a plain rebase first
+        print('Rebasing:', branch)
+        rebase_ret = git.rebase(parent, start_hash, branch, abort=True)
+        if not rebase_ret.success:
+            # TODO(iannucci): Find collapsible branches in a smarter way?
+            print("Failed! Attempting to squash", branch, "...", end=' ')
+            sys.stdout.flush()
+            squash_branch = branch + "_squash_attempt"
+            git.run('checkout', '-b', squash_branch)
+            git.squash_current_branch(merge_base=start_hash)
+
+            # Try to rebase the branch_squash_attempt branch to see if it's
+            # empty.
+            squash_ret = git.rebase(parent,
+                                    start_hash,
+                                    squash_branch,
+                                    abort=True)
+            empty_rebase = git.hash_one(squash_branch) == git.hash_one(parent)
+            git.run('checkout', branch)
+            git.run('branch', '-D', squash_branch)
+            if squash_ret.success and empty_rebase:
+                print('Success!')
+                git.squash_current_branch(merge_base=start_hash)
+                git.rebase(parent, start_hash, branch)
+            else:
+                print("Failed!")
+                print()
+
+                # rebase and leave in mid-rebase state.
+                # This second rebase attempt should always fail in the same
+                # way that the first one does.  If it magically succeeds then
+                # something very strange has happened.
+                second_rebase_ret = git.rebase(parent, start_hash, branch)
+                if second_rebase_ret.success:  # pragma: no cover
+                    print("Second rebase succeeded unexpectedly!")
+                    print("Please see: http://crbug.com/425696")
+                    print("First rebased failed with:")
+                    print(rebase_ret.stderr)
+                else:
+                    print("Here's what git-rebase (squashed) had to say:")
+                    print()
+                    print(squash_ret.stdout)
+                    print(squash_ret.stderr)
+                    print(
+                        textwrap.dedent("""\
           Squashing failed. You probably have a real merge conflict.
 
           Your working copy is in mid-rebase. Either:
@@ -208,147 +214,161 @@ def rebase_branch(branch, parent, start_hash):
 
           And then run `git rebase-update -n` to resume.
           """ % branch))
-          return False
-  else:
-    print('%s up-to-date' % branch)
+                    return False
+    else:
+        print('%s up-to-date' % branch)
 
-  git.remove_merge_base(branch)
-  git.get_or_create_merge_base(branch)
+    git.remove_merge_base(branch)
+    git.get_or_create_merge_base(branch)
 
-  return True
+    return True
 
 
 def main(args=None):
-  parser = argparse.ArgumentParser()
-  parser.add_argument('--verbose', '-v', action='store_true')
-  parser.add_argument('--keep-going', '-k', action='store_true',
-                      help='Keep processing past failed rebases.')
-  parser.add_argument('--no_fetch', '--no-fetch', '-n',
-                      action='store_true',
-                      help='Skip fetching remotes.')
-  parser.add_argument(
-      '--current', action='store_true', help='Only rebase the current branch.')
-  parser.add_argument('branches', nargs='*',
-                      help='Branches to be rebased. All branches are assumed '
-                           'if none specified.')
-  parser.add_argument('--keep-empty', '-e', action='store_true',
-                      help='Do not automatically delete empty branches.')
-  opts = parser.parse_args(args)
-
-  if opts.verbose:  # pragma: no cover
-    logging.getLogger().setLevel(logging.DEBUG)
-
-  # TODO(iannucci): snapshot all branches somehow, so we can implement
-  #                 `git rebase-update --undo`.
-  #   * Perhaps just copy packed-refs + refs/ + logs/ to the side?
-  #     * commit them to a secret ref?
-  #       * Then we could view a summary of each run as a
-  #         `diff --stat` on that secret ref.
-
-  if git.in_rebase():
-    # TODO(iannucci): Be able to resume rebase with flags like --continue,
-    # etc.
-    print('Rebase in progress. Please complete the rebase before running '
-          '`git rebase-update`.')
-    return 1
-
-  return_branch, return_workdir = find_return_branch_workdir()
-  os.chdir(git.run('rev-parse', '--show-toplevel'))
-
-  if git.current_branch() == 'HEAD':
-    if git.run('status', '--porcelain', '--ignore-submodules=all'):
-      print('Cannot rebase-update with detached head + uncommitted changes.')
-      return 1
-  else:
-    git.freeze()  # just in case there are any local changes.
-
-  branches_to_rebase = set(opts.branches)
-  if opts.current:
-    branches_to_rebase.add(git.current_branch())
-
-  skipped, branch_tree = git.get_branch_tree(use_limit=not opts.current)
-  if branches_to_rebase:
-    skipped = set(skipped).intersection(branches_to_rebase)
-  for branch in skipped:
-    print('Skipping %s: No upstream specified' % branch)
-
-  if not opts.no_fetch:
-    fetch_remotes(branch_tree)
-
-  merge_base = {}
-  for branch, parent in branch_tree.items():
-    merge_base[branch] = git.get_or_create_merge_base(branch, parent)
-
-  logging.debug('branch_tree: %s' % pformat(branch_tree))
-  logging.debug('merge_base: %s' % pformat(merge_base))
-
-  retcode = 0
-  unrebased_branches = []
-  # Rebase each branch starting with the root-most branches and working
-  # towards the leaves.
-  for branch, parent in git.topo_iter(branch_tree):
-    # Only rebase specified branches, unless none specified.
-    if branches_to_rebase and branch not in branches_to_rebase:
-      continue
-    if git.is_dormant(branch):
-      print('Skipping dormant branch', branch)
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--verbose', '-v', action='store_true')
+    parser.add_argument('--keep-going',
+                        '-k',
+                        action='store_true',
+                        help='Keep processing past failed rebases.')
+    parser.add_argument('--no_fetch',
+                        '--no-fetch',
+                        '-n',
+                        action='store_true',
+                        help='Skip fetching remotes.')
+    parser.add_argument('--current',
+                        action='store_true',
+                        help='Only rebase the current branch.')
+    parser.add_argument('branches',
+                        nargs='*',
+                        help='Branches to be rebased. All branches are assumed '
+                        'if none specified.')
+    parser.add_argument('--keep-empty',
+                        '-e',
+                        action='store_true',
+                        help='Do not automatically delete empty branches.')
+    opts = parser.parse_args(args)
+
+    if opts.verbose:  # pragma: no cover
+        logging.getLogger().setLevel(logging.DEBUG)
+
+    # TODO(iannucci): snapshot all branches somehow, so we can implement
+    #                 `git rebase-update --undo`.
+    #   * Perhaps just copy packed-refs + refs/ + logs/ to the side?
+    #     * commit them to a secret ref?
+    #       * Then we could view a summary of each run as a
+    #         `diff --stat` on that secret ref.
+
+    if git.in_rebase():
+        # TODO(iannucci): Be able to resume rebase with flags like --continue,
+        # etc.
+        print('Rebase in progress. Please complete the rebase before running '
+              '`git rebase-update`.')
+        return 1
+
+    return_branch, return_workdir = find_return_branch_workdir()
+    os.chdir(git.run('rev-parse', '--show-toplevel'))
+
+    if git.current_branch() == 'HEAD':
+        if git.run('status', '--porcelain', '--ignore-submodules=all'):
+            print(
+                'Cannot rebase-update with detached head + uncommitted changes.'
+            )
+            return 1
     else:
-      ret = rebase_branch(branch, parent, merge_base[branch])
-      if not ret:
-        retcode = 1
-
-        if opts.keep_going:
-          print('--keep-going set, continuing with next branch.')
-          unrebased_branches.append(branch)
-          if git.in_rebase():
-            git.run_with_retcode('rebase', '--abort')
-          if git.in_rebase():  # pragma: no cover
-            print('Failed to abort rebase. Something is really wrong.')
-            break
+        git.freeze()  # just in case there are any local changes.
+
+    branches_to_rebase = set(opts.branches)
+    if opts.current:
+        branches_to_rebase.add(git.current_branch())
+
+    skipped, branch_tree = git.get_branch_tree(use_limit=not opts.current)
+    if branches_to_rebase:
+        skipped = set(skipped).intersection(branches_to_rebase)
+    for branch in skipped:
+        print('Skipping %s: No upstream specified' % branch)
+
+    if not opts.no_fetch:
+        fetch_remotes(branch_tree)
+
+    merge_base = {}
+    for branch, parent in branch_tree.items():
+        merge_base[branch] = git.get_or_create_merge_base(branch, parent)
+
+    logging.debug('branch_tree: %s' % pformat(branch_tree))
+    logging.debug('merge_base: %s' % pformat(merge_base))
+
+    retcode = 0
+    unrebased_branches = []
+    # Rebase each branch starting with the root-most branches and working
+    # towards the leaves.
+    for branch, parent in git.topo_iter(branch_tree):
+        # Only rebase specified branches, unless none specified.
+        if branches_to_rebase and branch not in branches_to_rebase:
+            continue
+        if git.is_dormant(branch):
+            print('Skipping dormant branch', branch)
         else:
-          break
-
-  if unrebased_branches:
-    print()
-    print('The following branches could not be cleanly rebased:')
-    for branch in unrebased_branches:
-      print('  %s' % branch)
-
-  if not retcode:
-    if not opts.keep_empty:
-      remove_empty_branches(branch_tree)
-
-    # return_branch may not be there any more.
-    if return_branch in git.branches(use_limit=False):
-      git.run('checkout', return_branch)
-      git.thaw()
-    else:
-      root_branch = git.root()
-      if return_branch != 'HEAD':
-        print("%s was merged with its parent, checking out %s instead." %
-              (git.unicode_repr(return_branch), git.unicode_repr(root_branch)))
-      git.run('checkout', root_branch)
-
-    # return_workdir may also not be there any more.
-    if return_workdir:
-      try:
-        os.chdir(return_workdir)
-      except OSError as e:
-        print(
-            "Unable to return to original workdir %r: %s" % (return_workdir, e))
-    git.set_config(STARTING_BRANCH_KEY, '')
-    git.set_config(STARTING_WORKDIR_KEY, '')
+            ret = rebase_branch(branch, parent, merge_base[branch])
+            if not ret:
+                retcode = 1
+
+                if opts.keep_going:
+                    print('--keep-going set, continuing with next branch.')
+                    unrebased_branches.append(branch)
+                    if git.in_rebase():
+                        git.run_with_retcode('rebase', '--abort')
+                    if git.in_rebase():  # pragma: no cover
+                        print(
+                            'Failed to abort rebase. Something is really wrong.'
+                        )
+                        break
+                else:
+                    break
+
+    if unrebased_branches:
+        print()
+        print('The following branches could not be cleanly rebased:')
+        for branch in unrebased_branches:
+            print('  %s' % branch)
+
+    if not retcode:
+        if not opts.keep_empty:
+            remove_empty_branches(branch_tree)
+
+        # return_branch may not be there any more.
+        if return_branch in git.branches(use_limit=False):
+            git.run('checkout', return_branch)
+            git.thaw()
+        else:
+            root_branch = git.root()
+            if return_branch != 'HEAD':
+                print(
+                    "%s was merged with its parent, checking out %s instead." %
+                    (git.unicode_repr(return_branch),
+                     git.unicode_repr(root_branch)))
+            git.run('checkout', root_branch)
+
+        # return_workdir may also not be there any more.
+        if return_workdir:
+            try:
+                os.chdir(return_workdir)
+            except OSError as e:
+                print("Unable to return to original workdir %r: %s" %
+                      (return_workdir, e))
+        git.set_config(STARTING_BRANCH_KEY, '')
+        git.set_config(STARTING_WORKDIR_KEY, '')
 
-    print()
-    print("Running `git gc --auto` - Ctrl-C to abort is OK.")
-    git.run('gc', '--auto')
+        print()
+        print("Running `git gc --auto` - Ctrl-C to abort is OK.")
+        git.run('gc', '--auto')
 
-  return retcode
+    return retcode
 
 
 if __name__ == '__main__':  # pragma: no cover
-  try:
-    sys.exit(main())
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main())
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 40 - 37
git_rename_branch.py

@@ -2,7 +2,6 @@
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Rename the current branch while maintaining correct dependencies."""
 
 import argparse
@@ -13,43 +12,47 @@ import subprocess2
 from git_common import current_branch, run, set_branch_config, branch_config
 from git_common import branch_config_map
 
+
 def main(args):
-  current = current_branch()
-  if current == 'HEAD':
-    current = None
-  old_name_help = 'The old branch to rename.'
-  if current:
-    old_name_help += ' (default %(default)r)'
-
-  parser = argparse.ArgumentParser()
-  parser.add_argument('old_name', nargs=('?' if current else 1),
-                      help=old_name_help, default=current)
-  parser.add_argument('new_name', help='The new branch name.')
-
-  opts = parser.parse_args(args)
-
-  # when nargs=1, we get a list :(
-  if isinstance(opts.old_name, list):
-    opts.old_name = opts.old_name[0]
-
-  try:
-    run('branch', '-m', opts.old_name, opts.new_name)
-
-    # update the downstreams
-    for branch, merge in branch_config_map('merge').items():
-      if merge == 'refs/heads/' + opts.old_name:
-        # Only care about local branches
-        if branch_config(branch, 'remote') == '.':
-          set_branch_config(branch, 'merge', 'refs/heads/' + opts.new_name)
-  except subprocess2.CalledProcessError as cpe:
-    sys.stderr.write(cpe.stderr.decode('utf-8', 'replace'))
-    return 1
-  return 0
+    current = current_branch()
+    if current == 'HEAD':
+        current = None
+    old_name_help = 'The old branch to rename.'
+    if current:
+        old_name_help += ' (default %(default)r)'
+
+    parser = argparse.ArgumentParser()
+    parser.add_argument('old_name',
+                        nargs=('?' if current else 1),
+                        help=old_name_help,
+                        default=current)
+    parser.add_argument('new_name', help='The new branch name.')
+
+    opts = parser.parse_args(args)
+
+    # when nargs=1, we get a list :(
+    if isinstance(opts.old_name, list):
+        opts.old_name = opts.old_name[0]
+
+    try:
+        run('branch', '-m', opts.old_name, opts.new_name)
+
+        # update the downstreams
+        for branch, merge in branch_config_map('merge').items():
+            if merge == 'refs/heads/' + opts.old_name:
+                # Only care about local branches
+                if branch_config(branch, 'remote') == '.':
+                    set_branch_config(branch, 'merge',
+                                      'refs/heads/' + opts.new_name)
+    except subprocess2.CalledProcessError as cpe:
+        sys.stderr.write(cpe.stderr.decode('utf-8', 'replace'))
+        return 1
+    return 0
 
 
 if __name__ == '__main__':  # pragma: no cover
-  try:
-    sys.exit(main(sys.argv[1:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main(sys.argv[1:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 82 - 79
git_reparent_branch.py

@@ -2,7 +2,6 @@
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Change the upstream of the current branch."""
 
 import argparse
@@ -17,86 +16,90 @@ from git_common import get_branch_tree, topo_iter
 import git_rebase_update
 import metrics
 
+
 @metrics.collector.collect_metrics('git reparent-branch')
 def main(args):
-  root_ref = root()
-
-  parser = argparse.ArgumentParser()
-  g = parser.add_mutually_exclusive_group()
-  g.add_argument('new_parent', nargs='?',
-                 help='New parent branch (or tag) to reparent to.')
-  g.add_argument('--root', action='store_true',
-                 help='Reparent to the configured root branch (%s).' % root_ref)
-  g.add_argument('--lkgr', action='store_true',
-                 help='Reparent to the lkgr tag.')
-  opts = parser.parse_args(args)
-
-  # TODO(iannucci): Allow specification of the branch-to-reparent
-
-  branch = current_branch()
-
-  if opts.root:
-    new_parent = root_ref
-  elif opts.lkgr:
-    new_parent = 'lkgr'
-  else:
-    if not opts.new_parent:
-      parser.error('Must specify new parent somehow')
-    new_parent = opts.new_parent
-  cur_parent = upstream(branch)
-
-  if branch == 'HEAD' or not branch:
-    parser.error('Must be on the branch you want to reparent')
-  if new_parent == cur_parent:
-    parser.error('Cannot reparent a branch to its existing parent')
-
-  if not cur_parent:
-    msg = (
-      "Unable to determine %s@{upstream}.\n\nThis can happen if you didn't use "
-      "`git new-branch` to create the branch and haven't used "
-      "`git branch --set-upstream-to` to assign it one.\n\nPlease assign an "
-      "upstream branch and then run this command again."
-    )
-    print(msg % branch, file=sys.stderr)
-    return 1
-
-  mbase = get_or_create_merge_base(branch, cur_parent)
-
-  all_tags = tags()
-  if cur_parent in all_tags:
-    cur_parent += ' [tag]'
-
-  try:
-    run('show-ref', new_parent)
-  except subprocess2.CalledProcessError:
-    print('fatal: invalid reference: %s' % new_parent, file=sys.stderr)
-    return 1
-
-  if new_parent in all_tags:
-    print("Reparenting %s to track %s [tag] (was %s)" % (branch, new_parent,
-                                                         cur_parent))
-    set_branch_config(branch, 'remote', '.')
-    set_branch_config(branch, 'merge', new_parent)
-  else:
-    print("Reparenting %s to track %s (was %s)" % (branch, new_parent,
-                                                   cur_parent))
-    run('branch', '--set-upstream-to', new_parent, branch)
-
-  manual_merge_base(branch, mbase, new_parent)
-
-  # ONLY rebase-update the branch which moved (and dependants)
-  _, branch_tree = get_branch_tree()
-  branches = [branch]
-  for branch, parent in topo_iter(branch_tree):
-    if parent in branches:
-      branches.append(branch)
-  return git_rebase_update.main(['--no-fetch', '--keep-empty'] + branches)
+    root_ref = root()
+
+    parser = argparse.ArgumentParser()
+    g = parser.add_mutually_exclusive_group()
+    g.add_argument('new_parent',
+                   nargs='?',
+                   help='New parent branch (or tag) to reparent to.')
+    g.add_argument('--root',
+                   action='store_true',
+                   help='Reparent to the configured root branch (%s).' %
+                   root_ref)
+    g.add_argument('--lkgr',
+                   action='store_true',
+                   help='Reparent to the lkgr tag.')
+    opts = parser.parse_args(args)
+
+    # TODO(iannucci): Allow specification of the branch-to-reparent
+
+    branch = current_branch()
+
+    if opts.root:
+        new_parent = root_ref
+    elif opts.lkgr:
+        new_parent = 'lkgr'
+    else:
+        if not opts.new_parent:
+            parser.error('Must specify new parent somehow')
+        new_parent = opts.new_parent
+    cur_parent = upstream(branch)
+
+    if branch == 'HEAD' or not branch:
+        parser.error('Must be on the branch you want to reparent')
+    if new_parent == cur_parent:
+        parser.error('Cannot reparent a branch to its existing parent')
+
+    if not cur_parent:
+        msg = (
+            "Unable to determine %s@{upstream}.\n\nThis can happen if you "
+            "didn't use `git new-branch` to create the branch and haven't used "
+            "`git branch --set-upstream-to` to assign it one.\n\nPlease assign "
+            "an upstream branch and then run this command again.")
+        print(msg % branch, file=sys.stderr)
+        return 1
+
+    mbase = get_or_create_merge_base(branch, cur_parent)
+
+    all_tags = tags()
+    if cur_parent in all_tags:
+        cur_parent += ' [tag]'
+
+    try:
+        run('show-ref', new_parent)
+    except subprocess2.CalledProcessError:
+        print('fatal: invalid reference: %s' % new_parent, file=sys.stderr)
+        return 1
+
+    if new_parent in all_tags:
+        print("Reparenting %s to track %s [tag] (was %s)" %
+              (branch, new_parent, cur_parent))
+        set_branch_config(branch, 'remote', '.')
+        set_branch_config(branch, 'merge', new_parent)
+    else:
+        print("Reparenting %s to track %s (was %s)" %
+              (branch, new_parent, cur_parent))
+        run('branch', '--set-upstream-to', new_parent, branch)
+
+    manual_merge_base(branch, mbase, new_parent)
+
+    # ONLY rebase-update the branch which moved (and dependants)
+    _, branch_tree = get_branch_tree()
+    branches = [branch]
+    for branch, parent in topo_iter(branch_tree):
+        if parent in branches:
+            branches.append(branch)
+    return git_rebase_update.main(['--no-fetch', '--keep-empty'] + branches)
 
 
 if __name__ == '__main__':  # pragma: no cover
-  try:
-    with metrics.collector.print_notice_and_exit():
-      sys.exit(main(sys.argv[1:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        with metrics.collector.print_notice_and_exit():
+            sys.exit(main(sys.argv[1:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 155 - 142
git_retry.py

@@ -2,8 +2,6 @@
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
-
 """Generic retry wrapper for Git operations.
 
 This is largely DEPRECATED in favor of the Infra Git wrapper:
@@ -22,63 +20,62 @@ from git_common import GIT_EXE, GIT_TRANSIENT_ERRORS_RE
 
 
 class TeeThread(threading.Thread):
+    def __init__(self, fd, out_fd, name):
+        super(TeeThread, self).__init__(name='git-retry.tee.%s' % (name, ))
+        self.data = None
+        self.fd = fd
+        self.out_fd = out_fd
 
-  def __init__(self, fd, out_fd, name):
-    super(TeeThread, self).__init__(name='git-retry.tee.%s' % (name,))
-    self.data = None
-    self.fd = fd
-    self.out_fd = out_fd
-
-  def run(self):
-    chunks = []
-    for line in self.fd:
-      line = line.decode('utf-8')
-      chunks.append(line)
-      self.out_fd.write(line)
-    self.data = ''.join(chunks)
+    def run(self):
+        chunks = []
+        for line in self.fd:
+            line = line.decode('utf-8')
+            chunks.append(line)
+            self.out_fd.write(line)
+        self.data = ''.join(chunks)
 
 
 class GitRetry(object):
 
-  logger = logging.getLogger('git-retry')
-  DEFAULT_DELAY_SECS = 3.0
-  DEFAULT_RETRY_COUNT = 5
-
-  def __init__(self, retry_count=None, delay=None, delay_factor=None):
-    self.retry_count = retry_count or self.DEFAULT_RETRY_COUNT
-    self.delay = max(delay, 0) if delay else 0
-    self.delay_factor = max(delay_factor, 0) if delay_factor else 0
-
-  def shouldRetry(self, stderr):
-    m = GIT_TRANSIENT_ERRORS_RE.search(stderr)
-    if not m:
-      return False
-    self.logger.info("Encountered known transient error: [%s]",
-                     stderr[m.start(): m.end()])
-    return True
-
-  @staticmethod
-  def execute(*args):
-    args = (GIT_EXE,) + args
-    proc = subprocess.Popen(
-        args,
-        stderr=subprocess.PIPE,
-    )
-    stderr_tee = TeeThread(proc.stderr, sys.stderr, 'stderr')
-
-    # Start our process. Collect/tee 'stdout' and 'stderr'.
-    stderr_tee.start()
-    try:
-      proc.wait()
-    except KeyboardInterrupt:
-      proc.kill()
-      raise
-    finally:
-      stderr_tee.join()
-    return proc.returncode, None, stderr_tee.data
-
-  def computeDelay(self, iteration):
-    """Returns: the delay (in seconds) for a given iteration
+    logger = logging.getLogger('git-retry')
+    DEFAULT_DELAY_SECS = 3.0
+    DEFAULT_RETRY_COUNT = 5
+
+    def __init__(self, retry_count=None, delay=None, delay_factor=None):
+        self.retry_count = retry_count or self.DEFAULT_RETRY_COUNT
+        self.delay = max(delay, 0) if delay else 0
+        self.delay_factor = max(delay_factor, 0) if delay_factor else 0
+
+    def shouldRetry(self, stderr):
+        m = GIT_TRANSIENT_ERRORS_RE.search(stderr)
+        if not m:
+            return False
+        self.logger.info("Encountered known transient error: [%s]",
+                         stderr[m.start():m.end()])
+        return True
+
+    @staticmethod
+    def execute(*args):
+        args = (GIT_EXE, ) + args
+        proc = subprocess.Popen(
+            args,
+            stderr=subprocess.PIPE,
+        )
+        stderr_tee = TeeThread(proc.stderr, sys.stderr, 'stderr')
+
+        # Start our process. Collect/tee 'stdout' and 'stderr'.
+        stderr_tee.start()
+        try:
+            proc.wait()
+        except KeyboardInterrupt:
+            proc.kill()
+            raise
+        finally:
+            stderr_tee.join()
+        return proc.returncode, None, stderr_tee.data
+
+    def computeDelay(self, iteration):
+        """Returns: the delay (in seconds) for a given iteration
 
     The first iteration has a delay of '0'.
 
@@ -86,97 +83,113 @@ class GitRetry(object):
       iteration: (int) The iteration index (starting with zero as the first
           iteration)
     """
-    if (not self.delay) or (iteration == 0):
-      return 0
-    if self.delay_factor == 0:
-      # Linear delay
-      return iteration * self.delay
-    # Exponential delay
-    return (self.delay_factor ** (iteration - 1)) * self.delay
-
-  def __call__(self, *args):
-    returncode = 0
-    for i in range(self.retry_count):
-      # If the previous run failed and a delay is configured, delay before the
-      # next run.
-      delay = self.computeDelay(i)
-      if delay > 0:
-        self.logger.info("Delaying for [%s second(s)] until next retry", delay)
-        time.sleep(delay)
-
-      self.logger.debug("Executing subprocess (%d/%d) with arguments: %s",
-                        (i+1), self.retry_count, args)
-      returncode, _, stderr = self.execute(*args)
-
-      self.logger.debug("Process terminated with return code: %d", returncode)
-      if returncode == 0:
-        break
-
-      if not self.shouldRetry(stderr):
-        self.logger.error("Process failure was not known to be transient; "
-                          "terminating with return code %d", returncode)
-        break
-    return returncode
+        if (not self.delay) or (iteration == 0):
+            return 0
+        if self.delay_factor == 0:
+            # Linear delay
+            return iteration * self.delay
+        # Exponential delay
+        return (self.delay_factor**(iteration - 1)) * self.delay
+
+    def __call__(self, *args):
+        returncode = 0
+        for i in range(self.retry_count):
+            # If the previous run failed and a delay is configured, delay before
+            # the next run.
+            delay = self.computeDelay(i)
+            if delay > 0:
+                self.logger.info("Delaying for [%s second(s)] until next retry",
+                                 delay)
+                time.sleep(delay)
+
+            self.logger.debug("Executing subprocess (%d/%d) with arguments: %s",
+                              (i + 1), self.retry_count, args)
+            returncode, _, stderr = self.execute(*args)
+
+            self.logger.debug("Process terminated with return code: %d",
+                              returncode)
+            if returncode == 0:
+                break
+
+            if not self.shouldRetry(stderr):
+                self.logger.error(
+                    "Process failure was not known to be transient; "
+                    "terminating with return code %d", returncode)
+                break
+        return returncode
 
 
 def main(args):
-  # If we're using the Infra Git wrapper, do nothing here.
-  # https://chromium.googlesource.com/infra/infra/+/HEAD/go/src/infra/tools/git
-  if 'INFRA_GIT_WRAPPER' in os.environ:
-    # Remove Git's execution path from PATH so that our call-through re-invokes
-    # the Git wrapper.
-    # See crbug.com/721450
-    env = os.environ.copy()
-    git_exec = subprocess.check_output([GIT_EXE, '--exec-path']).strip()
-    env['PATH'] = os.pathsep.join([
-      elem for elem in env.get('PATH', '').split(os.pathsep)
-      if elem != git_exec])
-    return subprocess.call([GIT_EXE] + args, env=env)
-
-  parser = optparse.OptionParser()
-  parser.disable_interspersed_args()
-  parser.add_option('-v', '--verbose',
-                    action='count', default=0,
-                    help="Increase verbosity; can be specified multiple times")
-  parser.add_option('-c', '--retry-count', metavar='COUNT',
-                    type=int, default=GitRetry.DEFAULT_RETRY_COUNT,
-                    help="Number of times to retry (default=%default)")
-  parser.add_option('-d', '--delay', metavar='SECONDS',
-                    type=float, default=GitRetry.DEFAULT_DELAY_SECS,
-                    help="Specifies the amount of time (in seconds) to wait "
-                         "between successive retries (default=%default). This "
-                         "can be zero.")
-  parser.add_option('-D', '--delay-factor', metavar='FACTOR',
-                    type=int, default=2,
-                    help="The exponential factor to apply to delays in between "
-                         "successive failures (default=%default). If this is "
-                         "zero, delays will increase linearly. Set this to "
-                         "one to have a constant (non-increasing) delay.")
-
-  opts, args = parser.parse_args(args)
-
-  # Configure logging verbosity
-  if opts.verbose == 0:
-    logging.getLogger().setLevel(logging.WARNING)
-  elif opts.verbose == 1:
-    logging.getLogger().setLevel(logging.INFO)
-  else:
-    logging.getLogger().setLevel(logging.DEBUG)
-
-  # Execute retries
-  retry = GitRetry(
-      retry_count=opts.retry_count,
-      delay=opts.delay,
-      delay_factor=opts.delay_factor,
-  )
-  return retry(*args)
+    # If we're using the Infra Git wrapper, do nothing here.
+    # https://chromium.googlesource.com/infra/infra/+/HEAD/go/src/infra/tools/git
+    if 'INFRA_GIT_WRAPPER' in os.environ:
+        # Remove Git's execution path from PATH so that our call-through
+        # re-invokes the Git wrapper. See crbug.com/721450
+        env = os.environ.copy()
+        git_exec = subprocess.check_output([GIT_EXE, '--exec-path']).strip()
+        env['PATH'] = os.pathsep.join([
+            elem for elem in env.get('PATH', '').split(os.pathsep)
+            if elem != git_exec
+        ])
+        return subprocess.call([GIT_EXE] + args, env=env)
+
+    parser = optparse.OptionParser()
+    parser.disable_interspersed_args()
+    parser.add_option(
+        '-v',
+        '--verbose',
+        action='count',
+        default=0,
+        help="Increase verbosity; can be specified multiple times")
+    parser.add_option('-c',
+                      '--retry-count',
+                      metavar='COUNT',
+                      type=int,
+                      default=GitRetry.DEFAULT_RETRY_COUNT,
+                      help="Number of times to retry (default=%default)")
+    parser.add_option('-d',
+                      '--delay',
+                      metavar='SECONDS',
+                      type=float,
+                      default=GitRetry.DEFAULT_DELAY_SECS,
+                      help="Specifies the amount of time (in seconds) to wait "
+                      "between successive retries (default=%default). This "
+                      "can be zero.")
+    parser.add_option(
+        '-D',
+        '--delay-factor',
+        metavar='FACTOR',
+        type=int,
+        default=2,
+        help="The exponential factor to apply to delays in between "
+        "successive failures (default=%default). If this is "
+        "zero, delays will increase linearly. Set this to "
+        "one to have a constant (non-increasing) delay.")
+
+    opts, args = parser.parse_args(args)
+
+    # Configure logging verbosity
+    if opts.verbose == 0:
+        logging.getLogger().setLevel(logging.WARNING)
+    elif opts.verbose == 1:
+        logging.getLogger().setLevel(logging.INFO)
+    else:
+        logging.getLogger().setLevel(logging.DEBUG)
+
+    # Execute retries
+    retry = GitRetry(
+        retry_count=opts.retry_count,
+        delay=opts.delay,
+        delay_factor=opts.delay_factor,
+    )
+    return retry(*args)
 
 
 if __name__ == '__main__':
-  logging.basicConfig()
-  logging.getLogger().setLevel(logging.WARNING)
-  try:
-    sys.exit(main(sys.argv[2:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    logging.basicConfig()
+    logging.getLogger().setLevel(logging.WARNING)
+    try:
+        sys.exit(main(sys.argv[2:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 18 - 14
git_squash_branch.py

@@ -8,21 +8,25 @@ import sys
 
 import git_common
 
+
 def main(args):
-  parser = argparse.ArgumentParser()
-  parser.add_argument(
-      '-m', '--message', metavar='<msg>', default=None,
-      help='Use the given <msg> as the first line of the commit message.')
-  opts = parser.parse_args(args)
-  if git_common.is_dirty_git_tree('squash-branch'):
-    return 1
-  git_common.squash_current_branch(opts.message)
-  return 0
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        '-m',
+        '--message',
+        metavar='<msg>',
+        default=None,
+        help='Use the given <msg> as the first line of the commit message.')
+    opts = parser.parse_args(args)
+    if git_common.is_dirty_git_tree('squash-branch'):
+        return 1
+    git_common.squash_current_branch(opts.message)
+    return 0
 
 
 if __name__ == '__main__':
-  try:
-    sys.exit(main(sys.argv[1:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main(sys.argv[1:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 43 - 38
git_upstream_diff.py

@@ -10,53 +10,58 @@ import subprocess2
 
 import git_common as git
 
+
 def main(args):
-  default_args = git.get_config_list('depot-tools.upstream-diff.default-args')
-  args = default_args + args
+    default_args = git.get_config_list('depot-tools.upstream-diff.default-args')
+    args = default_args + args
 
-  current_branch = git.current_branch()
+    current_branch = git.current_branch()
 
-  parser = argparse.ArgumentParser()
-  parser.add_argument('--wordwise', action='store_true', default=False,
-                      help=(
-                        'Print a colorized wordwise diff '
-                        'instead of line-wise diff'))
-  parser.add_argument('--branch', default=current_branch,
-                      help='Show changes from a different branch. Passing '
-                           '"HEAD" is the same as omitting this option (it '
-                           'diffs against the current branch)')
-  opts, extra_args = parser.parse_known_args(args)
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--wordwise',
+                        action='store_true',
+                        default=False,
+                        help=('Print a colorized wordwise diff '
+                              'instead of line-wise diff'))
+    parser.add_argument('--branch',
+                        default=current_branch,
+                        help='Show changes from a different branch. Passing '
+                        '"HEAD" is the same as omitting this option (it '
+                        'diffs against the current branch)')
+    opts, extra_args = parser.parse_known_args(args)
 
-  if opts.branch == 'HEAD':
-    opts.branch = current_branch
+    if opts.branch == 'HEAD':
+        opts.branch = current_branch
 
-  if not opts.branch or opts.branch == 'HEAD':
-    print('fatal: Cannot perform git-upstream-diff while not on a branch')
-    return 1
+    if not opts.branch or opts.branch == 'HEAD':
+        print('fatal: Cannot perform git-upstream-diff while not on a branch')
+        return 1
 
-  par = git.upstream(opts.branch)
-  if not par:
-    print('fatal: No upstream configured for branch \'%s\'' % opts.branch)
-    return 1
+    par = git.upstream(opts.branch)
+    if not par:
+        print('fatal: No upstream configured for branch \'%s\'' % opts.branch)
+        return 1
 
-  cmd = [git.GIT_EXE, '-c', 'core.quotePath=false',
-         'diff', '--patience', '-C', '-C']
-  if opts.wordwise:
-    cmd += ['--word-diff=color', r'--word-diff-regex=(\w+|[^[:space:]])']
-  cmd += [git.get_or_create_merge_base(opts.branch, par)]
-  # Only specify the end commit if it is not the current branch, this lets the
-  # diff include uncommitted changes when diffing the current branch.
-  if opts.branch != current_branch:
-    cmd += [opts.branch]
+    cmd = [
+        git.GIT_EXE, '-c', 'core.quotePath=false', 'diff', '--patience', '-C',
+        '-C'
+    ]
+    if opts.wordwise:
+        cmd += ['--word-diff=color', r'--word-diff-regex=(\w+|[^[:space:]])']
+    cmd += [git.get_or_create_merge_base(opts.branch, par)]
+    # Only specify the end commit if it is not the current branch, this lets the
+    # diff include uncommitted changes when diffing the current branch.
+    if opts.branch != current_branch:
+        cmd += [opts.branch]
 
-  cmd += extra_args
+    cmd += extra_args
 
-  return subprocess2.check_call(cmd)
+    return subprocess2.check_call(cmd)
 
 
 if __name__ == '__main__':
-  try:
-    sys.exit(main(sys.argv[1:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main(sys.argv[1:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 47 - 45
gn.py

@@ -2,7 +2,6 @@
 # Copyright 2013 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """This script is a wrapper around the GN binary that is pulled from Google
 Cloud Storage when you sync Chrome. The binaries go into platform-specific
 subdirectories in the source tree.
@@ -21,57 +20,60 @@ import sys
 
 
 def PruneVirtualEnv():
-  # Set by VirtualEnv, no need to keep it.
-  os.environ.pop('VIRTUAL_ENV', None)
+    # Set by VirtualEnv, no need to keep it.
+    os.environ.pop('VIRTUAL_ENV', None)
 
-  # Set by VPython, if scripts want it back they have to set it explicitly.
-  os.environ.pop('PYTHONNOUSERSITE', None)
+    # Set by VPython, if scripts want it back they have to set it explicitly.
+    os.environ.pop('PYTHONNOUSERSITE', None)
 
-  # Look for "activate_this.py" in this path, which is installed by VirtualEnv.
-  # This mechanism is used by vpython as well to sanitize VirtualEnvs from
-  # $PATH.
-  os.environ['PATH'] = os.pathsep.join([
-    p for p in os.environ.get('PATH', '').split(os.pathsep)
-    if not os.path.isfile(os.path.join(p, 'activate_this.py'))
-  ])
+    # Look for "activate_this.py" in this path, which is installed by
+    # VirtualEnv. This mechanism is used by vpython as well to sanitize
+    # VirtualEnvs from $PATH.
+    os.environ['PATH'] = os.pathsep.join([
+        p for p in os.environ.get('PATH', '').split(os.pathsep)
+        if not os.path.isfile(os.path.join(p, 'activate_this.py'))
+    ])
 
 
 def main(args):
-  # Prune all evidence of VPython/VirtualEnv out of the environment. This means
-  # that we 'unwrap' vpython VirtualEnv path/env manipulation. Invocations of
-  # `python` from GN should never inherit the gn.py's own VirtualEnv. This also
-  # helps to ensure that generated ninja files do not reference python.exe from
-  # the VirtualEnv generated from depot_tools' own .vpython file (or lack
-  # thereof), but instead reference the default python from the PATH.
-  PruneVirtualEnv()
+    # Prune all evidence of VPython/VirtualEnv out of the environment. This
+    # means that we 'unwrap' vpython VirtualEnv path/env manipulation.
+    # Invocations of `python` from GN should never inherit the gn.py's own
+    # VirtualEnv. This also helps to ensure that generated ninja files do not
+    # reference python.exe from the VirtualEnv generated from depot_tools' own
+    # .vpython file (or lack thereof), but instead reference the default python
+    # from the PATH.
+    PruneVirtualEnv()
 
-  # Try in primary solution location first, with the gn binary having been
-  # downloaded by cipd in the projects DEPS.
-  primary_solution_path = gclient_paths.GetPrimarySolutionPath()
-  if primary_solution_path:
-    gn_path = os.path.join(primary_solution_path, 'third_party',
-                           'gn', 'gn' + gclient_paths.GetExeSuffix())
-    if os.path.exists(gn_path):
-      return subprocess.call([gn_path] + args[1:])
+    # Try in primary solution location first, with the gn binary having been
+    # downloaded by cipd in the projects DEPS.
+    primary_solution_path = gclient_paths.GetPrimarySolutionPath()
+    if primary_solution_path:
+        gn_path = os.path.join(primary_solution_path, 'third_party', 'gn',
+                               'gn' + gclient_paths.GetExeSuffix())
+        if os.path.exists(gn_path):
+            return subprocess.call([gn_path] + args[1:])
 
-  # Otherwise try the old .sha1 and download_from_google_storage locations
-  # inside of buildtools.
-  bin_path = gclient_paths.GetBuildtoolsPlatformBinaryPath()
-  if not bin_path:
-    print('gn.py: Could not find checkout in any parent of the current path.\n'
-          'This must be run inside a checkout.', file=sys.stderr)
-    return 1
-  gn_path = os.path.join(bin_path, 'gn' + gclient_paths.GetExeSuffix())
-  if not os.path.exists(gn_path):
-    print(
-        'gn.py: Could not find gn executable at: %s' % gn_path, file=sys.stderr)
-    return 2
-  return subprocess.call([gn_path] + args[1:])
+    # Otherwise try the old .sha1 and download_from_google_storage locations
+    # inside of buildtools.
+    bin_path = gclient_paths.GetBuildtoolsPlatformBinaryPath()
+    if not bin_path:
+        print(
+            'gn.py: Could not find checkout in any parent of the current '
+            'path.\nThis must be run inside a checkout.',
+            file=sys.stderr)
+        return 1
+    gn_path = os.path.join(bin_path, 'gn' + gclient_paths.GetExeSuffix())
+    if not os.path.exists(gn_path):
+        print('gn.py: Could not find gn executable at: %s' % gn_path,
+              file=sys.stderr)
+        return 2
+    return subprocess.call([gn_path] + args[1:])
 
 
 if __name__ == '__main__':
-  try:
-    sys.exit(main(sys.argv))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main(sys.argv))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 222 - 219
gsutil.py

@@ -2,7 +2,6 @@
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Run a pinned gsutil."""
 
 from __future__ import print_function
@@ -22,7 +21,6 @@ import urllib.request
 
 import zipfile
 
-
 GSUTIL_URL = 'https://storage.googleapis.com/pub/'
 API_URL = 'https://www.googleapis.com/storage/v1/b/pub/o/'
 
@@ -41,273 +39,278 @@ LUCI_AUTH_SCOPES = [
 
 
 class InvalidGsutilError(Exception):
-  pass
+    pass
 
 
 def download_gsutil(version, target_dir):
-  """Downloads gsutil into the target_dir."""
-  filename = 'gsutil_%s.zip' % version
-  target_filename = os.path.join(target_dir, filename)
-
-  # Check if the target exists already.
-  if os.path.exists(target_filename):
-    md5_calc = hashlib.md5()
-    with open(target_filename, 'rb') as f:
-      while True:
-        buf = f.read(4096)
-        if not buf:
-          break
-        md5_calc.update(buf)
-    local_md5 = md5_calc.hexdigest()
-
-    metadata_url = '%s%s' % (API_URL, filename)
-    metadata = json.load(urllib.request.urlopen(metadata_url))
-    remote_md5 = base64.b64decode(metadata['md5Hash']).decode('utf-8')
-
-    if local_md5 == remote_md5:
-      return target_filename
-    os.remove(target_filename)
-
-  # Do the download.
-  url = '%s%s' % (GSUTIL_URL, filename)
-  u = urllib.request.urlopen(url)
-  with open(target_filename, 'wb') as f:
-    while True:
-      buf = u.read(4096)
-      if not buf:
-        break
-      f.write(buf)
-  return target_filename
+    """Downloads gsutil into the target_dir."""
+    filename = 'gsutil_%s.zip' % version
+    target_filename = os.path.join(target_dir, filename)
+
+    # Check if the target exists already.
+    if os.path.exists(target_filename):
+        md5_calc = hashlib.md5()
+        with open(target_filename, 'rb') as f:
+            while True:
+                buf = f.read(4096)
+                if not buf:
+                    break
+                md5_calc.update(buf)
+        local_md5 = md5_calc.hexdigest()
+
+        metadata_url = '%s%s' % (API_URL, filename)
+        metadata = json.load(urllib.request.urlopen(metadata_url))
+        remote_md5 = base64.b64decode(metadata['md5Hash']).decode('utf-8')
+
+        if local_md5 == remote_md5:
+            return target_filename
+        os.remove(target_filename)
+
+    # Do the download.
+    url = '%s%s' % (GSUTIL_URL, filename)
+    u = urllib.request.urlopen(url)
+    with open(target_filename, 'wb') as f:
+        while True:
+            buf = u.read(4096)
+            if not buf:
+                break
+            f.write(buf)
+    return target_filename
 
 
 @contextlib.contextmanager
 def temporary_directory(base):
-  tmpdir = tempfile.mkdtemp(prefix='t', dir=base)
-  try:
-    yield tmpdir
-  finally:
-    if os.path.isdir(tmpdir):
-      shutil.rmtree(tmpdir)
-
-
-def ensure_gsutil(version, target, clean):
-  bin_dir = os.path.join(target, 'gsutil_%s' % version)
-  gsutil_bin = os.path.join(bin_dir, 'gsutil', 'gsutil')
-  gsutil_flag = os.path.join(bin_dir, 'gsutil', 'install.flag')
-  # We assume that if gsutil_flag exists, then we have a good version
-  # of the gsutil package.
-  if not clean and os.path.isfile(gsutil_flag):
-    # Everything is awesome! we're all done here.
-    return gsutil_bin
-
-  if not os.path.exists(target):
-    try:
-      os.makedirs(target)
-    except FileExistsError:
-      # Another process is prepping workspace, so let's check if gsutil_bin is
-      # present.  If after several checks it's still not, continue with
-      # downloading gsutil.
-      delay = 2  # base delay, in seconds
-      for _ in range(3):  # make N attempts
-        # sleep first as it's not expected to have file ready just yet.
-        time.sleep(delay)
-        delay *= 1.5  # next delay increased by that factor
-        if os.path.isfile(gsutil_bin):
-          return gsutil_bin
-
-  with temporary_directory(target) as instance_dir:
-    # Clean up if we're redownloading a corrupted gsutil.
-    cleanup_path = os.path.join(instance_dir, 'clean')
+    tmpdir = tempfile.mkdtemp(prefix='t', dir=base)
     try:
-      os.rename(bin_dir, cleanup_path)
-    except (OSError, IOError):
-      cleanup_path = None
-    if cleanup_path:
-      shutil.rmtree(cleanup_path)
+        yield tmpdir
+    finally:
+        if os.path.isdir(tmpdir):
+            shutil.rmtree(tmpdir)
 
-    download_dir = os.path.join(instance_dir, 'd')
-    target_zip_filename = download_gsutil(version, instance_dir)
-    with zipfile.ZipFile(target_zip_filename, 'r') as target_zip:
-      target_zip.extractall(download_dir)
 
-    shutil.move(download_dir, bin_dir)
-    # Final check that the gsutil bin exists.  This should never fail.
-    if not os.path.isfile(gsutil_bin):
-      raise InvalidGsutilError()
-    # Drop a flag file.
-    with open(gsutil_flag, 'w') as f:
-      f.write('This flag file is dropped by gsutil.py')
+def ensure_gsutil(version, target, clean):
+    bin_dir = os.path.join(target, 'gsutil_%s' % version)
+    gsutil_bin = os.path.join(bin_dir, 'gsutil', 'gsutil')
+    gsutil_flag = os.path.join(bin_dir, 'gsutil', 'install.flag')
+    # We assume that if gsutil_flag exists, then we have a good version
+    # of the gsutil package.
+    if not clean and os.path.isfile(gsutil_flag):
+        # Everything is awesome! we're all done here.
+        return gsutil_bin
+
+    if not os.path.exists(target):
+        try:
+            os.makedirs(target)
+        except FileExistsError:
+            # Another process is prepping workspace, so let's check if
+            # gsutil_bin is present.  If after several checks it's still not,
+            # continue with downloading gsutil.
+            delay = 2  # base delay, in seconds
+            for _ in range(3):  # make N attempts
+                # sleep first as it's not expected to have file ready just yet.
+                time.sleep(delay)
+                delay *= 1.5  # next delay increased by that factor
+                if os.path.isfile(gsutil_bin):
+                    return gsutil_bin
+
+    with temporary_directory(target) as instance_dir:
+        # Clean up if we're redownloading a corrupted gsutil.
+        cleanup_path = os.path.join(instance_dir, 'clean')
+        try:
+            os.rename(bin_dir, cleanup_path)
+        except (OSError, IOError):
+            cleanup_path = None
+        if cleanup_path:
+            shutil.rmtree(cleanup_path)
+
+        download_dir = os.path.join(instance_dir, 'd')
+        target_zip_filename = download_gsutil(version, instance_dir)
+        with zipfile.ZipFile(target_zip_filename, 'r') as target_zip:
+            target_zip.extractall(download_dir)
+
+        shutil.move(download_dir, bin_dir)
+        # Final check that the gsutil bin exists.  This should never fail.
+        if not os.path.isfile(gsutil_bin):
+            raise InvalidGsutilError()
+        # Drop a flag file.
+        with open(gsutil_flag, 'w') as f:
+            f.write('This flag file is dropped by gsutil.py')
 
-  return gsutil_bin
+    return gsutil_bin
 
 
 def _is_luci_context():
-  """Returns True if the script is run within luci-context"""
-  if os.getenv('SWARMING_HEADLESS') == '1':
-    return True
+    """Returns True if the script is run within luci-context"""
+    if os.getenv('SWARMING_HEADLESS') == '1':
+        return True
 
-  luci_context_env = os.getenv('LUCI_CONTEXT')
-  if not luci_context_env:
-    return False
+    luci_context_env = os.getenv('LUCI_CONTEXT')
+    if not luci_context_env:
+        return False
 
-  try:
-    with open(luci_context_env) as f:
-      luci_context_json = json.load(f)
-      return 'local_auth' in luci_context_json
-  except (ValueError, FileNotFoundError):
-    return False
+    try:
+        with open(luci_context_env) as f:
+            luci_context_json = json.load(f)
+            return 'local_auth' in luci_context_json
+    except (ValueError, FileNotFoundError):
+        return False
 
 
 def luci_context(cmd):
-  """Helper to call`luci-auth context`."""
-  p = _luci_auth_cmd('context', wrapped_cmds=cmd)
+    """Helper to call`luci-auth context`."""
+    p = _luci_auth_cmd('context', wrapped_cmds=cmd)
 
-  # If luci-auth is not logged in, fallback to normal execution.
-  if b'Not logged in.' in p.stderr:
-    return _run_subprocess(cmd, interactive=True)
+    # If luci-auth is not logged in, fallback to normal execution.
+    if b'Not logged in.' in p.stderr:
+        return _run_subprocess(cmd, interactive=True)
 
-  _print_subprocess_result(p)
-  return p
+    _print_subprocess_result(p)
+    return p
 
 
 def luci_login():
-  """Helper to run `luci-auth login`."""
-  # luci-auth requires interactive shell.
-  return _luci_auth_cmd('login', interactive=True)
+    """Helper to run `luci-auth login`."""
+    # luci-auth requires interactive shell.
+    return _luci_auth_cmd('login', interactive=True)
 
 
 def _luci_auth_cmd(luci_cmd, wrapped_cmds=None, interactive=False):
-  """Helper to call luci-auth command."""
-  cmd = ['luci-auth', luci_cmd, '-scopes', ' '.join(LUCI_AUTH_SCOPES)]
-  if wrapped_cmds:
-    cmd += ['--'] + wrapped_cmds
+    """Helper to call luci-auth command."""
+    cmd = ['luci-auth', luci_cmd, '-scopes', ' '.join(LUCI_AUTH_SCOPES)]
+    if wrapped_cmds:
+        cmd += ['--'] + wrapped_cmds
 
-  return _run_subprocess(cmd, interactive)
+    return _run_subprocess(cmd, interactive)
 
 
 def _run_subprocess(cmd, interactive=False, env=None):
-  """Wrapper to run the given command within a subprocess."""
-  kwargs = {'shell': IS_WINDOWS}
+    """Wrapper to run the given command within a subprocess."""
+    kwargs = {'shell': IS_WINDOWS}
 
-  if env:
-    kwargs['env'] = dict(os.environ, **env)
+    if env:
+        kwargs['env'] = dict(os.environ, **env)
 
-  if not interactive:
-    kwargs['stdout'] = subprocess.PIPE
-    kwargs['stderr'] = subprocess.PIPE
+    if not interactive:
+        kwargs['stdout'] = subprocess.PIPE
+        kwargs['stderr'] = subprocess.PIPE
 
-  return subprocess.run(cmd, **kwargs)
+    return subprocess.run(cmd, **kwargs)
 
 
 def _print_subprocess_result(p):
-  """Prints the subprocess result to stdout & stderr."""
-  if p.stdout:
-    sys.stdout.buffer.write(p.stdout)
+    """Prints the subprocess result to stdout & stderr."""
+    if p.stdout:
+        sys.stdout.buffer.write(p.stdout)
 
-  if p.stderr:
-    sys.stderr.buffer.write(p.stderr)
+    if p.stderr:
+        sys.stderr.buffer.write(p.stderr)
 
 
 def is_boto_present():
-  """Returns true if the .boto file is present in the default path."""
-  return os.getenv('BOTO_CONFIG') or os.getenv(
-      'AWS_CREDENTIAL_FILE') or os.path.isfile(
-          os.path.join(os.path.expanduser('~'), '.boto'))
+    """Returns true if the .boto file is present in the default path."""
+    return os.getenv('BOTO_CONFIG') or os.getenv(
+        'AWS_CREDENTIAL_FILE') or os.path.isfile(
+            os.path.join(os.path.expanduser('~'), '.boto'))
 
 
 def run_gsutil(target, args, clean=False):
-  # Redirect gsutil config calls to luci-auth.
-  if 'config' in args:
-    return luci_login().returncode
-
-  gsutil_bin = ensure_gsutil(VERSION, target, clean)
-  args_opt = ['-o', 'GSUtil:software_update_check_period=0']
-
-  if sys.platform == 'darwin':
-    # We are experiencing problems with multiprocessing on MacOS where gsutil.py
-    # may hang.
-    # This behavior is documented in gsutil codebase, and recommendation is to
-    # set GSUtil:parallel_process_count=1.
-    # https://github.com/GoogleCloudPlatform/gsutil/blob/06efc9dc23719fab4fd5fadb506d252bbd3fe0dd/gslib/command.py#L1331
-    # https://github.com/GoogleCloudPlatform/gsutil/issues/1100
-    args_opt.extend(['-o', 'GSUtil:parallel_process_count=1'])
-  if sys.platform == 'cygwin':
-    # This script requires Windows Python, so invoke with depot_tools'
-    # Python.
-    def winpath(path):
-      stdout = subprocess.check_output(['cygpath', '-w', path])
-      return stdout.strip().decode('utf-8', 'replace')
-    cmd = ['python.bat', winpath(__file__)]
-    cmd.extend(args)
-    sys.exit(subprocess.call(cmd))
-  assert sys.platform != 'cygwin'
-
-  cmd = [
-      'vpython3',
-      '-vpython-spec', os.path.join(THIS_DIR, 'gsutil.vpython3'),
-      '--',
-      gsutil_bin
-  ] + args_opt + args
-
-  # When .boto is present, try without additional wrappers and handle specific
-  # errors.
-  if is_boto_present():
-    p = _run_subprocess(cmd)
-
-    # Notify user that their .boto file might be outdated.
-    if b'Your credentials are invalid.' in p.stderr:
-      # Make sure this error message is visible when invoked by gclient runhooks
-      separator = '*' * 80
-      print('\n' + separator + '\n' +
-            'Warning: You might have an outdated .boto file. If this issue '
-            'persists after running `gsutil.py config`, try removing your '
-            '.boto, usually located in your home directory.\n' + separator +
-            '\n',
-            file=sys.stderr)
-
-    _print_subprocess_result(p)
-    return p.returncode
-
-  # Skip wrapping commands if luci-auth is already being
-  if _is_luci_context():
-    return _run_subprocess(cmd, interactive=True).returncode
-
-  # Wrap gsutil with luci-auth context.
-  return luci_context(cmd).returncode
+    # Redirect gsutil config calls to luci-auth.
+    if 'config' in args:
+        return luci_login().returncode
+
+    gsutil_bin = ensure_gsutil(VERSION, target, clean)
+    args_opt = ['-o', 'GSUtil:software_update_check_period=0']
+
+    if sys.platform == 'darwin':
+        # We are experiencing problems with multiprocessing on MacOS where
+        # gsutil.py may hang. This behavior is documented in gsutil codebase,
+        # and recommendation is to set GSUtil:parallel_process_count=1.
+        # https://github.com/GoogleCloudPlatform/gsutil/blob/06efc9dc23719fab4fd5fadb506d252bbd3fe0dd/gslib/command.py#L1331
+        # https://github.com/GoogleCloudPlatform/gsutil/issues/1100
+        args_opt.extend(['-o', 'GSUtil:parallel_process_count=1'])
+    if sys.platform == 'cygwin':
+        # This script requires Windows Python, so invoke with depot_tools'
+        # Python.
+        def winpath(path):
+            stdout = subprocess.check_output(['cygpath', '-w', path])
+            return stdout.strip().decode('utf-8', 'replace')
+
+        cmd = ['python.bat', winpath(__file__)]
+        cmd.extend(args)
+        sys.exit(subprocess.call(cmd))
+    assert sys.platform != 'cygwin'
+
+    cmd = [
+        'vpython3', '-vpython-spec',
+        os.path.join(THIS_DIR, 'gsutil.vpython3'), '--', gsutil_bin
+    ] + args_opt + args
+
+    # When .boto is present, try without additional wrappers and handle specific
+    # errors.
+    if is_boto_present():
+        p = _run_subprocess(cmd)
+
+        # Notify user that their .boto file might be outdated.
+        if b'Your credentials are invalid.' in p.stderr:
+            # Make sure this error message is visible when invoked by gclient
+            # runhooks
+            separator = '*' * 80
+            print(
+                '\n' + separator + '\n' +
+                'Warning: You might have an outdated .boto file. If this issue '
+                'persists after running `gsutil.py config`, try removing your '
+                '.boto, usually located in your home directory.\n' + separator +
+                '\n',
+                file=sys.stderr)
+
+        _print_subprocess_result(p)
+        return p.returncode
+
+    # Skip wrapping commands if luci-auth is already being
+    if _is_luci_context():
+        return _run_subprocess(cmd, interactive=True).returncode
+
+    # Wrap gsutil with luci-auth context.
+    return luci_context(cmd).returncode
 
 
 def parse_args():
-  bin_dir = os.environ.get('DEPOT_TOOLS_GSUTIL_BIN_DIR', DEFAULT_BIN_DIR)
-
-  # Help is disabled as it conflicts with gsutil -h, which controls headers.
-  parser = argparse.ArgumentParser(add_help=False)
-
-  parser.add_argument('--clean', action='store_true',
-      help='Clear any existing gsutil package, forcing a new download.')
-  parser.add_argument('--target', default=bin_dir,
-      help='The target directory to download/store a gsutil version in. '
-           '(default is %(default)s).')
-
-  # These two args exist for backwards-compatibility but are no-ops.
-  parser.add_argument('--force-version', default=VERSION,
-                      help='(deprecated, this flag has no effect)')
-  parser.add_argument('--fallback',
-                      help='(deprecated, this flag has no effect)')
-
-  parser.add_argument('args', nargs=argparse.REMAINDER)
-
-  args, extras = parser.parse_known_args()
-  if args.args and args.args[0] == '--':
-    args.args.pop(0)
-  if extras:
-    args.args = extras + args.args
-  return args
+    bin_dir = os.environ.get('DEPOT_TOOLS_GSUTIL_BIN_DIR', DEFAULT_BIN_DIR)
+
+    # Help is disabled as it conflicts with gsutil -h, which controls headers.
+    parser = argparse.ArgumentParser(add_help=False)
+
+    parser.add_argument(
+        '--clean',
+        action='store_true',
+        help='Clear any existing gsutil package, forcing a new download.')
+    parser.add_argument(
+        '--target',
+        default=bin_dir,
+        help='The target directory to download/store a gsutil version in. '
+        '(default is %(default)s).')
+
+    # These two args exist for backwards-compatibility but are no-ops.
+    parser.add_argument('--force-version',
+                        default=VERSION,
+                        help='(deprecated, this flag has no effect)')
+    parser.add_argument('--fallback',
+                        help='(deprecated, this flag has no effect)')
+
+    parser.add_argument('args', nargs=argparse.REMAINDER)
+
+    args, extras = parser.parse_known_args()
+    if args.args and args.args[0] == '--':
+        args.args.pop(0)
+    if extras:
+        args.args = extras + args.args
+    return args
 
 
 def main():
-  args = parse_args()
-  return run_gsutil(args.target, args.args, clean=args.clean)
+    args = parse_args()
+    return run_gsutil(args.target, args.args, clean=args.clean)
 
 
 if __name__ == '__main__':
-  sys.exit(main())
+    sys.exit(main())

+ 34 - 34
infra_to_superproject.py

@@ -23,44 +23,44 @@ import shutil
 
 
 def main(argv):
-  source = os.getcwd()
+    source = os.getcwd()
 
-  parser = argparse.ArgumentParser(description=__doc__.strip().splitlines()[0],
-                                   epilog=' '.join(
-                                       __doc__.strip().splitlines()[1:]))
-  parser.add_argument('-n',
-                      '--no-backup',
-                      action='store_true',
-                      help='NOT RECOMMENDED. Skips copying the current '
-                      'checkout (which can take up to ~15 min) to '
-                      'a backup before starting the migration.')
-  args = parser.parse_args(argv)
+    parser = argparse.ArgumentParser(
+        description=__doc__.strip().splitlines()[0],
+        epilog=' '.join(__doc__.strip().splitlines()[1:]))
+    parser.add_argument('-n',
+                        '--no-backup',
+                        action='store_true',
+                        help='NOT RECOMMENDED. Skips copying the current '
+                        'checkout (which can take up to ~15 min) to '
+                        'a backup before starting the migration.')
+    args = parser.parse_args(argv)
 
-  if not args.no_backup:
-    backup = source + '_backup'
-    print(f'Creating backup in {backup}')
-    print('May take up to ~15 minutes...')
-    shutil.copytree(source, backup, symlinks=True, dirs_exist_ok=True)
-    print('backup complete')
+    if not args.no_backup:
+        backup = source + '_backup'
+        print(f'Creating backup in {backup}')
+        print('May take up to ~15 minutes...')
+        shutil.copytree(source, backup, symlinks=True, dirs_exist_ok=True)
+        print('backup complete')
 
-  print(f'Deleting old {source}/.gclient file')
-  gclient_file = os.path.join(source, '.gclient')
-  with open(gclient_file, 'r') as file:
-    data = file.read()
-    internal = "infra_internal" in data
-  os.remove(gclient_file)
+    print(f'Deleting old {source}/.gclient file')
+    gclient_file = os.path.join(source, '.gclient')
+    with open(gclient_file, 'r') as file:
+        data = file.read()
+        internal = "infra_internal" in data
+    os.remove(gclient_file)
 
-  print('Migrating to infra/infra_superproject')
-  cmds = ['fetch', '--force']
-  if internal:
-    cmds.append('infra_internal')
-    print('including internal code in checkout')
-  else:
-    cmds.append('infra')
-  shell = sys.platform == 'win32'
-  fetch = subprocess.Popen(cmds, cwd=source, shell=shell)
-  fetch.wait()
+    print('Migrating to infra/infra_superproject')
+    cmds = ['fetch', '--force']
+    if internal:
+        cmds.append('infra_internal')
+        print('including internal code in checkout')
+    else:
+        cmds.append('infra')
+    shell = sys.platform == 'win32'
+    fetch = subprocess.Popen(cmds, cwd=source, shell=shell)
+    fetch.wait()
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+    sys.exit(main(sys.argv[1:]))

+ 0 - 1
isort

@@ -19,7 +19,6 @@
 from pathlib import Path
 import sys
 
-
 THIS_DIR = Path(__file__).resolve().parent
 RC_FILE = THIS_DIR / '.isort.cfg'
 

+ 75 - 74
lockfile.py

@@ -13,94 +13,95 @@ import time
 
 
 class LockError(Exception):
-  pass
+    pass
 
 
 if sys.platform.startswith('win'):
-  # Windows implementation
-  import win32imports
-
-  BYTES_TO_LOCK = 1
-
-  def _open_file(lockfile):
-    return win32imports.Handle(
-        win32imports.CreateFileW(
-            lockfile,  # lpFileName
-            win32imports.GENERIC_WRITE,  # dwDesiredAccess
-            0,  # dwShareMode=prevent others from opening file
-            None,  # lpSecurityAttributes
-            win32imports.CREATE_ALWAYS,  # dwCreationDisposition
-            win32imports.FILE_ATTRIBUTE_NORMAL,  # dwFlagsAndAttributes
-            None  # hTemplateFile
-        ))
-
-  def _close_file(handle):
-    # CloseHandle releases lock too.
-    win32imports.CloseHandle(handle)
-
-  def _lock_file(handle):
-    ret = win32imports.LockFileEx(
-        handle,  # hFile
-        win32imports.LOCKFILE_FAIL_IMMEDIATELY
-        | win32imports.LOCKFILE_EXCLUSIVE_LOCK,  # dwFlags
-        0,  #dwReserved
-        BYTES_TO_LOCK,  # nNumberOfBytesToLockLow
-        0,  # nNumberOfBytesToLockHigh
-        win32imports.Overlapped()  # lpOverlapped
-    )
-    # LockFileEx returns result as bool, which is converted into an integer
-    # (1 == successful; 0 == not successful)
-    if ret == 0:
-      error_code = win32imports.GetLastError()
-      raise OSError('Failed to lock handle (error code: %d).' % error_code)
+    # Windows implementation
+    import win32imports
+
+    BYTES_TO_LOCK = 1
+
+    def _open_file(lockfile):
+        return win32imports.Handle(
+            win32imports.CreateFileW(
+                lockfile,  # lpFileName
+                win32imports.GENERIC_WRITE,  # dwDesiredAccess
+                0,  # dwShareMode=prevent others from opening file
+                None,  # lpSecurityAttributes
+                win32imports.CREATE_ALWAYS,  # dwCreationDisposition
+                win32imports.FILE_ATTRIBUTE_NORMAL,  # dwFlagsAndAttributes
+                None  # hTemplateFile
+            ))
+
+    def _close_file(handle):
+        # CloseHandle releases lock too.
+        win32imports.CloseHandle(handle)
+
+    def _lock_file(handle):
+        ret = win32imports.LockFileEx(
+            handle,  # hFile
+            win32imports.LOCKFILE_FAIL_IMMEDIATELY
+            | win32imports.LOCKFILE_EXCLUSIVE_LOCK,  # dwFlags
+            0,  #dwReserved
+            BYTES_TO_LOCK,  # nNumberOfBytesToLockLow
+            0,  # nNumberOfBytesToLockHigh
+            win32imports.Overlapped()  # lpOverlapped
+        )
+        # LockFileEx returns result as bool, which is converted into an integer
+        # (1 == successful; 0 == not successful)
+        if ret == 0:
+            error_code = win32imports.GetLastError()
+            raise OSError('Failed to lock handle (error code: %d).' %
+                          error_code)
 else:
-  # Unix implementation
-  import fcntl
+    # Unix implementation
+    import fcntl
 
-  def _open_file(lockfile):
-    open_flags = (os.O_CREAT | os.O_WRONLY)
-    return os.open(lockfile, open_flags, 0o644)
+    def _open_file(lockfile):
+        open_flags = (os.O_CREAT | os.O_WRONLY)
+        return os.open(lockfile, open_flags, 0o644)
 
-  def _close_file(fd):
-    os.close(fd)
+    def _close_file(fd):
+        os.close(fd)
 
-  def _lock_file(fd):
-    fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
+    def _lock_file(fd):
+        fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
 
 
 def _try_lock(lockfile):
-  f = _open_file(lockfile)
-  try:
-    _lock_file(f)
-  except Exception:
-    _close_file(f)
-    raise
-  return lambda: _close_file(f)
+    f = _open_file(lockfile)
+    try:
+        _lock_file(f)
+    except Exception:
+        _close_file(f)
+        raise
+    return lambda: _close_file(f)
 
 
 def _lock(path, timeout=0):
-  """_lock returns function to release the lock if locking was successful.
+    """_lock returns function to release the lock if locking was successful.
 
   _lock also implements simple retry logic."""
-  elapsed = 0
-  while True:
-    try:
-      return _try_lock(path + '.locked')
-    except (OSError, IOError) as e:
-      if elapsed < timeout:
-        sleep_time = min(10, timeout - elapsed)
-        logging.info(
-            'Could not create git cache lockfile; '
-            'will retry after sleep(%d).', sleep_time)
-        elapsed += sleep_time
-        time.sleep(sleep_time)
-        continue
-      raise LockError("Error locking %s (err: %s)" % (path, str(e)))
+    elapsed = 0
+    while True:
+        try:
+            return _try_lock(path + '.locked')
+        except (OSError, IOError) as e:
+            if elapsed < timeout:
+                sleep_time = min(10, timeout - elapsed)
+                logging.info(
+                    'Could not create git cache lockfile; '
+                    'will retry after sleep(%d).', sleep_time)
+                elapsed += sleep_time
+                time.sleep(sleep_time)
+                continue
+            raise LockError("Error locking %s (err: %s)" % (path, str(e)))
 
 
 @contextlib.contextmanager
 def lock(path, timeout=0):
-  """Get exclusive lock to path.
+    """Get exclusive lock to path.
 
   Usage:
     import lockfile
@@ -109,8 +110,8 @@ def lock(path, timeout=0):
       pass
 
    """
-  release_fn = _lock(path, timeout)
-  try:
-    yield
-  finally:
-    release_fn()
+    release_fn = _lock(path, timeout)
+    try:
+        yield
+    finally:
+        release_fn()

+ 104 - 103
man/src/filter_demo_output.py

@@ -14,122 +14,123 @@ from xml.sax.saxutils import escape
 from io import StringIO
 
 if not os.path.exists('ansi2html'):
-  print('You must run ./make_docs.sh once before running this script.')
-  sys.exit(1)
+    print('You must run ./make_docs.sh once before running this script.')
+    sys.exit(1)
 
 # This dependency is pulled in by make_docs.sh
 # if it doesn't exist, run ./make_docs.sh first
 sys.path.insert(0, 'ansi2html')
 
-import ansi2html            # pylint: disable=import-error, W0611
+import ansi2html  # pylint: disable=import-error, W0611
 import ansi2html.converter  # pylint: disable=import-error, W0611
 
-def simpleXML(string):
-  BRIGHT = 1
-  DIM    = 2
-  NORMAL = 22
-  RESET  = 0
-  ESC_RE = re.compile('(\x1B\\[[^m]*?)m')
-
-  ret = StringIO()
-  boldstate = False
-
-  for tok in ESC_RE.split(string):
-    if not tok:
-      continue
-    if tok[0] == '\x1b':
-      codes = map(int, filter(bool, tok[2:].split(';')))
-      if not codes:
-        codes = [RESET]
-      for code in codes:
-        # only care about Bright
-        if code == BRIGHT and boldstate is False:
-          boldstate = True
-          ret.write('<emphasis role="strong">')
-        elif code in (DIM, NORMAL, RESET) and boldstate:
-          boldstate = False
-          ret.write('</emphasis>')
-    else:
-      ret.write(escape(tok))
-
-  if boldstate:
-    ret.write('</emphasis>')
 
-  return ret.getvalue()
+def simpleXML(string):
+    BRIGHT = 1
+    DIM = 2
+    NORMAL = 22
+    RESET = 0
+    ESC_RE = re.compile('(\x1B\\[[^m]*?)m')
+
+    ret = StringIO()
+    boldstate = False
+
+    for tok in ESC_RE.split(string):
+        if not tok:
+            continue
+        if tok[0] == '\x1b':
+            codes = map(int, filter(bool, tok[2:].split(';')))
+            if not codes:
+                codes = [RESET]
+            for code in codes:
+                # only care about Bright
+                if code == BRIGHT and boldstate is False:
+                    boldstate = True
+                    ret.write('<emphasis role="strong">')
+                elif code in (DIM, NORMAL, RESET) and boldstate:
+                    boldstate = False
+                    ret.write('</emphasis>')
+        else:
+            ret.write(escape(tok))
+
+    if boldstate:
+        ret.write('</emphasis>')
+
+    return ret.getvalue()
 
 
 def main():
-  backend = sys.argv[1]
-  output = sys.stdin.read().rstrip()
-
-  callout_re = re.compile(r'\x1b\[(\d+)c\n')
-  callouts = collections.defaultdict(int)
-  for i, line in enumerate(output.splitlines(True)):
-    m = callout_re.match(line)
-    if m:
-      callouts[i + int(m.group(1)) - len(callouts)] += 1
-
-  output = callout_re.sub('', output)
-
-  w = sys.stdout.write
-
-  comment_marker = '###COMMENT###'
-
-  callout_counter = 1
-  if backend == 'xhtml11':
-    preamble = (
-        '</p></div><div class="listingblock"><div class="content"><pre><code>'
-    )
-    postamble = '</code></pre></div></div><p><div class="paragraph">'
-    c = ansi2html.Ansi2HTMLConverter(inline=True, scheme='dracula')
-
-    in_code = False
-    body = c.convert(output, full=False)
-    for i, line in enumerate(body.splitlines()):
-      if line.startswith(comment_marker):
+    backend = sys.argv[1]
+    output = sys.stdin.read().rstrip()
+
+    callout_re = re.compile(r'\x1b\[(\d+)c\n')
+    callouts = collections.defaultdict(int)
+    for i, line in enumerate(output.splitlines(True)):
+        m = callout_re.match(line)
+        if m:
+            callouts[i + int(m.group(1)) - len(callouts)] += 1
+
+    output = callout_re.sub('', output)
+
+    w = sys.stdout.write
+
+    comment_marker = '###COMMENT###'
+
+    callout_counter = 1
+    if backend == 'xhtml11':
+        preamble = (
+            '</p></div><div class="listingblock"><div class="content"><pre><code>'
+        )
+        postamble = '</code></pre></div></div><p><div class="paragraph">'
+        c = ansi2html.Ansi2HTMLConverter(inline=True, scheme='dracula')
+
+        in_code = False
+        body = c.convert(output, full=False)
+        for i, line in enumerate(body.splitlines()):
+            if line.startswith(comment_marker):
+                if in_code:
+                    w(postamble)
+                    in_code = False
+                w(line[len(comment_marker):])
+            else:
+                if not in_code:
+                    w(preamble)
+                    in_code = True
+                ext = ''
+                for _ in range(callouts[i]):
+                    if not ext:
+                        ext += '</span>'
+                    ext += ' <b>&lt;%d&gt;</b>' % callout_counter
+                    callout_counter += 1
+                if ext:
+                    ext += '<span>'
+                w(line + ext + '\n')
         if in_code:
-          w(postamble)
-          in_code = False
-        w(line[len(comment_marker):])
-      else:
-        if not in_code:
-          w(preamble)
-          in_code = True
-        ext = ''
-        for _ in range(callouts[i]):
-          if not ext:
-            ext += '</span>'
-          ext += ' <b>&lt;%d&gt;</b>' % callout_counter
-          callout_counter += 1
-        if ext:
-          ext += '<span>'
-        w(line + ext + '\n')
-    if in_code:
-      w(postamble)
-  else:
-    preamble = '</simpara><literallayout class="monospaced">'
-    postamble = '</literallayout><simpara>'
-
-    in_code = False
-    body = simpleXML(output)
-    for i, line in enumerate(body.splitlines()):
-      if line.startswith(comment_marker):
+            w(postamble)
+    else:
+        preamble = '</simpara><literallayout class="monospaced">'
+        postamble = '</literallayout><simpara>'
+
+        in_code = False
+        body = simpleXML(output)
+        for i, line in enumerate(body.splitlines()):
+            if line.startswith(comment_marker):
+                if in_code:
+                    w(postamble)
+                    in_code = False
+                w(line[len(comment_marker):])
+            else:
+                if not in_code:
+                    w(preamble)
+                    in_code = True
+                ext = ''
+                for _ in range(callouts[i]):
+                    ext += '  <emphasis role="strong">(%d)</emphasis>' % callout_counter
+                    callout_counter += 1
+                w(line + ext + '\n')
         if in_code:
-          w(postamble)
-          in_code = False
-        w(line[len(comment_marker):])
-      else:
-        if not in_code:
-          w(preamble)
-          in_code = True
-        ext = ''
-        for _ in range(callouts[i]):
-          ext += '  <emphasis role="strong">(%d)</emphasis>' % callout_counter
-          callout_counter += 1
-        w(line + ext + '\n')
-    if in_code:
-      w(postamble)
+            w(postamble)
 
 
 if __name__ == '__main__':
-  main()
+    main()

+ 0 - 2
metadata/.style.yapf

@@ -1,2 +0,0 @@
-[style]
-based_on_style = pep8

+ 1 - 2
metadata/fields/field_types.py

@@ -58,8 +58,7 @@ class MetadataField:
         Raises: NotImplementedError if called. This method must be
                 overridden with the actual validation of the field.
         """
-        raise NotImplementedError(
-            f"{self._name} field validation not defined.")
+        raise NotImplementedError(f"{self._name} field validation not defined.")
 
 
 class FreeformTextField(MetadataField):

+ 2 - 4
metadata/tests/dependency_metadata_test.py

@@ -52,8 +52,7 @@ class DependencyValidationTest(unittest.TestCase):
         dependency.add_entry(known_fields.LICENSE_FILE.get_name(), "LICENSE")
         dependency.add_entry(known_fields.LICENSE.get_name(), "Public Domain")
         dependency.add_entry(known_fields.VERSION.get_name(), "1.0.0")
-        dependency.add_entry(known_fields.NAME.get_name(),
-                             "Test missing field")
+        dependency.add_entry(known_fields.NAME.get_name(), "Test missing field")
         # Leave URL field unspecified.
 
         results = dependency.validate(
@@ -70,8 +69,7 @@ class DependencyValidationTest(unittest.TestCase):
         dependency = dm.DependencyMetadata()
         dependency.add_entry(known_fields.URL.get_name(),
                              "https://www.example.com")
-        dependency.add_entry(known_fields.NAME.get_name(),
-                             "Test invalid field")
+        dependency.add_entry(known_fields.NAME.get_name(), "Test invalid field")
         dependency.add_entry(known_fields.VERSION.get_name(), "1.0.0")
         dependency.add_entry(known_fields.LICENSE_FILE.get_name(), "LICENSE")
         dependency.add_entry(known_fields.LICENSE.get_name(), "Public domain")

+ 2 - 2
metadata/tests/validate_test.py

@@ -21,8 +21,8 @@ import metadata.validate
 _SOURCE_FILE_DIR = os.path.join(_THIS_DIR, "data")
 _VALID_METADATA_FILEPATH = os.path.join(_THIS_DIR, "data",
                                         "README.chromium.test.multi-valid")
-_INVALID_METADATA_FILEPATH = os.path.join(
-    _THIS_DIR, "data", "README.chromium.test.multi-invalid")
+_INVALID_METADATA_FILEPATH = os.path.join(_THIS_DIR, "data",
+                                          "README.chromium.test.multi-invalid")
 
 
 class ValidateContentTest(unittest.TestCase):

+ 1 - 2
metadata/validate.py

@@ -47,8 +47,7 @@ def validate_content(content: str, source_file_dir: str,
     return results
 
 
-def _construct_file_read_error(filepath: str,
-                               cause: str) -> vr.ValidationError:
+def _construct_file_read_error(filepath: str, cause: str) -> vr.ValidationError:
     """Helper function to create a validation error for a
     file reading issue.
     """

+ 2 - 3
metadata/validation_result.py

@@ -7,9 +7,8 @@ import textwrap
 from typing import Dict, List, Union
 
 _CHROMIUM_METADATA_PRESCRIPT = "Third party metadata issue:"
-_CHROMIUM_METADATA_POSTSCRIPT = (
-    "Check //third_party/README.chromium.template "
-    "for details.")
+_CHROMIUM_METADATA_POSTSCRIPT = ("Check //third_party/README.chromium.template "
+                                 "for details.")
 
 
 class ValidationResult:

+ 273 - 271
metrics.py

@@ -20,7 +20,6 @@ import gclient_utils
 import metrics_utils
 import subprocess2
 
-
 DEPOT_TOOLS = os.path.dirname(os.path.abspath(__file__))
 CONFIG_FILE = os.path.join(DEPOT_TOOLS, 'metrics.cfg')
 UPLOAD_SCRIPT = os.path.join(DEPOT_TOOLS, 'upload_metrics.py')
@@ -32,294 +31,297 @@ DEPOT_TOOLS_ENV = ['DOGFOOD_STACKED_CHANGES']
 
 INVALID_CONFIG_WARNING = (
     'WARNING: Your metrics.cfg file was invalid or nonexistent. A new one will '
-    'be created.'
-)
+    'be created.')
 PERMISSION_DENIED_WARNING = (
     'Could not write the metrics collection config:\n\t%s\n'
-    'Metrics collection will be disabled.'
-)
+    'Metrics collection will be disabled.')
 
 
 class _Config(object):
-  def __init__(self):
-    self._initialized = False
-    self._config = {}
-
-  def _ensure_initialized(self):
-    if self._initialized:
-      return
-
-    # Metrics collection is disabled, so don't collect any metrics.
-    if not metrics_utils.COLLECT_METRICS:
-      self._config = {
-        'is-googler': False,
-        'countdown': 0,
-        'opt-in': False,
-        'version': metrics_utils.CURRENT_VERSION,
-      }
-      self._initialized = True
-      return
-
-    # We are running on a bot. Ignore config and collect metrics.
-    if metrics_utils.REPORT_BUILD:
-      self._config = {
-        'is-googler': True,
-        'countdown': 0,
-        'opt-in': True,
-        'version': metrics_utils.CURRENT_VERSION,
-      }
-      self._initialized = True
-      return
-
-    try:
-      config = json.loads(gclient_utils.FileRead(CONFIG_FILE))
-    except (IOError, ValueError):
-      config = {}
-
-    self._config = config.copy()
-
-    if 'is-googler' not in self._config:
-      # /should-upload is only accessible from Google IPs, so we only need to
-      # check if we can reach the page. An external developer would get access
-      # denied.
-      try:
-        req = urllib.request.urlopen(metrics_utils.APP_URL + '/should-upload')
-        self._config['is-googler'] = req.getcode() == 200
-      except (urllib.request.URLError, urllib.request.HTTPError):
-        self._config['is-googler'] = False
-
-    # Make sure the config variables we need are present, and initialize them to
-    # safe values otherwise.
-    self._config.setdefault('countdown', DEFAULT_COUNTDOWN)
-    self._config.setdefault('opt-in', None)
-    self._config.setdefault('version', metrics_utils.CURRENT_VERSION)
-
-    if config != self._config:
-      print(INVALID_CONFIG_WARNING, file=sys.stderr)
-      self._write_config()
-
-    self._initialized = True
-
-  def _write_config(self):
-    try:
-      gclient_utils.FileWrite(CONFIG_FILE, json.dumps(self._config))
-    except IOError as e:
-      print(PERMISSION_DENIED_WARNING % e, file=sys.stderr)
-      self._config['opt-in'] = False
-
-  @property
-  def version(self):
-    self._ensure_initialized()
-    return self._config['version']
-
-  @property
-  def is_googler(self):
-    self._ensure_initialized()
-    return self._config['is-googler']
-
-  @property
-  def opted_in(self):
-    self._ensure_initialized()
-    return self._config['opt-in']
-
-  @opted_in.setter
-  def opted_in(self, value):
-    self._ensure_initialized()
-    self._config['opt-in'] = value
-    self._config['version'] = metrics_utils.CURRENT_VERSION
-    self._write_config()
-
-  @property
-  def countdown(self):
-    self._ensure_initialized()
-    return self._config['countdown']
-
-  @property
-  def should_collect_metrics(self):
-    # Don't report metrics if user is not a Googler.
-    if not self.is_googler:
-      return False
-    # Don't report metrics if user has opted out.
-    if self.opted_in is False:
-      return False
-    # Don't report metrics if countdown hasn't reached 0.
-    if self.opted_in is None and self.countdown > 0:
-      return False
-    return True
-
-  def decrease_countdown(self):
-    self._ensure_initialized()
-    if self.countdown == 0:
-      return
-    self._config['countdown'] -= 1
-    if self.countdown == 0:
-      self._config['version'] = metrics_utils.CURRENT_VERSION
-    self._write_config()
-
-  def reset_config(self):
-    # Only reset countdown if we're already collecting metrics.
-    if self.should_collect_metrics:
-      self._ensure_initialized()
-      self._config['countdown'] = DEFAULT_COUNTDOWN
-      self._config['opt-in'] = None
+    def __init__(self):
+        self._initialized = False
+        self._config = {}
+
+    def _ensure_initialized(self):
+        if self._initialized:
+            return
+
+        # Metrics collection is disabled, so don't collect any metrics.
+        if not metrics_utils.COLLECT_METRICS:
+            self._config = {
+                'is-googler': False,
+                'countdown': 0,
+                'opt-in': False,
+                'version': metrics_utils.CURRENT_VERSION,
+            }
+            self._initialized = True
+            return
+
+        # We are running on a bot. Ignore config and collect metrics.
+        if metrics_utils.REPORT_BUILD:
+            self._config = {
+                'is-googler': True,
+                'countdown': 0,
+                'opt-in': True,
+                'version': metrics_utils.CURRENT_VERSION,
+            }
+            self._initialized = True
+            return
+
+        try:
+            config = json.loads(gclient_utils.FileRead(CONFIG_FILE))
+        except (IOError, ValueError):
+            config = {}
+
+        self._config = config.copy()
+
+        if 'is-googler' not in self._config:
+            # /should-upload is only accessible from Google IPs, so we only need
+            # to check if we can reach the page. An external developer would get
+            # access denied.
+            try:
+                req = urllib.request.urlopen(metrics_utils.APP_URL +
+                                             '/should-upload')
+                self._config['is-googler'] = req.getcode() == 200
+            except (urllib.request.URLError, urllib.request.HTTPError):
+                self._config['is-googler'] = False
+
+        # Make sure the config variables we need are present, and initialize
+        # them to safe values otherwise.
+        self._config.setdefault('countdown', DEFAULT_COUNTDOWN)
+        self._config.setdefault('opt-in', None)
+        self._config.setdefault('version', metrics_utils.CURRENT_VERSION)
+
+        if config != self._config:
+            print(INVALID_CONFIG_WARNING, file=sys.stderr)
+            self._write_config()
+
+        self._initialized = True
+
+    def _write_config(self):
+        try:
+            gclient_utils.FileWrite(CONFIG_FILE, json.dumps(self._config))
+        except IOError as e:
+            print(PERMISSION_DENIED_WARNING % e, file=sys.stderr)
+            self._config['opt-in'] = False
+
+    @property
+    def version(self):
+        self._ensure_initialized()
+        return self._config['version']
+
+    @property
+    def is_googler(self):
+        self._ensure_initialized()
+        return self._config['is-googler']
+
+    @property
+    def opted_in(self):
+        self._ensure_initialized()
+        return self._config['opt-in']
+
+    @opted_in.setter
+    def opted_in(self, value):
+        self._ensure_initialized()
+        self._config['opt-in'] = value
+        self._config['version'] = metrics_utils.CURRENT_VERSION
+        self._write_config()
+
+    @property
+    def countdown(self):
+        self._ensure_initialized()
+        return self._config['countdown']
+
+    @property
+    def should_collect_metrics(self):
+        # Don't report metrics if user is not a Googler.
+        if not self.is_googler:
+            return False
+        # Don't report metrics if user has opted out.
+        if self.opted_in is False:
+            return False
+        # Don't report metrics if countdown hasn't reached 0.
+        if self.opted_in is None and self.countdown > 0:
+            return False
+        return True
+
+    def decrease_countdown(self):
+        self._ensure_initialized()
+        if self.countdown == 0:
+            return
+        self._config['countdown'] -= 1
+        if self.countdown == 0:
+            self._config['version'] = metrics_utils.CURRENT_VERSION
+        self._write_config()
+
+    def reset_config(self):
+        # Only reset countdown if we're already collecting metrics.
+        if self.should_collect_metrics:
+            self._ensure_initialized()
+            self._config['countdown'] = DEFAULT_COUNTDOWN
+            self._config['opt-in'] = None
 
 
 class MetricsCollector(object):
-  def __init__(self):
-    self._metrics_lock = threading.Lock()
-    self._reported_metrics = {}
-    self._config = _Config()
-    self._collecting_metrics = False
-    self._collect_custom_metrics = True
-
-  @property
-  def config(self):
-    return self._config
-
-  @property
-  def collecting_metrics(self):
-    return self._collecting_metrics
-
-  def add(self, name, value):
-    if self._collect_custom_metrics:
-      with self._metrics_lock:
-        self._reported_metrics[name] = value
-
-  def add_repeated(self, name, value):
-    if self._collect_custom_metrics:
-      with self._metrics_lock:
-        self._reported_metrics.setdefault(name, []).append(value)
-
-  @contextlib.contextmanager
-  def pause_metrics_collection(self):
-    collect_custom_metrics = self._collect_custom_metrics
-    self._collect_custom_metrics = False
-    try:
-      yield
-    finally:
-      self._collect_custom_metrics = collect_custom_metrics
-
-  def _upload_metrics_data(self):
-    """Upload the metrics data to the AppEngine app."""
-    p = subprocess2.Popen(['vpython3', UPLOAD_SCRIPT], stdin=subprocess2.PIPE)
-    # We invoke a subprocess, and use stdin.write instead of communicate(),
-    # so that we are able to return immediately, leaving the upload running in
-    # the background.
-    p.stdin.write(json.dumps(self._reported_metrics).encode('utf-8'))
-    # ... but if we're running on a bot, wait until upload has completed.
-    if metrics_utils.REPORT_BUILD:
-      p.communicate()
-
-  def _collect_metrics(self, func, command_name, *args, **kwargs):
-    # If we're already collecting metrics, just execute the function.
-    # e.g. git-cl split invokes git-cl upload several times to upload each
-    # split CL.
-    if self.collecting_metrics:
-      # Don't collect metrics for this function.
-      # e.g. Don't record the arguments git-cl split passes to git-cl upload.
-      with self.pause_metrics_collection():
-        return func(*args, **kwargs)
-
-    self._collecting_metrics = True
-    self.add('metrics_version', metrics_utils.CURRENT_VERSION)
-    self.add('command', command_name)
-    for env in DEPOT_TOOLS_ENV:
-      if env in os.environ:
-        self.add_repeated('env_vars', {
-            'name': env,
-            'value': os.environ.get(env)
-        })
-
-    try:
-      start = time.time()
-      result = func(*args, **kwargs)
-      exception = None
-    # pylint: disable=bare-except
-    except:
-      exception = sys.exc_info()
-    finally:
-      self.add('execution_time', time.time() - start)
-
-    exit_code = metrics_utils.return_code_from_exception(exception)
-    self.add('exit_code', exit_code)
-
-    # Add metrics regarding environment information.
-    self.add('timestamp', int(time.time()))
-    self.add('python_version', metrics_utils.get_python_version())
-    self.add('host_os', gclient_utils.GetOperatingSystem())
-    self.add('host_arch', detect_host_arch.HostArch())
-
-    depot_tools_age = metrics_utils.get_repo_timestamp(DEPOT_TOOLS)
-    if depot_tools_age is not None:
-      self.add('depot_tools_age', int(depot_tools_age))
-
-    git_version = metrics_utils.get_git_version()
-    if git_version:
-      self.add('git_version', git_version)
-
-    bot_metrics = metrics_utils.get_bot_metrics()
-    if bot_metrics:
-      self.add('bot_metrics', bot_metrics)
-
-    self._upload_metrics_data()
-    if exception:
-      gclient_utils.reraise(exception[0], exception[1], exception[2])
-    return result
-
-  def collect_metrics(self, command_name):
-    """A decorator used to collect metrics over the life of a function.
+    def __init__(self):
+        self._metrics_lock = threading.Lock()
+        self._reported_metrics = {}
+        self._config = _Config()
+        self._collecting_metrics = False
+        self._collect_custom_metrics = True
+
+    @property
+    def config(self):
+        return self._config
+
+    @property
+    def collecting_metrics(self):
+        return self._collecting_metrics
+
+    def add(self, name, value):
+        if self._collect_custom_metrics:
+            with self._metrics_lock:
+                self._reported_metrics[name] = value
+
+    def add_repeated(self, name, value):
+        if self._collect_custom_metrics:
+            with self._metrics_lock:
+                self._reported_metrics.setdefault(name, []).append(value)
+
+    @contextlib.contextmanager
+    def pause_metrics_collection(self):
+        collect_custom_metrics = self._collect_custom_metrics
+        self._collect_custom_metrics = False
+        try:
+            yield
+        finally:
+            self._collect_custom_metrics = collect_custom_metrics
+
+    def _upload_metrics_data(self):
+        """Upload the metrics data to the AppEngine app."""
+        p = subprocess2.Popen(['vpython3', UPLOAD_SCRIPT],
+                              stdin=subprocess2.PIPE)
+        # We invoke a subprocess, and use stdin.write instead of communicate(),
+        # so that we are able to return immediately, leaving the upload running
+        # in the background.
+        p.stdin.write(json.dumps(self._reported_metrics).encode('utf-8'))
+        # ... but if we're running on a bot, wait until upload has completed.
+        if metrics_utils.REPORT_BUILD:
+            p.communicate()
+
+    def _collect_metrics(self, func, command_name, *args, **kwargs):
+        # If we're already collecting metrics, just execute the function.
+        # e.g. git-cl split invokes git-cl upload several times to upload each
+        # split CL.
+        if self.collecting_metrics:
+            # Don't collect metrics for this function.
+            # e.g. Don't record the arguments git-cl split passes to git-cl
+            # upload.
+            with self.pause_metrics_collection():
+                return func(*args, **kwargs)
+
+        self._collecting_metrics = True
+        self.add('metrics_version', metrics_utils.CURRENT_VERSION)
+        self.add('command', command_name)
+        for env in DEPOT_TOOLS_ENV:
+            if env in os.environ:
+                self.add_repeated('env_vars', {
+                    'name': env,
+                    'value': os.environ.get(env)
+                })
+
+        try:
+            start = time.time()
+            result = func(*args, **kwargs)
+            exception = None
+        # pylint: disable=bare-except
+        except:
+            exception = sys.exc_info()
+        finally:
+            self.add('execution_time', time.time() - start)
+
+        exit_code = metrics_utils.return_code_from_exception(exception)
+        self.add('exit_code', exit_code)
+
+        # Add metrics regarding environment information.
+        self.add('timestamp', int(time.time()))
+        self.add('python_version', metrics_utils.get_python_version())
+        self.add('host_os', gclient_utils.GetOperatingSystem())
+        self.add('host_arch', detect_host_arch.HostArch())
+
+        depot_tools_age = metrics_utils.get_repo_timestamp(DEPOT_TOOLS)
+        if depot_tools_age is not None:
+            self.add('depot_tools_age', int(depot_tools_age))
+
+        git_version = metrics_utils.get_git_version()
+        if git_version:
+            self.add('git_version', git_version)
+
+        bot_metrics = metrics_utils.get_bot_metrics()
+        if bot_metrics:
+            self.add('bot_metrics', bot_metrics)
+
+        self._upload_metrics_data()
+        if exception:
+            gclient_utils.reraise(exception[0], exception[1], exception[2])
+        return result
+
+    def collect_metrics(self, command_name):
+        """A decorator used to collect metrics over the life of a function.
 
     This decorator executes the function and collects metrics about the system
     environment and the function performance.
     """
-    def _decorator(func):
-      if not self.config.should_collect_metrics:
-        return func
-      # Needed to preserve the __name__ and __doc__ attributes of func.
-      @functools.wraps(func)
-      def _inner(*args, **kwargs):
-        return self._collect_metrics(func, command_name, *args, **kwargs)
-      return _inner
-    return _decorator
-
-  @contextlib.contextmanager
-  def print_notice_and_exit(self):
-    """A context manager used to print the notice and terminate execution.
+        def _decorator(func):
+            if not self.config.should_collect_metrics:
+                return func
+            # Needed to preserve the __name__ and __doc__ attributes of func.
+            @functools.wraps(func)
+            def _inner(*args, **kwargs):
+                return self._collect_metrics(func, command_name, *args,
+                                             **kwargs)
+
+            return _inner
+
+        return _decorator
+
+    @contextlib.contextmanager
+    def print_notice_and_exit(self):
+        """A context manager used to print the notice and terminate execution.
 
     This decorator executes the function and prints the monitoring notice if
     necessary. If an exception is raised, we will catch it, and print it before
     printing the metrics collection notice.
     This will call sys.exit() with an appropriate exit code to ensure the notice
     is the last thing printed."""
-    # Needed to preserve the __name__ and __doc__ attributes of func.
-    try:
-      yield
-      exception = None
-    # pylint: disable=bare-except
-    except:
-      exception = sys.exc_info()
-
-    # Print the exception before the metrics notice, so that the notice is
-    # clearly visible even if gclient fails.
-    if exception:
-      if isinstance(exception[1], KeyboardInterrupt):
-        sys.stderr.write('Interrupted\n')
-      elif not isinstance(exception[1], SystemExit):
-        traceback.print_exception(*exception)
-
-    # Check if the version has changed
-    if (self.config.is_googler
-        and self.config.opted_in is not False
-        and self.config.version != metrics_utils.CURRENT_VERSION):
-      metrics_utils.print_version_change(self.config.version)
-      self.config.reset_config()
-
-    # Print the notice
-    if self.config.is_googler and self.config.opted_in is None:
-      metrics_utils.print_notice(self.config.countdown)
-      self.config.decrease_countdown()
-
-    sys.exit(metrics_utils.return_code_from_exception(exception))
+        # Needed to preserve the __name__ and __doc__ attributes of func.
+        try:
+            yield
+            exception = None
+        # pylint: disable=bare-except
+        except:
+            exception = sys.exc_info()
+
+        # Print the exception before the metrics notice, so that the notice is
+        # clearly visible even if gclient fails.
+        if exception:
+            if isinstance(exception[1], KeyboardInterrupt):
+                sys.stderr.write('Interrupted\n')
+            elif not isinstance(exception[1], SystemExit):
+                traceback.print_exception(*exception)
+
+        # Check if the version has changed
+        if (self.config.is_googler and self.config.opted_in is not False
+                and self.config.version != metrics_utils.CURRENT_VERSION):
+            metrics_utils.print_version_change(self.config.version)
+            self.config.reset_config()
+
+        # Print the notice
+        if self.config.is_googler and self.config.opted_in is None:
+            metrics_utils.print_notice(self.config.countdown)
+            self.config.decrease_countdown()
+
+        sys.exit(metrics_utils.return_code_from_exception(exception))
 
 
 collector = MetricsCollector()

+ 198 - 223
metrics_utils.py

@@ -12,7 +12,6 @@ import subprocess2
 import sys
 import urllib.parse
 
-
 # Current version of metrics recording.
 # When we add new metrics, the version number will be increased, we display the
 # user what has changed, and ask the user to agree again.
@@ -21,223 +20,198 @@ CURRENT_VERSION = 2
 APP_URL = 'https://cit-cli-metrics.appspot.com'
 
 REPORT_BUILD = os.getenv('DEPOT_TOOLS_REPORT_BUILD')
-COLLECT_METRICS = (
-    os.getenv('DEPOT_TOOLS_COLLECT_METRICS') != '0'
-    and os.getenv('DEPOT_TOOLS_METRICS') != '0')
+COLLECT_METRICS = (os.getenv('DEPOT_TOOLS_COLLECT_METRICS') != '0'
+                   and os.getenv('DEPOT_TOOLS_METRICS') != '0')
 
 SYNC_STATUS_SUCCESS = 'SYNC_STATUS_SUCCESS'
 SYNC_STATUS_FAILURE = 'SYNC_STATUS_FAILURE'
 
 
 def get_notice_countdown_header(countdown):
-  if countdown == 0:
-    yield '     METRICS COLLECTION IS TAKING PLACE'
-  else:
-    yield '  METRICS COLLECTION WILL START IN %d EXECUTIONS' % countdown
+    if countdown == 0:
+        yield '     METRICS COLLECTION IS TAKING PLACE'
+    else:
+        yield '  METRICS COLLECTION WILL START IN %d EXECUTIONS' % countdown
+
 
 def get_notice_version_change_header():
-  yield '       WE ARE COLLECTING ADDITIONAL METRICS'
-  yield ''
-  yield ' Please review the changes and opt-in again.'
+    yield '       WE ARE COLLECTING ADDITIONAL METRICS'
+    yield ''
+    yield ' Please review the changes and opt-in again.'
+
 
 def get_notice_footer():
-  yield 'To suppress this message opt in or out using:'
-  yield '$ gclient metrics [--opt-in] [--opt-out]'
-  yield 'For more information please see metrics.README.md'
-  yield 'in your depot_tools checkout or visit'
-  yield 'https://bit.ly/3MpLAYM.'
+    yield 'To suppress this message opt in or out using:'
+    yield '$ gclient metrics [--opt-in] [--opt-out]'
+    yield 'For more information please see metrics.README.md'
+    yield 'in your depot_tools checkout or visit'
+    yield 'https://bit.ly/3MpLAYM.'
+
 
 def get_change_notice(version):
-  if version == 0:
-    return [] # No changes for version 0
-
-  if version == 1:
-    return [
-      'We want to collect the Git version.',
-      'We want to collect information about the HTTP',
-      'requests that depot_tools makes, and the git and',
-      'cipd commands it executes.',
-      '',
-      'We only collect known strings to make sure we',
-      'don\'t record PII.',
-    ]
-
-  if version == 2:
-    return [
-      'We will start collecting metrics from bots.',
-      'There are no changes for developers.',
-      'If the DEPOT_TOOLS_REPORT_BUILD environment variable is set,',
-      'we will report information about the current build',
-      '(e.g. buildbucket project, bucket, builder and build id),',
-      'and authenticate to the metrics collection server.',
-      'This information will only be recorded for requests',
-      'authenticated as bot service accounts.',
-    ]
+    if version == 0:
+        return []  # No changes for version 0
+
+    if version == 1:
+        return [
+            'We want to collect the Git version.',
+            'We want to collect information about the HTTP',
+            'requests that depot_tools makes, and the git and',
+            'cipd commands it executes.',
+            '',
+            'We only collect known strings to make sure we',
+            'don\'t record PII.',
+        ]
+
+    if version == 2:
+        return [
+            'We will start collecting metrics from bots.',
+            'There are no changes for developers.',
+            'If the DEPOT_TOOLS_REPORT_BUILD environment variable is set,',
+            'we will report information about the current build',
+            '(e.g. buildbucket project, bucket, builder and build id),',
+            'and authenticate to the metrics collection server.',
+            'This information will only be recorded for requests',
+            'authenticated as bot service accounts.',
+        ]
 
 
 KNOWN_PROJECT_URLS = {
-  'https://chrome-internal.googlesource.com/chrome/ios_internal',
-  'https://chrome-internal.googlesource.com/infra/infra_internal',
-  'https://chromium.googlesource.com/breakpad/breakpad',
-  'https://chromium.googlesource.com/chromium/src',
-  'https://chromium.googlesource.com/chromium/tools/depot_tools',
-  'https://chromium.googlesource.com/crashpad/crashpad',
-  'https://chromium.googlesource.com/external/gyp',
-  'https://chromium.googlesource.com/external/naclports',
-  'https://chromium.googlesource.com/infra/goma/client',
-  'https://chromium.googlesource.com/infra/infra',
-  'https://chromium.googlesource.com/native_client/',
-  'https://chromium.googlesource.com/syzygy',
-  'https://chromium.googlesource.com/v8/v8',
-  'https://dart.googlesource.com/sdk',
-  'https://pdfium.googlesource.com/pdfium',
-  'https://skia.googlesource.com/buildbot',
-  'https://skia.googlesource.com/skia',
-  'https://webrtc.googlesource.com/src',
+    'https://chrome-internal.googlesource.com/chrome/ios_internal',
+    'https://chrome-internal.googlesource.com/infra/infra_internal',
+    'https://chromium.googlesource.com/breakpad/breakpad',
+    'https://chromium.googlesource.com/chromium/src',
+    'https://chromium.googlesource.com/chromium/tools/depot_tools',
+    'https://chromium.googlesource.com/crashpad/crashpad',
+    'https://chromium.googlesource.com/external/gyp',
+    'https://chromium.googlesource.com/external/naclports',
+    'https://chromium.googlesource.com/infra/goma/client',
+    'https://chromium.googlesource.com/infra/infra',
+    'https://chromium.googlesource.com/native_client/',
+    'https://chromium.googlesource.com/syzygy',
+    'https://chromium.googlesource.com/v8/v8',
+    'https://dart.googlesource.com/sdk',
+    'https://pdfium.googlesource.com/pdfium',
+    'https://skia.googlesource.com/buildbot',
+    'https://skia.googlesource.com/skia',
+    'https://webrtc.googlesource.com/src',
 }
 
 KNOWN_HTTP_HOSTS = {
-  'chrome-internal-review.googlesource.com',
-  'chromium-review.googlesource.com',
-  'dart-review.googlesource.com',
-  'eu1-mirror-chromium-review.googlesource.com',
-  'pdfium-review.googlesource.com',
-  'skia-review.googlesource.com',
-  'us1-mirror-chromium-review.googlesource.com',
-  'us2-mirror-chromium-review.googlesource.com',
-  'us3-mirror-chromium-review.googlesource.com',
-  'webrtc-review.googlesource.com',
+    'chrome-internal-review.googlesource.com',
+    'chromium-review.googlesource.com',
+    'dart-review.googlesource.com',
+    'eu1-mirror-chromium-review.googlesource.com',
+    'pdfium-review.googlesource.com',
+    'skia-review.googlesource.com',
+    'us1-mirror-chromium-review.googlesource.com',
+    'us2-mirror-chromium-review.googlesource.com',
+    'us3-mirror-chromium-review.googlesource.com',
+    'webrtc-review.googlesource.com',
 }
 
 KNOWN_HTTP_METHODS = {
-  'DELETE',
-  'GET',
-  'PATCH',
-  'POST',
-  'PUT',
+    'DELETE',
+    'GET',
+    'PATCH',
+    'POST',
+    'PUT',
 }
 
 KNOWN_HTTP_PATHS = {
-  'accounts':
-      re.compile(r'(/a)?/accounts/.*'),
-  'changes':
-      re.compile(r'(/a)?/changes/([^/]+)?$'),
-  'changes/abandon':
-      re.compile(r'(/a)?/changes/.*/abandon'),
-  'changes/comments':
-      re.compile(r'(/a)?/changes/.*/comments'),
-  'changes/detail':
-      re.compile(r'(/a)?/changes/.*/detail'),
-  'changes/edit':
-      re.compile(r'(/a)?/changes/.*/edit'),
-  'changes/message':
-      re.compile(r'(/a)?/changes/.*/message'),
-  'changes/restore':
-      re.compile(r'(/a)?/changes/.*/restore'),
-  'changes/reviewers':
-      re.compile(r'(/a)?/changes/.*/reviewers/.*'),
-  'changes/revisions/commit':
-      re.compile(r'(/a)?/changes/.*/revisions/.*/commit'),
-  'changes/revisions/review':
-      re.compile(r'(/a)?/changes/.*/revisions/.*/review'),
-  'changes/submit':
-      re.compile(r'(/a)?/changes/.*/submit'),
-  'projects/branches':
-      re.compile(r'(/a)?/projects/.*/branches/.*'),
+    'accounts': re.compile(r'(/a)?/accounts/.*'),
+    'changes': re.compile(r'(/a)?/changes/([^/]+)?$'),
+    'changes/abandon': re.compile(r'(/a)?/changes/.*/abandon'),
+    'changes/comments': re.compile(r'(/a)?/changes/.*/comments'),
+    'changes/detail': re.compile(r'(/a)?/changes/.*/detail'),
+    'changes/edit': re.compile(r'(/a)?/changes/.*/edit'),
+    'changes/message': re.compile(r'(/a)?/changes/.*/message'),
+    'changes/restore': re.compile(r'(/a)?/changes/.*/restore'),
+    'changes/reviewers': re.compile(r'(/a)?/changes/.*/reviewers/.*'),
+    'changes/revisions/commit':
+    re.compile(r'(/a)?/changes/.*/revisions/.*/commit'),
+    'changes/revisions/review':
+    re.compile(r'(/a)?/changes/.*/revisions/.*/review'),
+    'changes/submit': re.compile(r'(/a)?/changes/.*/submit'),
+    'projects/branches': re.compile(r'(/a)?/projects/.*/branches/.*'),
 }
 
 KNOWN_HTTP_ARGS = {
-  'ALL_REVISIONS',
-  'CURRENT_COMMIT',
-  'CURRENT_REVISION',
-  'DETAILED_ACCOUNTS',
-  'LABELS',
+    'ALL_REVISIONS',
+    'CURRENT_COMMIT',
+    'CURRENT_REVISION',
+    'DETAILED_ACCOUNTS',
+    'LABELS',
 }
 
-GIT_VERSION_RE = re.compile(
-  r'git version (\d)\.(\d{0,2})\.(\d{0,2})'
-)
+GIT_VERSION_RE = re.compile(r'git version (\d)\.(\d{0,2})\.(\d{0,2})')
 
 KNOWN_SUBCOMMAND_ARGS = {
-  'cc',
-  'hashtag',
-  'l=Auto-Submit+1',
-  'l=Code-Review+1',
-  'l=Code-Review+2',
-  'l=Commit-Queue+1',
-  'l=Commit-Queue+2',
-  'label',
-  'm',
-  'notify=ALL',
-  'notify=NONE',
-  'private',
-  'r',
-  'ready',
-  'topic',
-  'wip'
+    'cc', 'hashtag', 'l=Auto-Submit+1', 'l=Code-Review+1', 'l=Code-Review+2',
+    'l=Commit-Queue+1', 'l=Commit-Queue+2', 'label', 'm', 'notify=ALL',
+    'notify=NONE', 'private', 'r', 'ready', 'topic', 'wip'
 }
 
 
 def get_python_version():
-  """Return the python version in the major.minor.micro format."""
-  return '{v.major}.{v.minor}.{v.micro}'.format(v=sys.version_info)
+    """Return the python version in the major.minor.micro format."""
+    return '{v.major}.{v.minor}.{v.micro}'.format(v=sys.version_info)
 
 
 def get_git_version():
-  """Return the Git version in the major.minor.micro format."""
-  p = subprocess2.Popen(
-      ['git', '--version'],
-      stdout=subprocess2.PIPE, stderr=subprocess2.PIPE)
-  stdout, _ = p.communicate()
-  match = GIT_VERSION_RE.match(stdout.decode('utf-8'))
-  if not match:
-    return None
-  return '%s.%s.%s' % match.groups()
+    """Return the Git version in the major.minor.micro format."""
+    p = subprocess2.Popen(['git', '--version'],
+                          stdout=subprocess2.PIPE,
+                          stderr=subprocess2.PIPE)
+    stdout, _ = p.communicate()
+    match = GIT_VERSION_RE.match(stdout.decode('utf-8'))
+    if not match:
+        return None
+    return '%s.%s.%s' % match.groups()
 
 
 def get_bot_metrics():
-  try:
-    project, bucket, builder, build = REPORT_BUILD.split('/')
-    return {
-      'build_id': int(build),
-      'builder': {
-        'project': project,
-        'bucket': bucket,
-        'builder': builder,
-      },
-    }
-  except (AttributeError, ValueError):
-    return None
-
+    try:
+        project, bucket, builder, build = REPORT_BUILD.split('/')
+        return {
+            'build_id': int(build),
+            'builder': {
+                'project': project,
+                'bucket': bucket,
+                'builder': builder,
+            },
+        }
+    except (AttributeError, ValueError):
+        return None
 
 
 def return_code_from_exception(exception):
-  """Returns the exit code that would result of raising the exception."""
-  if exception is None:
-    return 0
-  e = exception[1]
-  if isinstance(e, KeyboardInterrupt):
-    return 130
-  if isinstance(e, SystemExit):
-    return e.code
-  return 1
+    """Returns the exit code that would result of raising the exception."""
+    if exception is None:
+        return 0
+    e = exception[1]
+    if isinstance(e, KeyboardInterrupt):
+        return 130
+    if isinstance(e, SystemExit):
+        return e.code
+    return 1
 
 
 def extract_known_subcommand_args(args):
-  """Extract the known arguments from the passed list of args."""
-  known_args = []
-  for arg in args:
-    if arg in KNOWN_SUBCOMMAND_ARGS:
-      known_args.append(arg)
-    else:
-      arg = arg.split('=')[0]
-      if arg in KNOWN_SUBCOMMAND_ARGS:
-        known_args.append(arg)
-  return sorted(known_args)
+    """Extract the known arguments from the passed list of args."""
+    known_args = []
+    for arg in args:
+        if arg in KNOWN_SUBCOMMAND_ARGS:
+            known_args.append(arg)
+        else:
+            arg = arg.split('=')[0]
+            if arg in KNOWN_SUBCOMMAND_ARGS:
+                known_args.append(arg)
+    return sorted(known_args)
 
 
 def extract_http_metrics(request_uri, method, status, response_time):
-  """Extract metrics from the request URI.
+    """Extract metrics from the request URI.
 
   Extracts the host, path, and arguments from the request URI, and returns them
   along with the method, status and response time.
@@ -253,81 +227,82 @@ def extract_http_metrics(request_uri, method, status, response_time):
   The regex defined in KNOWN_HTTP_PATH_RES are checked against the path, and
   those that match will be returned.
   """
-  http_metrics = {
-    'status': status,
-    'response_time': response_time,
-  }
+    http_metrics = {
+        'status': status,
+        'response_time': response_time,
+    }
 
-  if method in KNOWN_HTTP_METHODS:
-    http_metrics['method'] = method
+    if method in KNOWN_HTTP_METHODS:
+        http_metrics['method'] = method
 
-  parsed_url = urllib.parse.urlparse(request_uri)
+    parsed_url = urllib.parse.urlparse(request_uri)
 
-  if parsed_url.netloc in KNOWN_HTTP_HOSTS:
-    http_metrics['host'] = parsed_url.netloc
+    if parsed_url.netloc in KNOWN_HTTP_HOSTS:
+        http_metrics['host'] = parsed_url.netloc
 
-  for name, path_re in KNOWN_HTTP_PATHS.items():
-    if path_re.match(parsed_url.path):
-      http_metrics['path'] = name
-      break
+    for name, path_re in KNOWN_HTTP_PATHS.items():
+        if path_re.match(parsed_url.path):
+            http_metrics['path'] = name
+            break
 
-  parsed_query = urllib.parse.parse_qs(parsed_url.query)
+    parsed_query = urllib.parse.parse_qs(parsed_url.query)
 
-  # Collect o-parameters from the request.
-  args = [
-    arg for arg in parsed_query.get('o', [])
-    if arg in KNOWN_HTTP_ARGS
-  ]
-  if args:
-    http_metrics['arguments'] = args
+    # Collect o-parameters from the request.
+    args = [arg for arg in parsed_query.get('o', []) if arg in KNOWN_HTTP_ARGS]
+    if args:
+        http_metrics['arguments'] = args
 
-  return http_metrics
+    return http_metrics
 
 
 def get_repo_timestamp(path_to_repo):
-  """Get an approximate timestamp for the upstream of |path_to_repo|.
+    """Get an approximate timestamp for the upstream of |path_to_repo|.
 
   Returns the top two bits of the timestamp of the HEAD for the upstream of the
   branch path_to_repo is checked out at.
   """
-  # Get the upstream for the current branch. If we're not in a branch, fallback
-  # to HEAD.
-  try:
-    upstream = scm.GIT.GetUpstreamBranch(path_to_repo) or 'HEAD'
-  except subprocess2.CalledProcessError:
-    upstream = 'HEAD'
+    # Get the upstream for the current branch. If we're not in a branch,
+    # fallback to HEAD.
+    try:
+        upstream = scm.GIT.GetUpstreamBranch(path_to_repo) or 'HEAD'
+    except subprocess2.CalledProcessError:
+        upstream = 'HEAD'
 
-  # Get the timestamp of the HEAD for the upstream of the current branch.
-  p = subprocess2.Popen(
-      ['git', '-C', path_to_repo, 'log', '-n1', upstream, '--format=%at'],
-      stdout=subprocess2.PIPE, stderr=subprocess2.PIPE)
-  stdout, _ = p.communicate()
+    # Get the timestamp of the HEAD for the upstream of the current branch.
+    p = subprocess2.Popen(
+        ['git', '-C', path_to_repo, 'log', '-n1', upstream, '--format=%at'],
+        stdout=subprocess2.PIPE,
+        stderr=subprocess2.PIPE)
+    stdout, _ = p.communicate()
 
-  # If there was an error, give up.
-  if p.returncode != 0:
-    return None
+    # If there was an error, give up.
+    if p.returncode != 0:
+        return None
+
+    return stdout.strip()
 
-  return stdout.strip()
 
 def print_boxed_text(out, min_width, lines):
-  [EW, NS, SE, SW, NE, NW] = list('=|++++')
-  width = max(min_width, max(len(line) for line in lines))
-  out(SE + EW * (width + 2) + SW + '\n')
-  for line in lines:
-    out('%s %-*s %s\n' % (NS, width, line, NS))
-  out(NE + EW * (width + 2) + NW + '\n')
+    [EW, NS, SE, SW, NE, NW] = list('=|++++')
+    width = max(min_width, max(len(line) for line in lines))
+    out(SE + EW * (width + 2) + SW + '\n')
+    for line in lines:
+        out('%s %-*s %s\n' % (NS, width, line, NS))
+    out(NE + EW * (width + 2) + NW + '\n')
+
 
 def print_notice(countdown):
-  """Print a notice to let the user know the status of metrics collection."""
-  lines = list(get_notice_countdown_header(countdown))
-  lines.append('')
-  lines += list(get_notice_footer())
-  print_boxed_text(sys.stderr.write, 49, lines)
+    """Print a notice to let the user know the status of metrics collection."""
+    lines = list(get_notice_countdown_header(countdown))
+    lines.append('')
+    lines += list(get_notice_footer())
+    print_boxed_text(sys.stderr.write, 49, lines)
+
 
 def print_version_change(config_version):
-  """Print a notice to let the user know we are collecting more metrics."""
-  lines = list(get_notice_version_change_header())
-  for version in range(config_version + 1, CURRENT_VERSION + 1):
-    lines.append('')
-    lines += get_change_notice(version)
-  print_boxed_text(sys.stderr.write, 49, lines)
+    """Print a notice to let the user know we are collecting more metrics."""
+    lines = list(get_notice_version_change_header())
+    for version in range(config_version + 1, CURRENT_VERSION + 1):
+        lines.append('')
+        lines += get_change_notice(version)
+    print_boxed_text(sys.stderr.write, 49, lines)

+ 926 - 878
my_activity.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Get stats about your activity.
 
 Example:
@@ -57,259 +56,271 @@ import gclient_utils
 import gerrit_util
 
 if sys.version_info.major == 2:
-  logging.critical(
-      'Python 2 is not supported. Run my_activity.py using vpython3.')
-
+    logging.critical(
+        'Python 2 is not supported. Run my_activity.py using vpython3.')
 
 try:
-  import dateutil  # pylint: disable=import-error
-  import dateutil.parser
-  from dateutil.relativedelta import relativedelta
+    import dateutil  # pylint: disable=import-error
+    import dateutil.parser
+    from dateutil.relativedelta import relativedelta
 except ImportError:
-  logging.error('python-dateutil package required')
-  sys.exit(1)
+    logging.error('python-dateutil package required')
+    sys.exit(1)
 
 
 class DefaultFormatter(Formatter):
-  def __init__(self, default = ''):
-    super(DefaultFormatter, self).__init__()
-    self.default = default
+    def __init__(self, default=''):
+        super(DefaultFormatter, self).__init__()
+        self.default = default
 
-  def get_value(self, key, args, kwargs):
-    if isinstance(key, str) and key not in kwargs:
-      return self.default
-    return Formatter.get_value(self, key, args, kwargs)
+    def get_value(self, key, args, kwargs):
+        if isinstance(key, str) and key not in kwargs:
+            return self.default
+        return Formatter.get_value(self, key, args, kwargs)
 
 
 gerrit_instances = [
-  {
-    'url': 'android-review.googlesource.com',
-    'shorturl': 'r.android.com',
-    'short_url_protocol': 'https',
-  },
-  {
-    'url': 'gerrit-review.googlesource.com',
-  },
-  {
-    'url': 'chrome-internal-review.googlesource.com',
-    'shorturl': 'crrev.com/i',
-    'short_url_protocol': 'https',
-  },
-  {
-    'url': 'chromium-review.googlesource.com',
-    'shorturl': 'crrev.com/c',
-    'short_url_protocol': 'https',
-  },
-  {
-    'url': 'dawn-review.googlesource.com',
-  },
-  {
-    'url': 'pdfium-review.googlesource.com',
-  },
-  {
-    'url': 'skia-review.googlesource.com',
-  },
-  {
-    'url': 'review.coreboot.org',
-  },
+    {
+        'url': 'android-review.googlesource.com',
+        'shorturl': 'r.android.com',
+        'short_url_protocol': 'https',
+    },
+    {
+        'url': 'gerrit-review.googlesource.com',
+    },
+    {
+        'url': 'chrome-internal-review.googlesource.com',
+        'shorturl': 'crrev.com/i',
+        'short_url_protocol': 'https',
+    },
+    {
+        'url': 'chromium-review.googlesource.com',
+        'shorturl': 'crrev.com/c',
+        'short_url_protocol': 'https',
+    },
+    {
+        'url': 'dawn-review.googlesource.com',
+    },
+    {
+        'url': 'pdfium-review.googlesource.com',
+    },
+    {
+        'url': 'skia-review.googlesource.com',
+    },
+    {
+        'url': 'review.coreboot.org',
+    },
 ]
 
 monorail_projects = {
-  'angleproject': {
-    'shorturl': 'anglebug.com',
-    'short_url_protocol': 'http',
-  },
-  'chromium': {
-    'shorturl': 'crbug.com',
-    'short_url_protocol': 'https',
-  },
-  'dawn': {},
-  'google-breakpad': {},
-  'gyp': {},
-  'pdfium': {
-    'shorturl': 'crbug.com/pdfium',
-    'short_url_protocol': 'https',
-  },
-  'skia': {},
-  'tint': {},
-  'v8': {
-    'shorturl': 'crbug.com/v8',
-    'short_url_protocol': 'https',
-  },
+    'angleproject': {
+        'shorturl': 'anglebug.com',
+        'short_url_protocol': 'http',
+    },
+    'chromium': {
+        'shorturl': 'crbug.com',
+        'short_url_protocol': 'https',
+    },
+    'dawn': {},
+    'google-breakpad': {},
+    'gyp': {},
+    'pdfium': {
+        'shorturl': 'crbug.com/pdfium',
+        'short_url_protocol': 'https',
+    },
+    'skia': {},
+    'tint': {},
+    'v8': {
+        'shorturl': 'crbug.com/v8',
+        'short_url_protocol': 'https',
+    },
 }
 
+
 def username(email):
-  """Keeps the username of an email address."""
-  return email and email.split('@', 1)[0]
+    """Keeps the username of an email address."""
+    return email and email.split('@', 1)[0]
 
 
 def datetime_to_midnight(date):
-  return date - timedelta(hours=date.hour, minutes=date.minute,
-                          seconds=date.second, microseconds=date.microsecond)
+    return date - timedelta(hours=date.hour,
+                            minutes=date.minute,
+                            seconds=date.second,
+                            microseconds=date.microsecond)
 
 
 def get_quarter_of(date):
-  begin = (datetime_to_midnight(date) -
-           relativedelta(months=(date.month - 1) % 3, days=(date.day - 1)))
-  return begin, begin + relativedelta(months=3)
+    begin = (datetime_to_midnight(date) -
+             relativedelta(months=(date.month - 1) % 3, days=(date.day - 1)))
+    return begin, begin + relativedelta(months=3)
 
 
 def get_year_of(date):
-  begin = (datetime_to_midnight(date) -
-           relativedelta(months=(date.month - 1), days=(date.day - 1)))
-  return begin, begin + relativedelta(years=1)
+    begin = (datetime_to_midnight(date) -
+             relativedelta(months=(date.month - 1), days=(date.day - 1)))
+    return begin, begin + relativedelta(years=1)
 
 
 def get_week_of(date):
-  begin = (datetime_to_midnight(date) - timedelta(days=date.weekday()))
-  return begin, begin + timedelta(days=7)
+    begin = (datetime_to_midnight(date) - timedelta(days=date.weekday()))
+    return begin, begin + timedelta(days=7)
 
 
 def get_yes_or_no(msg):
-  while True:
-    response = gclient_utils.AskForData(msg + ' yes/no [no] ')
-    if response in ('y', 'yes'):
-      return True
+    while True:
+        response = gclient_utils.AskForData(msg + ' yes/no [no] ')
+        if response in ('y', 'yes'):
+            return True
 
-    if not response or response in ('n', 'no'):
-      return False
+        if not response or response in ('n', 'no'):
+            return False
 
 
 def datetime_from_gerrit(date_string):
-  return datetime.strptime(date_string, '%Y-%m-%d %H:%M:%S.%f000')
+    return datetime.strptime(date_string, '%Y-%m-%d %H:%M:%S.%f000')
 
 
 def datetime_from_monorail(date_string):
-  return datetime.strptime(date_string, '%Y-%m-%dT%H:%M:%S')
+    return datetime.strptime(date_string, '%Y-%m-%dT%H:%M:%S')
+
 
 def extract_bug_numbers_from_description(issue):
-  # Getting the description for REST Gerrit
-  revision = issue['revisions'][issue['current_revision']]
-  description = revision['commit']['message']
-
-  bugs = []
-  # Handle both "Bug: 99999" and "BUG=99999" bug notations
-  # Multiple bugs can be noted on a single line or in multiple ones.
-  matches = re.findall(
-      r'^(BUG=|(Bug|Fixed):\s*)((((?:[a-zA-Z0-9-]+:)?\d+)(,\s?)?)+)',
-      description, flags=re.IGNORECASE | re.MULTILINE)
-  if matches:
-    for match in matches:
-      bugs.extend(match[2].replace(' ', '').split(','))
-    # Add default chromium: prefix if none specified.
-    bugs = [bug if ':' in bug else 'chromium:%s' % bug for bug in bugs]
-
-  return sorted(set(bugs))
+    # Getting the description for REST Gerrit
+    revision = issue['revisions'][issue['current_revision']]
+    description = revision['commit']['message']
+
+    bugs = []
+    # Handle both "Bug: 99999" and "BUG=99999" bug notations
+    # Multiple bugs can be noted on a single line or in multiple ones.
+    matches = re.findall(
+        r'^(BUG=|(Bug|Fixed):\s*)((((?:[a-zA-Z0-9-]+:)?\d+)(,\s?)?)+)',
+        description,
+        flags=re.IGNORECASE | re.MULTILINE)
+    if matches:
+        for match in matches:
+            bugs.extend(match[2].replace(' ', '').split(','))
+        # Add default chromium: prefix if none specified.
+        bugs = [bug if ':' in bug else 'chromium:%s' % bug for bug in bugs]
+
+    return sorted(set(bugs))
+
 
 class MyActivity(object):
-  def __init__(self, options):
-    self.options = options
-    self.modified_after = options.begin
-    self.modified_before = options.end
-    self.user = options.user
-    self.changes = []
-    self.reviews = []
-    self.issues = []
-    self.referenced_issues = []
-    self.google_code_auth_token = None
-    self.access_errors = set()
-    self.skip_servers = (options.skip_servers.split(','))
-
-  def show_progress(self, how='.'):
-    if sys.stdout.isatty():
-      sys.stdout.write(how)
-      sys.stdout.flush()
-
-  def gerrit_changes_over_rest(self, instance, filters):
-    # Convert the "key:value" filter to a list of (key, value) pairs.
-    req = list(f.split(':', 1) for f in filters)
-    try:
-      # Instantiate the generator to force all the requests now and catch the
-      # errors here.
-      return list(gerrit_util.GenerateAllChanges(instance['url'], req,
-          o_params=['MESSAGES', 'LABELS', 'DETAILED_ACCOUNTS',
-                    'CURRENT_REVISION', 'CURRENT_COMMIT']))
-    except gerrit_util.GerritError as e:
-      error_message = 'Looking up %r: %s' % (instance['url'], e)
-      if error_message not in self.access_errors:
-        self.access_errors.add(error_message)
-      return []
-
-  def gerrit_search(self, instance, owner=None, reviewer=None):
-    if instance['url'] in self.skip_servers:
-      return []
-    max_age = datetime.today() - self.modified_after
-    filters = ['-age:%ss' % (max_age.days * 24 * 3600 + max_age.seconds)]
-    if owner:
-      assert not reviewer
-      filters.append('owner:%s' % owner)
-    else:
-      filters.extend(('-owner:%s' % reviewer, 'reviewer:%s' % reviewer))
-    # TODO(cjhopman): Should abandoned changes be filtered out when
-    # merged_only is not enabled?
-    if self.options.merged_only:
-      filters.append('status:merged')
-
-    issues = self.gerrit_changes_over_rest(instance, filters)
-    self.show_progress()
-    issues = [self.process_gerrit_issue(instance, issue)
-              for issue in issues]
-
-    issues = filter(self.filter_issue, issues)
-    issues = sorted(issues, key=lambda i: i['modified'], reverse=True)
-
-    return issues
-
-  def process_gerrit_issue(self, instance, issue):
-    ret = {}
-    if self.options.deltas:
-      ret['delta'] = DefaultFormatter().format(
-          '+{insertions},-{deletions}',
-          **issue)
-    ret['status'] = issue['status']
-    if 'shorturl' in instance:
-      protocol = instance.get('short_url_protocol', 'http')
-      url = instance['shorturl']
-    else:
-      protocol = 'https'
-      url = instance['url']
-    ret['review_url'] = '%s://%s/%s' % (protocol, url, issue['_number'])
-
-    ret['header'] = issue['subject']
-    ret['owner'] = issue['owner'].get('email', '')
-    ret['author'] = ret['owner']
-    ret['created'] = datetime_from_gerrit(issue['created'])
-    ret['modified'] = datetime_from_gerrit(issue['updated'])
-    if 'messages' in issue:
-      ret['replies'] = self.process_gerrit_issue_replies(issue['messages'])
-    else:
-      ret['replies'] = []
-    ret['reviewers'] = set(r['author'] for r in ret['replies'])
-    ret['reviewers'].discard(ret['author'])
-    ret['bugs'] = extract_bug_numbers_from_description(issue)
-    return ret
-
-  @staticmethod
-  def process_gerrit_issue_replies(replies):
-    ret = []
-    replies = filter(lambda r: 'author' in r and 'email' in r['author'],
-        replies)
-    for reply in replies:
-      ret.append({
-        'author': reply['author']['email'],
-        'created': datetime_from_gerrit(reply['date']),
-        'content': reply['message'],
-      })
-    return ret
-
-  def monorail_get_auth_http(self):
-    # Manually use a long timeout (10m); for some users who have a
-    # long history on the issue tracker, whatever the default timeout
-    # is is reached.
-    return auth.Authenticator().authorize(httplib2.Http(timeout=600))
-
-  def filter_modified_monorail_issue(self, issue):
-    """Precisely checks if an issue has been modified in the time range.
+    def __init__(self, options):
+        self.options = options
+        self.modified_after = options.begin
+        self.modified_before = options.end
+        self.user = options.user
+        self.changes = []
+        self.reviews = []
+        self.issues = []
+        self.referenced_issues = []
+        self.google_code_auth_token = None
+        self.access_errors = set()
+        self.skip_servers = (options.skip_servers.split(','))
+
+    def show_progress(self, how='.'):
+        if sys.stdout.isatty():
+            sys.stdout.write(how)
+            sys.stdout.flush()
+
+    def gerrit_changes_over_rest(self, instance, filters):
+        # Convert the "key:value" filter to a list of (key, value) pairs.
+        req = list(f.split(':', 1) for f in filters)
+        try:
+            # Instantiate the generator to force all the requests now and catch
+            # the errors here.
+            return list(
+                gerrit_util.GenerateAllChanges(instance['url'],
+                                               req,
+                                               o_params=[
+                                                   'MESSAGES', 'LABELS',
+                                                   'DETAILED_ACCOUNTS',
+                                                   'CURRENT_REVISION',
+                                                   'CURRENT_COMMIT'
+                                               ]))
+        except gerrit_util.GerritError as e:
+            error_message = 'Looking up %r: %s' % (instance['url'], e)
+            if error_message not in self.access_errors:
+                self.access_errors.add(error_message)
+            return []
+
+    def gerrit_search(self, instance, owner=None, reviewer=None):
+        if instance['url'] in self.skip_servers:
+            return []
+        max_age = datetime.today() - self.modified_after
+        filters = ['-age:%ss' % (max_age.days * 24 * 3600 + max_age.seconds)]
+        if owner:
+            assert not reviewer
+            filters.append('owner:%s' % owner)
+        else:
+            filters.extend(('-owner:%s' % reviewer, 'reviewer:%s' % reviewer))
+        # TODO(cjhopman): Should abandoned changes be filtered out when
+        # merged_only is not enabled?
+        if self.options.merged_only:
+            filters.append('status:merged')
+
+        issues = self.gerrit_changes_over_rest(instance, filters)
+        self.show_progress()
+        issues = [
+            self.process_gerrit_issue(instance, issue) for issue in issues
+        ]
+
+        issues = filter(self.filter_issue, issues)
+        issues = sorted(issues, key=lambda i: i['modified'], reverse=True)
+
+        return issues
+
+    def process_gerrit_issue(self, instance, issue):
+        ret = {}
+        if self.options.deltas:
+            ret['delta'] = DefaultFormatter().format(
+                '+{insertions},-{deletions}', **issue)
+        ret['status'] = issue['status']
+        if 'shorturl' in instance:
+            protocol = instance.get('short_url_protocol', 'http')
+            url = instance['shorturl']
+        else:
+            protocol = 'https'
+            url = instance['url']
+        ret['review_url'] = '%s://%s/%s' % (protocol, url, issue['_number'])
+
+        ret['header'] = issue['subject']
+        ret['owner'] = issue['owner'].get('email', '')
+        ret['author'] = ret['owner']
+        ret['created'] = datetime_from_gerrit(issue['created'])
+        ret['modified'] = datetime_from_gerrit(issue['updated'])
+        if 'messages' in issue:
+            ret['replies'] = self.process_gerrit_issue_replies(
+                issue['messages'])
+        else:
+            ret['replies'] = []
+        ret['reviewers'] = set(r['author'] for r in ret['replies'])
+        ret['reviewers'].discard(ret['author'])
+        ret['bugs'] = extract_bug_numbers_from_description(issue)
+        return ret
+
+    @staticmethod
+    def process_gerrit_issue_replies(replies):
+        ret = []
+        replies = filter(lambda r: 'author' in r and 'email' in r['author'],
+                         replies)
+        for reply in replies:
+            ret.append({
+                'author': reply['author']['email'],
+                'created': datetime_from_gerrit(reply['date']),
+                'content': reply['message'],
+            })
+        return ret
+
+    def monorail_get_auth_http(self):
+        # Manually use a long timeout (10m); for some users who have a
+        # long history on the issue tracker, whatever the default timeout
+        # is is reached.
+        return auth.Authenticator().authorize(httplib2.Http(timeout=600))
+
+    def filter_modified_monorail_issue(self, issue):
+        """Precisely checks if an issue has been modified in the time range.
 
     This fetches all issue comments to check if the issue has been modified in
     the time range specified by user. This is needed because monorail only
@@ -324,682 +335,719 @@ class MyActivity(object):
     Returns:
       Passed issue if modified, None otherwise.
     """
-    http = self.monorail_get_auth_http()
-    project, issue_id = issue['uid'].split(':')
-    url = ('https://monorail-prod.appspot.com/_ah/api/monorail/v1/projects'
-           '/%s/issues/%s/comments?maxResults=10000') % (project, issue_id)
-    _, body = http.request(url)
-    self.show_progress()
-    content = json.loads(body)
-    if not content:
-      logging.error('Unable to parse %s response from monorail.', project)
-      return issue
-
-    for item in content.get('items', []):
-      comment_published = datetime_from_monorail(item['published'])
-      if self.filter_modified(comment_published):
-        return issue
-
-    return None
-
-  def monorail_query_issues(self, project, query):
-    http = self.monorail_get_auth_http()
-    url = ('https://monorail-prod.appspot.com/_ah/api/monorail/v1/projects'
-           '/%s/issues') % project
-    query_data = urllib.parse.urlencode(query)
-    url = url + '?' + query_data
-    _, body = http.request(url)
-    self.show_progress()
-    content = json.loads(body)
-    if not content:
-      logging.error('Unable to parse %s response from monorail.', project)
-      return []
-
-    issues = []
-    project_config = monorail_projects.get(project, {})
-    for item in content.get('items', []):
-      if project_config.get('shorturl'):
-        protocol = project_config.get('short_url_protocol', 'http')
-        item_url = '%s://%s/%d' % (
-            protocol, project_config['shorturl'], item['id'])
-      else:
-        item_url = 'https://bugs.chromium.org/p/%s/issues/detail?id=%d' % (
-            project, item['id'])
-      issue = {
-        'uid': '%s:%s' % (project, item['id']),
-        'header': item['title'],
-        'created': datetime_from_monorail(item['published']),
-        'modified': datetime_from_monorail(item['updated']),
-        'author': item['author']['name'],
-        'url': item_url,
-        'comments': [],
-        'status': item['status'],
-        'labels': [],
-        'components': []
-      }
-      if 'owner' in item:
-        issue['owner'] = item['owner']['name']
-      else:
-        issue['owner'] = 'None'
-      if 'labels' in item:
-        issue['labels'] = item['labels']
-      if 'components' in item:
-        issue['components'] = item['components']
-      issues.append(issue)
-
-    return issues
-
-  def monorail_issue_search(self, project):
-    epoch = datetime.utcfromtimestamp(0)
-    # Defaults to @chromium.org email if one wasn't provided on -u option.
-    user_str = (self.options.email if self.options.email.find('@') >= 0
-                else '%s@chromium.org' % self.user)
-
-    issues = self.monorail_query_issues(project, {
-      'maxResults': 10000,
-      'q': user_str,
-      'publishedMax': '%d' % (self.modified_before - epoch).total_seconds(),
-      'updatedMin': '%d' % (self.modified_after - epoch).total_seconds(),
-    })
-
-    if self.options.completed_issues:
-      return [
-          issue for issue in issues
-          if (self.match(issue['owner']) and
-              issue['status'].lower() in ('verified', 'fixed'))
-      ]
-
-    return [
-        issue for issue in issues
-        if user_str in (issue['author'], issue['owner'])]
-
-  def monorail_get_issues(self, project, issue_ids):
-    return self.monorail_query_issues(project, {
-      'maxResults': 10000,
-      'q': 'id:%s' % ','.join(issue_ids)
-    })
-
-  def print_heading(self, heading):
-    print()
-    print(self.options.output_format_heading.format(heading=heading))
-
-  def match(self, author):
-    if '@' in self.user:
-      return author == self.user
-    return author.startswith(self.user + '@')
-
-  def print_change(self, change):
-    activity = len([
-        reply
-        for reply in change['replies']
-        if self.match(reply['author'])
-    ])
-    optional_values = {
-        'created': change['created'].date().isoformat(),
-        'modified': change['modified'].date().isoformat(),
-        'reviewers': ', '.join(change['reviewers']),
-        'status': change['status'],
-        'activity': activity,
-    }
-    if self.options.deltas:
-      optional_values['delta'] = change['delta']
-
-    self.print_generic(self.options.output_format,
-                       self.options.output_format_changes,
-                       change['header'],
-                       change['review_url'],
-                       change['author'],
-                       change['created'],
-                       change['modified'],
-                       optional_values)
-
-  def print_issue(self, issue):
-    optional_values = {
-        'created': issue['created'].date().isoformat(),
-        'modified': issue['modified'].date().isoformat(),
-        'owner': issue['owner'],
-        'status': issue['status'],
-    }
-    self.print_generic(self.options.output_format,
-                       self.options.output_format_issues,
-                       issue['header'],
-                       issue['url'],
-                       issue['author'],
-                       issue['created'],
-                       issue['modified'],
-                       optional_values)
-
-  def print_review(self, review):
-    activity = len([
-        reply
-        for reply in review['replies']
-        if self.match(reply['author'])
-    ])
-    optional_values = {
-        'created': review['created'].date().isoformat(),
-        'modified': review['modified'].date().isoformat(),
-        'status': review['status'],
-        'activity': activity,
-    }
-    if self.options.deltas:
-      optional_values['delta'] = review['delta']
-
-    self.print_generic(self.options.output_format,
-                       self.options.output_format_reviews,
-                       review['header'],
-                       review['review_url'],
-                       review['author'],
-                       review['created'],
-                       review['modified'],
-                       optional_values)
-
-  @staticmethod
-  def print_generic(default_fmt, specific_fmt,
-                    title, url, author, created, modified,
-                    optional_values=None):
-    output_format = specific_fmt if specific_fmt is not None else default_fmt
-    values = {
-        'title': title,
-        'url': url,
-        'author': author,
-        'created': created,
-        'modified': modified,
-    }
-    if optional_values is not None:
-      values.update(optional_values)
-    print(DefaultFormatter().format(output_format, **values))
-
-
-  def filter_issue(self, issue, should_filter_by_user=True):
-    def maybe_filter_username(email):
-      return not should_filter_by_user or username(email) == self.user
-    if (maybe_filter_username(issue['author']) and
-        self.filter_modified(issue['created'])):
-      return True
-    if (maybe_filter_username(issue['owner']) and
-        (self.filter_modified(issue['created']) or
-         self.filter_modified(issue['modified']))):
-      return True
-    for reply in issue['replies']:
-      if self.filter_modified(reply['created']):
-        if not should_filter_by_user:
-          break
-        if (username(reply['author']) == self.user
-            or (self.user + '@') in reply['content']):
-          break
-    else:
-      return False
-    return True
-
-  def filter_modified(self, modified):
-    return self.modified_after < modified < self.modified_before
-
-  def auth_for_changes(self):
-    #TODO(cjhopman): Move authentication check for getting changes here.
-    pass
-
-  def auth_for_reviews(self):
-    # Reviews use all the same instances as changes so no authentication is
-    # required.
-    pass
-
-  def get_changes(self):
-    num_instances = len(gerrit_instances)
-    with contextlib.closing(ThreadPool(num_instances)) as pool:
-      gerrit_changes = pool.map_async(
-          lambda instance: self.gerrit_search(instance, owner=self.user),
-          gerrit_instances)
-      gerrit_changes = itertools.chain.from_iterable(gerrit_changes.get())
-      self.changes = list(gerrit_changes)
-
-  def print_changes(self):
-    if self.changes:
-      self.print_heading('Changes')
-      for change in self.changes:
-        self.print_change(change)
-
-  def print_access_errors(self):
-    if self.access_errors:
-      logging.error('Access Errors:')
-      for error in self.access_errors:
-        logging.error(error.rstrip())
-
-  def get_reviews(self):
-    num_instances = len(gerrit_instances)
-    with contextlib.closing(ThreadPool(num_instances)) as pool:
-      gerrit_reviews = pool.map_async(
-          lambda instance: self.gerrit_search(instance, reviewer=self.user),
-          gerrit_instances)
-      gerrit_reviews = itertools.chain.from_iterable(gerrit_reviews.get())
-      self.reviews = list(gerrit_reviews)
-
-  def print_reviews(self):
-    if self.reviews:
-      self.print_heading('Reviews')
-      for review in self.reviews:
-        self.print_review(review)
-
-  def get_issues(self):
-    with contextlib.closing(ThreadPool(len(monorail_projects))) as pool:
-      monorail_issues = pool.map(
-          self.monorail_issue_search, monorail_projects.keys())
-      monorail_issues = list(itertools.chain.from_iterable(monorail_issues))
-
-    if not monorail_issues:
-      return
-
-    with contextlib.closing(ThreadPool(len(monorail_issues))) as pool:
-      filtered_issues = pool.map(
-          self.filter_modified_monorail_issue, monorail_issues)
-      self.issues = [issue for issue in filtered_issues if issue]
-
-  def get_referenced_issues(self):
-    if not self.issues:
-      self.get_issues()
-
-    if not self.changes:
-      self.get_changes()
-
-    referenced_issue_uids = set(itertools.chain.from_iterable(
-      change['bugs'] for change in self.changes))
-    fetched_issue_uids = set(issue['uid'] for issue in self.issues)
-    missing_issue_uids = referenced_issue_uids - fetched_issue_uids
-
-    missing_issues_by_project = collections.defaultdict(list)
-    for issue_uid in missing_issue_uids:
-      project, issue_id = issue_uid.split(':')
-      missing_issues_by_project[project].append(issue_id)
-
-    for project, issue_ids in missing_issues_by_project.items():
-      self.referenced_issues += self.monorail_get_issues(project, issue_ids)
-
-  def print_issues(self):
-    if self.issues:
-      self.print_heading('Issues')
-      for issue in self.issues:
-        self.print_issue(issue)
-
-  def print_changes_by_issue(self, skip_empty_own):
-    if not self.issues or not self.changes:
-      return
-
-    self.print_heading('Changes by referenced issue(s)')
-    issues = {issue['uid']: issue for issue in self.issues}
-    ref_issues = {issue['uid']: issue for issue in self.referenced_issues}
-    changes_by_issue_uid = collections.defaultdict(list)
-    changes_by_ref_issue_uid = collections.defaultdict(list)
-    changes_without_issue = []
-    for change in self.changes:
-      added = False
-      for issue_uid in change['bugs']:
-        if issue_uid in issues:
-          changes_by_issue_uid[issue_uid].append(change)
-          added = True
-        if issue_uid in ref_issues:
-          changes_by_ref_issue_uid[issue_uid].append(change)
-          added = True
-      if not added:
-        changes_without_issue.append(change)
-
-    # Changes referencing own issues.
-    for issue_uid in issues:
-      if changes_by_issue_uid[issue_uid] or not skip_empty_own:
-        self.print_issue(issues[issue_uid])
-      if changes_by_issue_uid[issue_uid]:
-        print()
-        for change in changes_by_issue_uid[issue_uid]:
-          print('    ', end='')  # this prints no newline
-          self.print_change(change)
+        http = self.monorail_get_auth_http()
+        project, issue_id = issue['uid'].split(':')
+        url = ('https://monorail-prod.appspot.com/_ah/api/monorail/v1/projects'
+               '/%s/issues/%s/comments?maxResults=10000') % (project, issue_id)
+        _, body = http.request(url)
+        self.show_progress()
+        content = json.loads(body)
+        if not content:
+            logging.error('Unable to parse %s response from monorail.', project)
+            return issue
+
+        for item in content.get('items', []):
+            comment_published = datetime_from_monorail(item['published'])
+            if self.filter_modified(comment_published):
+                return issue
+
+        return None
+
+    def monorail_query_issues(self, project, query):
+        http = self.monorail_get_auth_http()
+        url = ('https://monorail-prod.appspot.com/_ah/api/monorail/v1/projects'
+               '/%s/issues') % project
+        query_data = urllib.parse.urlencode(query)
+        url = url + '?' + query_data
+        _, body = http.request(url)
+        self.show_progress()
+        content = json.loads(body)
+        if not content:
+            logging.error('Unable to parse %s response from monorail.', project)
+            return []
+
+        issues = []
+        project_config = monorail_projects.get(project, {})
+        for item in content.get('items', []):
+            if project_config.get('shorturl'):
+                protocol = project_config.get('short_url_protocol', 'http')
+                item_url = '%s://%s/%d' % (protocol, project_config['shorturl'],
+                                           item['id'])
+            else:
+                item_url = (
+                    'https://bugs.chromium.org/p/%s/issues/detail?id=%d' %
+                    (project, item['id']))
+            issue = {
+                'uid': '%s:%s' % (project, item['id']),
+                'header': item['title'],
+                'created': datetime_from_monorail(item['published']),
+                'modified': datetime_from_monorail(item['updated']),
+                'author': item['author']['name'],
+                'url': item_url,
+                'comments': [],
+                'status': item['status'],
+                'labels': [],
+                'components': []
+            }
+            if 'owner' in item:
+                issue['owner'] = item['owner']['name']
+            else:
+                issue['owner'] = 'None'
+            if 'labels' in item:
+                issue['labels'] = item['labels']
+            if 'components' in item:
+                issue['components'] = item['components']
+            issues.append(issue)
+
+        return issues
+
+    def monorail_issue_search(self, project):
+        epoch = datetime.utcfromtimestamp(0)
+        # Defaults to @chromium.org email if one wasn't provided on -u option.
+        user_str = (self.options.email if self.options.email.find('@') >= 0 else
+                    '%s@chromium.org' % self.user)
+
+        issues = self.monorail_query_issues(
+            project, {
+                'maxResults':
+                10000,
+                'q':
+                user_str,
+                'publishedMax':
+                '%d' % (self.modified_before - epoch).total_seconds(),
+                'updatedMin':
+                '%d' % (self.modified_after - epoch).total_seconds(),
+            })
+
+        if self.options.completed_issues:
+            return [
+                issue for issue in issues
+                if (self.match(issue['owner']) and issue['status'].lower() in (
+                    'verified', 'fixed'))
+            ]
+
+        return [
+            issue for issue in issues
+            if user_str in (issue['author'], issue['owner'])
+        ]
+
+    def monorail_get_issues(self, project, issue_ids):
+        return self.monorail_query_issues(project, {
+            'maxResults': 10000,
+            'q': 'id:%s' % ','.join(issue_ids)
+        })
+
+    def print_heading(self, heading):
         print()
-
-    # Changes referencing others' issues.
-    for issue_uid in ref_issues:
-      assert changes_by_ref_issue_uid[issue_uid]
-      self.print_issue(ref_issues[issue_uid])
-      for change in changes_by_ref_issue_uid[issue_uid]:
-        print('', end=' ')  # this prints one space due to comma, but no newline
-        self.print_change(change)
-
-    # Changes referencing no issues.
-    if changes_without_issue:
-      print(self.options.output_format_no_url.format(title='Other changes'))
-      for change in changes_without_issue:
-        print('', end=' ')  # this prints one space due to comma, but no newline
-        self.print_change(change)
-
-  def print_activity(self):
-    self.print_changes()
-    self.print_reviews()
-    self.print_issues()
-
-  def dump_json(self, ignore_keys=None):
-    if ignore_keys is None:
-      ignore_keys = ['replies']
-
-    def format_for_json_dump(in_array):
-      output = {}
-      for item in in_array:
-        url = item.get('url') or item.get('review_url')
-        if not url:
-          raise Exception('Dumped item %s does not specify url' % item)
-        output[url] = dict(
-            (k, v) for k,v in item.items() if k not in ignore_keys)
-      return output
-
-    class PythonObjectEncoder(json.JSONEncoder):
-      def default(self, o):  # pylint: disable=method-hidden
-        if isinstance(o, datetime):
-          return o.isoformat()
-        if isinstance(o, set):
-          return list(o)
-        return json.JSONEncoder.default(self, o)
-
-    output = {
-      'reviews': format_for_json_dump(self.reviews),
-      'changes': format_for_json_dump(self.changes),
-      'issues': format_for_json_dump(self.issues)
-    }
-    print(json.dumps(output, indent=2, cls=PythonObjectEncoder))
+        print(self.options.output_format_heading.format(heading=heading))
+
+    def match(self, author):
+        if '@' in self.user:
+            return author == self.user
+        return author.startswith(self.user + '@')
+
+    def print_change(self, change):
+        activity = len([
+            reply for reply in change['replies'] if self.match(reply['author'])
+        ])
+        optional_values = {
+            'created': change['created'].date().isoformat(),
+            'modified': change['modified'].date().isoformat(),
+            'reviewers': ', '.join(change['reviewers']),
+            'status': change['status'],
+            'activity': activity,
+        }
+        if self.options.deltas:
+            optional_values['delta'] = change['delta']
+
+        self.print_generic(self.options.output_format,
+                           self.options.output_format_changes, change['header'],
+                           change['review_url'], change['author'],
+                           change['created'], change['modified'],
+                           optional_values)
+
+    def print_issue(self, issue):
+        optional_values = {
+            'created': issue['created'].date().isoformat(),
+            'modified': issue['modified'].date().isoformat(),
+            'owner': issue['owner'],
+            'status': issue['status'],
+        }
+        self.print_generic(self.options.output_format,
+                           self.options.output_format_issues, issue['header'],
+                           issue['url'], issue['author'], issue['created'],
+                           issue['modified'], optional_values)
+
+    def print_review(self, review):
+        activity = len([
+            reply for reply in review['replies'] if self.match(reply['author'])
+        ])
+        optional_values = {
+            'created': review['created'].date().isoformat(),
+            'modified': review['modified'].date().isoformat(),
+            'status': review['status'],
+            'activity': activity,
+        }
+        if self.options.deltas:
+            optional_values['delta'] = review['delta']
+
+        self.print_generic(self.options.output_format,
+                           self.options.output_format_reviews, review['header'],
+                           review['review_url'], review['author'],
+                           review['created'], review['modified'],
+                           optional_values)
+
+    @staticmethod
+    def print_generic(default_fmt,
+                      specific_fmt,
+                      title,
+                      url,
+                      author,
+                      created,
+                      modified,
+                      optional_values=None):
+        output_format = (specific_fmt
+                         if specific_fmt is not None else default_fmt)
+        values = {
+            'title': title,
+            'url': url,
+            'author': author,
+            'created': created,
+            'modified': modified,
+        }
+        if optional_values is not None:
+            values.update(optional_values)
+        print(DefaultFormatter().format(output_format, **values))
+
+    def filter_issue(self, issue, should_filter_by_user=True):
+        def maybe_filter_username(email):
+            return not should_filter_by_user or username(email) == self.user
+
+        if (maybe_filter_username(issue['author'])
+                and self.filter_modified(issue['created'])):
+            return True
+        if (maybe_filter_username(issue['owner'])
+                and (self.filter_modified(issue['created'])
+                     or self.filter_modified(issue['modified']))):
+            return True
+        for reply in issue['replies']:
+            if self.filter_modified(reply['created']):
+                if not should_filter_by_user:
+                    break
+                if (username(reply['author']) == self.user
+                        or (self.user + '@') in reply['content']):
+                    break
+        else:
+            return False
+        return True
+
+    def filter_modified(self, modified):
+        return self.modified_after < modified < self.modified_before
+
+    def auth_for_changes(self):
+        #TODO(cjhopman): Move authentication check for getting changes here.
+        pass
+
+    def auth_for_reviews(self):
+        # Reviews use all the same instances as changes so no authentication is
+        # required.
+        pass
+
+    def get_changes(self):
+        num_instances = len(gerrit_instances)
+        with contextlib.closing(ThreadPool(num_instances)) as pool:
+            gerrit_changes = pool.map_async(
+                lambda instance: self.gerrit_search(instance, owner=self.user),
+                gerrit_instances)
+            gerrit_changes = itertools.chain.from_iterable(gerrit_changes.get())
+            self.changes = list(gerrit_changes)
+
+    def print_changes(self):
+        if self.changes:
+            self.print_heading('Changes')
+            for change in self.changes:
+                self.print_change(change)
+
+    def print_access_errors(self):
+        if self.access_errors:
+            logging.error('Access Errors:')
+            for error in self.access_errors:
+                logging.error(error.rstrip())
+
+    def get_reviews(self):
+        num_instances = len(gerrit_instances)
+        with contextlib.closing(ThreadPool(num_instances)) as pool:
+            gerrit_reviews = pool.map_async(
+                lambda instance: self.gerrit_search(instance,
+                                                    reviewer=self.user),
+                gerrit_instances)
+            gerrit_reviews = itertools.chain.from_iterable(gerrit_reviews.get())
+            self.reviews = list(gerrit_reviews)
+
+    def print_reviews(self):
+        if self.reviews:
+            self.print_heading('Reviews')
+            for review in self.reviews:
+                self.print_review(review)
+
+    def get_issues(self):
+        with contextlib.closing(ThreadPool(len(monorail_projects))) as pool:
+            monorail_issues = pool.map(self.monorail_issue_search,
+                                       monorail_projects.keys())
+            monorail_issues = list(
+                itertools.chain.from_iterable(monorail_issues))
+
+        if not monorail_issues:
+            return
+
+        with contextlib.closing(ThreadPool(len(monorail_issues))) as pool:
+            filtered_issues = pool.map(self.filter_modified_monorail_issue,
+                                       monorail_issues)
+            self.issues = [issue for issue in filtered_issues if issue]
+
+    def get_referenced_issues(self):
+        if not self.issues:
+            self.get_issues()
+
+        if not self.changes:
+            self.get_changes()
+
+        referenced_issue_uids = set(
+            itertools.chain.from_iterable(change['bugs']
+                                          for change in self.changes))
+        fetched_issue_uids = set(issue['uid'] for issue in self.issues)
+        missing_issue_uids = referenced_issue_uids - fetched_issue_uids
+
+        missing_issues_by_project = collections.defaultdict(list)
+        for issue_uid in missing_issue_uids:
+            project, issue_id = issue_uid.split(':')
+            missing_issues_by_project[project].append(issue_id)
+
+        for project, issue_ids in missing_issues_by_project.items():
+            self.referenced_issues += self.monorail_get_issues(
+                project, issue_ids)
+
+    def print_issues(self):
+        if self.issues:
+            self.print_heading('Issues')
+            for issue in self.issues:
+                self.print_issue(issue)
+
+    def print_changes_by_issue(self, skip_empty_own):
+        if not self.issues or not self.changes:
+            return
+
+        self.print_heading('Changes by referenced issue(s)')
+        issues = {issue['uid']: issue for issue in self.issues}
+        ref_issues = {issue['uid']: issue for issue in self.referenced_issues}
+        changes_by_issue_uid = collections.defaultdict(list)
+        changes_by_ref_issue_uid = collections.defaultdict(list)
+        changes_without_issue = []
+        for change in self.changes:
+            added = False
+            for issue_uid in change['bugs']:
+                if issue_uid in issues:
+                    changes_by_issue_uid[issue_uid].append(change)
+                    added = True
+                if issue_uid in ref_issues:
+                    changes_by_ref_issue_uid[issue_uid].append(change)
+                    added = True
+            if not added:
+                changes_without_issue.append(change)
+
+        # Changes referencing own issues.
+        for issue_uid in issues:
+            if changes_by_issue_uid[issue_uid] or not skip_empty_own:
+                self.print_issue(issues[issue_uid])
+            if changes_by_issue_uid[issue_uid]:
+                print()
+                for change in changes_by_issue_uid[issue_uid]:
+                    print('    ', end='')  # this prints no newline
+                    self.print_change(change)
+                print()
+
+        # Changes referencing others' issues.
+        for issue_uid in ref_issues:
+            assert changes_by_ref_issue_uid[issue_uid]
+            self.print_issue(ref_issues[issue_uid])
+            for change in changes_by_ref_issue_uid[issue_uid]:
+                print('', end=' '
+                      )  # this prints one space due to comma, but no newline
+                self.print_change(change)
+
+        # Changes referencing no issues.
+        if changes_without_issue:
+            print(
+                self.options.output_format_no_url.format(title='Other changes'))
+            for change in changes_without_issue:
+                print('', end=' '
+                      )  # this prints one space due to comma, but no newline
+                self.print_change(change)
+
+    def print_activity(self):
+        self.print_changes()
+        self.print_reviews()
+        self.print_issues()
+
+    def dump_json(self, ignore_keys=None):
+        if ignore_keys is None:
+            ignore_keys = ['replies']
+
+        def format_for_json_dump(in_array):
+            output = {}
+            for item in in_array:
+                url = item.get('url') or item.get('review_url')
+                if not url:
+                    raise Exception('Dumped item %s does not specify url' %
+                                    item)
+                output[url] = dict(
+                    (k, v) for k, v in item.items() if k not in ignore_keys)
+            return output
+
+        class PythonObjectEncoder(json.JSONEncoder):
+            def default(self, o):  # pylint: disable=method-hidden
+                if isinstance(o, datetime):
+                    return o.isoformat()
+                if isinstance(o, set):
+                    return list(o)
+                return json.JSONEncoder.default(self, o)
+
+        output = {
+            'reviews': format_for_json_dump(self.reviews),
+            'changes': format_for_json_dump(self.changes),
+            'issues': format_for_json_dump(self.issues)
+        }
+        print(json.dumps(output, indent=2, cls=PythonObjectEncoder))
 
 
 def main():
-  parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
-  parser.add_option(
-      '-u', '--user', metavar='<email>',
-      # Look for USER and USERNAME (Windows) environment variables.
-      default=os.environ.get('USER', os.environ.get('USERNAME')),
-      help='Filter on user, default=%default')
-  parser.add_option(
-      '-b', '--begin', metavar='<date>',
-      help='Filter issues created after the date (mm/dd/yy)')
-  parser.add_option(
-      '-e', '--end', metavar='<date>',
-      help='Filter issues created before the date (mm/dd/yy)')
-  quarter_begin, quarter_end = get_quarter_of(datetime.today() -
-                                              relativedelta(months=2))
-  parser.add_option(
-      '-Q', '--last_quarter', action='store_true',
-      help='Use last quarter\'s dates, i.e. %s to %s' % (
-        quarter_begin.strftime('%Y-%m-%d'), quarter_end.strftime('%Y-%m-%d')))
-  parser.add_option(
-      '-Y', '--this_year', action='store_true',
-      help='Use this year\'s dates')
-  parser.add_option(
-      '-w', '--week_of', metavar='<date>',
-      help='Show issues for week of the date (mm/dd/yy)')
-  parser.add_option(
-      '-W', '--last_week', action='count',
-      help='Show last week\'s issues. Use more times for more weeks.')
-  parser.add_option(
-      '-a', '--auth',
-      action='store_true',
-      help='Ask to authenticate for instances with no auth cookie')
-  parser.add_option(
-      '-d', '--deltas',
-      action='store_true',
-      help='Fetch deltas for changes.')
-  parser.add_option(
-      '--no-referenced-issues',
-      action='store_true',
-      help='Do not fetch issues referenced by owned changes. Useful in '
-           'combination with --changes-by-issue when you only want to list '
-           'issues that have also been modified in the same time period.')
-  parser.add_option(
-      '--skip_servers',
-      action='store',
-      default='',
-      help='A comma separated list of gerrit and rietveld servers to ignore')
-  parser.add_option(
-      '--skip-own-issues-without-changes',
-      action='store_true',
-      help='Skips listing own issues without changes when showing changes '
-           'grouped by referenced issue(s). See --changes-by-issue for more '
-           'details.')
-  parser.add_option(
-      '-F', '--config_file', metavar='<config_file>',
-      help='Configuration file in JSON format, used to add additional gerrit '
-           'instances (see source code for an example).')
-
-  activity_types_group = optparse.OptionGroup(parser, 'Activity Types',
-                               'By default, all activity will be looked up and '
-                               'printed. If any of these are specified, only '
-                               'those specified will be searched.')
-  activity_types_group.add_option(
-      '-c', '--changes',
-      action='store_true',
-      help='Show changes.')
-  activity_types_group.add_option(
-      '-i', '--issues',
-      action='store_true',
-      help='Show issues.')
-  activity_types_group.add_option(
-      '-r', '--reviews',
-      action='store_true',
-      help='Show reviews.')
-  activity_types_group.add_option(
-      '--changes-by-issue', action='store_true',
-      help='Show changes grouped by referenced issue(s).')
-  parser.add_option_group(activity_types_group)
-
-  output_format_group = optparse.OptionGroup(parser, 'Output Format',
-                              'By default, all activity will be printed in the '
-                              'following format: {url} {title}. This can be '
-                              'changed for either all activity types or '
-                              'individually for each activity type. The format '
-                              'is defined as documented for '
-                              'string.format(...). The variables available for '
-                              'all activity types are url, title, author, '
-                              'created and modified. Format options for '
-                              'specific activity types will override the '
-                              'generic format.')
-  output_format_group.add_option(
-      '-f', '--output-format', metavar='<format>',
-      default=u'{url} {title}',
-      help='Specifies the format to use when printing all your activity.')
-  output_format_group.add_option(
-      '--output-format-changes', metavar='<format>',
-      default=None,
-      help='Specifies the format to use when printing changes. Supports the '
-      'additional variable {reviewers}')
-  output_format_group.add_option(
-      '--output-format-issues', metavar='<format>',
-      default=None,
-      help='Specifies the format to use when printing issues. Supports the '
-           'additional variable {owner}.')
-  output_format_group.add_option(
-      '--output-format-reviews', metavar='<format>',
-      default=None,
-      help='Specifies the format to use when printing reviews.')
-  output_format_group.add_option(
-      '--output-format-heading', metavar='<format>',
-      default=u'{heading}:',
-      help='Specifies the format to use when printing headings. '
-           'Supports the variable {heading}.')
-  output_format_group.add_option(
-      '--output-format-no-url', default='{title}',
-      help='Specifies the format to use when printing activity without url.')
-  output_format_group.add_option(
-      '-m', '--markdown', action='store_true',
-      help='Use markdown-friendly output (overrides --output-format '
-           'and --output-format-heading)')
-  output_format_group.add_option(
-      '-j', '--json', action='store_true',
-      help='Output json data (overrides other format options)')
-  parser.add_option_group(output_format_group)
-
-  parser.add_option(
-      '-v', '--verbose',
-      action='store_const',
-      dest='verbosity',
-      default=logging.WARN,
-      const=logging.INFO,
-      help='Output extra informational messages.'
-  )
-  parser.add_option(
-      '-q', '--quiet',
-      action='store_const',
-      dest='verbosity',
-      const=logging.ERROR,
-      help='Suppress non-error messages.'
-  )
-  parser.add_option(
-      '-M', '--merged-only',
-      action='store_true',
-      dest='merged_only',
-      default=False,
-      help='Shows only changes that have been merged.')
-  parser.add_option(
-      '-C', '--completed-issues',
-      action='store_true',
-      dest='completed_issues',
-      default=False,
-      help='Shows only monorail issues that have completed (Fixed|Verified) '
-           'by the user.')
-  parser.add_option(
-      '-o', '--output', metavar='<file>',
-      help='Where to output the results. By default prints to stdout.')
-
-  # Remove description formatting
-  parser.format_description = (
-      lambda _: parser.description)  # pylint: disable=no-member
-
-  options, args = parser.parse_args()
-  options.local_user = os.environ.get('USER')
-  if args:
-    parser.error('Args unsupported')
-  if not options.user:
-    parser.error('USER/USERNAME is not set, please use -u')
-  # Retains the original -u option as the email address.
-  options.email = options.user
-  options.user = username(options.email)
-
-  logging.basicConfig(level=options.verbosity)
-
-  # python-keyring provides easy access to the system keyring.
-  try:
-    import keyring  # pylint: disable=unused-import,unused-variable,F0401
-  except ImportError:
-    logging.warning('Consider installing python-keyring')
-
-  if not options.begin:
-    if options.last_quarter:
-      begin, end = quarter_begin, quarter_end
-    elif options.this_year:
-      begin, end = get_year_of(datetime.today())
-    elif options.week_of:
-      begin, end = (get_week_of(datetime.strptime(options.week_of, '%m/%d/%y')))
-    elif options.last_week:
-      begin, end = (get_week_of(datetime.today() -
-                                timedelta(days=1 + 7 * options.last_week)))
-    else:
-      begin, end = (get_week_of(datetime.today() - timedelta(days=1)))
-  else:
-    begin = dateutil.parser.parse(options.begin)
-    if options.end:
-      end = dateutil.parser.parse(options.end)
+    parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
+    parser.add_option(
+        '-u',
+        '--user',
+        metavar='<email>',
+        # Look for USER and USERNAME (Windows) environment variables.
+        default=os.environ.get('USER', os.environ.get('USERNAME')),
+        help='Filter on user, default=%default')
+    parser.add_option('-b',
+                      '--begin',
+                      metavar='<date>',
+                      help='Filter issues created after the date (mm/dd/yy)')
+    parser.add_option('-e',
+                      '--end',
+                      metavar='<date>',
+                      help='Filter issues created before the date (mm/dd/yy)')
+    quarter_begin, quarter_end = get_quarter_of(datetime.today() -
+                                                relativedelta(months=2))
+    parser.add_option(
+        '-Q',
+        '--last_quarter',
+        action='store_true',
+        help='Use last quarter\'s dates, i.e. %s to %s' %
+        (quarter_begin.strftime('%Y-%m-%d'), quarter_end.strftime('%Y-%m-%d')))
+    parser.add_option('-Y',
+                      '--this_year',
+                      action='store_true',
+                      help='Use this year\'s dates')
+    parser.add_option('-w',
+                      '--week_of',
+                      metavar='<date>',
+                      help='Show issues for week of the date (mm/dd/yy)')
+    parser.add_option(
+        '-W',
+        '--last_week',
+        action='count',
+        help='Show last week\'s issues. Use more times for more weeks.')
+    parser.add_option(
+        '-a',
+        '--auth',
+        action='store_true',
+        help='Ask to authenticate for instances with no auth cookie')
+    parser.add_option('-d',
+                      '--deltas',
+                      action='store_true',
+                      help='Fetch deltas for changes.')
+    parser.add_option(
+        '--no-referenced-issues',
+        action='store_true',
+        help='Do not fetch issues referenced by owned changes. Useful in '
+        'combination with --changes-by-issue when you only want to list '
+        'issues that have also been modified in the same time period.')
+    parser.add_option(
+        '--skip_servers',
+        action='store',
+        default='',
+        help='A comma separated list of gerrit and rietveld servers to ignore')
+    parser.add_option(
+        '--skip-own-issues-without-changes',
+        action='store_true',
+        help='Skips listing own issues without changes when showing changes '
+        'grouped by referenced issue(s). See --changes-by-issue for more '
+        'details.')
+    parser.add_option(
+        '-F',
+        '--config_file',
+        metavar='<config_file>',
+        help='Configuration file in JSON format, used to add additional gerrit '
+        'instances (see source code for an example).')
+
+    activity_types_group = optparse.OptionGroup(
+        parser, 'Activity Types',
+        'By default, all activity will be looked up and '
+        'printed. If any of these are specified, only '
+        'those specified will be searched.')
+    activity_types_group.add_option('-c',
+                                    '--changes',
+                                    action='store_true',
+                                    help='Show changes.')
+    activity_types_group.add_option('-i',
+                                    '--issues',
+                                    action='store_true',
+                                    help='Show issues.')
+    activity_types_group.add_option('-r',
+                                    '--reviews',
+                                    action='store_true',
+                                    help='Show reviews.')
+    activity_types_group.add_option(
+        '--changes-by-issue',
+        action='store_true',
+        help='Show changes grouped by referenced issue(s).')
+    parser.add_option_group(activity_types_group)
+
+    output_format_group = optparse.OptionGroup(
+        parser, 'Output Format',
+        'By default, all activity will be printed in the '
+        'following format: {url} {title}. This can be '
+        'changed for either all activity types or '
+        'individually for each activity type. The format '
+        'is defined as documented for '
+        'string.format(...). The variables available for '
+        'all activity types are url, title, author, '
+        'created and modified. Format options for '
+        'specific activity types will override the '
+        'generic format.')
+    output_format_group.add_option(
+        '-f',
+        '--output-format',
+        metavar='<format>',
+        default=u'{url} {title}',
+        help='Specifies the format to use when printing all your activity.')
+    output_format_group.add_option(
+        '--output-format-changes',
+        metavar='<format>',
+        default=None,
+        help='Specifies the format to use when printing changes. Supports the '
+        'additional variable {reviewers}')
+    output_format_group.add_option(
+        '--output-format-issues',
+        metavar='<format>',
+        default=None,
+        help='Specifies the format to use when printing issues. Supports the '
+        'additional variable {owner}.')
+    output_format_group.add_option(
+        '--output-format-reviews',
+        metavar='<format>',
+        default=None,
+        help='Specifies the format to use when printing reviews.')
+    output_format_group.add_option(
+        '--output-format-heading',
+        metavar='<format>',
+        default=u'{heading}:',
+        help='Specifies the format to use when printing headings. '
+        'Supports the variable {heading}.')
+    output_format_group.add_option(
+        '--output-format-no-url',
+        default='{title}',
+        help='Specifies the format to use when printing activity without url.')
+    output_format_group.add_option(
+        '-m',
+        '--markdown',
+        action='store_true',
+        help='Use markdown-friendly output (overrides --output-format '
+        'and --output-format-heading)')
+    output_format_group.add_option(
+        '-j',
+        '--json',
+        action='store_true',
+        help='Output json data (overrides other format options)')
+    parser.add_option_group(output_format_group)
+
+    parser.add_option('-v',
+                      '--verbose',
+                      action='store_const',
+                      dest='verbosity',
+                      default=logging.WARN,
+                      const=logging.INFO,
+                      help='Output extra informational messages.')
+    parser.add_option('-q',
+                      '--quiet',
+                      action='store_const',
+                      dest='verbosity',
+                      const=logging.ERROR,
+                      help='Suppress non-error messages.')
+    parser.add_option('-M',
+                      '--merged-only',
+                      action='store_true',
+                      dest='merged_only',
+                      default=False,
+                      help='Shows only changes that have been merged.')
+    parser.add_option(
+        '-C',
+        '--completed-issues',
+        action='store_true',
+        dest='completed_issues',
+        default=False,
+        help='Shows only monorail issues that have completed (Fixed|Verified) '
+        'by the user.')
+    parser.add_option(
+        '-o',
+        '--output',
+        metavar='<file>',
+        help='Where to output the results. By default prints to stdout.')
+
+    # Remove description formatting
+    parser.format_description = (lambda _: parser.description)  # pylint: disable=no-member
+
+    options, args = parser.parse_args()
+    options.local_user = os.environ.get('USER')
+    if args:
+        parser.error('Args unsupported')
+    if not options.user:
+        parser.error('USER/USERNAME is not set, please use -u')
+    # Retains the original -u option as the email address.
+    options.email = options.user
+    options.user = username(options.email)
+
+    logging.basicConfig(level=options.verbosity)
+
+    # python-keyring provides easy access to the system keyring.
+    try:
+        import keyring  # pylint: disable=unused-import,unused-variable,F0401
+    except ImportError:
+        logging.warning('Consider installing python-keyring')
+
+    if not options.begin:
+        if options.last_quarter:
+            begin, end = quarter_begin, quarter_end
+        elif options.this_year:
+            begin, end = get_year_of(datetime.today())
+        elif options.week_of:
+            begin, end = (get_week_of(
+                datetime.strptime(options.week_of, '%m/%d/%y')))
+        elif options.last_week:
+            begin, end = (
+                get_week_of(datetime.today() -
+                            timedelta(days=1 + 7 * options.last_week)))
+        else:
+            begin, end = (get_week_of(datetime.today() - timedelta(days=1)))
     else:
-      end = datetime.today()
-  options.begin, options.end = begin, end
-  if begin >= end:
-    # The queries fail in peculiar ways when the begin date is in the future.
-    # Give a descriptive error message instead.
-    logging.error('Start date (%s) is the same or later than end date (%s)' %
-                  (begin, end))
-    return 1
-
-  if options.markdown:
-    options.output_format_heading = '### {heading}\n'
-    options.output_format = '  * [{title}]({url})'
-    options.output_format_no_url = '  * {title}'
-  logging.info('Searching for activity by %s', options.user)
-  logging.info('Using range %s to %s', options.begin, options.end)
-
-  if options.config_file:
-    with open(options.config_file) as f:
-      config = json.load(f)
-
-      for item, entries in config.items():
-        if item == 'gerrit_instances':
-          for repo, dic in entries.items():
-            # Use property name as URL
-            dic['url'] = repo
-            gerrit_instances.append(dic)
-        elif item == 'monorail_projects':
-          monorail_projects.append(entries)
+        begin = dateutil.parser.parse(options.begin)
+        if options.end:
+            end = dateutil.parser.parse(options.end)
         else:
-          logging.error('Invalid entry in config file.')
-          return 1
-
-  my_activity = MyActivity(options)
-  my_activity.show_progress('Loading data')
-
-  if not (options.changes or options.reviews or options.issues or
-          options.changes_by_issue):
-    options.changes = True
-    options.issues = True
-    options.reviews = True
-
-  # First do any required authentication so none of the user interaction has to
-  # wait for actual work.
-  if options.changes or options.changes_by_issue:
-    my_activity.auth_for_changes()
-  if options.reviews:
-    my_activity.auth_for_reviews()
-
-  logging.info('Looking up activity.....')
-
-  try:
+            end = datetime.today()
+    options.begin, options.end = begin, end
+    if begin >= end:
+        # The queries fail in peculiar ways when the begin date is in the
+        # future. Give a descriptive error message instead.
+        logging.error(
+            'Start date (%s) is the same or later than end date (%s)' %
+            (begin, end))
+        return 1
+
+    if options.markdown:
+        options.output_format_heading = '### {heading}\n'
+        options.output_format = '  * [{title}]({url})'
+        options.output_format_no_url = '  * {title}'
+    logging.info('Searching for activity by %s', options.user)
+    logging.info('Using range %s to %s', options.begin, options.end)
+
+    if options.config_file:
+        with open(options.config_file) as f:
+            config = json.load(f)
+
+            for item, entries in config.items():
+                if item == 'gerrit_instances':
+                    for repo, dic in entries.items():
+                        # Use property name as URL
+                        dic['url'] = repo
+                        gerrit_instances.append(dic)
+                elif item == 'monorail_projects':
+                    monorail_projects.append(entries)
+                else:
+                    logging.error('Invalid entry in config file.')
+                    return 1
+
+    my_activity = MyActivity(options)
+    my_activity.show_progress('Loading data')
+
+    if not (options.changes or options.reviews or options.issues
+            or options.changes_by_issue):
+        options.changes = True
+        options.issues = True
+        options.reviews = True
+
+    # First do any required authentication so none of the user interaction has
+    # to wait for actual work.
     if options.changes or options.changes_by_issue:
-      my_activity.get_changes()
+        my_activity.auth_for_changes()
     if options.reviews:
-      my_activity.get_reviews()
-    if options.issues or options.changes_by_issue:
-      my_activity.get_issues()
-    if not options.no_referenced_issues:
-      my_activity.get_referenced_issues()
-  except auth.LoginRequiredError as e:
-    logging.error('auth.LoginRequiredError: %s', e)
-
-  my_activity.show_progress('\n')
-
-  my_activity.print_access_errors()
-
-  output_file = None
-  try:
-    if options.output:
-      output_file = open(options.output, 'w')
-      logging.info('Printing output to "%s"', options.output)
-      sys.stdout = output_file
-  except (IOError, OSError) as e:
-    logging.error('Unable to write output: %s', e)
-  else:
-    if options.json:
-      my_activity.dump_json()
+        my_activity.auth_for_reviews()
+
+    logging.info('Looking up activity.....')
+
+    try:
+        if options.changes or options.changes_by_issue:
+            my_activity.get_changes()
+        if options.reviews:
+            my_activity.get_reviews()
+        if options.issues or options.changes_by_issue:
+            my_activity.get_issues()
+        if not options.no_referenced_issues:
+            my_activity.get_referenced_issues()
+    except auth.LoginRequiredError as e:
+        logging.error('auth.LoginRequiredError: %s', e)
+
+    my_activity.show_progress('\n')
+
+    my_activity.print_access_errors()
+
+    output_file = None
+    try:
+        if options.output:
+            output_file = open(options.output, 'w')
+            logging.info('Printing output to "%s"', options.output)
+            sys.stdout = output_file
+    except (IOError, OSError) as e:
+        logging.error('Unable to write output: %s', e)
     else:
-      if options.changes:
-        my_activity.print_changes()
-      if options.reviews:
-        my_activity.print_reviews()
-      if options.issues:
-        my_activity.print_issues()
-      if options.changes_by_issue:
-        my_activity.print_changes_by_issue(
-            options.skip_own_issues_without_changes)
-  finally:
-    if output_file:
-      logging.info('Done printing to file.')
-      sys.stdout = sys.__stdout__
-      output_file.close()
-
-  return 0
+        if options.json:
+            my_activity.dump_json()
+        else:
+            if options.changes:
+                my_activity.print_changes()
+            if options.reviews:
+                my_activity.print_reviews()
+            if options.issues:
+                my_activity.print_issues()
+            if options.changes_by_issue:
+                my_activity.print_changes_by_issue(
+                    options.skip_own_issues_without_changes)
+    finally:
+        if output_file:
+            logging.info('Done printing to file.')
+            sys.stdout = sys.__stdout__
+            output_file.close()
+
+    return 0
 
 
 if __name__ == '__main__':
-  # Fix encoding to support non-ascii issue titles.
-  fix_encoding.fix_encoding()
+    # Fix encoding to support non-ascii issue titles.
+    fix_encoding.fix_encoding()
 
-  try:
-    sys.exit(main())
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main())
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 60 - 59
ninja.py

@@ -15,76 +15,77 @@ import gclient_paths
 
 
 def findNinjaInPath():
-  env_path = os.getenv('PATH')
-  if not env_path:
-    return
-  exe = 'ninja'
-  if sys.platform in ['win32', 'cygwin']:
-    exe += '.exe'
-  for bin_dir in env_path.split(os.pathsep):
-    if bin_dir.rstrip(os.sep).endswith('depot_tools'):
-      # skip depot_tools to avoid calling ninja.py infitely.
-      continue
-    ninja_path = os.path.join(bin_dir, exe)
-    if os.path.isfile(ninja_path):
-      return ninja_path
+    env_path = os.getenv('PATH')
+    if not env_path:
+        return
+    exe = 'ninja'
+    if sys.platform in ['win32', 'cygwin']:
+        exe += '.exe'
+    for bin_dir in env_path.split(os.pathsep):
+        if bin_dir.rstrip(os.sep).endswith('depot_tools'):
+            # skip depot_tools to avoid calling ninja.py infitely.
+            continue
+        ninja_path = os.path.join(bin_dir, exe)
+        if os.path.isfile(ninja_path):
+            return ninja_path
 
 
 def fallback(ninja_args):
-  # Try to find ninja in PATH.
-  ninja_path = findNinjaInPath()
-  if ninja_path:
-    return subprocess.call([ninja_path] + ninja_args)
+    # Try to find ninja in PATH.
+    ninja_path = findNinjaInPath()
+    if ninja_path:
+        return subprocess.call([ninja_path] + ninja_args)
 
-  print(
-      'depot_tools/ninja.py: Could not find Ninja in the third_party of '
-      'the current project, nor in your PATH.\n'
-      'Please take one of the following actions to install Ninja:\n'
-      '- If your project has DEPS, add a CIPD Ninja dependency to DEPS.\n'
-      '- Otherwise, add Ninja to your PATH *after* depot_tools.',
-      file=sys.stderr)
-  return 1
+    print(
+        'depot_tools/ninja.py: Could not find Ninja in the third_party of '
+        'the current project, nor in your PATH.\n'
+        'Please take one of the following actions to install Ninja:\n'
+        '- If your project has DEPS, add a CIPD Ninja dependency to DEPS.\n'
+        '- Otherwise, add Ninja to your PATH *after* depot_tools.',
+        file=sys.stderr)
+    return 1
 
 
 def main(args):
-  # On Windows the ninja.bat script passes along the arguments enclosed in
-  # double quotes. This prevents multiple levels of parsing of the special '^'
-  # characters needed when compiling a single file.  When this case is detected,
-  # we need to split the argument. This means that arguments containing actual
-  # spaces are not supported by ninja.bat, but that is not a real limitation.
-  if (sys.platform.startswith('win') and len(args) == 2):
-    args = args[:1] + args[1].split()
+    # On Windows the ninja.bat script passes along the arguments enclosed in
+    # double quotes. This prevents multiple levels of parsing of the special '^'
+    # characters needed when compiling a single file.  When this case is
+    # detected, we need to split the argument. This means that arguments
+    # containing actual spaces are not supported by ninja.bat, but that is not a
+    # real limitation.
+    if (sys.platform.startswith('win') and len(args) == 2):
+        args = args[:1] + args[1].split()
 
-  # macOS's python sets CPATH, LIBRARY_PATH, SDKROOT implicitly.
-  # https://openradar.appspot.com/radar?id=5608755232243712
-  #
-  # Removing those environment variables to avoid affecting clang's behaviors.
-  if sys.platform == 'darwin':
-    os.environ.pop("CPATH", None)
-    os.environ.pop("LIBRARY_PATH", None)
-    os.environ.pop("SDKROOT", None)
+    # macOS's python sets CPATH, LIBRARY_PATH, SDKROOT implicitly.
+    # https://openradar.appspot.com/radar?id=5608755232243712
+    #
+    # Removing those environment variables to avoid affecting clang's behaviors.
+    if sys.platform == 'darwin':
+        os.environ.pop("CPATH", None)
+        os.environ.pop("LIBRARY_PATH", None)
+        os.environ.pop("SDKROOT", None)
 
-  # Get gclient root + src.
-  primary_solution_path = gclient_paths.GetPrimarySolutionPath()
-  gclient_root_path = gclient_paths.FindGclientRoot(os.getcwd())
-  gclient_src_root_path = None
-  if gclient_root_path:
-    gclient_src_root_path = os.path.join(gclient_root_path, 'src')
+    # Get gclient root + src.
+    primary_solution_path = gclient_paths.GetPrimarySolutionPath()
+    gclient_root_path = gclient_paths.FindGclientRoot(os.getcwd())
+    gclient_src_root_path = None
+    if gclient_root_path:
+        gclient_src_root_path = os.path.join(gclient_root_path, 'src')
 
-  for base_path in set(
-      [primary_solution_path, gclient_root_path, gclient_src_root_path]):
-    if not base_path:
-      continue
-    ninja_path = os.path.join(base_path, 'third_party', 'ninja',
-                              'ninja' + gclient_paths.GetExeSuffix())
-    if os.path.isfile(ninja_path):
-      return subprocess.call([ninja_path] + args[1:])
+    for base_path in set(
+        [primary_solution_path, gclient_root_path, gclient_src_root_path]):
+        if not base_path:
+            continue
+        ninja_path = os.path.join(base_path, 'third_party', 'ninja',
+                                  'ninja' + gclient_paths.GetExeSuffix())
+        if os.path.isfile(ninja_path):
+            return subprocess.call([ninja_path] + args[1:])
 
-  return fallback(args[1:])
+    return fallback(args[1:])
 
 
 if __name__ == '__main__':
-  try:
-    sys.exit(main(sys.argv))
-  except KeyboardInterrupt:
-    sys.exit(1)
+    try:
+        sys.exit(main(sys.argv))
+    except KeyboardInterrupt:
+        sys.exit(1)

+ 8 - 8
ninja_reclient.py

@@ -14,14 +14,14 @@ import reclient_helper
 
 
 def main(argv):
-  with reclient_helper.build_context(argv, 'ninja_reclient') as ret_code:
-    if ret_code:
-      return ret_code
-    try:
-      return ninja.main(argv)
-    except KeyboardInterrupt:
-      return 1
+    with reclient_helper.build_context(argv, 'ninja_reclient') as ret_code:
+        if ret_code:
+            return ret_code
+        try:
+            return ninja.main(argv)
+        except KeyboardInterrupt:
+            return 1
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv))
+    sys.exit(main(sys.argv))

+ 152 - 152
ninjalog_uploader.py

@@ -39,92 +39,92 @@ ALLOWLISTED_CONFIGS = ('symbol_level', 'use_goma', 'is_debug',
 
 
 def IsGoogler():
-  """Check whether this user is Googler or not."""
-  p = subprocess.run('goma_auth info',
-                     stdout=subprocess.PIPE,
-                     stderr=subprocess.PIPE,
-                     universal_newlines=True,
-                     shell=True)
-  if p.returncode != 0:
-    return False
-  lines = p.stdout.splitlines()
-  if len(lines) == 0:
-    return False
-  l = lines[0]
-  # |l| will be like 'Login as <user>@google.com' for googler using goma.
-  return l.startswith('Login as ') and l.endswith('@google.com')
+    """Check whether this user is Googler or not."""
+    p = subprocess.run('goma_auth info',
+                       stdout=subprocess.PIPE,
+                       stderr=subprocess.PIPE,
+                       universal_newlines=True,
+                       shell=True)
+    if p.returncode != 0:
+        return False
+    lines = p.stdout.splitlines()
+    if len(lines) == 0:
+        return False
+    l = lines[0]
+    # |l| will be like 'Login as <user>@google.com' for googler using goma.
+    return l.startswith('Login as ') and l.endswith('@google.com')
 
 
 def ParseGNArgs(gn_args):
-  """Parse gn_args as json and return config dictionary."""
-  configs = json.loads(gn_args)
-  build_configs = {}
-
-  for config in configs:
-    key = config["name"]
-    if key not in ALLOWLISTED_CONFIGS:
-      continue
-    if 'current' in config:
-      build_configs[key] = config['current']['value']
-    else:
-      build_configs[key] = config['default']['value']
+    """Parse gn_args as json and return config dictionary."""
+    configs = json.loads(gn_args)
+    build_configs = {}
+
+    for config in configs:
+        key = config["name"]
+        if key not in ALLOWLISTED_CONFIGS:
+            continue
+        if 'current' in config:
+            build_configs[key] = config['current']['value']
+        else:
+            build_configs[key] = config['default']['value']
 
-  return build_configs
+    return build_configs
 
 
 def GetBuildTargetFromCommandLine(cmdline):
-  """Get build targets from commandline."""
+    """Get build targets from commandline."""
 
-  # Skip argv0, argv1: ['/path/to/python3', '/path/to/depot_tools/ninja.py']
-  idx = 2
+    # Skip argv0, argv1: ['/path/to/python3', '/path/to/depot_tools/ninja.py']
+    idx = 2
 
-  # Skipping all args that involve these flags, and taking all remaining args
-  # as targets.
-  onearg_flags = ('-C', '-d', '-f', '-j', '-k', '-l', '-p', '-t', '-w')
-  zeroarg_flags = ('--version', '-n', '-v')
+    # Skipping all args that involve these flags, and taking all remaining args
+    # as targets.
+    onearg_flags = ('-C', '-d', '-f', '-j', '-k', '-l', '-p', '-t', '-w')
+    zeroarg_flags = ('--version', '-n', '-v')
 
-  targets = []
+    targets = []
 
-  while idx < len(cmdline):
-    arg = cmdline[idx]
-    if arg in onearg_flags:
-      idx += 2
-      continue
+    while idx < len(cmdline):
+        arg = cmdline[idx]
+        if arg in onearg_flags:
+            idx += 2
+            continue
 
-    if (arg[:2] in onearg_flags or arg in zeroarg_flags):
-      idx += 1
-      continue
+        if (arg[:2] in onearg_flags or arg in zeroarg_flags):
+            idx += 1
+            continue
 
-    # A target doesn't start with '-'.
-    if arg.startswith('-'):
-      idx += 1
-      continue
+        # A target doesn't start with '-'.
+        if arg.startswith('-'):
+            idx += 1
+            continue
 
-    # Avoid uploading absolute paths accidentally. e.g. b/270907050
-    if os.path.isabs(arg):
-      idx += 1
-      continue
+        # Avoid uploading absolute paths accidentally. e.g. b/270907050
+        if os.path.isabs(arg):
+            idx += 1
+            continue
 
-    targets.append(arg)
-    idx += 1
+        targets.append(arg)
+        idx += 1
 
-  return targets
+    return targets
 
 
 def GetJflag(cmdline):
-  """Parse cmdline to get flag value for -j"""
+    """Parse cmdline to get flag value for -j"""
 
-  for i in range(len(cmdline)):
-    if (cmdline[i] == '-j' and i + 1 < len(cmdline)
-        and cmdline[i + 1].isdigit()):
-      return int(cmdline[i + 1])
+    for i in range(len(cmdline)):
+        if (cmdline[i] == '-j' and i + 1 < len(cmdline)
+                and cmdline[i + 1].isdigit()):
+            return int(cmdline[i + 1])
 
-    if (cmdline[i].startswith('-j') and cmdline[i][len('-j'):].isdigit()):
-      return int(cmdline[i][len('-j'):])
+        if (cmdline[i].startswith('-j') and cmdline[i][len('-j'):].isdigit()):
+            return int(cmdline[i][len('-j'):])
 
 
 def GetMetadata(cmdline, ninjalog):
-  """Get metadata for uploaded ninjalog.
+    """Get metadata for uploaded ninjalog.
 
     Returned metadata has schema defined in
     https://cs.chromium.org?q="type+Metadata+struct+%7B"+file:%5Einfra/go/src/infra/appengine/chromium_build_stats/ninjalog/
@@ -132,120 +132,120 @@ def GetMetadata(cmdline, ninjalog):
     TODO(tikuta): Collect GOMA_* env var.
     """
 
-  build_dir = os.path.dirname(ninjalog)
+    build_dir = os.path.dirname(ninjalog)
 
-  build_configs = {}
+    build_configs = {}
 
-  try:
-    args = ['gn', 'args', build_dir, '--list', '--short', '--json']
-    if sys.platform == 'win32':
-      # gn in PATH is bat file in windows environment (except cygwin).
-      args = ['cmd', '/c'] + args
+    try:
+        args = ['gn', 'args', build_dir, '--list', '--short', '--json']
+        if sys.platform == 'win32':
+            # gn in PATH is bat file in windows environment (except cygwin).
+            args = ['cmd', '/c'] + args
 
-    gn_args = subprocess.check_output(args)
-    build_configs = ParseGNArgs(gn_args)
-  except subprocess.CalledProcessError as e:
-    logging.error("Failed to call gn %s", e)
-    build_configs = {}
+        gn_args = subprocess.check_output(args)
+        build_configs = ParseGNArgs(gn_args)
+    except subprocess.CalledProcessError as e:
+        logging.error("Failed to call gn %s", e)
+        build_configs = {}
 
-  # Stringify config.
-  for k in build_configs:
-    build_configs[k] = str(build_configs[k])
+    # Stringify config.
+    for k in build_configs:
+        build_configs[k] = str(build_configs[k])
 
-  metadata = {
-      'platform': platform.system(),
-      'cpu_core': multiprocessing.cpu_count(),
-      'build_configs': build_configs,
-      'targets': GetBuildTargetFromCommandLine(cmdline),
-  }
+    metadata = {
+        'platform': platform.system(),
+        'cpu_core': multiprocessing.cpu_count(),
+        'build_configs': build_configs,
+        'targets': GetBuildTargetFromCommandLine(cmdline),
+    }
 
-  jflag = GetJflag(cmdline)
-  if jflag is not None:
-    metadata['jobs'] = jflag
+    jflag = GetJflag(cmdline)
+    if jflag is not None:
+        metadata['jobs'] = jflag
 
-  return metadata
+    return metadata
 
 
 def GetNinjalog(cmdline):
-  """GetNinjalog returns the path to ninjalog from cmdline."""
-  # ninjalog is in current working directory by default.
-  ninjalog_dir = '.'
+    """GetNinjalog returns the path to ninjalog from cmdline."""
+    # ninjalog is in current working directory by default.
+    ninjalog_dir = '.'
 
-  i = 0
-  while i < len(cmdline):
-    cmd = cmdline[i]
-    i += 1
-    if cmd == '-C' and i < len(cmdline):
-      ninjalog_dir = cmdline[i]
-      i += 1
-      continue
+    i = 0
+    while i < len(cmdline):
+        cmd = cmdline[i]
+        i += 1
+        if cmd == '-C' and i < len(cmdline):
+            ninjalog_dir = cmdline[i]
+            i += 1
+            continue
 
-    if cmd.startswith('-C') and len(cmd) > len('-C'):
-      ninjalog_dir = cmd[len('-C'):]
+        if cmd.startswith('-C') and len(cmd) > len('-C'):
+            ninjalog_dir = cmd[len('-C'):]
 
-  return os.path.join(ninjalog_dir, '.ninja_log')
+    return os.path.join(ninjalog_dir, '.ninja_log')
 
 
 def main():
-  parser = argparse.ArgumentParser()
-  parser.add_argument('--server',
-                      default='chromium-build-stats.appspot.com',
-                      help='server to upload ninjalog file.')
-  parser.add_argument('--ninjalog', help='ninjalog file to upload.')
-  parser.add_argument('--verbose',
-                      action='store_true',
-                      help='Enable verbose logging.')
-  parser.add_argument('--cmdline',
-                      required=True,
-                      nargs=argparse.REMAINDER,
-                      help='command line args passed to ninja.')
-
-  args = parser.parse_args()
-
-  if args.verbose:
-    logging.basicConfig(level=logging.INFO)
-  else:
-    # Disable logging.
-    logging.disable(logging.CRITICAL)
-
-  if not IsGoogler():
-    return 0
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--server',
+                        default='chromium-build-stats.appspot.com',
+                        help='server to upload ninjalog file.')
+    parser.add_argument('--ninjalog', help='ninjalog file to upload.')
+    parser.add_argument('--verbose',
+                        action='store_true',
+                        help='Enable verbose logging.')
+    parser.add_argument('--cmdline',
+                        required=True,
+                        nargs=argparse.REMAINDER,
+                        help='command line args passed to ninja.')
+
+    args = parser.parse_args()
+
+    if args.verbose:
+        logging.basicConfig(level=logging.INFO)
+    else:
+        # Disable logging.
+        logging.disable(logging.CRITICAL)
 
-  ninjalog = args.ninjalog or GetNinjalog(args.cmdline)
-  if not os.path.isfile(ninjalog):
-    logging.warning("ninjalog is not found in %s", ninjalog)
-    return 1
+    if not IsGoogler():
+        return 0
 
-  # We assume that each ninja invocation interval takes at least 2 seconds.
-  # This is not to have duplicate entry in server when current build is no-op.
-  if os.stat(ninjalog).st_mtime < time.time() - 2:
-    logging.info("ninjalog is not updated recently %s", ninjalog)
-    return 0
+    ninjalog = args.ninjalog or GetNinjalog(args.cmdline)
+    if not os.path.isfile(ninjalog):
+        logging.warning("ninjalog is not found in %s", ninjalog)
+        return 1
 
-  output = io.BytesIO()
+    # We assume that each ninja invocation interval takes at least 2 seconds.
+    # This is not to have duplicate entry in server when current build is no-op.
+    if os.stat(ninjalog).st_mtime < time.time() - 2:
+        logging.info("ninjalog is not updated recently %s", ninjalog)
+        return 0
 
-  with open(ninjalog) as f:
-    with gzip.GzipFile(fileobj=output, mode='wb') as g:
-      g.write(f.read().encode())
-      g.write(b'# end of ninja log\n')
+    output = io.BytesIO()
 
-      metadata = GetMetadata(args.cmdline, ninjalog)
-      logging.info('send metadata: %s', json.dumps(metadata))
-      g.write(json.dumps(metadata).encode())
+    with open(ninjalog) as f:
+        with gzip.GzipFile(fileobj=output, mode='wb') as g:
+            g.write(f.read().encode())
+            g.write(b'# end of ninja log\n')
 
-  resp = request.urlopen(
-      request.Request('https://' + args.server + '/upload_ninja_log/',
-                      data=output.getvalue(),
-                      headers={'Content-Encoding': 'gzip'}))
+            metadata = GetMetadata(args.cmdline, ninjalog)
+            logging.info('send metadata: %s', json.dumps(metadata))
+            g.write(json.dumps(metadata).encode())
 
-  if resp.status != 200:
-    logging.warning("unexpected status code for response: %s", resp.status)
-    return 1
+    resp = request.urlopen(
+        request.Request('https://' + args.server + '/upload_ninja_log/',
+                        data=output.getvalue(),
+                        headers={'Content-Encoding': 'gzip'}))
 
-  logging.info('response header: %s', resp.headers)
-  logging.info('response content: %s', resp.read())
-  return 0
+    if resp.status != 200:
+        logging.warning("unexpected status code for response: %s", resp.status)
+        return 1
+
+    logging.info('response header: %s', resp.headers)
+    logging.info('response content: %s', resp.read())
+    return 0
 
 
 if __name__ == '__main__':
-  sys.exit(main())
+    sys.exit(main())

+ 71 - 71
ninjalog_uploader_wrapper.py

@@ -21,38 +21,38 @@ VERSION = 3
 
 
 def LoadConfig():
-  if os.path.isfile(CONFIG):
-    with open(CONFIG, 'r') as f:
-      try:
-        config = json.load(f)
-      except Exception:
-        # Set default value when failed to load config.
-        config = {
-            'is-googler': ninjalog_uploader.IsGoogler(),
-            'countdown': 10,
-            'version': VERSION,
-        }
-
-      if config['version'] == VERSION:
-        config['countdown'] = max(0, config['countdown'] - 1)
-        return config
-
-  return {
-      'is-googler': ninjalog_uploader.IsGoogler(),
-      'countdown': 10,
-      'version': VERSION,
-  }
+    if os.path.isfile(CONFIG):
+        with open(CONFIG, 'r') as f:
+            try:
+                config = json.load(f)
+            except Exception:
+                # Set default value when failed to load config.
+                config = {
+                    'is-googler': ninjalog_uploader.IsGoogler(),
+                    'countdown': 10,
+                    'version': VERSION,
+                }
+
+            if config['version'] == VERSION:
+                config['countdown'] = max(0, config['countdown'] - 1)
+                return config
+
+    return {
+        'is-googler': ninjalog_uploader.IsGoogler(),
+        'countdown': 10,
+        'version': VERSION,
+    }
 
 
 def SaveConfig(config):
-  with open(CONFIG, 'w') as f:
-    json.dump(config, f)
+    with open(CONFIG, 'w') as f:
+        json.dump(config, f)
 
 
 def ShowMessage(countdown):
-  whitelisted = '\n'.join(
-      ['  * %s' % config for config in ninjalog_uploader.ALLOWLISTED_CONFIGS])
-  print("""
+    whitelisted = '\n'.join(
+        ['  * %s' % config for config in ninjalog_uploader.ALLOWLISTED_CONFIGS])
+    print("""
 Your ninjalog will be uploaded to build stats server. The uploaded log will be
 used to analyze user side build performance.
 
@@ -85,51 +85,51 @@ https://chromium.googlesource.com/chromium/tools/depot_tools/+/main/ninjalog.REA
 
 
 def main():
-  config = LoadConfig()
-
-  if len(sys.argv) == 2 and sys.argv[1] == 'opt-in':
-    config['opt-in'] = True
-    config['countdown'] = 0
-    SaveConfig(config)
-    print('ninjalog upload is opted in.')
-    return 0
-
-  if len(sys.argv) == 2 and sys.argv[1] == 'opt-out':
-    config['opt-in'] = False
-    SaveConfig(config)
-    print('ninjalog upload is opted out.')
-    return 0
-
-  if 'opt-in' in config and not config['opt-in']:
-    # Upload is opted out.
-    return 0
-
-  if not config.get("is-googler", False):
-    # Not googler.
-    return 0
-
-  if config.get("countdown", 0) > 0:
-    # Need to show message.
-    ShowMessage(config["countdown"])
-    # Only save config if something has meaningfully changed.
-    SaveConfig(config)
-    return 0
-
-  if len(sys.argv) == 1:
-    # dry-run for debugging.
-    print("upload ninjalog dry-run")
-    return 0
-
-  # Run upload script without wait.
-  devnull = open(os.devnull, "w")
-  creationnflags = 0
-  if platform.system() == 'Windows':
-    creationnflags = subprocess.CREATE_NEW_PROCESS_GROUP
-  subprocess2.Popen([sys.executable, UPLOADER] + sys.argv[1:],
-                    stdout=devnull,
-                    stderr=devnull,
-                    creationflags=creationnflags)
+    config = LoadConfig()
+
+    if len(sys.argv) == 2 and sys.argv[1] == 'opt-in':
+        config['opt-in'] = True
+        config['countdown'] = 0
+        SaveConfig(config)
+        print('ninjalog upload is opted in.')
+        return 0
+
+    if len(sys.argv) == 2 and sys.argv[1] == 'opt-out':
+        config['opt-in'] = False
+        SaveConfig(config)
+        print('ninjalog upload is opted out.')
+        return 0
+
+    if 'opt-in' in config and not config['opt-in']:
+        # Upload is opted out.
+        return 0
+
+    if not config.get("is-googler", False):
+        # Not googler.
+        return 0
+
+    if config.get("countdown", 0) > 0:
+        # Need to show message.
+        ShowMessage(config["countdown"])
+        # Only save config if something has meaningfully changed.
+        SaveConfig(config)
+        return 0
+
+    if len(sys.argv) == 1:
+        # dry-run for debugging.
+        print("upload ninjalog dry-run")
+        return 0
+
+    # Run upload script without wait.
+    devnull = open(os.devnull, "w")
+    creationnflags = 0
+    if platform.system() == 'Windows':
+        creationnflags = subprocess.CREATE_NEW_PROCESS_GROUP
+    subprocess2.Popen([sys.executable, UPLOADER] + sys.argv[1:],
+                      stdout=devnull,
+                      stderr=devnull,
+                      creationflags=creationnflags)
 
 
 if __name__ == '__main__':
-  sys.exit(main())
+    sys.exit(main())

+ 140 - 137
owners_client.py

@@ -10,7 +10,7 @@ import git_common
 
 
 class OwnersClient(object):
-  """Interact with OWNERS files in a repository.
+    """Interact with OWNERS files in a repository.
 
   This class allows you to interact with OWNERS files in a repository both the
   Gerrit Code-Owners plugin REST API, and the owners database implemented by
@@ -23,164 +23,167 @@ class OwnersClient(object):
   All code should use this class to interact with OWNERS files instead of the
   owners database in owners.py
   """
-  # '*' means that everyone can approve.
-  EVERYONE = '*'
-
-  # Possible status of a file.
-  # - INSUFFICIENT_REVIEWERS: The path needs owners approval, but none of its
-  #   owners is currently a reviewer of the change.
-  # - PENDING: An owner of this path has been added as reviewer, but approval
-  #   has not been given yet.
-  # - APPROVED: The path has been approved by an owner.
-  APPROVED = 'APPROVED'
-  PENDING = 'PENDING'
-  INSUFFICIENT_REVIEWERS = 'INSUFFICIENT_REVIEWERS'
-
-  def ListOwners(self, path):
-    """List all owners for a file.
+    # '*' means that everyone can approve.
+    EVERYONE = '*'
+
+    # Possible status of a file.
+    # - INSUFFICIENT_REVIEWERS: The path needs owners approval, but none of its
+    #   owners is currently a reviewer of the change.
+    # - PENDING: An owner of this path has been added as reviewer, but approval
+    #   has not been given yet.
+    # - APPROVED: The path has been approved by an owner.
+    APPROVED = 'APPROVED'
+    PENDING = 'PENDING'
+    INSUFFICIENT_REVIEWERS = 'INSUFFICIENT_REVIEWERS'
+
+    def ListOwners(self, path):
+        """List all owners for a file.
 
     The returned list is sorted so that better owners appear first.
     """
-    raise Exception('Not implemented')
+        raise Exception('Not implemented')
 
-  def BatchListOwners(self, paths):
-    """List all owners for a group of files.
+    def BatchListOwners(self, paths):
+        """List all owners for a group of files.
 
     Returns a dictionary {path: [owners]}.
     """
-    with git_common.ScopedPool(kind='threads') as pool:
-      return dict(pool.imap_unordered(
-          lambda p: (p, self.ListOwners(p)), paths))
+        with git_common.ScopedPool(kind='threads') as pool:
+            return dict(
+                pool.imap_unordered(lambda p: (p, self.ListOwners(p)), paths))
 
-  def GetFilesApprovalStatus(self, paths, approvers, reviewers):
-    """Check the approval status for the given paths.
+    def GetFilesApprovalStatus(self, paths, approvers, reviewers):
+        """Check the approval status for the given paths.
 
     Utility method to check for approval status when a change has not yet been
     created, given reviewers and approvers.
 
     See GetChangeApprovalStatus for description of the returned value.
     """
-    approvers = set(approvers)
-    if approvers:
-      approvers.add(self.EVERYONE)
-    reviewers = set(reviewers)
-    if reviewers:
-      reviewers.add(self.EVERYONE)
-    status = {}
-    owners_by_path = self.BatchListOwners(paths)
-    for path, owners in owners_by_path.items():
-      owners = set(owners)
-      if owners.intersection(approvers):
-        status[path] = self.APPROVED
-      elif owners.intersection(reviewers):
-        status[path] = self.PENDING
-      else:
-        status[path] = self.INSUFFICIENT_REVIEWERS
-    return status
-
-  def ScoreOwners(self, paths, exclude=None):
-    """Get sorted list of owners for the given paths."""
-    if not paths:
-      return []
-    exclude = exclude or []
-    owners = []
-    queues = self.BatchListOwners(paths).values()
-    for i in range(max(len(q) for q in queues)):
-      for q in queues:
-        if i < len(q) and q[i] not in owners and q[i] not in exclude:
-          owners.append(q[i])
-    return owners
-
-  def SuggestOwners(self, paths, exclude=None):
-    """Suggest a set of owners for the given paths."""
-    exclude = exclude or []
-
-    paths_by_owner = {}
-    owners_by_path = self.BatchListOwners(paths)
-    for path, owners in owners_by_path.items():
-      for owner in owners:
-        paths_by_owner.setdefault(owner, set()).add(path)
-
-    selected = []
-    missing = set(paths)
-    for owner in self.ScoreOwners(paths, exclude=exclude):
-      missing_len = len(missing)
-      missing.difference_update(paths_by_owner[owner])
-      if missing_len > len(missing):
-        selected.append(owner)
-      if not missing:
-        break
-
-    return selected
+        approvers = set(approvers)
+        if approvers:
+            approvers.add(self.EVERYONE)
+        reviewers = set(reviewers)
+        if reviewers:
+            reviewers.add(self.EVERYONE)
+        status = {}
+        owners_by_path = self.BatchListOwners(paths)
+        for path, owners in owners_by_path.items():
+            owners = set(owners)
+            if owners.intersection(approvers):
+                status[path] = self.APPROVED
+            elif owners.intersection(reviewers):
+                status[path] = self.PENDING
+            else:
+                status[path] = self.INSUFFICIENT_REVIEWERS
+        return status
+
+    def ScoreOwners(self, paths, exclude=None):
+        """Get sorted list of owners for the given paths."""
+        if not paths:
+            return []
+        exclude = exclude or []
+        owners = []
+        queues = self.BatchListOwners(paths).values()
+        for i in range(max(len(q) for q in queues)):
+            for q in queues:
+                if i < len(q) and q[i] not in owners and q[i] not in exclude:
+                    owners.append(q[i])
+        return owners
+
+    def SuggestOwners(self, paths, exclude=None):
+        """Suggest a set of owners for the given paths."""
+        exclude = exclude or []
+
+        paths_by_owner = {}
+        owners_by_path = self.BatchListOwners(paths)
+        for path, owners in owners_by_path.items():
+            for owner in owners:
+                paths_by_owner.setdefault(owner, set()).add(path)
+
+        selected = []
+        missing = set(paths)
+        for owner in self.ScoreOwners(paths, exclude=exclude):
+            missing_len = len(missing)
+            missing.difference_update(paths_by_owner[owner])
+            if missing_len > len(missing):
+                selected.append(owner)
+            if not missing:
+                break
+
+        return selected
+
 
 class GerritClient(OwnersClient):
-  """Implement OwnersClient using OWNERS REST API."""
-  def __init__(self, host, project, branch):
-    super(GerritClient, self).__init__()
-
-    self._host = host
-    self._project = project
-    self._branch = branch
-    self._owners_cache = {}
-    self._best_owners_cache = {}
-
-    # Seed used by Gerrit to shuffle code owners that have the same score. Can
-    # be used to make the sort order stable across several requests, e.g. to get
-    # the same set of random code owners for different file paths that have the
-    # same code owners.
-    self._seed = random.getrandbits(30)
-
-  def _FetchOwners(self, path, cache, highest_score_only=False):
-    # Always use slashes as separators.
-    path = path.replace(os.sep, '/')
-    if path not in cache:
-      # GetOwnersForFile returns a list of account details sorted by order of
-      # best reviewer for path. If owners have the same score, the order is
-      # random, seeded by `self._seed`.
-      data = gerrit_util.GetOwnersForFile(self._host,
-                                          self._project,
-                                          self._branch,
-                                          path,
-                                          resolve_all_users=False,
-                                          highest_score_only=highest_score_only,
-                                          seed=self._seed)
-      cache[path] = [
-          d['account']['email'] for d in data['code_owners']
-          if 'account' in d and 'email' in d['account']
-      ]
-      # If owned_by_all_users is true, add everyone as an owner at the end of
-      # the owners list.
-      if data.get('owned_by_all_users', False):
-        cache[path].append(self.EVERYONE)
-    return cache[path]
-
-  def ListOwners(self, path):
-    return self._FetchOwners(path, self._owners_cache)
-
-  def ListBestOwners(self, path):
-    return self._FetchOwners(path,
-                             self._best_owners_cache,
-                             highest_score_only=True)
-
-  def BatchListBestOwners(self, paths):
-    """List only the higest-scoring owners for a group of files.
+    """Implement OwnersClient using OWNERS REST API."""
+    def __init__(self, host, project, branch):
+        super(GerritClient, self).__init__()
+
+        self._host = host
+        self._project = project
+        self._branch = branch
+        self._owners_cache = {}
+        self._best_owners_cache = {}
+
+        # Seed used by Gerrit to shuffle code owners that have the same score.
+        # Can be used to make the sort order stable across several requests,
+        # e.g. to get the same set of random code owners for different file
+        # paths that have the same code owners.
+        self._seed = random.getrandbits(30)
+
+    def _FetchOwners(self, path, cache, highest_score_only=False):
+        # Always use slashes as separators.
+        path = path.replace(os.sep, '/')
+        if path not in cache:
+            # GetOwnersForFile returns a list of account details sorted by order
+            # of best reviewer for path. If owners have the same score, the
+            # order is random, seeded by `self._seed`.
+            data = gerrit_util.GetOwnersForFile(
+                self._host,
+                self._project,
+                self._branch,
+                path,
+                resolve_all_users=False,
+                highest_score_only=highest_score_only,
+                seed=self._seed)
+            cache[path] = [
+                d['account']['email'] for d in data['code_owners']
+                if 'account' in d and 'email' in d['account']
+            ]
+            # If owned_by_all_users is true, add everyone as an owner at the end
+            # of the owners list.
+            if data.get('owned_by_all_users', False):
+                cache[path].append(self.EVERYONE)
+        return cache[path]
+
+    def ListOwners(self, path):
+        return self._FetchOwners(path, self._owners_cache)
+
+    def ListBestOwners(self, path):
+        return self._FetchOwners(path,
+                                 self._best_owners_cache,
+                                 highest_score_only=True)
+
+    def BatchListBestOwners(self, paths):
+        """List only the higest-scoring owners for a group of files.
 
     Returns a dictionary {path: [owners]}.
     """
-    with git_common.ScopedPool(kind='threads') as pool:
-      return dict(
-          pool.imap_unordered(lambda p: (p, self.ListBestOwners(p)), paths))
+        with git_common.ScopedPool(kind='threads') as pool:
+            return dict(
+                pool.imap_unordered(lambda p: (p, self.ListBestOwners(p)),
+                                    paths))
 
 
 def GetCodeOwnersClient(host, project, branch):
-  """Get a new OwnersClient.
+    """Get a new OwnersClient.
 
   Uses GerritClient and raises an exception if code-owners plugin is not
   available."""
-  if gerrit_util.IsCodeOwnersEnabledOnHost(host):
-    return GerritClient(host, project, branch)
-  raise Exception(
-      'code-owners plugin is not enabled. Ask your host admin to enable it '
-      'on %s. Read more about code-owners at '
-      'https://chromium-review.googlesource.com/'
-      'plugins/code-owners/Documentation/index.html.' % host)
+    if gerrit_util.IsCodeOwnersEnabledOnHost(host):
+        return GerritClient(host, project, branch)
+    raise Exception(
+        'code-owners plugin is not enabled. Ask your host admin to enable it '
+        'on %s. Read more about code-owners at '
+        'https://chromium-review.googlesource.com/'
+        'plugins/code-owners/Documentation/index.html.' % host)

+ 329 - 322
owners_finder.py

@@ -1,7 +1,6 @@
 # Copyright 2013 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Interactive tool for finding reviewers/owners for a change."""
 
 from __future__ import print_function
@@ -9,336 +8,344 @@ from __future__ import print_function
 import os
 import copy
 
-
 import gclient_utils
 
 
 def first(iterable):
-  for element in iterable:
-    return element
+    for element in iterable:
+        return element
 
 
 class OwnersFinder(object):
-  COLOR_LINK = '\033[4m'
-  COLOR_BOLD = '\033[1;32m'
-  COLOR_GREY = '\033[0;37m'
-  COLOR_RESET = '\033[0m'
-
-  indentation = 0
-
-  def __init__(self, files, author, reviewers, owners_client,
-               email_postfix='@chromium.org',
-               disable_color=False,
-               ignore_author=False):
-    self.email_postfix = email_postfix
-
-    if os.name == 'nt' or disable_color:
-      self.COLOR_LINK = ''
-      self.COLOR_BOLD = ''
-      self.COLOR_GREY = ''
-      self.COLOR_RESET = ''
-
-    self.author = author
-
-    filtered_files = files
-
-    reviewers = list(reviewers)
-    if author and not ignore_author:
-      reviewers.append(author)
-
-    # Eliminate files that existing reviewers can review.
-    self.owners_client = owners_client
-    approval_status = self.owners_client.GetFilesApprovalStatus(
-      filtered_files, reviewers, [])
-    filtered_files = [
-      f for f in filtered_files
-      if approval_status[f] != self.owners_client.APPROVED]
-
-    # If some files are eliminated.
-    if len(filtered_files) != len(files):
-      files = filtered_files
-
-    self.files_to_owners = self.owners_client.BatchListOwners(files)
-
-    self.owners_to_files = {}
-    self._map_owners_to_files()
-
-    self.original_files_to_owners = copy.deepcopy(self.files_to_owners)
-
-    # This is the queue that will be shown in the interactive questions.
-    # It is initially sorted by the score in descending order. In the
-    # interactive questions a user can choose to "defer" its decision, then the
-    # owner will be put to the end of the queue and shown later.
-    self.owners_queue = []
-
-    self.unreviewed_files = set()
-    self.reviewed_by = {}
-    self.selected_owners = set()
-    self.deselected_owners = set()
-    self.reset()
-
-  def run(self):
-    self.reset()
-    while self.owners_queue and self.unreviewed_files:
-      owner = self.owners_queue[0]
-
-      if (owner in self.selected_owners) or (owner in self.deselected_owners):
-        continue
-
-      if not any((file_name in self.unreviewed_files)
-                 for file_name in self.owners_to_files[owner]):
-        self.deselect_owner(owner)
-        continue
-
-      self.print_info(owner)
-
-      while True:
-        inp = self.input_command(owner)
-        if inp in ('y', 'yes'):
-          self.select_owner(owner)
-          break
-
-        if inp in ('n', 'no'):
-          self.deselect_owner(owner)
-          break
-
-        if inp in ('', 'd', 'defer'):
-          self.owners_queue.append(self.owners_queue.pop(0))
-          break
-
-        if inp in ('f', 'files'):
-          self.list_files()
-          break
-
-        if inp in ('o', 'owners'):
-          self.list_owners(self.owners_queue)
-          break
-
-        if inp in ('p', 'pick'):
-          self.pick_owner(gclient_utils.AskForData('Pick an owner: '))
-          break
-
-        if inp.startswith('p ') or inp.startswith('pick '):
-          self.pick_owner(inp.split(' ', 2)[1].strip())
-          break
-
-        if inp in ('r', 'restart'):
-          self.reset()
-          break
-
-        if inp in ('q', 'quit'):
-          # Exit with error
-          return 1
-
-    self.print_result()
-    return 0
-
-  def _map_owners_to_files(self):
-    for file_name in self.files_to_owners:
-      for owner in self.files_to_owners[file_name]:
-        self.owners_to_files.setdefault(owner, set())
-        self.owners_to_files[owner].add(file_name)
-
-  def reset(self):
-    self.files_to_owners = copy.deepcopy(self.original_files_to_owners)
-    self.unreviewed_files = set(self.files_to_owners.keys())
-    self.reviewed_by = {}
-    self.selected_owners = set()
-    self.deselected_owners = set()
-
-    # Randomize owners' names so that if many reviewers have identical scores
-    # they will be randomly ordered to avoid bias.
-    owners = list(self.owners_client.ScoreOwners(self.files_to_owners.keys()))
-    if self.author and self.author in owners:
-      owners.remove(self.author)
-    self.owners_queue = owners
-    self.find_mandatory_owners()
-
-  def select_owner(self, owner, findMandatoryOwners=True):
-    if owner in self.selected_owners or owner in self.deselected_owners\
-        or not (owner in self.owners_queue):
-      return
-    self.writeln('Selected: ' + owner)
-    self.owners_queue.remove(owner)
-    self.selected_owners.add(owner)
-    for file_name in filter(
-        lambda file_name: file_name in self.unreviewed_files,
-        self.owners_to_files[owner]):
-      self.unreviewed_files.remove(file_name)
-      self.reviewed_by[file_name] = owner
-    if findMandatoryOwners:
-      self.find_mandatory_owners()
-
-  def deselect_owner(self, owner, findMandatoryOwners=True):
-    if owner in self.selected_owners or owner in self.deselected_owners\
-        or not (owner in self.owners_queue):
-      return
-    self.writeln('Deselected: ' + owner)
-    self.owners_queue.remove(owner)
-    self.deselected_owners.add(owner)
-    for file_name in self.owners_to_files[owner] & self.unreviewed_files:
-      self.files_to_owners[file_name].remove(owner)
-    if findMandatoryOwners:
-      self.find_mandatory_owners()
-
-  def find_mandatory_owners(self):
-    continues = True
-    for owner in self.owners_queue:
-      if owner in self.selected_owners:
-        continue
-      if owner in self.deselected_owners:
-        continue
-      if len(self.owners_to_files[owner] & self.unreviewed_files) == 0:
-        self.deselect_owner(owner, False)
-
-    while continues:
-      continues = False
-      for file_name in filter(
-          lambda file_name: len(self.files_to_owners[file_name]) == 1,
-          self.unreviewed_files):
-        owner = first(self.files_to_owners[file_name])
-        self.select_owner(owner, False)
+    COLOR_LINK = '\033[4m'
+    COLOR_BOLD = '\033[1;32m'
+    COLOR_GREY = '\033[0;37m'
+    COLOR_RESET = '\033[0m'
+
+    indentation = 0
+
+    def __init__(self,
+                 files,
+                 author,
+                 reviewers,
+                 owners_client,
+                 email_postfix='@chromium.org',
+                 disable_color=False,
+                 ignore_author=False):
+        self.email_postfix = email_postfix
+
+        if os.name == 'nt' or disable_color:
+            self.COLOR_LINK = ''
+            self.COLOR_BOLD = ''
+            self.COLOR_GREY = ''
+            self.COLOR_RESET = ''
+
+        self.author = author
+
+        filtered_files = files
+
+        reviewers = list(reviewers)
+        if author and not ignore_author:
+            reviewers.append(author)
+
+        # Eliminate files that existing reviewers can review.
+        self.owners_client = owners_client
+        approval_status = self.owners_client.GetFilesApprovalStatus(
+            filtered_files, reviewers, [])
+        filtered_files = [
+            f for f in filtered_files
+            if approval_status[f] != self.owners_client.APPROVED
+        ]
+
+        # If some files are eliminated.
+        if len(filtered_files) != len(files):
+            files = filtered_files
+
+        self.files_to_owners = self.owners_client.BatchListOwners(files)
+
+        self.owners_to_files = {}
+        self._map_owners_to_files()
+
+        self.original_files_to_owners = copy.deepcopy(self.files_to_owners)
+
+        # This is the queue that will be shown in the interactive questions.
+        # It is initially sorted by the score in descending order. In the
+        # interactive questions a user can choose to "defer" its decision, then
+        # the owner will be put to the end of the queue and shown later.
+        self.owners_queue = []
+
+        self.unreviewed_files = set()
+        self.reviewed_by = {}
+        self.selected_owners = set()
+        self.deselected_owners = set()
+        self.reset()
+
+    def run(self):
+        self.reset()
+        while self.owners_queue and self.unreviewed_files:
+            owner = self.owners_queue[0]
+
+            if (owner in self.selected_owners) or (owner
+                                                   in self.deselected_owners):
+                continue
+
+            if not any((file_name in self.unreviewed_files)
+                       for file_name in self.owners_to_files[owner]):
+                self.deselect_owner(owner)
+                continue
+
+            self.print_info(owner)
+
+            while True:
+                inp = self.input_command(owner)
+                if inp in ('y', 'yes'):
+                    self.select_owner(owner)
+                    break
+
+                if inp in ('n', 'no'):
+                    self.deselect_owner(owner)
+                    break
+
+                if inp in ('', 'd', 'defer'):
+                    self.owners_queue.append(self.owners_queue.pop(0))
+                    break
+
+                if inp in ('f', 'files'):
+                    self.list_files()
+                    break
+
+                if inp in ('o', 'owners'):
+                    self.list_owners(self.owners_queue)
+                    break
+
+                if inp in ('p', 'pick'):
+                    self.pick_owner(gclient_utils.AskForData('Pick an owner: '))
+                    break
+
+                if inp.startswith('p ') or inp.startswith('pick '):
+                    self.pick_owner(inp.split(' ', 2)[1].strip())
+                    break
+
+                if inp in ('r', 'restart'):
+                    self.reset()
+                    break
+
+                if inp in ('q', 'quit'):
+                    # Exit with error
+                    return 1
+
+        self.print_result()
+        return 0
+
+    def _map_owners_to_files(self):
+        for file_name in self.files_to_owners:
+            for owner in self.files_to_owners[file_name]:
+                self.owners_to_files.setdefault(owner, set())
+                self.owners_to_files[owner].add(file_name)
+
+    def reset(self):
+        self.files_to_owners = copy.deepcopy(self.original_files_to_owners)
+        self.unreviewed_files = set(self.files_to_owners.keys())
+        self.reviewed_by = {}
+        self.selected_owners = set()
+        self.deselected_owners = set()
+
+        # Randomize owners' names so that if many reviewers have identical
+        # scores they will be randomly ordered to avoid bias.
+        owners = list(
+            self.owners_client.ScoreOwners(self.files_to_owners.keys()))
+        if self.author and self.author in owners:
+            owners.remove(self.author)
+        self.owners_queue = owners
+        self.find_mandatory_owners()
+
+    def select_owner(self, owner, findMandatoryOwners=True):
+        if owner in self.selected_owners or owner in self.deselected_owners\
+            or not (owner in self.owners_queue):
+            return
+        self.writeln('Selected: ' + owner)
+        self.owners_queue.remove(owner)
+        self.selected_owners.add(owner)
+        for file_name in filter(
+                lambda file_name: file_name in self.unreviewed_files,
+                self.owners_to_files[owner]):
+            self.unreviewed_files.remove(file_name)
+            self.reviewed_by[file_name] = owner
+        if findMandatoryOwners:
+            self.find_mandatory_owners()
+
+    def deselect_owner(self, owner, findMandatoryOwners=True):
+        if owner in self.selected_owners or owner in self.deselected_owners\
+            or not (owner in self.owners_queue):
+            return
+        self.writeln('Deselected: ' + owner)
+        self.owners_queue.remove(owner)
+        self.deselected_owners.add(owner)
+        for file_name in self.owners_to_files[owner] & self.unreviewed_files:
+            self.files_to_owners[file_name].remove(owner)
+        if findMandatoryOwners:
+            self.find_mandatory_owners()
+
+    def find_mandatory_owners(self):
         continues = True
-        break
-
-  def print_file_info(self, file_name, except_owner=''):
-    if file_name not in self.unreviewed_files:
-      self.writeln(self.greyed(file_name +
-                               ' (by ' +
-                               self.bold_name(self.reviewed_by[file_name]) +
-                               ')'))
-    else:
-      if len(self.files_to_owners[file_name]) <= 3:
-        other_owners = []
-        for ow in self.files_to_owners[file_name]:
-          if ow != except_owner:
-            other_owners.append(self.bold_name(ow))
-        self.writeln(file_name +
-                     ' [' + (', '.join(other_owners)) + ']')
-      else:
-        self.writeln(file_name + ' [' +
-                     self.bold(str(len(self.files_to_owners[file_name]))) +
-                     ']')
-
-  def print_file_info_detailed(self, file_name):
-    self.writeln(file_name)
-    self.indent()
-    for ow in sorted(self.files_to_owners[file_name]):
-      if ow in self.deselected_owners:
-        self.writeln(self.bold_name(self.greyed(ow)))
-      elif ow in self.selected_owners:
-        self.writeln(self.bold_name(self.greyed(ow)))
-      else:
-        self.writeln(self.bold_name(ow))
-    self.unindent()
-
-  def print_owned_files_for(self, owner):
-    # Print owned files
-    self.writeln(self.bold_name(owner))
-    self.writeln(self.bold_name(owner) + ' owns ' +
-                 str(len(self.owners_to_files[owner])) + ' file(s):')
-    self.indent()
-    for file_name in sorted(self.owners_to_files[owner]):
-      self.print_file_info(file_name, owner)
-    self.unindent()
-    self.writeln()
-
-  def list_owners(self, owners_queue):
-    if (len(self.owners_to_files) - len(self.deselected_owners) -
-            len(self.selected_owners)) > 3:
-      for ow in owners_queue:
-        if ow not in self.deselected_owners and ow not in self.selected_owners:
-          self.writeln(self.bold_name(ow))
-    else:
-      for ow in owners_queue:
-        if ow not in self.deselected_owners and ow not in self.selected_owners:
-          self.writeln()
-          self.print_owned_files_for(ow)
-
-  def list_files(self):
-    self.indent()
-    if len(self.unreviewed_files) > 5:
-      for file_name in sorted(self.unreviewed_files):
-        self.print_file_info(file_name)
-    else:
-      for file_name in self.unreviewed_files:
-        self.print_file_info_detailed(file_name)
-    self.unindent()
-
-  def pick_owner(self, ow):
-    # Allowing to omit domain suffixes
-    if ow not in self.owners_to_files:
-      if ow + self.email_postfix in self.owners_to_files:
-        ow += self.email_postfix
-
-    if ow not in self.owners_to_files:
-      self.writeln('You cannot pick ' + self.bold_name(ow) + ' manually. ' +
-                   'It\'s an invalid name or not related to the change list.')
-      return False
-
-    if ow in self.selected_owners:
-      self.writeln('You cannot pick ' + self.bold_name(ow) + ' manually. ' +
-                   'It\'s already selected.')
-      return False
-
-    if ow in self.deselected_owners:
-      self.writeln('You cannot pick ' + self.bold_name(ow) + ' manually.' +
-                   'It\'s already unselected.')
-      return False
-
-    self.select_owner(ow)
-    return True
-
-  def print_result(self):
-    # Print results
-    self.writeln()
-    self.writeln()
-    if len(self.selected_owners) == 0:
-      self.writeln('This change list already has owner-reviewers for all '
-                   'files.')
-      self.writeln('Use --ignore-current if you want to ignore them.')
-    else:
-      self.writeln('** You selected these owners **')
-      self.writeln()
-      for owner in self.selected_owners:
-        self.writeln(self.bold_name(owner) + ':')
+        for owner in self.owners_queue:
+            if owner in self.selected_owners:
+                continue
+            if owner in self.deselected_owners:
+                continue
+            if len(self.owners_to_files[owner] & self.unreviewed_files) == 0:
+                self.deselect_owner(owner, False)
+
+        while continues:
+            continues = False
+            for file_name in filter(
+                    lambda file_name: len(self.files_to_owners[file_name]) == 1,
+                    self.unreviewed_files):
+                owner = first(self.files_to_owners[file_name])
+                self.select_owner(owner, False)
+                continues = True
+                break
+
+    def print_file_info(self, file_name, except_owner=''):
+        if file_name not in self.unreviewed_files:
+            self.writeln(
+                self.greyed(file_name + ' (by ' +
+                            self.bold_name(self.reviewed_by[file_name]) + ')'))
+        else:
+            if len(self.files_to_owners[file_name]) <= 3:
+                other_owners = []
+                for ow in self.files_to_owners[file_name]:
+                    if ow != except_owner:
+                        other_owners.append(self.bold_name(ow))
+                self.writeln(file_name + ' [' + (', '.join(other_owners)) + ']')
+            else:
+                self.writeln(
+                    file_name + ' [' +
+                    self.bold(str(len(self.files_to_owners[file_name]))) + ']')
+
+    def print_file_info_detailed(self, file_name):
+        self.writeln(file_name)
         self.indent()
-        for file_name in sorted(self.owners_to_files[owner]):
-          self.writeln(file_name)
+        for ow in sorted(self.files_to_owners[file_name]):
+            if ow in self.deselected_owners:
+                self.writeln(self.bold_name(self.greyed(ow)))
+            elif ow in self.selected_owners:
+                self.writeln(self.bold_name(self.greyed(ow)))
+            else:
+                self.writeln(self.bold_name(ow))
         self.unindent()
 
-  def bold(self, text):
-    return self.COLOR_BOLD + text + self.COLOR_RESET
-
-  def bold_name(self, name):
-    return (self.COLOR_BOLD +
-            name.replace(self.email_postfix, '') + self.COLOR_RESET)
-
-  def greyed(self, text):
-    return self.COLOR_GREY + text + self.COLOR_RESET
-
-  def indent(self):
-    self.indentation += 1
-
-  def unindent(self):
-    self.indentation -= 1
-
-  def print_indent(self):
-    return '  ' * self.indentation
-
-  def writeln(self, text=''):
-    print(self.print_indent() + text)
-
-  def hr(self):
-    self.writeln('=====================')
-
-  def print_info(self, owner):
-    self.hr()
-    self.writeln(
-        self.bold(str(len(self.unreviewed_files))) + ' file(s) left.')
-    self.print_owned_files_for(owner)
+    def print_owned_files_for(self, owner):
+        # Print owned files
+        self.writeln(self.bold_name(owner))
+        self.writeln(
+            self.bold_name(owner) + ' owns ' +
+            str(len(self.owners_to_files[owner])) + ' file(s):')
+        self.indent()
+        for file_name in sorted(self.owners_to_files[owner]):
+            self.print_file_info(file_name, owner)
+        self.unindent()
+        self.writeln()
+
+    def list_owners(self, owners_queue):
+        if (len(self.owners_to_files) - len(self.deselected_owners) -
+                len(self.selected_owners)) > 3:
+            for ow in owners_queue:
+                if (ow not in self.deselected_owners
+                        and ow not in self.selected_owners):
+                    self.writeln(self.bold_name(ow))
+        else:
+            for ow in owners_queue:
+                if (ow not in self.deselected_owners
+                        and ow not in self.selected_owners):
+                    self.writeln()
+                    self.print_owned_files_for(ow)
+
+    def list_files(self):
+        self.indent()
+        if len(self.unreviewed_files) > 5:
+            for file_name in sorted(self.unreviewed_files):
+                self.print_file_info(file_name)
+        else:
+            for file_name in self.unreviewed_files:
+                self.print_file_info_detailed(file_name)
+        self.unindent()
 
-  def input_command(self, owner):
-    self.writeln('Add ' + self.bold_name(owner) + ' as your reviewer? ')
-    return gclient_utils.AskForData(
-        '[yes/no/Defer/pick/files/owners/quit/restart]: ').lower()
+    def pick_owner(self, ow):
+        # Allowing to omit domain suffixes
+        if ow not in self.owners_to_files:
+            if ow + self.email_postfix in self.owners_to_files:
+                ow += self.email_postfix
+
+        if ow not in self.owners_to_files:
+            self.writeln(
+                'You cannot pick ' + self.bold_name(ow) + ' manually. ' +
+                'It\'s an invalid name or not related to the change list.')
+            return False
+
+        if ow in self.selected_owners:
+            self.writeln('You cannot pick ' + self.bold_name(ow) +
+                         ' manually. ' + 'It\'s already selected.')
+            return False
+
+        if ow in self.deselected_owners:
+            self.writeln('You cannot pick ' + self.bold_name(ow) +
+                         ' manually.' + 'It\'s already unselected.')
+            return False
+
+        self.select_owner(ow)
+        return True
+
+    def print_result(self):
+        # Print results
+        self.writeln()
+        self.writeln()
+        if len(self.selected_owners) == 0:
+            self.writeln('This change list already has owner-reviewers for all '
+                         'files.')
+            self.writeln('Use --ignore-current if you want to ignore them.')
+        else:
+            self.writeln('** You selected these owners **')
+            self.writeln()
+            for owner in self.selected_owners:
+                self.writeln(self.bold_name(owner) + ':')
+                self.indent()
+                for file_name in sorted(self.owners_to_files[owner]):
+                    self.writeln(file_name)
+                self.unindent()
+
+    def bold(self, text):
+        return self.COLOR_BOLD + text + self.COLOR_RESET
+
+    def bold_name(self, name):
+        return (self.COLOR_BOLD + name.replace(self.email_postfix, '') +
+                self.COLOR_RESET)
+
+    def greyed(self, text):
+        return self.COLOR_GREY + text + self.COLOR_RESET
+
+    def indent(self):
+        self.indentation += 1
+
+    def unindent(self):
+        self.indentation -= 1
+
+    def print_indent(self):
+        return '  ' * self.indentation
+
+    def writeln(self, text=''):
+        print(self.print_indent() + text)
+
+    def hr(self):
+        self.writeln('=====================')
+
+    def print_info(self, owner):
+        self.hr()
+        self.writeln(
+            self.bold(str(len(self.unreviewed_files))) + ' file(s) left.')
+        self.print_owned_files_for(owner)
+
+    def input_command(self, owner):
+        self.writeln('Add ' + self.bold_name(owner) + ' as your reviewer? ')
+        return gclient_utils.AskForData(
+            '[yes/no/Defer/pick/files/owners/quit/restart]: ').lower()

+ 123 - 122
post_build_ninja_summary.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2018 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Summarize the last ninja build, invoked with ninja's -C syntax.
 
 This script is designed to be automatically run after each ninja build in
@@ -57,7 +56,6 @@ import fnmatch
 import os
 import sys
 
-
 # The number of long build times to report:
 long_count = 10
 # The number of long times by extension to report
@@ -96,8 +94,8 @@ class Target:
         # Allow for modest floating-point errors
         epsilon = 0.000002
         if (self.weighted_duration > self.Duration() + epsilon):
-          print('%s > %s?' % (self.weighted_duration, self.Duration()))
-        assert(self.weighted_duration <= self.Duration() + epsilon)
+            print('%s > %s?' % (self.weighted_duration, self.Duration()))
+        assert (self.weighted_duration <= self.Duration() + epsilon)
         return self.weighted_duration
 
     def DescribeTargets(self):
@@ -108,7 +106,7 @@ class Target:
         result = ', '.join(self.targets)
         max_length = 65
         if len(result) > max_length:
-          result = result[:max_length] + '...'
+            result = result[:max_length] + '...'
         return result
 
 
@@ -125,10 +123,10 @@ def ReadTargets(log, show_all):
     for line in log:
         parts = line.strip().split('\t')
         if len(parts) != 5:
-          # If ninja.exe is rudely halted then the .ninja_log file may be
-          # corrupt. Silently continue.
-          continue
-        start, end, _, name, cmdhash = parts # Ignore restat.
+            # If ninja.exe is rudely halted then the .ninja_log file may be
+            # corrupt. Silently continue.
+            continue
+        start, end, _, name, cmdhash = parts  # Ignore restat.
         # Convert from integral milliseconds to float seconds.
         start = int(start) / 1000.0
         end = int(end) / 1000.0
@@ -142,68 +140,68 @@ def ReadTargets(log, show_all):
             targets_dict = {}
         target = None
         if cmdhash in targets_dict:
-          target = targets_dict[cmdhash]
-          if not show_all and (target.start != start or target.end != end):
-            # If several builds in a row just run one or two build steps then
-            # the end times may not go backwards so the last build may not be
-            # detected as such. However in many cases there will be a build step
-            # repeated in the two builds and the changed start/stop points for
-            # that command, identified by the hash, can be used to detect and
-            # reset the target dictionary.
-            targets_dict = {}
-            target = None
+            target = targets_dict[cmdhash]
+            if not show_all and (target.start != start or target.end != end):
+                # If several builds in a row just run one or two build steps
+                # then the end times may not go backwards so the last build may
+                # not be detected as such. However in many cases there will be a
+                # build step repeated in the two builds and the changed
+                # start/stop points for that command, identified by the hash,
+                # can be used to detect and reset the target dictionary.
+                targets_dict = {}
+                target = None
         if not target:
-          targets_dict[cmdhash] = target = Target(start, end)
+            targets_dict[cmdhash] = target = Target(start, end)
         last_end_seen = end
         target.targets.append(name)
     return list(targets_dict.values())
 
 
 def GetExtension(target, extra_patterns):
-  """Return the file extension that best represents a target.
+    """Return the file extension that best represents a target.
 
   For targets that generate multiple outputs it is important to return a
   consistent 'canonical' extension. Ultimately the goal is to group build steps
   by type."""
-  for output in target.targets:
-    if extra_patterns:
-      for fn_pattern in extra_patterns.split(';'):
-        if fnmatch.fnmatch(output, '*' + fn_pattern + '*'):
-          return fn_pattern
-    # Not a true extension, but a good grouping.
-    if output.endswith('type_mappings'):
-      extension = 'type_mappings'
-      break
-
-    # Capture two extensions if present. For example: file.javac.jar should be
-    # distinguished from file.interface.jar.
-    root, ext1 = os.path.splitext(output)
-    _, ext2 = os.path.splitext(root)
-    extension = ext2 + ext1 # Preserve the order in the file name.
-
-    if len(extension) == 0:
-      extension = '(no extension found)'
-
-    if ext1 in ['.pdb', '.dll', '.exe']:
-      extension = 'PEFile (linking)'
-      # Make sure that .dll and .exe are grouped together and that the
-      # .dll.lib files don't cause these to be listed as libraries
-      break
-    if ext1 in ['.so', '.TOC']:
-      extension = '.so (linking)'
-      # Attempt to identify linking, avoid identifying as '.TOC'
-      break
-    # Make sure .obj files don't get categorized as mojo files
-    if ext1 in ['.obj', '.o']:
-      break
-    # Jars are the canonical output of java targets.
-    if ext1 == '.jar':
-      break
-    # Normalize all mojo related outputs to 'mojo'.
-    if output.count('.mojom') > 0:
-      extension = 'mojo'
-      break
-  return extension
+    for output in target.targets:
+        if extra_patterns:
+            for fn_pattern in extra_patterns.split(';'):
+                if fnmatch.fnmatch(output, '*' + fn_pattern + '*'):
+                    return fn_pattern
+        # Not a true extension, but a good grouping.
+        if output.endswith('type_mappings'):
+            extension = 'type_mappings'
+            break
+
+        # Capture two extensions if present. For example: file.javac.jar should
+        # be distinguished from file.interface.jar.
+        root, ext1 = os.path.splitext(output)
+        _, ext2 = os.path.splitext(root)
+        extension = ext2 + ext1  # Preserve the order in the file name.
+
+        if len(extension) == 0:
+            extension = '(no extension found)'
+
+        if ext1 in ['.pdb', '.dll', '.exe']:
+            extension = 'PEFile (linking)'
+            # Make sure that .dll and .exe are grouped together and that the
+            # .dll.lib files don't cause these to be listed as libraries
+            break
+        if ext1 in ['.so', '.TOC']:
+            extension = '.so (linking)'
+            # Attempt to identify linking, avoid identifying as '.TOC'
+            break
+        # Make sure .obj files don't get categorized as mojo files
+        if ext1 in ['.obj', '.o']:
+            break
+        # Jars are the canonical output of java targets.
+        if ext1 == '.jar':
+            break
+        # Normalize all mojo related outputs to 'mojo'.
+        if output.count('.mojom') > 0:
+            extension = 'mojo'
+            break
+    return extension
 
 
 def SummarizeEntries(entries, extra_step_types, elapsed_time_sorting):
@@ -221,13 +219,13 @@ def SummarizeEntries(entries, extra_step_types, elapsed_time_sorting):
     latest = 0
     total_cpu_time = 0
     for target in entries:
-      if earliest < 0 or target.start < earliest:
-        earliest = target.start
-      if target.end > latest:
-        latest = target.end
-      total_cpu_time += target.Duration()
-      task_start_stop_times.append((target.start, 'start', target))
-      task_start_stop_times.append((target.end, 'stop', target))
+        if earliest < 0 or target.start < earliest:
+            earliest = target.start
+        if target.end > latest:
+            latest = target.end
+        total_cpu_time += target.Duration()
+        task_start_stop_times.append((target.start, 'start', target))
+        task_start_stop_times.append((target.end, 'stop', target))
     length = latest - earliest
     weighted_total = 0.0
 
@@ -247,40 +245,41 @@ def SummarizeEntries(entries, extra_step_types, elapsed_time_sorting):
     last_weighted_time = 0.0
     # Scan all start/stop events.
     for event in task_start_stop_times:
-      time, action_name, target = event
-      # Accumulate weighted time up to now.
-      num_running = len(running_tasks)
-      if num_running > 0:
-        # Update the total weighted time up to this moment.
-        last_weighted_time += (time - last_time) / float(num_running)
-      if action_name == 'start':
-        # Record the total weighted task time when this task starts.
-        running_tasks[target] = last_weighted_time
-      if action_name == 'stop':
-        # Record the change in the total weighted task time while this task ran.
-        weighted_duration = last_weighted_time - running_tasks[target]
-        target.SetWeightedDuration(weighted_duration)
-        weighted_total += weighted_duration
-        del running_tasks[target]
-      last_time = time
-    assert(len(running_tasks) == 0)
+        time, action_name, target = event
+        # Accumulate weighted time up to now.
+        num_running = len(running_tasks)
+        if num_running > 0:
+            # Update the total weighted time up to this moment.
+            last_weighted_time += (time - last_time) / float(num_running)
+        if action_name == 'start':
+            # Record the total weighted task time when this task starts.
+            running_tasks[target] = last_weighted_time
+        if action_name == 'stop':
+            # Record the change in the total weighted task time while this task
+            # ran.
+            weighted_duration = last_weighted_time - running_tasks[target]
+            target.SetWeightedDuration(weighted_duration)
+            weighted_total += weighted_duration
+            del running_tasks[target]
+        last_time = time
+    assert (len(running_tasks) == 0)
 
     # Warn if the sum of weighted times is off by more than half a second.
     if abs(length - weighted_total) > 500:
-      print('Warning: Possible corrupt ninja log, results may be '
-            'untrustworthy. Length = %.3f, weighted total = %.3f' % (
-            length, weighted_total))
+        print('Warning: Possible corrupt ninja log, results may be '
+              'untrustworthy. Length = %.3f, weighted total = %.3f' %
+              (length, weighted_total))
 
     # Print the slowest build steps:
     print('    Longest build steps:')
     if elapsed_time_sorting:
-      entries.sort(key=lambda x: x.Duration())
+        entries.sort(key=lambda x: x.Duration())
     else:
-      entries.sort(key=lambda x: x.WeightedDuration())
+        entries.sort(key=lambda x: x.WeightedDuration())
     for target in entries[-long_count:]:
-      print('      %8.1f weighted s to build %s (%.1f s elapsed time)' % (
-            target.WeightedDuration(),
-            target.DescribeTargets(), target.Duration()))
+        print('      %8.1f weighted s to build %s (%.1f s elapsed time)' %
+              (target.WeightedDuration(), target.DescribeTargets(),
+               target.Duration()))
 
     # Sum up the time by file extension/type of the output file
     count_by_ext = {}
@@ -288,38 +287,39 @@ def SummarizeEntries(entries, extra_step_types, elapsed_time_sorting):
     weighted_time_by_ext = {}
     # Scan through all of the targets to build up per-extension statistics.
     for target in entries:
-      extension = GetExtension(target, extra_step_types)
-      time_by_ext[extension] = time_by_ext.get(extension, 0) + target.Duration()
-      weighted_time_by_ext[extension] = weighted_time_by_ext.get(extension,
-              0) + target.WeightedDuration()
-      count_by_ext[extension] = count_by_ext.get(extension, 0) + 1
+        extension = GetExtension(target, extra_step_types)
+        time_by_ext[extension] = time_by_ext.get(extension,
+                                                 0) + target.Duration()
+        weighted_time_by_ext[extension] = weighted_time_by_ext.get(
+            extension, 0) + target.WeightedDuration()
+        count_by_ext[extension] = count_by_ext.get(extension, 0) + 1
 
     print('    Time by build-step type:')
     # Copy to a list with extension name and total time swapped, to (time, ext)
     if elapsed_time_sorting:
-      weighted_time_by_ext_sorted = sorted((y, x) for (x, y) in
-                                            time_by_ext.items())
+        weighted_time_by_ext_sorted = sorted(
+            (y, x) for (x, y) in time_by_ext.items())
     else:
-      weighted_time_by_ext_sorted = sorted((y, x) for (x, y) in
-                                            weighted_time_by_ext.items())
+        weighted_time_by_ext_sorted = sorted(
+            (y, x) for (x, y) in weighted_time_by_ext.items())
     # Print the slowest build target types:
     for time, extension in weighted_time_by_ext_sorted[-long_ext_count:]:
-        print('      %8.1f s weighted time to generate %d %s files '
-               '(%1.1f s elapsed time sum)' % (time, count_by_ext[extension],
-                                        extension, time_by_ext[extension]))
+        print(
+            '      %8.1f s weighted time to generate %d %s files '
+            '(%1.1f s elapsed time sum)' %
+            (time, count_by_ext[extension], extension, time_by_ext[extension]))
 
     print('    %.1f s weighted time (%.1f s elapsed time sum, %1.1fx '
-          'parallelism)' % (length, total_cpu_time,
-          total_cpu_time * 1.0 / length))
-    print('    %d build steps completed, average of %1.2f/s' % (
-          len(entries), len(entries) / (length)))
+          'parallelism)' %
+          (length, total_cpu_time, total_cpu_time * 1.0 / length))
+    print('    %d build steps completed, average of %1.2f/s' %
+          (len(entries), len(entries) / (length)))
 
 
 def main():
     log_file = '.ninja_log'
     parser = argparse.ArgumentParser()
-    parser.add_argument('-C', dest='build_directory',
-                        help='Build directory.')
+    parser.add_argument('-C', dest='build_directory', help='Build directory.')
     parser.add_argument(
         '-s',
         '--step-types',
@@ -338,22 +338,23 @@ def main():
     if args.log_file:
         log_file = args.log_file
     if not args.step_types:
-      # Offer a convenient way to add extra step types automatically, including
-      # when this script is run by autoninja. get() returns None if the variable
-      # isn't set.
-      args.step_types = os.environ.get('chromium_step_types')
+        # Offer a convenient way to add extra step types automatically,
+        # including when this script is run by autoninja. get() returns None if
+        # the variable isn't set.
+        args.step_types = os.environ.get('chromium_step_types')
     if args.step_types:
-      # Make room for the extra build types.
-      global long_ext_count
-      long_ext_count += len(args.step_types.split(';'))
+        # Make room for the extra build types.
+        global long_ext_count
+        long_ext_count += len(args.step_types.split(';'))
 
     try:
-      with open(log_file, 'r') as log:
-        entries = ReadTargets(log, False)
-        SummarizeEntries(entries, args.step_types, args.elapsed_time_sorting)
+        with open(log_file, 'r') as log:
+            entries = ReadTargets(log, False)
+            SummarizeEntries(entries, args.step_types,
+                             args.elapsed_time_sorting)
     except IOError:
-      print('Log file %r not found, no build summary created.' % log_file)
-      return errno.ENOENT
+        print('Log file %r not found, no build summary created.' % log_file)
+        return errno.ENOENT
 
 
 if __name__ == '__main__':

文件差异内容过多而无法显示
+ 645 - 590
presubmit_canned_checks.py


文件差异内容过多而无法显示
+ 582 - 540
presubmit_support.py


+ 1 - 1
pylint-2.6

@@ -69,4 +69,4 @@ import sys
 import pylint_main
 
 if __name__ == '__main__':
-  sys.exit(pylint_main.main(sys.argv[1:]))
+    sys.exit(pylint_main.main(sys.argv[1:]))

+ 1 - 1
pylint-2.7

@@ -69,4 +69,4 @@ import sys
 import pylint_main
 
 if __name__ == '__main__':
-  sys.exit(pylint_main.main(sys.argv[1:]))
+    sys.exit(pylint_main.main(sys.argv[1:]))

+ 31 - 31
pylint_main.py

@@ -2,7 +2,6 @@
 # Copyright 2019 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Chromium wrapper for pylint for passing args via stdin.
 
 This will be executed by vpython with the right pylint versions.
@@ -21,36 +20,37 @@ ARGS_ON_STDIN = '--args-on-stdin'
 
 
 def main(argv):
-  """Our main wrapper."""
-  # Add support for a custom mode where arguments are fed line by line on
-  # stdin. This allows us to get around command line length limitations.
-  if ARGS_ON_STDIN in argv:
-    argv = [x for x in argv if x != ARGS_ON_STDIN]
-    argv.extend(x.strip() for x in sys.stdin)
-
-  # Set default config options with the PYLINTRC environment variable. This will
-  # allow overriding with "more local" config file options, such as a local
-  # "pylintrc" file, the "--rcfile" command-line flag, or an existing PYLINTRC.
-  #
-  # Note that this is not quite the same thing as replacing pylint's built-in
-  # defaults, since, based on config file precedence, it will not be overridden
-  # by "more global" config file options, such as ~/.pylintrc,
-  # ~/.config/pylintrc, or /etc/pylintrc. This is generally the desired
-  # behavior, since we want to enforce these defaults in most cases, but allow
-  # them to be overridden for specific code or repos.
-  #
-  # If someone really doesn't ever want the depot_tools pylintrc, they can set
-  # their own PYLINTRC, or set an empty PYLINTRC to use pylint's normal config
-  # file resolution, which would include the "more global" options that are
-  # normally overridden by the depot_tools config.
-  if os.path.isfile(RC_FILE) and 'PYLINTRC' not in os.environ:
-    os.environ['PYLINTRC'] = RC_FILE
-
-  # This import has to happen after PYLINTRC is set because the module tries to
-  # resolve the config file location on load.
-  from pylint import lint  # pylint: disable=bad-option-value,import-outside-toplevel
-  lint.Run(argv)
+    """Our main wrapper."""
+    # Add support for a custom mode where arguments are fed line by line on
+    # stdin. This allows us to get around command line length limitations.
+    if ARGS_ON_STDIN in argv:
+        argv = [x for x in argv if x != ARGS_ON_STDIN]
+        argv.extend(x.strip() for x in sys.stdin)
+
+    # Set default config options with the PYLINTRC environment variable. This
+    # will allow overriding with "more local" config file options, such as a
+    # local "pylintrc" file, the "--rcfile" command-line flag, or an existing
+    # PYLINTRC.
+    #
+    # Note that this is not quite the same thing as replacing pylint's built-in
+    # defaults, since, based on config file precedence, it will not be
+    # overridden by "more global" config file options, such as ~/.pylintrc,
+    # ~/.config/pylintrc, or /etc/pylintrc. This is generally the desired
+    # behavior, since we want to enforce these defaults in most cases, but allow
+    # them to be overridden for specific code or repos.
+    #
+    # If someone really doesn't ever want the depot_tools pylintrc, they can set
+    # their own PYLINTRC, or set an empty PYLINTRC to use pylint's normal config
+    # file resolution, which would include the "more global" options that are
+    # normally overridden by the depot_tools config.
+    if os.path.isfile(RC_FILE) and 'PYLINTRC' not in os.environ:
+        os.environ['PYLINTRC'] = RC_FILE
+
+    # This import has to happen after PYLINTRC is set because the module tries
+    # to resolve the config file location on load.
+    from pylint import lint  # pylint: disable=bad-option-value,import-outside-toplevel
+    lint.Run(argv)
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+    sys.exit(main(sys.argv[1:]))

+ 41 - 43
rdb_wrapper.py

@@ -15,23 +15,21 @@ STATUS_CRASH = 'CRASH'
 STATUS_ABORT = 'ABORT'
 STATUS_SKIP = 'SKIP'
 
-
 # ResultDB limits failure reasons to 1024 characters.
 _FAILURE_REASON_LENGTH_LIMIT = 1024
 
-
 # Message to use at the end of a truncated failure reason.
 _FAILURE_REASON_TRUNCATE_TEXT = '\n...\nFailure reason was truncated.'
 
 
 class ResultSink(object):
-  def __init__(self, session, url, prefix):
-    self._session = session
-    self._url = url
-    self._prefix = prefix
+    def __init__(self, session, url, prefix):
+        self._session = session
+        self._url = url
+        self._prefix = prefix
 
-  def report(self, function_name, status, elapsed_time, failure_reason=None):
-    """Reports the result and elapsed time of a presubmit function call.
+    def report(self, function_name, status, elapsed_time, failure_reason=None):
+        """Reports the result and elapsed time of a presubmit function call.
 
     Args:
       function_name (str): The name of the presubmit function
@@ -39,24 +37,24 @@ class ResultSink(object):
       elapsed_time: the time taken to invoke the presubmit function
       failure_reason (str or None): if set, the failure reason
     """
-    tr = {
-        'testId': self._prefix + function_name,
-        'status': status,
-        'expected': status == STATUS_PASS,
-        'duration': '{:.9f}s'.format(elapsed_time)
-    }
-    if failure_reason:
-      if len(failure_reason) > _FAILURE_REASON_LENGTH_LIMIT:
-        failure_reason = failure_reason[
-            :-len(_FAILURE_REASON_TRUNCATE_TEXT) - 1]
-        failure_reason += _FAILURE_REASON_TRUNCATE_TEXT
-      tr['failureReason'] = {'primaryErrorMessage': failure_reason}
-    self._session.post(self._url, json={'testResults': [tr]})
+        tr = {
+            'testId': self._prefix + function_name,
+            'status': status,
+            'expected': status == STATUS_PASS,
+            'duration': '{:.9f}s'.format(elapsed_time)
+        }
+        if failure_reason:
+            if len(failure_reason) > _FAILURE_REASON_LENGTH_LIMIT:
+                failure_reason = failure_reason[:-len(
+                    _FAILURE_REASON_TRUNCATE_TEXT) - 1]
+                failure_reason += _FAILURE_REASON_TRUNCATE_TEXT
+            tr['failureReason'] = {'primaryErrorMessage': failure_reason}
+        self._session.post(self._url, json={'testResults': [tr]})
 
 
 @contextlib.contextmanager
 def client(prefix):
-  """Returns a client for ResultSink.
+    """Returns a client for ResultSink.
 
   This is a context manager that returns a client for ResultSink,
   if LUCI_CONTEXT with a section of result_sink is present. When the context
@@ -71,24 +69,24 @@ def client(prefix):
   Returns:
     An instance of ResultSink() if the luci context is present. None, otherwise.
   """
-  luci_ctx = os.environ.get('LUCI_CONTEXT')
-  if not luci_ctx:
-    yield None
-    return
-
-  sink_ctx = None
-  with open(luci_ctx) as f:
-    sink_ctx = json.load(f).get('result_sink')
-    if not sink_ctx:
-      yield None
-      return
-
-  url = 'http://{0}/prpc/luci.resultsink.v1.Sink/ReportTestResults'.format(
-      sink_ctx['address'])
-  with requests.Session() as s:
-    s.headers = {
-        'Content-Type': 'application/json',
-        'Accept': 'application/json',
-        'Authorization': 'ResultSink {0}'.format(sink_ctx['auth_token'])
-    }
-    yield ResultSink(s, url, prefix)
+    luci_ctx = os.environ.get('LUCI_CONTEXT')
+    if not luci_ctx:
+        yield None
+        return
+
+    sink_ctx = None
+    with open(luci_ctx) as f:
+        sink_ctx = json.load(f).get('result_sink')
+        if not sink_ctx:
+            yield None
+            return
+
+    url = 'http://{0}/prpc/luci.resultsink.v1.Sink/ReportTestResults'.format(
+        sink_ctx['address'])
+    with requests.Session() as s:
+        s.headers = {
+            'Content-Type': 'application/json',
+            'Accept': 'application/json',
+            'Authorization': 'ResultSink {0}'.format(sink_ctx['auth_token'])
+        }
+        yield ResultSink(s, url, prefix)

+ 1 - 0
fetch_configs/.style.yapf → recipes/.style.yapf

@@ -1,3 +1,4 @@
 [style]
 based_on_style = pep8
+indent_width = 2
 column_limit = 80

+ 145 - 142
reclient_helper.py

@@ -19,65 +19,66 @@ import reclient_metrics
 
 
 def find_reclient_bin_dir():
-  tools_path = gclient_paths.GetBuildtoolsPath()
-  if not tools_path:
-    return None
+    tools_path = gclient_paths.GetBuildtoolsPath()
+    if not tools_path:
+        return None
 
-  reclient_bin_dir = os.path.join(tools_path, 'reclient')
-  if os.path.isdir(reclient_bin_dir):
-    return reclient_bin_dir
-  return None
+    reclient_bin_dir = os.path.join(tools_path, 'reclient')
+    if os.path.isdir(reclient_bin_dir):
+        return reclient_bin_dir
+    return None
 
 
 def find_reclient_cfg():
-  tools_path = gclient_paths.GetBuildtoolsPath()
-  if not tools_path:
-    return None
+    tools_path = gclient_paths.GetBuildtoolsPath()
+    if not tools_path:
+        return None
 
-  reclient_cfg = os.path.join(tools_path, 'reclient_cfgs', 'reproxy.cfg')
-  if os.path.isfile(reclient_cfg):
-    return reclient_cfg
-  return None
+    reclient_cfg = os.path.join(tools_path, 'reclient_cfgs', 'reproxy.cfg')
+    if os.path.isfile(reclient_cfg):
+        return reclient_cfg
+    return None
 
 
 def run(cmd_args):
-  if os.environ.get('NINJA_SUMMARIZE_BUILD') == '1':
-    print(' '.join(cmd_args))
-  return subprocess.call(cmd_args)
+    if os.environ.get('NINJA_SUMMARIZE_BUILD') == '1':
+        print(' '.join(cmd_args))
+    return subprocess.call(cmd_args)
 
 
 def start_reproxy(reclient_cfg, reclient_bin_dir):
-  return run([
-      os.path.join(reclient_bin_dir,
-                   'bootstrap' + gclient_paths.GetExeSuffix()), '--re_proxy=' +
-      os.path.join(reclient_bin_dir, 'reproxy' + gclient_paths.GetExeSuffix()),
-      '--cfg=' + reclient_cfg
-  ])
+    return run([
+        os.path.join(reclient_bin_dir,
+                     'bootstrap' + gclient_paths.GetExeSuffix()),
+        '--re_proxy=' + os.path.join(reclient_bin_dir,
+                                     'reproxy' + gclient_paths.GetExeSuffix()),
+        '--cfg=' + reclient_cfg
+    ])
 
 
 def stop_reproxy(reclient_cfg, reclient_bin_dir):
-  return run([
-      os.path.join(reclient_bin_dir,
-                   'bootstrap' + gclient_paths.GetExeSuffix()), '--shutdown',
-      '--cfg=' + reclient_cfg
-  ])
+    return run([
+        os.path.join(reclient_bin_dir,
+                     'bootstrap' + gclient_paths.GetExeSuffix()), '--shutdown',
+        '--cfg=' + reclient_cfg
+    ])
 
 
 def find_ninja_out_dir(args):
-  # Ninja uses getopt_long, which allows to intermix non-option arguments.
-  # To leave non supported parameters untouched, we do not use getopt.
-  for index, arg in enumerate(args[1:]):
-    if arg == '-C':
-      # + 1 to get the next argument and +1 because we trimmed off args[0]
-      return args[index + 2]
-    if arg.startswith('-C'):
-      # Support -Cout/Default
-      return arg[2:]
-  return '.'
+    # Ninja uses getopt_long, which allows to intermix non-option arguments.
+    # To leave non supported parameters untouched, we do not use getopt.
+    for index, arg in enumerate(args[1:]):
+        if arg == '-C':
+            # + 1 to get the next argument and +1 because we trimmed off args[0]
+            return args[index + 2]
+        if arg.startswith('-C'):
+            # Support -Cout/Default
+            return arg[2:]
+    return '.'
 
 
 def find_cache_dir(tmp_dir):
-  """Helper to find the correct cache directory for a build.
+    """Helper to find the correct cache directory for a build.
 
   tmp_dir should be a build specific temp directory within the out directory.
 
@@ -86,15 +87,15 @@ def find_cache_dir(tmp_dir):
   If this is not called from within a gclient checkout, the cache dir will be:
   tmp_dir/cache
   """
-  gclient_root = gclient_paths.FindGclientRoot(os.getcwd())
-  if gclient_root:
-    return os.path.join(gclient_root, '.reproxy_cache',
-                        hashlib.md5(tmp_dir.encode()).hexdigest())
-  return os.path.join(tmp_dir, 'cache')
+    gclient_root = gclient_paths.FindGclientRoot(os.getcwd())
+    if gclient_root:
+        return os.path.join(gclient_root, '.reproxy_cache',
+                            hashlib.md5(tmp_dir.encode()).hexdigest())
+    return os.path.join(tmp_dir, 'cache')
 
 
 def set_reproxy_metrics_flags(tool):
-  """Helper to setup metrics collection flags for reproxy.
+    """Helper to setup metrics collection flags for reproxy.
 
   The following env vars are set if not already set:
     RBE_metrics_project=chromium-reclient-metrics
@@ -103,23 +104,23 @@ def set_reproxy_metrics_flags(tool):
     RBE_metrics_labels=source=developer,tool={tool}
     RBE_metrics_prefix=go.chromium.org
   """
-  autoninja_id = os.environ.get("AUTONINJA_BUILD_ID")
-  if autoninja_id is not None:
-    os.environ.setdefault("RBE_invocation_id", autoninja_id)
-  os.environ.setdefault("RBE_metrics_project", "chromium-reclient-metrics")
-  os.environ.setdefault("RBE_metrics_table", "rbe_metrics.builds")
-  os.environ.setdefault("RBE_metrics_labels", "source=developer,tool=" + tool)
-  os.environ.setdefault("RBE_metrics_prefix", "go.chromium.org")
+    autoninja_id = os.environ.get("AUTONINJA_BUILD_ID")
+    if autoninja_id is not None:
+        os.environ.setdefault("RBE_invocation_id", autoninja_id)
+    os.environ.setdefault("RBE_metrics_project", "chromium-reclient-metrics")
+    os.environ.setdefault("RBE_metrics_table", "rbe_metrics.builds")
+    os.environ.setdefault("RBE_metrics_labels", "source=developer,tool=" + tool)
+    os.environ.setdefault("RBE_metrics_prefix", "go.chromium.org")
 
 
 def remove_mdproxy_from_path():
-  os.environ["PATH"] = os.pathsep.join(
-      d for d in os.environ.get("PATH", "").split(os.pathsep)
-      if "mdproxy" not in d)
+    os.environ["PATH"] = os.pathsep.join(
+        d for d in os.environ.get("PATH", "").split(os.pathsep)
+        if "mdproxy" not in d)
 
 
 def set_reproxy_path_flags(out_dir, make_dirs=True):
-  """Helper to setup the logs and cache directories for reclient.
+    """Helper to setup the logs and cache directories for reclient.
 
   Creates the following directory structure if make_dirs is true:
   If in a gclient checkout
@@ -146,98 +147,100 @@ def set_reproxy_path_flags(out_dir, make_dirs=True):
   Windows Only:
     RBE_server_address=pipe://md5(out_dir/.reproxy_tmp)/reproxy.pipe
   """
-  tmp_dir = os.path.abspath(os.path.join(out_dir, '.reproxy_tmp'))
-  log_dir = os.path.join(tmp_dir, 'logs')
-  racing_dir = os.path.join(tmp_dir, 'racing')
-  cache_dir = find_cache_dir(tmp_dir)
-  if make_dirs:
-    if os.path.exists(log_dir):
-      try:
-        # Clear log dir before each build to ensure correct metric aggregation.
-        shutil.rmtree(log_dir)
-      except OSError:
-        print(
-            "Couldn't clear logs because reproxy did "
-            "not shutdown after the last build",
-            file=sys.stderr)
-    os.makedirs(tmp_dir, exist_ok=True)
-    os.makedirs(log_dir, exist_ok=True)
-    os.makedirs(cache_dir, exist_ok=True)
-    os.makedirs(racing_dir, exist_ok=True)
-  os.environ.setdefault("RBE_output_dir", log_dir)
-  os.environ.setdefault("RBE_proxy_log_dir", log_dir)
-  os.environ.setdefault("RBE_log_dir", log_dir)
-  os.environ.setdefault("RBE_cache_dir", cache_dir)
-  os.environ.setdefault("RBE_racing_tmp_dir", racing_dir)
-  if sys.platform.startswith('win'):
-    pipe_dir = hashlib.md5(tmp_dir.encode()).hexdigest()
-    os.environ.setdefault("RBE_server_address",
-                          "pipe://%s/reproxy.pipe" % pipe_dir)
-  else:
-    # unix domain socket has path length limit, so use fixed size path here.
-    # ref: https://www.man7.org/linux/man-pages/man7/unix.7.html
-    os.environ.setdefault(
-        "RBE_server_address", "unix:///tmp/reproxy_%s.sock" %
-        hashlib.sha256(tmp_dir.encode()).hexdigest())
+    tmp_dir = os.path.abspath(os.path.join(out_dir, '.reproxy_tmp'))
+    log_dir = os.path.join(tmp_dir, 'logs')
+    racing_dir = os.path.join(tmp_dir, 'racing')
+    cache_dir = find_cache_dir(tmp_dir)
+    if make_dirs:
+        if os.path.exists(log_dir):
+            try:
+                # Clear log dir before each build to ensure correct metric
+                # aggregation.
+                shutil.rmtree(log_dir)
+            except OSError:
+                print(
+                    "Couldn't clear logs because reproxy did "
+                    "not shutdown after the last build",
+                    file=sys.stderr)
+        os.makedirs(tmp_dir, exist_ok=True)
+        os.makedirs(log_dir, exist_ok=True)
+        os.makedirs(cache_dir, exist_ok=True)
+        os.makedirs(racing_dir, exist_ok=True)
+    os.environ.setdefault("RBE_output_dir", log_dir)
+    os.environ.setdefault("RBE_proxy_log_dir", log_dir)
+    os.environ.setdefault("RBE_log_dir", log_dir)
+    os.environ.setdefault("RBE_cache_dir", cache_dir)
+    os.environ.setdefault("RBE_racing_tmp_dir", racing_dir)
+    if sys.platform.startswith('win'):
+        pipe_dir = hashlib.md5(tmp_dir.encode()).hexdigest()
+        os.environ.setdefault("RBE_server_address",
+                              "pipe://%s/reproxy.pipe" % pipe_dir)
+    else:
+        # unix domain socket has path length limit, so use fixed size path here.
+        # ref: https://www.man7.org/linux/man-pages/man7/unix.7.html
+        os.environ.setdefault(
+            "RBE_server_address", "unix:///tmp/reproxy_%s.sock" %
+            hashlib.sha256(tmp_dir.encode()).hexdigest())
 
 
 def set_racing_defaults():
-  os.environ.setdefault("RBE_local_resource_fraction", "0.2")
-  os.environ.setdefault("RBE_racing_bias", "0.95")
+    os.environ.setdefault("RBE_local_resource_fraction", "0.2")
+    os.environ.setdefault("RBE_racing_bias", "0.95")
 
 
 @contextlib.contextmanager
 def build_context(argv, tool):
-  # If use_remoteexec is set, but the reclient binaries or configs don't
-  # exist, display an error message and stop.  Otherwise, the build will
-  # attempt to run with rewrapper wrapping actions, but will fail with
-  # possible non-obvious problems.
-  reclient_bin_dir = find_reclient_bin_dir()
-  reclient_cfg = find_reclient_cfg()
-  if reclient_bin_dir is None or reclient_cfg is None:
-    print(('Build is configured to use reclient but necessary binaries '
-           "or config files can't be found.\n"
-           'Please check if `"download_remoteexec_cfg": True` custom var is set'
-           ' in `.gclient`, and run `gclient sync`.'),
-          file=sys.stderr)
-    yield 1
-    return
-
-  ninja_out = find_ninja_out_dir(argv)
-
-  try:
-    set_reproxy_path_flags(ninja_out)
-  except OSError:
-    print("Error creating reproxy_tmp in output dir", file=sys.stderr)
-    yield 1
-    return
-
-  if reclient_metrics.check_status(ninja_out):
-    set_reproxy_metrics_flags(tool)
-
-  if os.environ.get('RBE_instance', None):
-    print('WARNING: Using RBE_instance=%s\n' %
-          os.environ.get('RBE_instance', ''))
-
-  remote_disabled = os.environ.get('RBE_remote_disabled')
-  if remote_disabled not in ('1', 't', 'T', 'true', 'TRUE', 'True'):
-    set_racing_defaults()
-
-  # TODO(b/292523514) remove this once a fix is landed in reproxy
-  remove_mdproxy_from_path()
-
-  start = time.time()
-  reproxy_ret_code = start_reproxy(reclient_cfg, reclient_bin_dir)
-  elapsed = time.time() - start
-  print('%1.3f s to start reproxy' % elapsed)
-  if reproxy_ret_code != 0:
-    yield reproxy_ret_code
-    return
-  try:
-    yield
-  finally:
-    print("Shutting down reproxy...", file=sys.stderr)
+    # If use_remoteexec is set, but the reclient binaries or configs don't
+    # exist, display an error message and stop.  Otherwise, the build will
+    # attempt to run with rewrapper wrapping actions, but will fail with
+    # possible non-obvious problems.
+    reclient_bin_dir = find_reclient_bin_dir()
+    reclient_cfg = find_reclient_cfg()
+    if reclient_bin_dir is None or reclient_cfg is None:
+        print(
+            'Build is configured to use reclient but necessary binaries '
+            "or config files can't be found.\n"
+            'Please check if `"download_remoteexec_cfg": True` custom var is '
+            'set in `.gclient`, and run `gclient sync`.',
+            file=sys.stderr)
+        yield 1
+        return
+
+    ninja_out = find_ninja_out_dir(argv)
+
+    try:
+        set_reproxy_path_flags(ninja_out)
+    except OSError:
+        print("Error creating reproxy_tmp in output dir", file=sys.stderr)
+        yield 1
+        return
+
+    if reclient_metrics.check_status(ninja_out):
+        set_reproxy_metrics_flags(tool)
+
+    if os.environ.get('RBE_instance', None):
+        print('WARNING: Using RBE_instance=%s\n' %
+              os.environ.get('RBE_instance', ''))
+
+    remote_disabled = os.environ.get('RBE_remote_disabled')
+    if remote_disabled not in ('1', 't', 'T', 'true', 'TRUE', 'True'):
+        set_racing_defaults()
+
+    # TODO(b/292523514) remove this once a fix is landed in reproxy
+    remove_mdproxy_from_path()
+
     start = time.time()
-    stop_reproxy(reclient_cfg, reclient_bin_dir)
+    reproxy_ret_code = start_reproxy(reclient_cfg, reclient_bin_dir)
     elapsed = time.time() - start
-    print('%1.3f s to stop reproxy' % elapsed)
+    print('%1.3f s to start reproxy' % elapsed)
+    if reproxy_ret_code != 0:
+        yield reproxy_ret_code
+        return
+    try:
+        yield
+    finally:
+        print("Shutting down reproxy...", file=sys.stderr)
+        start = time.time()
+        stop_reproxy(reclient_cfg, reclient_bin_dir)
+        elapsed = time.time() - start
+        print('%1.3f s to stop reproxy' % elapsed)

+ 72 - 72
reclient_metrics.py

@@ -16,36 +16,36 @@ VERSION = 1
 
 
 def default_config():
-  return {
-      'is-googler': is_googler(),
-      'countdown': 10,
-      'version': VERSION,
-  }
+    return {
+        'is-googler': is_googler(),
+        'countdown': 10,
+        'version': VERSION,
+    }
 
 
 def load_config():
-  config = None
-  try:
-    with open(CONFIG) as f:
-      raw_config = json.load(f)
-      if raw_config['version'] == VERSION:
-        raw_config['countdown'] = max(0, raw_config['countdown'] - 1)
-        config = raw_config
-  except Exception:
-    pass
-  if not config:
-    config = default_config()
-  save_config(config)
-  return config
+    config = None
+    try:
+        with open(CONFIG) as f:
+            raw_config = json.load(f)
+            if raw_config['version'] == VERSION:
+                raw_config['countdown'] = max(0, raw_config['countdown'] - 1)
+                config = raw_config
+    except Exception:
+        pass
+    if not config:
+        config = default_config()
+    save_config(config)
+    return config
 
 
 def save_config(config):
-  with open(CONFIG, 'w') as f:
-    json.dump(config, f)
+    with open(CONFIG, 'w') as f:
+        json.dump(config, f)
 
 
 def show_message(config, ninja_out):
-  print("""
+    print("""
 Your reclient metrics will be uploaded to the chromium build metrics database. The uploaded metrics will be used to analyze user side build performance.
 
 We upload the contents of {ninja_out_abs}.
@@ -73,71 +73,71 @@ You can find a more detailed explanation in
 or
 https://chromium.googlesource.com/chromium/tools/depot_tools/+/main/reclient_metrics.README.md
 """.format(
-      ninja_out_abs=os.path.abspath(
-          os.path.join(ninja_out, ".reproxy_tmp", "logs", "rbe_metrics.txt")),
-      config_count=config.get("countdown", 0),
-      file_path=__file__,
-      metrics_readme_path=os.path.abspath(
-          os.path.join(THIS_DIR, "reclient_metrics.README.md")),
-  ))
+        ninja_out_abs=os.path.abspath(
+            os.path.join(ninja_out, ".reproxy_tmp", "logs", "rbe_metrics.txt")),
+        config_count=config.get("countdown", 0),
+        file_path=__file__,
+        metrics_readme_path=os.path.abspath(
+            os.path.join(THIS_DIR, "reclient_metrics.README.md")),
+    ))
 
 
 def is_googler(config=None):
-  """Check whether this user is Googler or not."""
-  if config is not None and 'is-googler' in config:
-    return config['is-googler']
-  # Use cipd auth-info to check for googler status as
-  # downloading rewrapper configs already requires cipd to be logged in
-  p = subprocess.run('cipd auth-info',
-                     stdout=subprocess.PIPE,
-                     stderr=subprocess.PIPE,
-                     text=True,
-                     shell=True)
-  if p.returncode != 0:
-    return False
-  lines = p.stdout.splitlines()
-  if len(lines) == 0:
-    return False
-  l = lines[0]
-  # |l| will be like 'Logged in as <user>@google.com.' for googlers.
-  return l.startswith('Logged in as ') and l.endswith('@google.com.')
+    """Check whether this user is Googler or not."""
+    if config is not None and 'is-googler' in config:
+        return config['is-googler']
+    # Use cipd auth-info to check for googler status as
+    # downloading rewrapper configs already requires cipd to be logged in
+    p = subprocess.run('cipd auth-info',
+                       stdout=subprocess.PIPE,
+                       stderr=subprocess.PIPE,
+                       text=True,
+                       shell=True)
+    if p.returncode != 0:
+        return False
+    lines = p.stdout.splitlines()
+    if len(lines) == 0:
+        return False
+    l = lines[0]
+    # |l| will be like 'Logged in as <user>@google.com.' for googlers.
+    return l.startswith('Logged in as ') and l.endswith('@google.com.')
 
 
 def check_status(ninja_out):
-  """Checks metrics collections status and shows notice to user if needed.
+    """Checks metrics collections status and shows notice to user if needed.
 
   Returns True if metrics should be collected."""
-  config = load_config()
-  if not is_googler(config):
-    return False
-  if 'opt-in' in config:
-    return config['opt-in']
-  if config.get("countdown", 0) > 0:
-    show_message(config, ninja_out)
-    return False
-  return True
+    config = load_config()
+    if not is_googler(config):
+        return False
+    if 'opt-in' in config:
+        return config['opt-in']
+    if config.get("countdown", 0) > 0:
+        show_message(config, ninja_out)
+        return False
+    return True
 
 
 def main(argv):
-  cfg = load_config()
+    cfg = load_config()
 
-  if not is_googler(cfg):
-    save_config(cfg)
-    return 0
+    if not is_googler(cfg):
+        save_config(cfg)
+        return 0
 
-  if len(argv) == 2 and argv[1] == 'opt-in':
-    cfg['opt-in'] = True
-    cfg['countdown'] = 0
-    save_config(cfg)
-    print('reclient metrics upload is opted in.')
-    return 0
+    if len(argv) == 2 and argv[1] == 'opt-in':
+        cfg['opt-in'] = True
+        cfg['countdown'] = 0
+        save_config(cfg)
+        print('reclient metrics upload is opted in.')
+        return 0
 
-  if len(argv) == 2 and argv[1] == 'opt-out':
-    cfg['opt-in'] = False
-    save_config(cfg)
-    print('reclient metrics upload is opted out.')
-    return 0
+    if len(argv) == 2 and argv[1] == 'opt-out':
+        cfg['opt-in'] = False
+        save_config(cfg)
+        print('reclient metrics upload is opted out.')
+        return 0
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv))
+    sys.exit(main(sys.argv))

+ 34 - 34
reclientreport.py

@@ -21,43 +21,43 @@ import reclient_helper
 
 # TODO(b/296402157): Remove once reclientreport binary saves all logs on windows
 def temp_win_impl__b_296402157(out_dir):
-  '''Temporary implementation until b/296402157 is fixed'''
-  log_dir = os.path.abspath(os.path.join(out_dir, '.reproxy_tmp', 'logs'))
-  with tempfile.NamedTemporaryFile(prefix='reclientreport',
-                                   suffix='.tar.gz',
-                                   delete=False) as f:
-    with tarfile.open(fileobj=f, mode='w:gz') as tar:
-      tar.add(log_dir, arcname=os.path.basename(log_dir))
-    print(
-        f'Created log file at {f.name}. Please attach this to your bug report!')
+    '''Temporary implementation until b/296402157 is fixed'''
+    log_dir = os.path.abspath(os.path.join(out_dir, '.reproxy_tmp', 'logs'))
+    with tempfile.NamedTemporaryFile(prefix='reclientreport',
+                                     suffix='.tar.gz',
+                                     delete=False) as f:
+        with tarfile.open(fileobj=f, mode='w:gz') as tar:
+            tar.add(log_dir, arcname=os.path.basename(log_dir))
+        print(f'Created log file at {f.name}. Please attach this to your bug '
+              'report!')
 
 
 def main():
-  parser = argparse.ArgumentParser(description=__doc__)
-  parser.add_argument("--ninja_out",
-                      "-C",
-                      required=True,
-                      help="ninja out directory used for the autoninja build")
-  parser.add_argument('args', nargs=argparse.REMAINDER)
-
-  args, extras = parser.parse_known_args()
-  if sys.platform.startswith('win'):
-    temp_win_impl__b_296402157(args.ninja_out)
-    return
-  if args.args and args.args[0] == '--':
-    args.args.pop(0)
-  if extras:
-    args.args = extras + args.args
-
-  reclient_helper.set_reproxy_path_flags(args.ninja_out, make_dirs=False)
-  reclient_bin_dir = reclient_helper.find_reclient_bin_dir()
-  code = subprocess.call([os.path.join(reclient_bin_dir, 'reclientreport')] +
-                         args.args)
-  if code != 0:
-    print("Failed to collect logs, make sure that %s/.reproxy_tmp exists" %
-          args.ninja_out,
-          file=sys.stderr)
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument("--ninja_out",
+                        "-C",
+                        required=True,
+                        help="ninja out directory used for the autoninja build")
+    parser.add_argument('args', nargs=argparse.REMAINDER)
+
+    args, extras = parser.parse_known_args()
+    if sys.platform.startswith('win'):
+        temp_win_impl__b_296402157(args.ninja_out)
+        return
+    if args.args and args.args[0] == '--':
+        args.args.pop(0)
+    if extras:
+        args.args = extras + args.args
+
+    reclient_helper.set_reproxy_path_flags(args.ninja_out, make_dirs=False)
+    reclient_bin_dir = reclient_helper.find_reclient_bin_dir()
+    code = subprocess.call([os.path.join(reclient_bin_dir, 'reclientreport')] +
+                           args.args)
+    if code != 0:
+        print("Failed to collect logs, make sure that %s/.reproxy_tmp exists" %
+              args.ninja_out,
+              file=sys.stderr)
 
 
 if __name__ == '__main__':
-  sys.exit(main())
+    sys.exit(main())

+ 26 - 26
repo

@@ -2,7 +2,6 @@
 # Copyright 2020 The Chromium OS Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Wrapper around repo to auto-update depot_tools during sync.
 
 gclient keeps depot_tools up-to-date automatically for Chromium developers.
@@ -16,7 +15,6 @@ from pathlib import Path
 import subprocess
 import sys
 
-
 # Some useful paths.
 DEPOT_TOOLS_DIR = Path(__file__).resolve().parent
 UPDATE_DEPOT_TOOLS = DEPOT_TOOLS_DIR / 'update_depot_tools'
@@ -24,35 +22,37 @@ REPO = DEPOT_TOOLS_DIR / 'repo_launcher'
 
 
 def _UpdateDepotTools():
-  """Help CrOS users keep their depot_tools checkouts up-to-date."""
-  if os.getenv('DEPOT_TOOLS_UPDATE') == '0':
-    return
-
-  # We don't update the copy that's part of the CrOS repo client checkout.
-  path = DEPOT_TOOLS_DIR
-  while path != path.parent:
-    if (path / '.repo').is_dir() and (path / 'chromite').is_dir():
-      return
-    path = path.parent
-
-  if UPDATE_DEPOT_TOOLS.exists():
-    subprocess.run([UPDATE_DEPOT_TOOLS], check=True)
-  else:
-    print(f'warning: {UPDATE_DEPOT_TOOLS} does not exist; export '
-          'DEPOT_TOOLS_UPDATE=0 to disable.', file=sys.stderr)
+    """Help CrOS users keep their depot_tools checkouts up-to-date."""
+    if os.getenv('DEPOT_TOOLS_UPDATE') == '0':
+        return
+
+    # We don't update the copy that's part of the CrOS repo client checkout.
+    path = DEPOT_TOOLS_DIR
+    while path != path.parent:
+        if (path / '.repo').is_dir() and (path / 'chromite').is_dir():
+            return
+        path = path.parent
+
+    if UPDATE_DEPOT_TOOLS.exists():
+        subprocess.run([UPDATE_DEPOT_TOOLS], check=True)
+    else:
+        print(
+            f'warning: {UPDATE_DEPOT_TOOLS} does not exist; export '
+            'DEPOT_TOOLS_UPDATE=0 to disable.',
+            file=sys.stderr)
 
 
 def main(argv):
-  # This is a bit hacky, but should be "good enough".  If repo itself gains
-  # support for sync hooks, we could switch to that.
-  if argv and argv[0] == 'sync':
-    _UpdateDepotTools()
+    # This is a bit hacky, but should be "good enough".  If repo itself gains
+    # support for sync hooks, we could switch to that.
+    if argv and argv[0] == 'sync':
+        _UpdateDepotTools()
 
-  # Set the default to our fork.
-  os.environ["REPO_URL"] = "https://chromium.googlesource.com/external/repo"
+    # Set the default to our fork.
+    os.environ["REPO_URL"] = "https://chromium.googlesource.com/external/repo"
 
-  os.execv(sys.executable, [sys.executable, str(REPO)] + argv)
+    os.execv(sys.executable, [sys.executable, str(REPO)] + argv)
 
 
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+    sys.exit(main(sys.argv[1:]))

+ 262 - 255
roll_dep.py

@@ -2,7 +2,6 @@
 # Copyright 2015 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Rolls DEPS controlled dependency.
 
 Works only with git checkout and git dependencies. Currently this script will
@@ -20,9 +19,8 @@ import sys
 import tempfile
 
 NEED_SHELL = sys.platform.startswith('win')
-GCLIENT_PATH = os.path.join(
-    os.path.dirname(os.path.abspath(__file__)), 'gclient.py')
-
+GCLIENT_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)),
+                            'gclient.py')
 
 # Commit subject that will be considered a roll. In the format generated by the
 # git log used, so it's "<year>-<month>-<day> <author> <subject>"
@@ -33,309 +31,318 @@ _ROLL_SUBJECT = re.compile(
     r'[^ ]+ '
     # Subject
     r'('
-      # Generated by
-      # https://skia.googlesource.com/buildbot/+/HEAdA/autoroll/go/repo_manager/deps_repo_manager.go
-      r'Roll [^ ]+ [a-f0-9]+\.\.[a-f0-9]+ \(\d+ commits\)'
-      r'|'
-      # Generated by
-      # https://chromium.googlesource.com/infra/infra/+/HEAD/recipes/recipe_modules/recipe_autoroller/api.py
-      r'Roll recipe dependencies \(trivial\)\.'
+    # Generated by
+    # https://skia.googlesource.com/buildbot/+/HEAdA/autoroll/go/repo_manager/deps_repo_manager.go
+    r'Roll [^ ]+ [a-f0-9]+\.\.[a-f0-9]+ \(\d+ commits\)'
+    r'|'
+    # Generated by
+    # https://chromium.googlesource.com/infra/infra/+/HEAD/recipes/recipe_modules/recipe_autoroller/api.py
+    r'Roll recipe dependencies \(trivial\)\.'
     r')$')
 
 
 class Error(Exception):
-  pass
+    pass
 
 
 class AlreadyRolledError(Error):
-  pass
+    pass
 
 
 def check_output(*args, **kwargs):
-  """subprocess2.check_output() passing shell=True on Windows for git."""
-  kwargs.setdefault('shell', NEED_SHELL)
-  return subprocess2.check_output(*args, **kwargs).decode('utf-8')
+    """subprocess2.check_output() passing shell=True on Windows for git."""
+    kwargs.setdefault('shell', NEED_SHELL)
+    return subprocess2.check_output(*args, **kwargs).decode('utf-8')
 
 
 def check_call(*args, **kwargs):
-  """subprocess2.check_call() passing shell=True on Windows for git."""
-  kwargs.setdefault('shell', NEED_SHELL)
-  subprocess2.check_call(*args, **kwargs)
+    """subprocess2.check_call() passing shell=True on Windows for git."""
+    kwargs.setdefault('shell', NEED_SHELL)
+    subprocess2.check_call(*args, **kwargs)
 
 
 def return_code(*args, **kwargs):
-  """subprocess2.call() passing shell=True on Windows for git and
+    """subprocess2.call() passing shell=True on Windows for git and
   subprocess2.DEVNULL for stdout and stderr."""
-  kwargs.setdefault('shell', NEED_SHELL)
-  kwargs.setdefault('stdout', subprocess2.DEVNULL)
-  kwargs.setdefault('stderr', subprocess2.DEVNULL)
-  return subprocess2.call(*args, **kwargs)
+    kwargs.setdefault('shell', NEED_SHELL)
+    kwargs.setdefault('stdout', subprocess2.DEVNULL)
+    kwargs.setdefault('stderr', subprocess2.DEVNULL)
+    return subprocess2.call(*args, **kwargs)
 
 
 def is_pristine(root):
-  """Returns True if a git checkout is pristine."""
-  # `git rev-parse --verify` has a non-zero return code if the revision
-  # doesn't exist.
-  diff_cmd = ['git', 'diff', '--ignore-submodules', 'origin/main']
-  return (not check_output(diff_cmd, cwd=root).strip() and
-          not check_output(diff_cmd + ['--cached'], cwd=root).strip())
-
+    """Returns True if a git checkout is pristine."""
+    # `git rev-parse --verify` has a non-zero return code if the revision
+    # doesn't exist.
+    diff_cmd = ['git', 'diff', '--ignore-submodules', 'origin/main']
+    return (not check_output(diff_cmd, cwd=root).strip()
+            and not check_output(diff_cmd + ['--cached'], cwd=root).strip())
 
 
 def get_log_url(upstream_url, head, tot):
-  """Returns an URL to read logs via a Web UI if applicable."""
-  if re.match(r'https://[^/]*\.googlesource\.com/', upstream_url):
-    # gitiles
-    return '%s/+log/%s..%s' % (upstream_url, head[:12], tot[:12])
-  if upstream_url.startswith('https://github.com/'):
-    upstream_url = upstream_url.rstrip('/')
-    if upstream_url.endswith('.git'):
-      upstream_url = upstream_url[:-len('.git')]
-    return '%s/compare/%s...%s' % (upstream_url, head[:12], tot[:12])
-  return None
+    """Returns an URL to read logs via a Web UI if applicable."""
+    if re.match(r'https://[^/]*\.googlesource\.com/', upstream_url):
+        # gitiles
+        return '%s/+log/%s..%s' % (upstream_url, head[:12], tot[:12])
+    if upstream_url.startswith('https://github.com/'):
+        upstream_url = upstream_url.rstrip('/')
+        if upstream_url.endswith('.git'):
+            upstream_url = upstream_url[:-len('.git')]
+        return '%s/compare/%s...%s' % (upstream_url, head[:12], tot[:12])
+    return None
 
 
 def should_show_log(upstream_url):
-  """Returns True if a short log should be included in the tree."""
-  # Skip logs for very active projects.
-  if upstream_url.endswith('/v8/v8.git'):
-    return False
-  if 'webrtc' in upstream_url:
-    return False
-  return True
+    """Returns True if a short log should be included in the tree."""
+    # Skip logs for very active projects.
+    if upstream_url.endswith('/v8/v8.git'):
+        return False
+    if 'webrtc' in upstream_url:
+        return False
+    return True
 
 
 def gclient(args):
-  """Executes gclient with the given args and returns the stdout."""
-  return check_output([sys.executable, GCLIENT_PATH] + args).strip()
-
-
-def generate_commit_message(
-    full_dir, dependency, head, roll_to, no_log, log_limit):
-  """Creates the commit message for this specific roll."""
-  commit_range = '%s..%s' % (head, roll_to)
-  commit_range_for_header = '%s..%s' % (head[:9], roll_to[:9])
-  upstream_url = check_output(
-      ['git', 'config', 'remote.origin.url'], cwd=full_dir).strip()
-  log_url = get_log_url(upstream_url, head, roll_to)
-  cmd = ['git', 'log', commit_range, '--date=short', '--no-merges']
-  logs = check_output(
-      # Args with '=' are automatically quoted.
-      cmd + ['--format=%ad %ae %s', '--'],
-      cwd=full_dir).rstrip()
-  logs = re.sub(r'(?m)^(\d\d\d\d-\d\d-\d\d [^@]+)@[^ ]+( .*)$', r'\1\2', logs)
-  lines = logs.splitlines()
-  cleaned_lines = [l for l in lines if not _ROLL_SUBJECT.match(l)]
-  logs = '\n'.join(cleaned_lines) + '\n'
-
-  nb_commits = len(lines)
-  rolls = nb_commits - len(cleaned_lines)
-  header = 'Roll %s/ %s (%d commit%s%s)\n\n' % (
-      dependency,
-      commit_range_for_header,
-      nb_commits,
-      's' if nb_commits > 1 else '',
-      ('; %s trivial rolls' % rolls) if rolls else '')
-  log_section = ''
-  if log_url:
-    log_section = log_url + '\n\n'
-  log_section += '$ %s ' % ' '.join(cmd)
-  log_section += '--format=\'%ad %ae %s\'\n'
-  log_section = log_section.replace(commit_range, commit_range_for_header)
-  # It is important that --no-log continues to work, as it is used by
-  # internal -> external rollers. Please do not remove or break it.
-  if not no_log and should_show_log(upstream_url):
-    if len(cleaned_lines) > log_limit:
-      # Keep the first N/2 log entries and last N/2 entries.
-      lines = logs.splitlines(True)
-      lines = lines[:log_limit//2] + ['(...)\n'] + lines[-log_limit//2:]
-      logs = ''.join(lines)
-    log_section += logs
-  return header + log_section
+    """Executes gclient with the given args and returns the stdout."""
+    return check_output([sys.executable, GCLIENT_PATH] + args).strip()
+
+
+def generate_commit_message(full_dir, dependency, head, roll_to, no_log,
+                            log_limit):
+    """Creates the commit message for this specific roll."""
+    commit_range = '%s..%s' % (head, roll_to)
+    commit_range_for_header = '%s..%s' % (head[:9], roll_to[:9])
+    upstream_url = check_output(['git', 'config', 'remote.origin.url'],
+                                cwd=full_dir).strip()
+    log_url = get_log_url(upstream_url, head, roll_to)
+    cmd = ['git', 'log', commit_range, '--date=short', '--no-merges']
+    logs = check_output(
+        # Args with '=' are automatically quoted.
+        cmd + ['--format=%ad %ae %s', '--'],
+        cwd=full_dir).rstrip()
+    logs = re.sub(r'(?m)^(\d\d\d\d-\d\d-\d\d [^@]+)@[^ ]+( .*)$', r'\1\2', logs)
+    lines = logs.splitlines()
+    cleaned_lines = [l for l in lines if not _ROLL_SUBJECT.match(l)]
+    logs = '\n'.join(cleaned_lines) + '\n'
+
+    nb_commits = len(lines)
+    rolls = nb_commits - len(cleaned_lines)
+    header = 'Roll %s/ %s (%d commit%s%s)\n\n' % (
+        dependency, commit_range_for_header, nb_commits,
+        's' if nb_commits > 1 else '',
+        ('; %s trivial rolls' % rolls) if rolls else '')
+    log_section = ''
+    if log_url:
+        log_section = log_url + '\n\n'
+    log_section += '$ %s ' % ' '.join(cmd)
+    log_section += '--format=\'%ad %ae %s\'\n'
+    log_section = log_section.replace(commit_range, commit_range_for_header)
+    # It is important that --no-log continues to work, as it is used by
+    # internal -> external rollers. Please do not remove or break it.
+    if not no_log and should_show_log(upstream_url):
+        if len(cleaned_lines) > log_limit:
+            # Keep the first N/2 log entries and last N/2 entries.
+            lines = logs.splitlines(True)
+            lines = lines[:log_limit // 2] + ['(...)\n'
+                                              ] + lines[-log_limit // 2:]
+            logs = ''.join(lines)
+        log_section += logs
+    return header + log_section
 
 
 def is_submoduled():
-  """Returns true if gclient root has submodules"""
-  return os.path.isfile(os.path.join(gclient(['root']), ".gitmodules"))
+    """Returns true if gclient root has submodules"""
+    return os.path.isfile(os.path.join(gclient(['root']), ".gitmodules"))
 
 
 def get_submodule_rev(submodule):
-  """Returns revision of the given submodule path"""
-  rev_output = check_output(['git', 'submodule', 'status', submodule],
-                            cwd=gclient(['root'])).strip()
+    """Returns revision of the given submodule path"""
+    rev_output = check_output(['git', 'submodule', 'status', submodule],
+                              cwd=gclient(['root'])).strip()
 
-  # git submodule status <path> returns all submodules with its rev in the
-  # pattern: `(+|-| )(<revision>) (submodule.path)`
-  revision = rev_output.split(' ')[0]
-  return revision[1:] if revision[0] in ('+', '-') else revision
+    # git submodule status <path> returns all submodules with its rev in the
+    # pattern: `(+|-| )(<revision>) (submodule.path)`
+    revision = rev_output.split(' ')[0]
+    return revision[1:] if revision[0] in ('+', '-') else revision
 
 
 def calculate_roll(full_dir, dependency, roll_to):
-  """Calculates the roll for a dependency by processing gclient_dict, and
+    """Calculates the roll for a dependency by processing gclient_dict, and
   fetching the dependency via git.
   """
-  # if the super-project uses submodules, get rev directly using git.
-  if is_submoduled():
-    head = get_submodule_rev(dependency)
-  else:
-    head = gclient(['getdep', '-r', dependency])
-  if not head:
-    raise Error('%s is unpinned.' % dependency)
-  check_call(['git', 'fetch', 'origin', '--quiet'], cwd=full_dir)
-  if roll_to == 'origin/HEAD':
-    check_output(['git', 'remote', 'set-head', 'origin', '-a'], cwd=full_dir)
-
-  roll_to = check_output(['git', 'rev-parse', roll_to], cwd=full_dir).strip()
-  return head, roll_to
+    # if the super-project uses submodules, get rev directly using git.
+    if is_submoduled():
+        head = get_submodule_rev(dependency)
+    else:
+        head = gclient(['getdep', '-r', dependency])
+    if not head:
+        raise Error('%s is unpinned.' % dependency)
+    check_call(['git', 'fetch', 'origin', '--quiet'], cwd=full_dir)
+    if roll_to == 'origin/HEAD':
+        check_output(['git', 'remote', 'set-head', 'origin', '-a'],
+                     cwd=full_dir)
 
+    roll_to = check_output(['git', 'rev-parse', roll_to], cwd=full_dir).strip()
+    return head, roll_to
 
 
 def gen_commit_msg(logs, cmdline, reviewers, bug):
-  """Returns the final commit message."""
-  commit_msg = ''
-  if len(logs) > 1:
-    commit_msg = 'Rolling %d dependencies\n\n' % len(logs)
-  commit_msg += '\n\n'.join(logs)
-  commit_msg += '\nCreated with:\n  ' + cmdline + '\n'
-  commit_msg += 'R=%s\n' % ','.join(reviewers) if reviewers else ''
-  commit_msg += '\nBug: %s\n' % bug if bug else ''
-  return commit_msg
+    """Returns the final commit message."""
+    commit_msg = ''
+    if len(logs) > 1:
+        commit_msg = 'Rolling %d dependencies\n\n' % len(logs)
+    commit_msg += '\n\n'.join(logs)
+    commit_msg += '\nCreated with:\n  ' + cmdline + '\n'
+    commit_msg += 'R=%s\n' % ','.join(reviewers) if reviewers else ''
+    commit_msg += '\nBug: %s\n' % bug if bug else ''
+    return commit_msg
 
 
 def finalize(commit_msg, current_dir, rolls):
-  """Commits changes to the DEPS file, then uploads a CL."""
-  print('Commit message:')
-  print('\n'.join('    ' + i for i in commit_msg.splitlines()))
-
-  # Pull the dependency to the right revision. This is surprising to users
-  # otherwise. The revision update is done before commiting to update
-  # submodule revision if present.
-  for dependency, (_head, roll_to, full_dir) in sorted(rolls.items()):
-    check_call(['git', 'checkout', '--quiet', roll_to], cwd=full_dir)
-
-    # This adds the submodule revision update to the commit.
-    if is_submoduled():
-      check_call([
-          'git', 'update-index', '--add', '--cacheinfo', '160000,{},{}'.format(
-              roll_to, dependency)
-      ],
-                 cwd=current_dir)
-
-  check_call(['git', 'add', 'DEPS'], cwd=current_dir)
-  # We have to set delete=False and then let the object go out of scope so
-  # that the file can be opened by name on Windows.
-  with tempfile.NamedTemporaryFile('w+', newline='', delete=False) as f:
-    commit_filename = f.name
-    f.write(commit_msg)
-  check_call(['git', 'commit', '--quiet', '--file', commit_filename],
-             cwd=current_dir)
-  os.remove(commit_filename)
+    """Commits changes to the DEPS file, then uploads a CL."""
+    print('Commit message:')
+    print('\n'.join('    ' + i for i in commit_msg.splitlines()))
+
+    # Pull the dependency to the right revision. This is surprising to users
+    # otherwise. The revision update is done before commiting to update
+    # submodule revision if present.
+    for dependency, (_head, roll_to, full_dir) in sorted(rolls.items()):
+        check_call(['git', 'checkout', '--quiet', roll_to], cwd=full_dir)
+
+        # This adds the submodule revision update to the commit.
+        if is_submoduled():
+            check_call([
+                'git', 'update-index', '--add', '--cacheinfo',
+                '160000,{},{}'.format(roll_to, dependency)
+            ],
+                       cwd=current_dir)
+
+    check_call(['git', 'add', 'DEPS'], cwd=current_dir)
+    # We have to set delete=False and then let the object go out of scope so
+    # that the file can be opened by name on Windows.
+    with tempfile.NamedTemporaryFile('w+', newline='', delete=False) as f:
+        commit_filename = f.name
+        f.write(commit_msg)
+    check_call(['git', 'commit', '--quiet', '--file', commit_filename],
+               cwd=current_dir)
+    os.remove(commit_filename)
 
 
 def main():
-  parser = argparse.ArgumentParser(description=__doc__)
-  parser.add_argument(
-      '--ignore-dirty-tree', action='store_true',
-      help='Roll anyways, even if there is a diff.')
-  parser.add_argument(
-      '-r',
-      '--reviewer',
-      action='append',
-      help=
-      'To specify multiple reviewers, either use a comma separated list, e.g. '
-      '-r joe,jane,john or provide the flag multiple times, e.g. '
-      '-r joe -r jane. Defaults to @chromium.org')
-  parser.add_argument('-b', '--bug', help='Associate a bug number to the roll')
-  # It is important that --no-log continues to work, as it is used by
-  # internal -> external rollers. Please do not remove or break it.
-  parser.add_argument(
-      '--no-log', action='store_true',
-      help='Do not include the short log in the commit message')
-  parser.add_argument(
-      '--log-limit', type=int, default=100,
-      help='Trim log after N commits (default: %(default)s)')
-  parser.add_argument(
-      '--roll-to', default='origin/HEAD',
-      help='Specify the new commit to roll to (default: %(default)s)')
-  parser.add_argument(
-      '--key', action='append', default=[],
-      help='Regex(es) for dependency in DEPS file')
-  parser.add_argument('dep_path', nargs='+', help='Path(s) to dependency')
-  args = parser.parse_args()
-
-  if len(args.dep_path) > 1:
-    if args.roll_to != 'origin/HEAD':
-      parser.error(
-          'Can\'t use multiple paths to roll simultaneously and --roll-to')
-    if args.key:
-      parser.error(
-          'Can\'t use multiple paths to roll simultaneously and --key')
-  reviewers = None
-  if args.reviewer:
-    reviewers = list(itertools.chain(*[r.split(',') for r in args.reviewer]))
-    for i, r in enumerate(reviewers):
-      if not '@' in r:
-        reviewers[i] = r + '@chromium.org'
-
-  gclient_root = gclient(['root'])
-  current_dir = os.getcwd()
-  dependencies = sorted(d.replace('\\', '/').rstrip('/') for d in args.dep_path)
-  cmdline = 'roll-dep ' + ' '.join(dependencies) + ''.join(
-      ' --key ' + k for k in args.key)
-  try:
-    if not args.ignore_dirty_tree and not is_pristine(current_dir):
-      raise Error(
-          'Ensure %s is clean first (no non-merged commits).' % current_dir)
-    # First gather all the information without modifying anything, except for a
-    # git fetch.
-    rolls = {}
-    for dependency in dependencies:
-      full_dir = os.path.normpath(os.path.join(gclient_root, dependency))
-      if not os.path.isdir(full_dir):
-        print('Dependency %s not found at %s' % (dependency, full_dir))
-        full_dir = os.path.normpath(os.path.join(current_dir, dependency))
-        print('Will look for relative dependency at %s' % full_dir)
-        if not os.path.isdir(full_dir):
-          raise Error('Directory not found: %s (%s)' % (dependency, full_dir))
-
-      head, roll_to = calculate_roll(full_dir, dependency, args.roll_to)
-      if roll_to == head:
-        if len(dependencies) == 1:
-          raise AlreadyRolledError('No revision to roll!')
-        print('%s: Already at latest commit %s' % (dependency, roll_to))
-      else:
-        print(
-            '%s: Rolling from %s to %s' % (dependency, head[:10], roll_to[:10]))
-        rolls[dependency] = (head, roll_to, full_dir)
-
-    logs = []
-    setdep_args = []
-    for dependency, (head, roll_to, full_dir) in sorted(rolls.items()):
-      log = generate_commit_message(
-          full_dir, dependency, head, roll_to, args.no_log, args.log_limit)
-      logs.append(log)
-      setdep_args.extend(['-r', '{}@{}'.format(dependency, roll_to)])
-
-    # DEPS is updated even if the repository uses submodules.
-    gclient(['setdep'] + setdep_args)
-
-    commit_msg = gen_commit_msg(logs, cmdline, reviewers, args.bug)
-    finalize(commit_msg, current_dir, rolls)
-  except Error as e:
-    sys.stderr.write('error: %s\n' % e)
-    return 2 if isinstance(e, AlreadyRolledError) else 1
-  except subprocess2.CalledProcessError:
-    return 1
-
-  print('')
-  if not reviewers:
-    print('You forgot to pass -r, make sure to insert a R=foo@example.com line')
-    print('to the commit description before emailing.')
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument('--ignore-dirty-tree',
+                        action='store_true',
+                        help='Roll anyways, even if there is a diff.')
+    parser.add_argument(
+        '-r',
+        '--reviewer',
+        action='append',
+        help='To specify multiple reviewers, either use a comma separated '
+        'list, e.g. -r joe,jane,john or provide the flag multiple times, e.g. '
+        '-r joe -r jane. Defaults to @chromium.org')
+    parser.add_argument('-b',
+                        '--bug',
+                        help='Associate a bug number to the roll')
+    # It is important that --no-log continues to work, as it is used by
+    # internal -> external rollers. Please do not remove or break it.
+    parser.add_argument(
+        '--no-log',
+        action='store_true',
+        help='Do not include the short log in the commit message')
+    parser.add_argument('--log-limit',
+                        type=int,
+                        default=100,
+                        help='Trim log after N commits (default: %(default)s)')
+    parser.add_argument(
+        '--roll-to',
+        default='origin/HEAD',
+        help='Specify the new commit to roll to (default: %(default)s)')
+    parser.add_argument('--key',
+                        action='append',
+                        default=[],
+                        help='Regex(es) for dependency in DEPS file')
+    parser.add_argument('dep_path', nargs='+', help='Path(s) to dependency')
+    args = parser.parse_args()
+
+    if len(args.dep_path) > 1:
+        if args.roll_to != 'origin/HEAD':
+            parser.error(
+                'Can\'t use multiple paths to roll simultaneously and --roll-to'
+            )
+        if args.key:
+            parser.error(
+                'Can\'t use multiple paths to roll simultaneously and --key')
+    reviewers = None
+    if args.reviewer:
+        reviewers = list(itertools.chain(*[r.split(',')
+                                           for r in args.reviewer]))
+        for i, r in enumerate(reviewers):
+            if not '@' in r:
+                reviewers[i] = r + '@chromium.org'
+
+    gclient_root = gclient(['root'])
+    current_dir = os.getcwd()
+    dependencies = sorted(
+        d.replace('\\', '/').rstrip('/') for d in args.dep_path)
+    cmdline = 'roll-dep ' + ' '.join(dependencies) + ''.join(' --key ' + k
+                                                             for k in args.key)
+    try:
+        if not args.ignore_dirty_tree and not is_pristine(current_dir):
+            raise Error('Ensure %s is clean first (no non-merged commits).' %
+                        current_dir)
+        # First gather all the information without modifying anything, except
+        # for a git fetch.
+        rolls = {}
+        for dependency in dependencies:
+            full_dir = os.path.normpath(os.path.join(gclient_root, dependency))
+            if not os.path.isdir(full_dir):
+                print('Dependency %s not found at %s' % (dependency, full_dir))
+                full_dir = os.path.normpath(
+                    os.path.join(current_dir, dependency))
+                print('Will look for relative dependency at %s' % full_dir)
+                if not os.path.isdir(full_dir):
+                    raise Error('Directory not found: %s (%s)' %
+                                (dependency, full_dir))
+
+            head, roll_to = calculate_roll(full_dir, dependency, args.roll_to)
+            if roll_to == head:
+                if len(dependencies) == 1:
+                    raise AlreadyRolledError('No revision to roll!')
+                print('%s: Already at latest commit %s' % (dependency, roll_to))
+            else:
+                print('%s: Rolling from %s to %s' %
+                      (dependency, head[:10], roll_to[:10]))
+                rolls[dependency] = (head, roll_to, full_dir)
+
+        logs = []
+        setdep_args = []
+        for dependency, (head, roll_to, full_dir) in sorted(rolls.items()):
+            log = generate_commit_message(full_dir, dependency, head, roll_to,
+                                          args.no_log, args.log_limit)
+            logs.append(log)
+            setdep_args.extend(['-r', '{}@{}'.format(dependency, roll_to)])
+
+        # DEPS is updated even if the repository uses submodules.
+        gclient(['setdep'] + setdep_args)
+
+        commit_msg = gen_commit_msg(logs, cmdline, reviewers, args.bug)
+        finalize(commit_msg, current_dir, rolls)
+    except Error as e:
+        sys.stderr.write('error: %s\n' % e)
+        return 2 if isinstance(e, AlreadyRolledError) else 1
+    except subprocess2.CalledProcessError:
+        return 1
+
     print('')
-  print('Run:')
-  print('  git cl upload --send-mail')
-  return 0
+    if not reviewers:
+        print('You forgot to pass -r, make sure to insert a R=foo@example.com '
+              'line')
+        print('to the commit description before emailing.')
+        print('')
+    print('Run:')
+    print('  git cl upload --send-mail')
+    return 0
 
 
 if __name__ == '__main__':
-  sys.exit(main())
+    sys.exit(main())

+ 38 - 38
rustfmt.py

@@ -15,56 +15,56 @@ import sys
 
 
 class NotFoundError(Exception):
-  """A file could not be found."""
-
-  def __init__(self, e):
-    Exception.__init__(
-        self, 'Problem while looking for rustfmt in Chromium source tree:\n'
-        '%s' % e)
+    """A file could not be found."""
+    def __init__(self, e):
+        Exception.__init__(
+            self, 'Problem while looking for rustfmt in Chromium source tree:\n'
+            '%s' % e)
 
 
 def FindRustfmtToolInChromiumTree():
-  """Return a path to the rustfmt executable, or die trying."""
-  chromium_src_path = gclient_paths.GetPrimarySolutionPath()
-  if not chromium_src_path:
-    raise NotFoundError(
-        'Could not find checkout in any parent of the current path.\n'
-        'Set CHROMIUM_BUILDTOOLS_PATH to use outside of a chromium checkout.')
+    """Return a path to the rustfmt executable, or die trying."""
+    chromium_src_path = gclient_paths.GetPrimarySolutionPath()
+    if not chromium_src_path:
+        raise NotFoundError(
+            'Could not find checkout in any parent of the current path.\n'
+            'Set CHROMIUM_BUILDTOOLS_PATH to use outside of a chromium '
+            'checkout.')
 
-  tool_path = os.path.join(chromium_src_path, 'third_party', 'rust-toolchain',
-                           'bin', 'rustfmt' + gclient_paths.GetExeSuffix())
-  if not os.path.exists(tool_path):
-    raise NotFoundError('File does not exist: %s' % tool_path)
-  return tool_path
+    tool_path = os.path.join(chromium_src_path, 'third_party', 'rust-toolchain',
+                             'bin', 'rustfmt' + gclient_paths.GetExeSuffix())
+    if not os.path.exists(tool_path):
+        raise NotFoundError('File does not exist: %s' % tool_path)
+    return tool_path
 
 
 def IsRustfmtSupported():
-  try:
-    FindRustfmtToolInChromiumTree()
-    return True
-  except NotFoundError:
-    return False
+    try:
+        FindRustfmtToolInChromiumTree()
+        return True
+    except NotFoundError:
+        return False
 
 
 def main(args):
-  try:
-    tool = FindRustfmtToolInChromiumTree()
-  except NotFoundError as e:
-    sys.stderr.write("%s\n" % str(e))
-    return 1
+    try:
+        tool = FindRustfmtToolInChromiumTree()
+    except NotFoundError as e:
+        sys.stderr.write("%s\n" % str(e))
+        return 1
 
-  # Add some visibility to --help showing where the tool lives, since this
-  # redirection can be a little opaque.
-  help_syntax = ('-h', '--help', '-help', '-help-list', '--help-list')
-  if any(match in args for match in help_syntax):
-    print('\nDepot tools redirects you to the rustfmt at:\n    %s\n' % tool)
+    # Add some visibility to --help showing where the tool lives, since this
+    # redirection can be a little opaque.
+    help_syntax = ('-h', '--help', '-help', '-help-list', '--help-list')
+    if any(match in args for match in help_syntax):
+        print('\nDepot tools redirects you to the rustfmt at:\n    %s\n' % tool)
 
-  return subprocess.call([tool] + args)
+    return subprocess.call([tool] + args)
 
 
 if __name__ == '__main__':
-  try:
-    sys.exit(main(sys.argv[1:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main(sys.argv[1:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 435 - 421
scm.py

@@ -1,7 +1,6 @@
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """SCM-specific utility classes."""
 
 import distutils.version
@@ -15,463 +14,478 @@ import sys
 import gclient_utils
 import subprocess2
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
+
 
 def ValidateEmail(email):
-  return (
-      re.match(r"^[a-zA-Z0-9._%\-+]+@[a-zA-Z0-9._%-]+.[a-zA-Z]{2,6}$", email)
-      is not None)
+    return (re.match(r"^[a-zA-Z0-9._%\-+]+@[a-zA-Z0-9._%-]+.[a-zA-Z]{2,6}$",
+                     email) is not None)
 
 
 def GetCasedPath(path):
-  """Elcheapos way to get the real path case on Windows."""
-  if sys.platform.startswith('win') and os.path.exists(path):
-    # Reconstruct the path.
-    path = os.path.abspath(path)
-    paths = path.split('\\')
-    for i in range(len(paths)):
-      if i == 0:
-        # Skip drive letter.
-        continue
-      subpath = '\\'.join(paths[:i+1])
-      prev = len('\\'.join(paths[:i]))
-      # glob.glob will return the cased path for the last item only. This is why
-      # we are calling it in a loop. Extract the data we want and put it back
-      # into the list.
-      paths[i] = glob.glob(subpath + '*')[0][prev+1:len(subpath)]
-    path = '\\'.join(paths)
-  return path
+    """Elcheapos way to get the real path case on Windows."""
+    if sys.platform.startswith('win') and os.path.exists(path):
+        # Reconstruct the path.
+        path = os.path.abspath(path)
+        paths = path.split('\\')
+        for i in range(len(paths)):
+            if i == 0:
+                # Skip drive letter.
+                continue
+            subpath = '\\'.join(paths[:i + 1])
+            prev = len('\\'.join(paths[:i]))
+            # glob.glob will return the cased path for the last item only. This
+            # is why we are calling it in a loop. Extract the data we want and
+            # put it back into the list.
+            paths[i] = glob.glob(subpath + '*')[0][prev + 1:len(subpath)]
+        path = '\\'.join(paths)
+    return path
 
 
 def GenFakeDiff(filename):
-  """Generates a fake diff from a file."""
-  file_content = gclient_utils.FileRead(filename, 'rb').splitlines(True)
-  filename = filename.replace(os.sep, '/')
-  nb_lines = len(file_content)
-  # We need to use / since patch on unix will fail otherwise.
-  data = io.StringIO()
-  data.write("Index: %s\n" % filename)
-  data.write('=' * 67 + '\n')
-  # Note: Should we use /dev/null instead?
-  data.write("--- %s\n" % filename)
-  data.write("+++ %s\n" % filename)
-  data.write("@@ -0,0 +1,%d @@\n" % nb_lines)
-  # Prepend '+' to every lines.
-  for line in file_content:
-    data.write('+')
-    data.write(line)
-  result = data.getvalue()
-  data.close()
-  return result
+    """Generates a fake diff from a file."""
+    file_content = gclient_utils.FileRead(filename, 'rb').splitlines(True)
+    filename = filename.replace(os.sep, '/')
+    nb_lines = len(file_content)
+    # We need to use / since patch on unix will fail otherwise.
+    data = io.StringIO()
+    data.write("Index: %s\n" % filename)
+    data.write('=' * 67 + '\n')
+    # Note: Should we use /dev/null instead?
+    data.write("--- %s\n" % filename)
+    data.write("+++ %s\n" % filename)
+    data.write("@@ -0,0 +1,%d @@\n" % nb_lines)
+    # Prepend '+' to every lines.
+    for line in file_content:
+        data.write('+')
+        data.write(line)
+    result = data.getvalue()
+    data.close()
+    return result
 
 
 def determine_scm(root):
-  """Similar to upload.py's version but much simpler.
+    """Similar to upload.py's version but much simpler.
 
   Returns 'git' or None.
   """
-  if os.path.isdir(os.path.join(root, '.git')):
-    return 'git'
+    if os.path.isdir(os.path.join(root, '.git')):
+        return 'git'
 
-  try:
-    subprocess2.check_call(
-        ['git', 'rev-parse', '--show-cdup'],
-        stdout=subprocess2.DEVNULL,
-        stderr=subprocess2.DEVNULL,
-        cwd=root)
-    return 'git'
-  except (OSError, subprocess2.CalledProcessError):
-    return None
+    try:
+        subprocess2.check_call(['git', 'rev-parse', '--show-cdup'],
+                               stdout=subprocess2.DEVNULL,
+                               stderr=subprocess2.DEVNULL,
+                               cwd=root)
+        return 'git'
+    except (OSError, subprocess2.CalledProcessError):
+        return None
 
 
 def only_int(val):
-  if val.isdigit():
-    return int(val)
+    if val.isdigit():
+        return int(val)
 
-  return 0
+    return 0
 
 
 class GIT(object):
-  current_version = None
-
-  @staticmethod
-  def ApplyEnvVars(kwargs):
-    env = kwargs.pop('env', None) or os.environ.copy()
-    # Don't prompt for passwords; just fail quickly and noisily.
-    # By default, git will use an interactive terminal prompt when a username/
-    # password is needed.  That shouldn't happen in the chromium workflow,
-    # and if it does, then gclient may hide the prompt in the midst of a flood
-    # of terminal spew.  The only indication that something has gone wrong
-    # will be when gclient hangs unresponsively.  Instead, we disable the
-    # password prompt and simply allow git to fail noisily.  The error
-    # message produced by git will be copied to gclient's output.
-    env.setdefault('GIT_ASKPASS', 'true')
-    env.setdefault('SSH_ASKPASS', 'true')
-    # 'cat' is a magical git string that disables pagers on all platforms.
-    env.setdefault('GIT_PAGER', 'cat')
-    return env
-
-  @staticmethod
-  def Capture(args, cwd=None, strip_out=True, **kwargs):
-    env = GIT.ApplyEnvVars(kwargs)
-    output = subprocess2.check_output(
-        ['git'] + args, cwd=cwd, stderr=subprocess2.PIPE, env=env, **kwargs)
-    output = output.decode('utf-8', 'replace')
-    return output.strip() if strip_out else output
-
-  @staticmethod
-  def CaptureStatus(cwd, upstream_branch, end_commit=None):
-    # type: (str, str, Optional[str]) -> Sequence[Tuple[str, str]]
-    """Returns git status.
+    current_version = None
+
+    @staticmethod
+    def ApplyEnvVars(kwargs):
+        env = kwargs.pop('env', None) or os.environ.copy()
+        # Don't prompt for passwords; just fail quickly and noisily.
+        # By default, git will use an interactive terminal prompt when a
+        # username/ password is needed.  That shouldn't happen in the chromium
+        # workflow, and if it does, then gclient may hide the prompt in the
+        # midst of a flood of terminal spew.  The only indication that something
+        # has gone wrong will be when gclient hangs unresponsively.  Instead, we
+        # disable the password prompt and simply allow git to fail noisily.  The
+        # error message produced by git will be copied to gclient's output.
+        env.setdefault('GIT_ASKPASS', 'true')
+        env.setdefault('SSH_ASKPASS', 'true')
+        # 'cat' is a magical git string that disables pagers on all platforms.
+        env.setdefault('GIT_PAGER', 'cat')
+        return env
+
+    @staticmethod
+    def Capture(args, cwd=None, strip_out=True, **kwargs):
+        env = GIT.ApplyEnvVars(kwargs)
+        output = subprocess2.check_output(['git'] + args,
+                                          cwd=cwd,
+                                          stderr=subprocess2.PIPE,
+                                          env=env,
+                                          **kwargs)
+        output = output.decode('utf-8', 'replace')
+        return output.strip() if strip_out else output
+
+    @staticmethod
+    def CaptureStatus(cwd, upstream_branch, end_commit=None):
+        # type: (str, str, Optional[str]) -> Sequence[Tuple[str, str]]
+        """Returns git status.
 
     Returns an array of (status, file) tuples."""
-    if end_commit is None:
-      end_commit = ''
-    if upstream_branch is None:
-      upstream_branch = GIT.GetUpstreamBranch(cwd)
-      if upstream_branch is None:
-        raise gclient_utils.Error('Cannot determine upstream branch')
-    command = [
-        '-c', 'core.quotePath=false', 'diff', '--name-status', '--no-renames',
-        '--ignore-submodules=all', '-r',
-        '%s...%s' % (upstream_branch, end_commit)
-    ]
-    status = GIT.Capture(command, cwd)
-    results = []
-    if status:
-      for statusline in status.splitlines():
-        # 3-way merges can cause the status can be 'MMM' instead of 'M'. This
-        # can happen when the user has 2 local branches and he diffs between
-        # these 2 branches instead diffing to upstream.
-        m = re.match(r'^(\w)+\t(.+)$', statusline)
-        if not m:
-          raise gclient_utils.Error(
-              'status currently unsupported: %s' % statusline)
-        # Only grab the first letter.
-        results.append(('%s      ' % m.group(1)[0], m.group(2)))
-    return results
-
-  @staticmethod
-  def GetConfig(cwd, key, default=None):
-    try:
-      return GIT.Capture(['config', key], cwd=cwd)
-    except subprocess2.CalledProcessError:
-      return default
-
-  @staticmethod
-  def GetBranchConfig(cwd, branch, key, default=None):
-    assert branch, 'A branch must be given'
-    key = 'branch.%s.%s' % (branch, key)
-    return GIT.GetConfig(cwd, key, default)
-
-  @staticmethod
-  def SetConfig(cwd, key, value=None):
-    if value is None:
-      args = ['config', '--unset', key]
-    else:
-      args = ['config', key, value]
-    GIT.Capture(args, cwd=cwd)
-
-  @staticmethod
-  def SetBranchConfig(cwd, branch, key, value=None):
-    assert branch, 'A branch must be given'
-    key = 'branch.%s.%s' % (branch, key)
-    GIT.SetConfig(cwd, key, value)
-
-  @staticmethod
-  def IsWorkTreeDirty(cwd):
-    return GIT.Capture(['status', '-s'], cwd=cwd) != ''
-
-  @staticmethod
-  def GetEmail(cwd):
-    """Retrieves the user email address if known."""
-    return GIT.GetConfig(cwd, 'user.email', '')
-
-  @staticmethod
-  def ShortBranchName(branch):
-    """Converts a name like 'refs/heads/foo' to just 'foo'."""
-    return branch.replace('refs/heads/', '')
-
-  @staticmethod
-  def GetBranchRef(cwd):
-    """Returns the full branch reference, e.g. 'refs/heads/main'."""
-    try:
-      return GIT.Capture(['symbolic-ref', 'HEAD'], cwd=cwd)
-    except subprocess2.CalledProcessError:
-      return None
-
-  @staticmethod
-  def GetRemoteHeadRef(cwd, url, remote):
-    """Returns the full default remote branch reference, e.g.
+        if end_commit is None:
+            end_commit = ''
+        if upstream_branch is None:
+            upstream_branch = GIT.GetUpstreamBranch(cwd)
+            if upstream_branch is None:
+                raise gclient_utils.Error('Cannot determine upstream branch')
+        command = [
+            '-c', 'core.quotePath=false', 'diff', '--name-status',
+            '--no-renames', '--ignore-submodules=all', '-r',
+            '%s...%s' % (upstream_branch, end_commit)
+        ]
+        status = GIT.Capture(command, cwd)
+        results = []
+        if status:
+            for statusline in status.splitlines():
+                # 3-way merges can cause the status can be 'MMM' instead of 'M'.
+                # This can happen when the user has 2 local branches and he
+                # diffs between these 2 branches instead diffing to upstream.
+                m = re.match(r'^(\w)+\t(.+)$', statusline)
+                if not m:
+                    raise gclient_utils.Error(
+                        'status currently unsupported: %s' % statusline)
+                # Only grab the first letter.
+                results.append(('%s      ' % m.group(1)[0], m.group(2)))
+        return results
+
+    @staticmethod
+    def GetConfig(cwd, key, default=None):
+        try:
+            return GIT.Capture(['config', key], cwd=cwd)
+        except subprocess2.CalledProcessError:
+            return default
+
+    @staticmethod
+    def GetBranchConfig(cwd, branch, key, default=None):
+        assert branch, 'A branch must be given'
+        key = 'branch.%s.%s' % (branch, key)
+        return GIT.GetConfig(cwd, key, default)
+
+    @staticmethod
+    def SetConfig(cwd, key, value=None):
+        if value is None:
+            args = ['config', '--unset', key]
+        else:
+            args = ['config', key, value]
+        GIT.Capture(args, cwd=cwd)
+
+    @staticmethod
+    def SetBranchConfig(cwd, branch, key, value=None):
+        assert branch, 'A branch must be given'
+        key = 'branch.%s.%s' % (branch, key)
+        GIT.SetConfig(cwd, key, value)
+
+    @staticmethod
+    def IsWorkTreeDirty(cwd):
+        return GIT.Capture(['status', '-s'], cwd=cwd) != ''
+
+    @staticmethod
+    def GetEmail(cwd):
+        """Retrieves the user email address if known."""
+        return GIT.GetConfig(cwd, 'user.email', '')
+
+    @staticmethod
+    def ShortBranchName(branch):
+        """Converts a name like 'refs/heads/foo' to just 'foo'."""
+        return branch.replace('refs/heads/', '')
+
+    @staticmethod
+    def GetBranchRef(cwd):
+        """Returns the full branch reference, e.g. 'refs/heads/main'."""
+        try:
+            return GIT.Capture(['symbolic-ref', 'HEAD'], cwd=cwd)
+        except subprocess2.CalledProcessError:
+            return None
+
+    @staticmethod
+    def GetRemoteHeadRef(cwd, url, remote):
+        """Returns the full default remote branch reference, e.g.
     'refs/remotes/origin/main'."""
-    if os.path.exists(cwd):
-      try:
-        # Try using local git copy first
-        ref = 'refs/remotes/%s/HEAD' % remote
-        ref = GIT.Capture(['symbolic-ref', ref], cwd=cwd)
-        if not ref.endswith('master'):
-          return ref
-        # Check if there are changes in the default branch for this particular
-        # repository.
-        GIT.Capture(['remote', 'set-head', '-a', remote], cwd=cwd)
-        return GIT.Capture(['symbolic-ref', ref], cwd=cwd)
-      except subprocess2.CalledProcessError:
-        pass
-
-    try:
-      # Fetch information from git server
-      resp = GIT.Capture(['ls-remote', '--symref', url, 'HEAD'])
-      regex = r'^ref: (.*)\tHEAD$'
-      for line in resp.split('\n'):
-        m = re.match(regex, line)
-        if m:
-          return ''.join(GIT.RefToRemoteRef(m.group(1), remote))
-    except subprocess2.CalledProcessError:
-      pass
-    # Return default branch
-    return 'refs/remotes/%s/main' % remote
-
-  @staticmethod
-  def GetBranch(cwd):
-    """Returns the short branch name, e.g. 'main'."""
-    branchref = GIT.GetBranchRef(cwd)
-    if branchref:
-      return GIT.ShortBranchName(branchref)
-    return None
-
-  @staticmethod
-  def GetRemoteBranches(cwd):
-    return GIT.Capture(['branch', '-r'], cwd=cwd).split()
-
-  @staticmethod
-  def FetchUpstreamTuple(cwd, branch=None):
-    """Returns a tuple containing remote and remote ref,
+        if os.path.exists(cwd):
+            try:
+                # Try using local git copy first
+                ref = 'refs/remotes/%s/HEAD' % remote
+                ref = GIT.Capture(['symbolic-ref', ref], cwd=cwd)
+                if not ref.endswith('master'):
+                    return ref
+                # Check if there are changes in the default branch for this
+                # particular repository.
+                GIT.Capture(['remote', 'set-head', '-a', remote], cwd=cwd)
+                return GIT.Capture(['symbolic-ref', ref], cwd=cwd)
+            except subprocess2.CalledProcessError:
+                pass
+
+        try:
+            # Fetch information from git server
+            resp = GIT.Capture(['ls-remote', '--symref', url, 'HEAD'])
+            regex = r'^ref: (.*)\tHEAD$'
+            for line in resp.split('\n'):
+                m = re.match(regex, line)
+                if m:
+                    return ''.join(GIT.RefToRemoteRef(m.group(1), remote))
+        except subprocess2.CalledProcessError:
+            pass
+        # Return default branch
+        return 'refs/remotes/%s/main' % remote
+
+    @staticmethod
+    def GetBranch(cwd):
+        """Returns the short branch name, e.g. 'main'."""
+        branchref = GIT.GetBranchRef(cwd)
+        if branchref:
+            return GIT.ShortBranchName(branchref)
+        return None
+
+    @staticmethod
+    def GetRemoteBranches(cwd):
+        return GIT.Capture(['branch', '-r'], cwd=cwd).split()
+
+    @staticmethod
+    def FetchUpstreamTuple(cwd, branch=None):
+        """Returns a tuple containing remote and remote ref,
        e.g. 'origin', 'refs/heads/main'
     """
-    try:
-      branch = branch or GIT.GetBranch(cwd)
-    except subprocess2.CalledProcessError:
-      pass
-    if branch:
-      upstream_branch = GIT.GetBranchConfig(cwd, branch, 'merge')
-      if upstream_branch:
-        remote = GIT.GetBranchConfig(cwd, branch, 'remote', '.')
-        return remote, upstream_branch
-
-    upstream_branch = GIT.GetConfig(cwd, 'rietveld.upstream-branch')
-    if upstream_branch:
-      remote = GIT.GetConfig(cwd, 'rietveld.upstream-remote', '.')
-      return remote, upstream_branch
-
-    # Else, try to guess the origin remote.
-    remote_branches = GIT.GetRemoteBranches(cwd)
-    if 'origin/main' in remote_branches:
-      # Fall back on origin/main if it exits.
-      return 'origin', 'refs/heads/main'
-
-    if 'origin/master' in remote_branches:
-      # Fall back on origin/master if it exits.
-      return 'origin', 'refs/heads/master'
-
-    return None, None
-
-  @staticmethod
-  def RefToRemoteRef(ref, remote):
-    """Convert a checkout ref to the equivalent remote ref.
+        try:
+            branch = branch or GIT.GetBranch(cwd)
+        except subprocess2.CalledProcessError:
+            pass
+        if branch:
+            upstream_branch = GIT.GetBranchConfig(cwd, branch, 'merge')
+            if upstream_branch:
+                remote = GIT.GetBranchConfig(cwd, branch, 'remote', '.')
+                return remote, upstream_branch
+
+        upstream_branch = GIT.GetConfig(cwd, 'rietveld.upstream-branch')
+        if upstream_branch:
+            remote = GIT.GetConfig(cwd, 'rietveld.upstream-remote', '.')
+            return remote, upstream_branch
+
+        # Else, try to guess the origin remote.
+        remote_branches = GIT.GetRemoteBranches(cwd)
+        if 'origin/main' in remote_branches:
+            # Fall back on origin/main if it exits.
+            return 'origin', 'refs/heads/main'
+
+        if 'origin/master' in remote_branches:
+            # Fall back on origin/master if it exits.
+            return 'origin', 'refs/heads/master'
+
+        return None, None
+
+    @staticmethod
+    def RefToRemoteRef(ref, remote):
+        """Convert a checkout ref to the equivalent remote ref.
 
     Returns:
       A tuple of the remote ref's (common prefix, unique suffix), or None if it
       doesn't appear to refer to a remote ref (e.g. it's a commit hash).
     """
-    # TODO(mmoss): This is just a brute-force mapping based of the expected git
-    # config. It's a bit better than the even more brute-force replace('heads',
-    # ...), but could still be smarter (like maybe actually using values gleaned
-    # from the git config).
-    m = re.match('^(refs/(remotes/)?)?branch-heads/', ref or '')
-    if m:
-      return ('refs/remotes/branch-heads/', ref.replace(m.group(0), ''))
-
-    m = re.match('^((refs/)?remotes/)?%s/|(refs/)?heads/' % remote, ref or '')
-    if m:
-      return ('refs/remotes/%s/' % remote, ref.replace(m.group(0), ''))
-
-    return None
-
-  @staticmethod
-  def RemoteRefToRef(ref, remote):
-    assert remote, 'A remote must be given'
-    if not ref or not ref.startswith('refs/'):
-      return None
-    if not ref.startswith('refs/remotes/'):
-      return ref
-    if ref.startswith('refs/remotes/branch-heads/'):
-      return 'refs' + ref[len('refs/remotes'):]
-    if ref.startswith('refs/remotes/%s/' % remote):
-      return 'refs/heads' + ref[len('refs/remotes/%s' % remote):]
-    return None
-
-  @staticmethod
-  def GetUpstreamBranch(cwd):
-    """Gets the current branch's upstream branch."""
-    remote, upstream_branch = GIT.FetchUpstreamTuple(cwd)
-    if remote != '.' and upstream_branch:
-      remote_ref = GIT.RefToRemoteRef(upstream_branch, remote)
-      if remote_ref:
-        upstream_branch = ''.join(remote_ref)
-    return upstream_branch
-
-  @staticmethod
-  def IsAncestor(maybe_ancestor, ref, cwd=None):
-    # type: (string, string, Optional[string]) -> bool
-    """Verifies if |maybe_ancestor| is an ancestor of |ref|."""
-    try:
-      GIT.Capture(['merge-base', '--is-ancestor', maybe_ancestor, ref], cwd=cwd)
-      return True
-    except subprocess2.CalledProcessError:
-      return False
-
-  @staticmethod
-  def GetOldContents(cwd, filename, branch=None):
-    if not branch:
-      branch = GIT.GetUpstreamBranch(cwd)
-    if platform.system() == 'Windows':
-      # git show <sha>:<path> wants a posix path.
-      filename = filename.replace('\\', '/')
-    command = ['show', '%s:%s' % (branch, filename)]
-    try:
-      return GIT.Capture(command, cwd=cwd, strip_out=False)
-    except subprocess2.CalledProcessError:
-      return ''
+        # TODO(mmoss): This is just a brute-force mapping based of the expected
+        # git config. It's a bit better than the even more brute-force
+        # replace('heads', ...), but could still be smarter (like maybe actually
+        # using values gleaned from the git config).
+        m = re.match('^(refs/(remotes/)?)?branch-heads/', ref or '')
+        if m:
+            return ('refs/remotes/branch-heads/', ref.replace(m.group(0), ''))
 
-  @staticmethod
-  def GenerateDiff(cwd, branch=None, branch_head='HEAD', full_move=False,
-                   files=None):
-    """Diffs against the upstream branch or optionally another branch.
+        m = re.match('^((refs/)?remotes/)?%s/|(refs/)?heads/' % remote, ref
+                     or '')
+        if m:
+            return ('refs/remotes/%s/' % remote, ref.replace(m.group(0), ''))
+
+        return None
+
+    @staticmethod
+    def RemoteRefToRef(ref, remote):
+        assert remote, 'A remote must be given'
+        if not ref or not ref.startswith('refs/'):
+            return None
+        if not ref.startswith('refs/remotes/'):
+            return ref
+        if ref.startswith('refs/remotes/branch-heads/'):
+            return 'refs' + ref[len('refs/remotes'):]
+        if ref.startswith('refs/remotes/%s/' % remote):
+            return 'refs/heads' + ref[len('refs/remotes/%s' % remote):]
+        return None
+
+    @staticmethod
+    def GetUpstreamBranch(cwd):
+        """Gets the current branch's upstream branch."""
+        remote, upstream_branch = GIT.FetchUpstreamTuple(cwd)
+        if remote != '.' and upstream_branch:
+            remote_ref = GIT.RefToRemoteRef(upstream_branch, remote)
+            if remote_ref:
+                upstream_branch = ''.join(remote_ref)
+        return upstream_branch
+
+    @staticmethod
+    def IsAncestor(maybe_ancestor, ref, cwd=None):
+        # type: (string, string, Optional[string]) -> bool
+        """Verifies if |maybe_ancestor| is an ancestor of |ref|."""
+        try:
+            GIT.Capture(['merge-base', '--is-ancestor', maybe_ancestor, ref],
+                        cwd=cwd)
+            return True
+        except subprocess2.CalledProcessError:
+            return False
+
+    @staticmethod
+    def GetOldContents(cwd, filename, branch=None):
+        if not branch:
+            branch = GIT.GetUpstreamBranch(cwd)
+        if platform.system() == 'Windows':
+            # git show <sha>:<path> wants a posix path.
+            filename = filename.replace('\\', '/')
+        command = ['show', '%s:%s' % (branch, filename)]
+        try:
+            return GIT.Capture(command, cwd=cwd, strip_out=False)
+        except subprocess2.CalledProcessError:
+            return ''
+
+    @staticmethod
+    def GenerateDiff(cwd,
+                     branch=None,
+                     branch_head='HEAD',
+                     full_move=False,
+                     files=None):
+        """Diffs against the upstream branch or optionally another branch.
 
     full_move means that move or copy operations should completely recreate the
     files, usually in the prospect to apply the patch for a try job."""
-    if not branch:
-      branch = GIT.GetUpstreamBranch(cwd)
-    command = ['-c', 'core.quotePath=false', 'diff',
-               '-p', '--no-color', '--no-prefix', '--no-ext-diff',
-               branch + "..." + branch_head]
-    if full_move:
-      command.append('--no-renames')
-    else:
-      command.append('-C')
-    # TODO(maruel): --binary support.
-    if files:
-      command.append('--')
-      command.extend(files)
-    diff = GIT.Capture(command, cwd=cwd, strip_out=False).splitlines(True)
-    for i in range(len(diff)):
-      # In the case of added files, replace /dev/null with the path to the
-      # file being added.
-      if diff[i].startswith('--- /dev/null'):
-        diff[i] = '--- %s' % diff[i+1][4:]
-    return ''.join(diff)
-
-  @staticmethod
-  def GetDifferentFiles(cwd, branch=None, branch_head='HEAD'):
-    """Returns the list of modified files between two branches."""
-    if not branch:
-      branch = GIT.GetUpstreamBranch(cwd)
-    command = ['-c', 'core.quotePath=false', 'diff',
-               '--name-only', branch + "..." + branch_head]
-    return GIT.Capture(command, cwd=cwd).splitlines(False)
-
-  @staticmethod
-  def GetAllFiles(cwd):
-    """Returns the list of all files under revision control."""
-    command = ['-c', 'core.quotePath=false', 'ls-files', '-s', '--', '.']
-    files = GIT.Capture(command, cwd=cwd).splitlines(False)
-    # return only files
-    return [f.split(maxsplit=3)[-1] for f in files if f.startswith('100')]
-
-  @staticmethod
-  def GetSubmoduleCommits(cwd, submodules):
-    # type: (string, List[string]) => Mapping[string][string]
-    """Returns a mapping of staged or committed new commits for submodules."""
-    if not submodules:
-      return {}
-    result = subprocess2.check_output(['git', 'ls-files', '-s', '--'] +
-                                      submodules,
-                                      cwd=cwd).decode('utf-8')
-    commit_hashes = {}
-    for r in result.splitlines():
-      # ['<mode>', '<commit_hash>', '<stage_number>', '<path>'].
-      record = r.strip().split(maxsplit=3)  # path can contain spaces.
-      assert record[0] == '160000', 'file is not a gitlink: %s' % record
-      commit_hashes[record[3]] = record[1]
-    return commit_hashes
-
-  @staticmethod
-  def GetPatchName(cwd):
-    """Constructs a name for this patch."""
-    short_sha = GIT.Capture(['rev-parse', '--short=4', 'HEAD'], cwd=cwd)
-    return "%s#%s" % (GIT.GetBranch(cwd), short_sha)
-
-  @staticmethod
-  def GetCheckoutRoot(cwd):
-    """Returns the top level directory of a git checkout as an absolute path.
+        if not branch:
+            branch = GIT.GetUpstreamBranch(cwd)
+        command = [
+            '-c', 'core.quotePath=false', 'diff', '-p', '--no-color',
+            '--no-prefix', '--no-ext-diff', branch + "..." + branch_head
+        ]
+        if full_move:
+            command.append('--no-renames')
+        else:
+            command.append('-C')
+        # TODO(maruel): --binary support.
+        if files:
+            command.append('--')
+            command.extend(files)
+        diff = GIT.Capture(command, cwd=cwd, strip_out=False).splitlines(True)
+        for i in range(len(diff)):
+            # In the case of added files, replace /dev/null with the path to the
+            # file being added.
+            if diff[i].startswith('--- /dev/null'):
+                diff[i] = '--- %s' % diff[i + 1][4:]
+        return ''.join(diff)
+
+    @staticmethod
+    def GetDifferentFiles(cwd, branch=None, branch_head='HEAD'):
+        """Returns the list of modified files between two branches."""
+        if not branch:
+            branch = GIT.GetUpstreamBranch(cwd)
+        command = [
+            '-c', 'core.quotePath=false', 'diff', '--name-only',
+            branch + "..." + branch_head
+        ]
+        return GIT.Capture(command, cwd=cwd).splitlines(False)
+
+    @staticmethod
+    def GetAllFiles(cwd):
+        """Returns the list of all files under revision control."""
+        command = ['-c', 'core.quotePath=false', 'ls-files', '-s', '--', '.']
+        files = GIT.Capture(command, cwd=cwd).splitlines(False)
+        # return only files
+        return [f.split(maxsplit=3)[-1] for f in files if f.startswith('100')]
+
+    @staticmethod
+    def GetSubmoduleCommits(cwd, submodules):
+        # type: (string, List[string]) => Mapping[string][string]
+        """Returns a mapping of staged or committed new commits for submodules."""
+        if not submodules:
+            return {}
+        result = subprocess2.check_output(['git', 'ls-files', '-s', '--'] +
+                                          submodules,
+                                          cwd=cwd).decode('utf-8')
+        commit_hashes = {}
+        for r in result.splitlines():
+            # ['<mode>', '<commit_hash>', '<stage_number>', '<path>'].
+            record = r.strip().split(maxsplit=3)  # path can contain spaces.
+            assert record[0] == '160000', 'file is not a gitlink: %s' % record
+            commit_hashes[record[3]] = record[1]
+        return commit_hashes
+
+    @staticmethod
+    def GetPatchName(cwd):
+        """Constructs a name for this patch."""
+        short_sha = GIT.Capture(['rev-parse', '--short=4', 'HEAD'], cwd=cwd)
+        return "%s#%s" % (GIT.GetBranch(cwd), short_sha)
+
+    @staticmethod
+    def GetCheckoutRoot(cwd):
+        """Returns the top level directory of a git checkout as an absolute path.
     """
-    root = GIT.Capture(['rev-parse', '--show-cdup'], cwd=cwd)
-    return os.path.abspath(os.path.join(cwd, root))
-
-  @staticmethod
-  def GetGitDir(cwd):
-    return os.path.abspath(GIT.Capture(['rev-parse', '--git-dir'], cwd=cwd))
-
-  @staticmethod
-  def IsInsideWorkTree(cwd):
-    try:
-      return GIT.Capture(['rev-parse', '--is-inside-work-tree'], cwd=cwd)
-    except (OSError, subprocess2.CalledProcessError):
-      return False
-
-  @staticmethod
-  def IsDirectoryVersioned(cwd, relative_dir):
-    """Checks whether the given |relative_dir| is part of cwd's repo."""
-    return bool(GIT.Capture(['ls-tree', 'HEAD', relative_dir], cwd=cwd))
-
-  @staticmethod
-  def CleanupDir(cwd, relative_dir):
-    """Cleans up untracked file inside |relative_dir|."""
-    return bool(GIT.Capture(['clean', '-df', relative_dir], cwd=cwd))
-
-  @staticmethod
-  def ResolveCommit(cwd, rev):
-    # We do this instead of rev-parse --verify rev^{commit}, since on Windows
-    # git can be either an executable or batch script, each of which requires
-    # escaping the caret (^) a different way.
-    if gclient_utils.IsFullGitSha(rev):
-      # git-rev parse --verify FULL_GIT_SHA always succeeds, even if we don't
-      # have FULL_GIT_SHA locally. Removing the last character forces git to
-      # check if FULL_GIT_SHA refers to an object in the local database.
-      rev = rev[:-1]
-    try:
-      return GIT.Capture(['rev-parse', '--quiet', '--verify', rev], cwd=cwd)
-    except subprocess2.CalledProcessError:
-      return None
-
-  @staticmethod
-  def IsValidRevision(cwd, rev, sha_only=False):
-    """Verifies the revision is a proper git revision.
+        root = GIT.Capture(['rev-parse', '--show-cdup'], cwd=cwd)
+        return os.path.abspath(os.path.join(cwd, root))
+
+    @staticmethod
+    def GetGitDir(cwd):
+        return os.path.abspath(GIT.Capture(['rev-parse', '--git-dir'], cwd=cwd))
+
+    @staticmethod
+    def IsInsideWorkTree(cwd):
+        try:
+            return GIT.Capture(['rev-parse', '--is-inside-work-tree'], cwd=cwd)
+        except (OSError, subprocess2.CalledProcessError):
+            return False
+
+    @staticmethod
+    def IsDirectoryVersioned(cwd, relative_dir):
+        """Checks whether the given |relative_dir| is part of cwd's repo."""
+        return bool(GIT.Capture(['ls-tree', 'HEAD', relative_dir], cwd=cwd))
+
+    @staticmethod
+    def CleanupDir(cwd, relative_dir):
+        """Cleans up untracked file inside |relative_dir|."""
+        return bool(GIT.Capture(['clean', '-df', relative_dir], cwd=cwd))
+
+    @staticmethod
+    def ResolveCommit(cwd, rev):
+        # We do this instead of rev-parse --verify rev^{commit}, since on
+        # Windows git can be either an executable or batch script, each of which
+        # requires escaping the caret (^) a different way.
+        if gclient_utils.IsFullGitSha(rev):
+            # git-rev parse --verify FULL_GIT_SHA always succeeds, even if we
+            # don't have FULL_GIT_SHA locally. Removing the last character
+            # forces git to check if FULL_GIT_SHA refers to an object in the
+            # local database.
+            rev = rev[:-1]
+        try:
+            return GIT.Capture(['rev-parse', '--quiet', '--verify', rev],
+                               cwd=cwd)
+        except subprocess2.CalledProcessError:
+            return None
+
+    @staticmethod
+    def IsValidRevision(cwd, rev, sha_only=False):
+        """Verifies the revision is a proper git revision.
 
     sha_only: Fail unless rev is a sha hash.
     """
-    sha = GIT.ResolveCommit(cwd, rev)
-    if sha is None:
-      return False
-    if sha_only:
-      return sha == rev.lower()
-    return True
-
-  @classmethod
-  def AssertVersion(cls, min_version):
-    """Asserts git's version is at least min_version."""
-    if cls.current_version is None:
-      current_version = cls.Capture(['--version'], '.')
-      matched = re.search(r'git version (.+)', current_version)
-      cls.current_version = distutils.version.LooseVersion(matched.group(1))
-    min_version = distutils.version.LooseVersion(min_version)
-    return (min_version <= cls.current_version, cls.current_version)
+        sha = GIT.ResolveCommit(cwd, rev)
+        if sha is None:
+            return False
+        if sha_only:
+            return sha == rev.lower()
+        return True
+
+    @classmethod
+    def AssertVersion(cls, min_version):
+        """Asserts git's version is at least min_version."""
+        if cls.current_version is None:
+            current_version = cls.Capture(['--version'], '.')
+            matched = re.search(r'git version (.+)', current_version)
+            cls.current_version = distutils.version.LooseVersion(
+                matched.group(1))
+        min_version = distutils.version.LooseVersion(min_version)
+        return (min_version <= cls.current_version, cls.current_version)

+ 102 - 97
setup_color.py

@@ -16,115 +16,120 @@ OUT_TYPE = 'unknown'
 
 
 def enable_native_ansi():
-  """Enables native ANSI sequences in console. Windows 10 only.
+    """Enables native ANSI sequences in console. Windows 10 only.
 
   Returns whether successful.
   """
-  kernel32 = ctypes.windll.kernel32
-  ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x04
+    kernel32 = ctypes.windll.kernel32
+    ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x04
 
-  out_handle = kernel32.GetStdHandle(subprocess.STD_OUTPUT_HANDLE)
+    out_handle = kernel32.GetStdHandle(subprocess.STD_OUTPUT_HANDLE)
 
-  # GetConsoleMode fails if the terminal isn't native.
-  mode = ctypes.wintypes.DWORD()
-  if kernel32.GetConsoleMode(out_handle, ctypes.byref(mode)) == 0:
-    return False
+    # GetConsoleMode fails if the terminal isn't native.
+    mode = ctypes.wintypes.DWORD()
+    if kernel32.GetConsoleMode(out_handle, ctypes.byref(mode)) == 0:
+        return False
 
-  if not (mode.value & ENABLE_VIRTUAL_TERMINAL_PROCESSING):
-    if kernel32.SetConsoleMode(
-        out_handle, mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING) == 0:
-      print(
-          'kernel32.SetConsoleMode to enable ANSI sequences failed',
-          file=sys.stderr)
-      return False
+    if not (mode.value & ENABLE_VIRTUAL_TERMINAL_PROCESSING):
+        if kernel32.SetConsoleMode(
+                out_handle,
+                mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING) == 0:
+            print('kernel32.SetConsoleMode to enable ANSI sequences failed',
+                  file=sys.stderr)
+            return False
 
-  return True
+    return True
 
 
 def init():
-  # should_wrap instructs colorama to wrap stdout/stderr with an ANSI colorcode
-  # interpreter that converts them to SetConsoleTextAttribute calls. This only
-  # should be True in cases where we're connected to cmd.exe's console. Setting
-  # this to True on non-windows systems has no effect.
-  should_wrap = False
-  global IS_TTY, OUT_TYPE
-  IS_TTY = sys.stdout.isatty()
-  is_windows = sys.platform.startswith('win')
-  if IS_TTY:
-    # Yay! We detected a console in the normal way. It doesn't really matter
-    # if it's windows or not, we win.
-    OUT_TYPE = 'console'
-    should_wrap = True
-  elif is_windows:
-    # assume this is some sort of file
-    OUT_TYPE = 'file (win)'
-
-    import msvcrt
-    h = msvcrt.get_osfhandle(sys.stdout.fileno())
-    # h is the win32 HANDLE for stdout.
-    ftype = ctypes.windll.kernel32.GetFileType(h)
-    if ftype == 2: # FILE_TYPE_CHAR
-      # This is a normal cmd console, but we'll only get here if we're running
-      # inside a `git command` which is actually git->bash->command. Not sure
-      # why isatty doesn't detect this case.
-      OUT_TYPE = 'console (cmd via msys)'
-      IS_TTY = True
-      should_wrap = True
-    elif ftype == 3: # FILE_TYPE_PIPE
-      OUT_TYPE = 'pipe (win)'
-      # This is some kind of pipe on windows. This could either be a real pipe
-      # or this could be msys using a pipe to emulate a pty. We use the same
-      # algorithm that msys-git uses to determine if it's connected to a pty or
-      # not.
-
-      # This function and the structures are defined in the MSDN documentation
-      # using the same names.
-      def NT_SUCCESS(status):
-        # The first two bits of status are the severity. The success
-        # severities are 0 and 1, and the !success severities are 2 and 3.
-        # Therefore since ctypes interprets the default restype of the call
-        # to be an 'C int' (which is guaranteed to be signed 32 bits), All
-        # success codes are positive, and all !success codes are negative.
-        return status >= 0
-
-      class UNICODE_STRING(ctypes.Structure):
-        _fields_ = [('Length', ctypes.c_ushort),
-                    ('MaximumLength', ctypes.c_ushort),
-                    ('Buffer', ctypes.c_wchar_p)]
-
-      class OBJECT_NAME_INFORMATION(ctypes.Structure):
-        _fields_ = [('Name', UNICODE_STRING),
-                    ('NameBuffer', ctypes.c_wchar_p)]
-
-      buf = ctypes.create_string_buffer(1024)
-      # Ask NT what the name of the object our stdout HANDLE is. It would be
-      # possible to use GetFileInformationByHandleEx, but it's only available
-      # on Vista+. If you're reading this in 2017 or later, feel free to
-      # refactor this out.
-      #
-      # The '1' here is ObjectNameInformation
-      if NT_SUCCESS(ctypes.windll.ntdll.NtQueryObject(h, 1, buf, len(buf)-2,
-                    None)):
-        out = OBJECT_NAME_INFORMATION.from_buffer(buf)
-        name = out.Name.Buffer.split('\\')[-1]
-        IS_TTY = name.startswith('msys-') and '-pty' in name
-        if IS_TTY:
-          OUT_TYPE = 'bash (msys)'
+    # should_wrap instructs colorama to wrap stdout/stderr with an ANSI
+    # colorcode interpreter that converts them to SetConsoleTextAttribute calls.
+    # This only should be True in cases where we're connected to cmd.exe's
+    # console. Setting this to True on non-windows systems has no effect.
+    should_wrap = False
+    global IS_TTY, OUT_TYPE
+    IS_TTY = sys.stdout.isatty()
+    is_windows = sys.platform.startswith('win')
+    if IS_TTY:
+        # Yay! We detected a console in the normal way. It doesn't really matter
+        # if it's windows or not, we win.
+        OUT_TYPE = 'console'
+        should_wrap = True
+    elif is_windows:
+        # assume this is some sort of file
+        OUT_TYPE = 'file (win)'
+
+        import msvcrt
+        h = msvcrt.get_osfhandle(sys.stdout.fileno())
+        # h is the win32 HANDLE for stdout.
+        ftype = ctypes.windll.kernel32.GetFileType(h)
+        if ftype == 2:  # FILE_TYPE_CHAR
+            # This is a normal cmd console, but we'll only get here if we're
+            # running inside a `git command` which is actually
+            # git->bash->command. Not sure why isatty doesn't detect this case.
+            OUT_TYPE = 'console (cmd via msys)'
+            IS_TTY = True
+            should_wrap = True
+        elif ftype == 3:  # FILE_TYPE_PIPE
+            OUT_TYPE = 'pipe (win)'
+
+            # This is some kind of pipe on windows. This could either be a real
+            # pipe or this could be msys using a pipe to emulate a pty. We use
+            # the same algorithm that msys-git uses to determine if it's
+            # connected to a pty or not.
+
+            # This function and the structures are defined in the MSDN
+            # documentation using the same names.
+            def NT_SUCCESS(status):
+                # The first two bits of status are the severity. The success
+                # severities are 0 and 1, and the !success severities are 2 and
+                # 3. Therefore since ctypes interprets the default restype of
+                # the call to be an 'C int' (which is guaranteed to be signed 32
+                # bits), All success codes are positive, and all !success codes
+                # are negative.
+                return status >= 0
+
+            class UNICODE_STRING(ctypes.Structure):
+                _fields_ = [('Length', ctypes.c_ushort),
+                            ('MaximumLength', ctypes.c_ushort),
+                            ('Buffer', ctypes.c_wchar_p)]
+
+            class OBJECT_NAME_INFORMATION(ctypes.Structure):
+                _fields_ = [('Name', UNICODE_STRING),
+                            ('NameBuffer', ctypes.c_wchar_p)]
+
+            buf = ctypes.create_string_buffer(1024)
+            # Ask NT what the name of the object our stdout HANDLE is. It would
+            # be possible to use GetFileInformationByHandleEx, but it's only
+            # available on Vista+. If you're reading this in 2017 or later, feel
+            # free to refactor this out.
+            #
+            # The '1' here is ObjectNameInformation
+            if NT_SUCCESS(
+                    ctypes.windll.ntdll.NtQueryObject(h, 1, buf,
+                                                      len(buf) - 2, None)):
+                out = OBJECT_NAME_INFORMATION.from_buffer(buf)
+                name = out.Name.Buffer.split('\\')[-1]
+                IS_TTY = name.startswith('msys-') and '-pty' in name
+                if IS_TTY:
+                    OUT_TYPE = 'bash (msys)'
+        else:
+            # A normal file, or an unknown file type.
+            pass
     else:
-      # A normal file, or an unknown file type.
-      pass
-  else:
-    # This is non-windows, so we trust isatty.
-    OUT_TYPE = 'pipe or file'
+        # This is non-windows, so we trust isatty.
+        OUT_TYPE = 'pipe or file'
 
-  if IS_TTY and is_windows:
-    # Wrapping may cause errors on some Windows versions (crbug.com/1114548).
-    if platform.release() != '10' or enable_native_ansi():
-      should_wrap = False
+    if IS_TTY and is_windows:
+        # Wrapping may cause errors on some Windows versions
+        # (crbug.com/1114548).
+        if platform.release() != '10' or enable_native_ansi():
+            should_wrap = False
+
+    colorama.init(wrap=should_wrap)
 
-  colorama.init(wrap=should_wrap)
 
 if __name__ == '__main__':
-  init()
-  print('IS_TTY:', IS_TTY)
-  print('OUT_TYPE:', OUT_TYPE)
+    init()
+    print('IS_TTY:', IS_TTY)
+    print('OUT_TYPE:', OUT_TYPE)

+ 60 - 56
siso.py

@@ -15,68 +15,72 @@ import gclient_paths
 
 
 def main(args):
-  # On Windows the siso.bat script passes along the arguments enclosed in
-  # double quotes. This prevents multiple levels of parsing of the special '^'
-  # characters needed when compiling a single file.  When this case is detected,
-  # we need to split the argument. This means that arguments containing actual
-  # spaces are not supported by siso.bat, but that is not a real limitation.
-  if sys.platform.startswith('win') and len(args) == 2:
-    args = args[:1] + args[1].split()
+    # On Windows the siso.bat script passes along the arguments enclosed in
+    # double quotes. This prevents multiple levels of parsing of the special '^'
+    # characters needed when compiling a single file.  When this case is
+    # detected, we need to split the argument. This means that arguments
+    # containing actual spaces are not supported by siso.bat, but that is not a
+    # real limitation.
+    if sys.platform.startswith('win') and len(args) == 2:
+        args = args[:1] + args[1].split()
 
-  # macOS's python sets CPATH, LIBRARY_PATH, SDKROOT implicitly.
-  # https://openradar.appspot.com/radar?id=5608755232243712
-  #
-  # Removing those environment variables to avoid affecting clang's behaviors.
-  if sys.platform == 'darwin':
-    os.environ.pop("CPATH", None)
-    os.environ.pop("LIBRARY_PATH", None)
-    os.environ.pop("SDKROOT", None)
+    # macOS's python sets CPATH, LIBRARY_PATH, SDKROOT implicitly.
+    # https://openradar.appspot.com/radar?id=5608755232243712
+    #
+    # Removing those environment variables to avoid affecting clang's behaviors.
+    if sys.platform == 'darwin':
+        os.environ.pop("CPATH", None)
+        os.environ.pop("LIBRARY_PATH", None)
+        os.environ.pop("SDKROOT", None)
 
-  environ = os.environ.copy()
+    environ = os.environ.copy()
 
-  # Get gclient root + src.
-  primary_solution_path = gclient_paths.GetPrimarySolutionPath()
-  gclient_root_path = gclient_paths.FindGclientRoot(os.getcwd())
-  gclient_src_root_path = None
-  if gclient_root_path:
-    gclient_src_root_path = os.path.join(gclient_root_path, 'src')
+    # Get gclient root + src.
+    primary_solution_path = gclient_paths.GetPrimarySolutionPath()
+    gclient_root_path = gclient_paths.FindGclientRoot(os.getcwd())
+    gclient_src_root_path = None
+    if gclient_root_path:
+        gclient_src_root_path = os.path.join(gclient_root_path, 'src')
 
-  siso_override_path = os.environ.get('SISO_PATH')
-  if siso_override_path:
-    print('depot_tools/siso.py: Using Siso binary from SISO_PATH: %s.' %
-          siso_override_path)
-    if not os.path.isfile(siso_override_path):
-      print('depot_tools/siso.py: Could not find Siso at provided SISO_PATH.',
-            file=sys.stderr)
-      return 1
+    siso_override_path = os.environ.get('SISO_PATH')
+    if siso_override_path:
+        print('depot_tools/siso.py: Using Siso binary from SISO_PATH: %s.' %
+              siso_override_path)
+        if not os.path.isfile(siso_override_path):
+            print(
+                'depot_tools/siso.py: Could not find Siso at provided '
+                'SISO_PATH.',
+                file=sys.stderr)
+            return 1
 
-  for base_path in set(
-      [primary_solution_path, gclient_root_path, gclient_src_root_path]):
-    if not base_path:
-      continue
-    env = environ.copy()
-    sisoenv_path = os.path.join(base_path, 'build', 'config', 'siso',
-                                '.sisoenv')
-    if not os.path.exists(sisoenv_path):
-      continue
-    with open(sisoenv_path) as f:
-      for line in f.readlines():
-        k, v = line.rstrip().split('=', 1)
-        env[k] = v
-    siso_path = siso_override_path or os.path.join(
-        base_path, 'third_party', 'siso', 'siso' + gclient_paths.GetExeSuffix())
-    if os.path.isfile(siso_path):
-      return subprocess.call([siso_path] + args[1:], env=env)
+    for base_path in set(
+        [primary_solution_path, gclient_root_path, gclient_src_root_path]):
+        if not base_path:
+            continue
+        env = environ.copy()
+        sisoenv_path = os.path.join(base_path, 'build', 'config', 'siso',
+                                    '.sisoenv')
+        if not os.path.exists(sisoenv_path):
+            continue
+        with open(sisoenv_path) as f:
+            for line in f.readlines():
+                k, v = line.rstrip().split('=', 1)
+                env[k] = v
+        siso_path = siso_override_path or os.path.join(
+            base_path, 'third_party', 'siso',
+            'siso' + gclient_paths.GetExeSuffix())
+        if os.path.isfile(siso_path):
+            return subprocess.call([siso_path] + args[1:], env=env)
 
-  print(
-      'depot_tools/siso.py: Could not find .sisoenv under build/config/siso of '
-      'the current project. Did you run gclient sync?',
-      file=sys.stderr)
-  return 1
+    print(
+        'depot_tools/siso.py: Could not find .sisoenv under build/config/siso '
+        'of the current project. Did you run gclient sync?',
+        file=sys.stderr)
+    return 1
 
 
 if __name__ == '__main__':
-  try:
-    sys.exit(main(sys.argv))
-  except KeyboardInterrupt:
-    sys.exit(1)
+    try:
+        sys.exit(main(sys.argv))
+    except KeyboardInterrupt:
+        sys.exit(1)

+ 223 - 221
split_cl.py

@@ -2,7 +2,6 @@
 # Copyright 2017 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Splits a branch into smaller branches and uploads CLs."""
 
 from __future__ import print_function
@@ -19,7 +18,6 @@ import scm
 
 import git_common as git
 
-
 # If a call to `git cl split` will generate more than this number of CLs, the
 # command will prompt the user to make sure they know what they're doing. Large
 # numbers of CLs generated by `git cl split` have caused infrastructure issues
@@ -36,62 +34,62 @@ FilesAndOwnersDirectory = collections.namedtuple("FilesAndOwnersDirectory",
 
 
 def EnsureInGitRepository():
-  """Throws an exception if the current directory is not a git repository."""
-  git.run('rev-parse')
+    """Throws an exception if the current directory is not a git repository."""
+    git.run('rev-parse')
 
 
 def CreateBranchForDirectories(prefix, directories, upstream):
-  """Creates a branch named |prefix| + "_" + |directories[0]| + "_split".
+    """Creates a branch named |prefix| + "_" + |directories[0]| + "_split".
 
   Return false if the branch already exists. |upstream| is used as upstream for
   the created branch.
   """
-  existing_branches = set(git.branches(use_limit = False))
-  branch_name = prefix + '_' + directories[0] + '_split'
-  if branch_name in existing_branches:
-    return False
-  git.run('checkout', '-t', upstream, '-b', branch_name)
-  return True
+    existing_branches = set(git.branches(use_limit=False))
+    branch_name = prefix + '_' + directories[0] + '_split'
+    if branch_name in existing_branches:
+        return False
+    git.run('checkout', '-t', upstream, '-b', branch_name)
+    return True
 
 
 def FormatDirectoriesForPrinting(directories, prefix=None):
-  """Formats directory list for printing
+    """Formats directory list for printing
 
   Uses dedicated format for single-item list."""
 
-  prefixed = directories
-  if prefix:
-    prefixed = [(prefix + d) for d in directories]
+    prefixed = directories
+    if prefix:
+        prefixed = [(prefix + d) for d in directories]
 
-  return str(prefixed) if len(prefixed) > 1 else str(prefixed[0])
+    return str(prefixed) if len(prefixed) > 1 else str(prefixed[0])
 
 
 def FormatDescriptionOrComment(txt, directories):
-  """Replaces $directory with |directories| in |txt|."""
-  to_insert = FormatDirectoriesForPrinting(directories, prefix='/')
-  return txt.replace('$directory', to_insert)
+    """Replaces $directory with |directories| in |txt|."""
+    to_insert = FormatDirectoriesForPrinting(directories, prefix='/')
+    return txt.replace('$directory', to_insert)
 
 
 def AddUploadedByGitClSplitToDescription(description):
-  """Adds a 'This CL was uploaded by git cl split.' line to |description|.
+    """Adds a 'This CL was uploaded by git cl split.' line to |description|.
 
   The line is added before footers, or at the end of |description| if it has no
   footers.
   """
-  split_footers = git_footers.split_footers(description)
-  lines = split_footers[0]
-  if lines[-1] and not lines[-1].isspace():
-    lines = lines + ['']
-  lines = lines + ['This CL was uploaded by git cl split.']
-  if split_footers[1]:
-    lines += [''] + split_footers[1]
-  return '\n'.join(lines)
+    split_footers = git_footers.split_footers(description)
+    lines = split_footers[0]
+    if lines[-1] and not lines[-1].isspace():
+        lines = lines + ['']
+    lines = lines + ['This CL was uploaded by git cl split.']
+    if split_footers[1]:
+        lines += [''] + split_footers[1]
+    return '\n'.join(lines)
 
 
 def UploadCl(refactor_branch, refactor_branch_upstream, directories, files,
              description, comment, reviewers, changelist, cmd_upload,
              cq_dry_run, enable_auto_submit, topic, repository_root):
-  """Uploads a CL with all changes to |files| in |refactor_branch|.
+    """Uploads a CL with all changes to |files| in |refactor_branch|.
 
   Args:
     refactor_branch: Name of the branch that contains the changes to upload.
@@ -108,89 +106,92 @@ def UploadCl(refactor_branch, refactor_branch_upstream, directories, files,
     enable_auto_submit: If CL uploads should also enable auto submit.
     topic: Topic to associate with uploaded CLs.
   """
-  # Create a branch.
-  if not CreateBranchForDirectories(refactor_branch, directories,
-                                    refactor_branch_upstream):
-    print('Skipping ' + FormatDirectoriesForPrinting(directories) +
-          ' for which a branch already exists.')
-    return
-
-  # Checkout all changes to files in |files|.
-  deleted_files = []
-  modified_files = []
-  for action, f in files:
-    abspath = os.path.abspath(os.path.join(repository_root, f))
-    if action == 'D':
-      deleted_files.append(abspath)
-    else:
-      modified_files.append(abspath)
-
-  if deleted_files:
-    git.run(*['rm'] + deleted_files)
-  if modified_files:
-    git.run(*['checkout', refactor_branch, '--'] + modified_files)
-
-  # Commit changes. The temporary file is created with delete=False so that it
-  # can be deleted manually after git has read it rather than automatically
-  # when it is closed.
-  with gclient_utils.temporary_file() as tmp_file:
-    gclient_utils.FileWrite(
-        tmp_file, FormatDescriptionOrComment(description, directories))
-    git.run('commit', '-F', tmp_file)
-
-  # Upload a CL.
-  upload_args = ['-f']
-  if reviewers:
-    upload_args.extend(['-r', ','.join(sorted(reviewers))])
-  if cq_dry_run:
-    upload_args.append('--cq-dry-run')
-  if not comment:
-    upload_args.append('--send-mail')
-  if enable_auto_submit:
-    upload_args.append('--enable-auto-submit')
-  if topic:
-    upload_args.append('--topic={}'.format(topic))
-  print('Uploading CL for ' + FormatDirectoriesForPrinting(directories) + '...')
-
-  ret = cmd_upload(upload_args)
-  if ret != 0:
-    print('Uploading failed.')
-    print('Note: git cl split has built-in resume capabilities.')
-    print('Delete ' + git.current_branch() +
-          ' then run git cl split again to resume uploading.')
-
-  if comment:
-    changelist().AddComment(FormatDescriptionOrComment(comment, directories),
-                            publish=True)
+    # Create a branch.
+    if not CreateBranchForDirectories(refactor_branch, directories,
+                                      refactor_branch_upstream):
+        print('Skipping ' + FormatDirectoriesForPrinting(directories) +
+              ' for which a branch already exists.')
+        return
+
+    # Checkout all changes to files in |files|.
+    deleted_files = []
+    modified_files = []
+    for action, f in files:
+        abspath = os.path.abspath(os.path.join(repository_root, f))
+        if action == 'D':
+            deleted_files.append(abspath)
+        else:
+            modified_files.append(abspath)
+
+    if deleted_files:
+        git.run(*['rm'] + deleted_files)
+    if modified_files:
+        git.run(*['checkout', refactor_branch, '--'] + modified_files)
+
+    # Commit changes. The temporary file is created with delete=False so that it
+    # can be deleted manually after git has read it rather than automatically
+    # when it is closed.
+    with gclient_utils.temporary_file() as tmp_file:
+        gclient_utils.FileWrite(
+            tmp_file, FormatDescriptionOrComment(description, directories))
+        git.run('commit', '-F', tmp_file)
+
+    # Upload a CL.
+    upload_args = ['-f']
+    if reviewers:
+        upload_args.extend(['-r', ','.join(sorted(reviewers))])
+    if cq_dry_run:
+        upload_args.append('--cq-dry-run')
+    if not comment:
+        upload_args.append('--send-mail')
+    if enable_auto_submit:
+        upload_args.append('--enable-auto-submit')
+    if topic:
+        upload_args.append('--topic={}'.format(topic))
+    print('Uploading CL for ' + FormatDirectoriesForPrinting(directories) +
+          '...')
+
+    ret = cmd_upload(upload_args)
+    if ret != 0:
+        print('Uploading failed.')
+        print('Note: git cl split has built-in resume capabilities.')
+        print('Delete ' + git.current_branch() +
+              ' then run git cl split again to resume uploading.')
+
+    if comment:
+        changelist().AddComment(FormatDescriptionOrComment(
+            comment, directories),
+                                publish=True)
 
 
 def GetFilesSplitByOwners(files, max_depth):
-  """Returns a map of files split by OWNERS file.
+    """Returns a map of files split by OWNERS file.
 
   Returns:
     A map where keys are paths to directories containing an OWNERS file and
     values are lists of files sharing an OWNERS file.
   """
-  files_split_by_owners = {}
-  for action, path in files:
-    # normpath() is important to normalize separators here, in prepration for
-    # str.split() before. It would be nicer to use something like pathlib here
-    # but alas...
-    dir_with_owners = os.path.normpath(os.path.dirname(path))
-    if max_depth >= 1:
-      dir_with_owners = os.path.join(
-          *dir_with_owners.split(os.path.sep)[:max_depth])
-    # Find the closest parent directory with an OWNERS file.
-    while (dir_with_owners not in files_split_by_owners
-           and not os.path.isfile(os.path.join(dir_with_owners, 'OWNERS'))):
-      dir_with_owners = os.path.dirname(dir_with_owners)
-    files_split_by_owners.setdefault(dir_with_owners, []).append((action, path))
-  return files_split_by_owners
+    files_split_by_owners = {}
+    for action, path in files:
+        # normpath() is important to normalize separators here, in prepration
+        # for str.split() before. It would be nicer to use something like
+        # pathlib here but alas...
+        dir_with_owners = os.path.normpath(os.path.dirname(path))
+        if max_depth >= 1:
+            dir_with_owners = os.path.join(
+                *dir_with_owners.split(os.path.sep)[:max_depth])
+        # Find the closest parent directory with an OWNERS file.
+        while (dir_with_owners not in files_split_by_owners
+               and not os.path.isfile(os.path.join(dir_with_owners, 'OWNERS'))):
+            dir_with_owners = os.path.dirname(dir_with_owners)
+        files_split_by_owners.setdefault(dir_with_owners, []).append(
+            (action, path))
+    return files_split_by_owners
 
 
 def PrintClInfo(cl_index, num_cls, directories, file_paths, description,
                 reviewers, enable_auto_submit, topic):
-  """Prints info about a CL.
+    """Prints info about a CL.
 
   Args:
     cl_index: The index of this CL in the list of CLs to upload.
@@ -203,23 +204,23 @@ def PrintClInfo(cl_index, num_cls, directories, file_paths, description,
     enable_auto_submit: If the CL should also have auto submit enabled.
     topic: Topic to set for this CL.
   """
-  description_lines = FormatDescriptionOrComment(description,
-                                                 directories).splitlines()
-  indented_description = '\n'.join(['    ' + l for l in description_lines])
+    description_lines = FormatDescriptionOrComment(description,
+                                                   directories).splitlines()
+    indented_description = '\n'.join(['    ' + l for l in description_lines])
 
-  print('CL {}/{}'.format(cl_index, num_cls))
-  print('Paths: {}'.format(FormatDirectoriesForPrinting(directories)))
-  print('Reviewers: {}'.format(', '.join(reviewers)))
-  print('Auto-Submit: {}'.format(enable_auto_submit))
-  print('Topic: {}'.format(topic))
-  print('\n' + indented_description + '\n')
-  print('\n'.join(file_paths))
-  print()
+    print('CL {}/{}'.format(cl_index, num_cls))
+    print('Paths: {}'.format(FormatDirectoriesForPrinting(directories)))
+    print('Reviewers: {}'.format(', '.join(reviewers)))
+    print('Auto-Submit: {}'.format(enable_auto_submit))
+    print('Topic: {}'.format(topic))
+    print('\n' + indented_description + '\n')
+    print('\n'.join(file_paths))
+    print()
 
 
 def SplitCl(description_file, comment_file, changelist, cmd_upload, dry_run,
             cq_dry_run, enable_auto_submit, max_depth, topic, repository_root):
-  """"Splits a branch into smaller branches and uploads CLs.
+    """"Splits a branch into smaller branches and uploads CLs.
 
   Args:
     description_file: File containing the description of uploaded CLs.
@@ -236,89 +237,90 @@ def SplitCl(description_file, comment_file, changelist, cmd_upload, dry_run,
   Returns:
     0 in case of success. 1 in case of error.
   """
-  description = AddUploadedByGitClSplitToDescription(
-      gclient_utils.FileRead(description_file))
-  comment = gclient_utils.FileRead(comment_file) if comment_file else None
-
-  try:
-    EnsureInGitRepository()
-
-    cl = changelist()
-    upstream = cl.GetCommonAncestorWithUpstream()
-    files = [
-        (action.strip(), f)
-        for action, f in scm.GIT.CaptureStatus(repository_root, upstream)
-    ]
-
-    if not files:
-      print('Cannot split an empty CL.')
-      return 1
-
-    author = git.run('config', 'user.email').strip() or None
-    refactor_branch = git.current_branch()
-    assert refactor_branch, "Can't run from detached branch."
-    refactor_branch_upstream = git.upstream(refactor_branch)
-    assert refactor_branch_upstream, \
-        "Branch %s must have an upstream." % refactor_branch
-
-    if not CheckDescriptionBugLink(description):
-      return 0
-
-    files_split_by_reviewers = SelectReviewersForFiles(cl, author, files,
-                                                       max_depth)
-
-    num_cls = len(files_split_by_reviewers)
-    print('Will split current branch (' + refactor_branch + ') into ' +
-          str(num_cls) + ' CLs.\n')
-    if cq_dry_run and num_cls > CL_SPLIT_FORCE_LIMIT:
-      print(
-        'This will generate "%r" CLs. This many CLs can potentially generate'
-        ' too much load on the build infrastructure. Please email'
-        ' infra-dev@chromium.org to ensure that this won\'t  break anything.'
-        ' The infra team reserves the right to cancel your jobs if they are'
-        ' overloading the CQ.' % num_cls)
-      answer = gclient_utils.AskForData('Proceed? (y/n):')
-      if answer.lower() != 'y':
-        return 0
-
-    cls_per_reviewer = collections.defaultdict(int)
-    for cl_index, (reviewers, cl_info) in \
-        enumerate(files_split_by_reviewers.items(), 1):
-      # Convert reviewers from tuple to set.
-      reviewer_set = set(reviewers)
-      if dry_run:
-        file_paths = [f for _, f in cl_info.files]
-        PrintClInfo(cl_index, num_cls, cl_info.owners_directories, file_paths,
-                    description, reviewer_set, enable_auto_submit, topic)
-      else:
-        UploadCl(refactor_branch, refactor_branch_upstream,
-                 cl_info.owners_directories, cl_info.files, description,
-                 comment, reviewer_set, changelist, cmd_upload, cq_dry_run,
-                 enable_auto_submit, topic, repository_root)
-
-      for reviewer in reviewers:
-        cls_per_reviewer[reviewer] += 1
-
-    # List the top reviewers that will be sent the most CLs as a result of the
-    # split.
-    reviewer_rankings = sorted(cls_per_reviewer.items(),
-                               key=lambda item: item[1],
-                               reverse=True)
-    print('The top reviewers are:')
-    for reviewer, count in reviewer_rankings[:CL_SPLIT_TOP_REVIEWERS]:
-      print(f'    {reviewer}: {count} CLs')
-
-    # Go back to the original branch.
-    git.run('checkout', refactor_branch)
-
-  except subprocess2.CalledProcessError as cpe:
-    sys.stderr.write(cpe.stderr)
-    return 1
-  return 0
+    description = AddUploadedByGitClSplitToDescription(
+        gclient_utils.FileRead(description_file))
+    comment = gclient_utils.FileRead(comment_file) if comment_file else None
+
+    try:
+        EnsureInGitRepository()
+
+        cl = changelist()
+        upstream = cl.GetCommonAncestorWithUpstream()
+        files = [
+            (action.strip(), f)
+            for action, f in scm.GIT.CaptureStatus(repository_root, upstream)
+        ]
+
+        if not files:
+            print('Cannot split an empty CL.')
+            return 1
+
+        author = git.run('config', 'user.email').strip() or None
+        refactor_branch = git.current_branch()
+        assert refactor_branch, "Can't run from detached branch."
+        refactor_branch_upstream = git.upstream(refactor_branch)
+        assert refactor_branch_upstream, \
+            "Branch %s must have an upstream." % refactor_branch
+
+        if not CheckDescriptionBugLink(description):
+            return 0
+
+        files_split_by_reviewers = SelectReviewersForFiles(
+            cl, author, files, max_depth)
+
+        num_cls = len(files_split_by_reviewers)
+        print('Will split current branch (' + refactor_branch + ') into ' +
+              str(num_cls) + ' CLs.\n')
+        if cq_dry_run and num_cls > CL_SPLIT_FORCE_LIMIT:
+            print(
+                'This will generate "%r" CLs. This many CLs can potentially'
+                ' generate too much load on the build infrastructure. Please'
+                ' email infra-dev@chromium.org to ensure that this won\'t break'
+                ' anything. The infra team reserves the right to cancel your'
+                ' jobs if they are overloading the CQ.' % num_cls)
+            answer = gclient_utils.AskForData('Proceed? (y/n):')
+            if answer.lower() != 'y':
+                return 0
+
+        cls_per_reviewer = collections.defaultdict(int)
+        for cl_index, (reviewers, cl_info) in \
+            enumerate(files_split_by_reviewers.items(), 1):
+            # Convert reviewers from tuple to set.
+            reviewer_set = set(reviewers)
+            if dry_run:
+                file_paths = [f for _, f in cl_info.files]
+                PrintClInfo(cl_index, num_cls, cl_info.owners_directories,
+                            file_paths, description, reviewer_set,
+                            enable_auto_submit, topic)
+            else:
+                UploadCl(refactor_branch, refactor_branch_upstream,
+                         cl_info.owners_directories, cl_info.files, description,
+                         comment, reviewer_set, changelist, cmd_upload,
+                         cq_dry_run, enable_auto_submit, topic, repository_root)
+
+            for reviewer in reviewers:
+                cls_per_reviewer[reviewer] += 1
+
+        # List the top reviewers that will be sent the most CLs as a result of
+        # the split.
+        reviewer_rankings = sorted(cls_per_reviewer.items(),
+                                   key=lambda item: item[1],
+                                   reverse=True)
+        print('The top reviewers are:')
+        for reviewer, count in reviewer_rankings[:CL_SPLIT_TOP_REVIEWERS]:
+            print(f'    {reviewer}: {count} CLs')
+
+        # Go back to the original branch.
+        git.run('checkout', refactor_branch)
+
+    except subprocess2.CalledProcessError as cpe:
+        sys.stderr.write(cpe.stderr)
+        return 1
+    return 0
 
 
 def CheckDescriptionBugLink(description):
-  """Verifies that the description contains a bug link.
+    """Verifies that the description contains a bug link.
 
   Examples:
       Bug: 123
@@ -326,17 +328,17 @@ def CheckDescriptionBugLink(description):
 
   Prompts user if the description does not contain a bug link.
   """
-  bug_pattern = re.compile(r"^Bug:\s*(?:[a-zA-Z]+:)?[0-9]+", re.MULTILINE)
-  matches = re.findall(bug_pattern, description)
-  answer = 'y'
-  if not matches:
-    answer = gclient_utils.AskForData(
-        'Description does not include a bug link. Proceed? (y/n):')
-  return answer.lower() == 'y'
+    bug_pattern = re.compile(r"^Bug:\s*(?:[a-zA-Z]+:)?[0-9]+", re.MULTILINE)
+    matches = re.findall(bug_pattern, description)
+    answer = 'y'
+    if not matches:
+        answer = gclient_utils.AskForData(
+            'Description does not include a bug link. Proceed? (y/n):')
+    return answer.lower() == 'y'
 
 
 def SelectReviewersForFiles(cl, author, files, max_depth):
-  """Selects reviewers for passed-in files
+    """Selects reviewers for passed-in files
 
   Args:
     cl: Changelist class instance
@@ -345,24 +347,24 @@ def SelectReviewersForFiles(cl, author, files, max_depth):
     max_depth: The maximum directory depth to search for OWNERS files. A value
                less than 1 means no limit.
   """
-  info_split_by_owners = GetFilesSplitByOwners(files, max_depth)
-
-  info_split_by_reviewers = {}
-
-  for (directory, split_files) in info_split_by_owners.items():
-    # Use '/' as a path separator in the branch name and the CL description
-    # and comment.
-    directory = directory.replace(os.path.sep, '/')
-    file_paths = [f for _, f in split_files]
-    # Convert reviewers list to tuple in order to use reviewers as key to
-    # dictionary.
-    reviewers = tuple(
-        cl.owners_client.SuggestOwners(
-            file_paths, exclude=[author, cl.owners_client.EVERYONE]))
-
-    if not reviewers in info_split_by_reviewers:
-      info_split_by_reviewers[reviewers] = FilesAndOwnersDirectory([], [])
-    info_split_by_reviewers[reviewers].files.extend(split_files)
-    info_split_by_reviewers[reviewers].owners_directories.append(directory)
-
-  return info_split_by_reviewers
+    info_split_by_owners = GetFilesSplitByOwners(files, max_depth)
+
+    info_split_by_reviewers = {}
+
+    for (directory, split_files) in info_split_by_owners.items():
+        # Use '/' as a path separator in the branch name and the CL description
+        # and comment.
+        directory = directory.replace(os.path.sep, '/')
+        file_paths = [f for _, f in split_files]
+        # Convert reviewers list to tuple in order to use reviewers as key to
+        # dictionary.
+        reviewers = tuple(
+            cl.owners_client.SuggestOwners(
+                file_paths, exclude=[author, cl.owners_client.EVERYONE]))
+
+        if not reviewers in info_split_by_reviewers:
+            info_split_by_reviewers[reviewers] = FilesAndOwnersDirectory([], [])
+        info_split_by_reviewers[reviewers].files.extend(split_files)
+        info_split_by_reviewers[reviewers].owners_directories.append(directory)
+
+    return info_split_by_reviewers

+ 167 - 166
subcommand.py

@@ -1,7 +1,6 @@
 # Copyright 2013 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Manages subcommands in a script.
 
 Each subcommand should look like this:
@@ -46,51 +45,55 @@ import textwrap
 
 
 def usage(more):
-  """Adds a 'usage_more' property to a CMD function."""
-  def hook(fn):
-    fn.usage_more = more
-    return fn
-  return hook
+    """Adds a 'usage_more' property to a CMD function."""
+    def hook(fn):
+        fn.usage_more = more
+        return fn
+
+    return hook
 
 
 def epilog(text):
-  """Adds an 'epilog' property to a CMD function.
+    """Adds an 'epilog' property to a CMD function.
 
   It will be shown in the epilog. Usually useful for examples.
   """
-  def hook(fn):
-    fn.epilog = text
-    return fn
-  return hook
+    def hook(fn):
+        fn.epilog = text
+        return fn
+
+    return hook
 
 
 def CMDhelp(parser, args):
-  """Prints list of commands or help for a specific command."""
-  # This is the default help implementation. It can be disabled or overridden if
-  # wanted.
-  if not any(i in ('-h', '--help') for i in args):
-    args = args + ['--help']
-  parser.parse_args(args)
-  # Never gets there.
-  assert False
+    """Prints list of commands or help for a specific command."""
+    # This is the default help implementation. It can be disabled or overridden
+    # if wanted.
+    if not any(i in ('-h', '--help') for i in args):
+        args = args + ['--help']
+    parser.parse_args(args)
+    # Never gets there.
+    assert False
 
 
 def _get_color_module():
-  """Returns the colorama module if available.
+    """Returns the colorama module if available.
 
   If so, assumes colors are supported and return the module handle.
   """
-  return sys.modules.get('colorama') or sys.modules.get('third_party.colorama')
+    return sys.modules.get('colorama') or sys.modules.get(
+        'third_party.colorama')
 
 
 def _function_to_name(name):
-  """Returns the name of a CMD function."""
-  return name[3:].replace('_', '-')
+    """Returns the name of a CMD function."""
+    return name[3:].replace('_', '-')
 
 
 class CommandDispatcher(object):
-  def __init__(self, module):
-    """module is the name of the main python module where to look for commands.
+    def __init__(self, module):
+        """module is the name of the main python module where to look for
+        commands.
 
     The python builtin variable __name__ MUST be used for |module|. If the
     script is executed in the form 'python script.py', __name__ == '__main__'
@@ -98,10 +101,10 @@ class CommandDispatcher(object):
     tested, __main__ will be the unit test's module so it has to reference to
     itself with 'script'. __name__ always match the right value.
     """
-    self.module = sys.modules[module]
+        self.module = sys.modules[module]
 
-  def enumerate_commands(self):
-    """Returns a dict of command and their handling function.
+    def enumerate_commands(self):
+        """Returns a dict of command and their handling function.
 
     The commands must be in the '__main__' modules. To import a command from a
     submodule, use:
@@ -115,149 +118,147 @@ class CommandDispatcher(object):
     e.g.:
       CMDhelp = None
     """
-    cmds = dict(
-        (_function_to_name(name), getattr(self.module, name))
-        for name in dir(self.module) if name.startswith('CMD'))
-    cmds.setdefault('help', CMDhelp)
-    return cmds
+        cmds = dict((_function_to_name(name), getattr(self.module, name))
+                    for name in dir(self.module) if name.startswith('CMD'))
+        cmds.setdefault('help', CMDhelp)
+        return cmds
 
-  def find_nearest_command(self, name_asked):
-    """Retrieves the function to handle a command as supplied by the user.
+    def find_nearest_command(self, name_asked):
+        """Retrieves the function to handle a command as supplied by the user.
 
     It automatically tries to guess the _intended command_ by handling typos
     and/or incomplete names.
     """
-    commands = self.enumerate_commands()
-    name_to_dash = name_asked.replace('_', '-')
-    if name_to_dash in commands:
-      return commands[name_to_dash]
-
-    # An exact match was not found. Try to be smart and look if there's
-    # something similar.
-    commands_with_prefix = [c for c in commands if c.startswith(name_asked)]
-    if len(commands_with_prefix) == 1:
-      return commands[commands_with_prefix[0]]
-
-    # A #closeenough approximation of levenshtein distance.
-    def close_enough(a, b):
-      return difflib.SequenceMatcher(a=a, b=b).ratio()
-
-    hamming_commands = sorted(
-        ((close_enough(c, name_asked), c) for c in commands),
-        reverse=True)
-    if (hamming_commands[0][0] - hamming_commands[1][0]) < 0.3:
-      # Too ambiguous.
-      return None
-
-    if hamming_commands[0][0] < 0.8:
-      # Not similar enough. Don't be a fool and run a random command.
-      return None
-
-    return commands[hamming_commands[0][1]]
-
-  def _gen_commands_list(self):
-    """Generates the short list of supported commands."""
-    commands = self.enumerate_commands()
-    docs = sorted(
-        (cmd_name, self._create_command_summary(cmd_name, handler))
-        for cmd_name, handler in commands.items())
-    # Skip commands without a docstring.
-    docs = [i for i in docs if i[1]]
-    # Then calculate maximum length for alignment:
-    length = max(len(c) for c in commands)
-
-    # Look if color is supported.
-    colors = _get_color_module()
-    green = reset = ''
-    if colors:
-      green = colors.Fore.GREEN
-      reset = colors.Fore.RESET
-    return (
-        'Commands are:\n' +
-        ''.join(
-            '  %s%-*s%s %s\n' % (green, length, cmd_name, reset, doc)
-            for cmd_name, doc in docs))
-
-  def _add_command_usage(self, parser, command):
-    """Modifies an OptionParser object with the function's documentation."""
-    cmd_name = _function_to_name(command.__name__)
-    if cmd_name == 'help':
-      cmd_name = '<command>'
-      # Use the module's docstring as the description for the 'help' command if
-      # available.
-      parser.description = (self.module.__doc__ or '').rstrip()
-      if parser.description:
-        parser.description += '\n\n'
-      parser.description += self._gen_commands_list()
-      # Do not touch epilog.
-    else:
-      # Use the command's docstring if available. For commands, unlike module
-      # docstring, realign.
-      lines = (command.__doc__ or '').rstrip().splitlines()
-      if lines[:1]:
-        rest = textwrap.dedent('\n'.join(lines[1:]))
-        parser.description = '\n'.join((lines[0], rest))
-      else:
-        parser.description = lines[0] if lines else ''
-      if parser.description:
-        parser.description += '\n'
-      parser.epilog = getattr(command, 'epilog', None)
-      if parser.epilog:
-        parser.epilog = '\n' + parser.epilog.strip() + '\n'
-
-    more = getattr(command, 'usage_more', '')
-    extra = '' if not more else ' ' + more
-    parser.set_usage('usage: %%prog %s [options]%s' % (cmd_name, extra))
-
-  @staticmethod
-  def _create_command_summary(cmd_name, command):
-    """Creates a oneliner summary from the command's docstring."""
-    if cmd_name != _function_to_name(command.__name__):
-      # Skip aliases. For example using at module level:
-      # CMDfoo = CMDbar
-      return ''
-    doc = command.__doc__ or ''
-    line = doc.split('\n', 1)[0].rstrip('.')
-    if not line:
-      return line
-    return (line[0].lower() + line[1:]).strip()
-
-  def execute(self, parser, args):
-    """Dispatches execution to the right command.
+        commands = self.enumerate_commands()
+        name_to_dash = name_asked.replace('_', '-')
+        if name_to_dash in commands:
+            return commands[name_to_dash]
+
+        # An exact match was not found. Try to be smart and look if there's
+        # something similar.
+        commands_with_prefix = [c for c in commands if c.startswith(name_asked)]
+        if len(commands_with_prefix) == 1:
+            return commands[commands_with_prefix[0]]
+
+        # A #closeenough approximation of levenshtein distance.
+        def close_enough(a, b):
+            return difflib.SequenceMatcher(a=a, b=b).ratio()
+
+        hamming_commands = sorted(
+            ((close_enough(c, name_asked), c) for c in commands), reverse=True)
+        if (hamming_commands[0][0] - hamming_commands[1][0]) < 0.3:
+            # Too ambiguous.
+            return None
+
+        if hamming_commands[0][0] < 0.8:
+            # Not similar enough. Don't be a fool and run a random command.
+            return None
+
+        return commands[hamming_commands[0][1]]
+
+    def _gen_commands_list(self):
+        """Generates the short list of supported commands."""
+        commands = self.enumerate_commands()
+        docs = sorted(
+            (cmd_name, self._create_command_summary(cmd_name, handler))
+            for cmd_name, handler in commands.items())
+        # Skip commands without a docstring.
+        docs = [i for i in docs if i[1]]
+        # Then calculate maximum length for alignment:
+        length = max(len(c) for c in commands)
+
+        # Look if color is supported.
+        colors = _get_color_module()
+        green = reset = ''
+        if colors:
+            green = colors.Fore.GREEN
+            reset = colors.Fore.RESET
+        return ('Commands are:\n' +
+                ''.join('  %s%-*s%s %s\n' %
+                        (green, length, cmd_name, reset, doc)
+                        for cmd_name, doc in docs))
+
+    def _add_command_usage(self, parser, command):
+        """Modifies an OptionParser object with the function's documentation."""
+        cmd_name = _function_to_name(command.__name__)
+        if cmd_name == 'help':
+            cmd_name = '<command>'
+            # Use the module's docstring as the description for the 'help'
+            # command if available.
+            parser.description = (self.module.__doc__ or '').rstrip()
+            if parser.description:
+                parser.description += '\n\n'
+            parser.description += self._gen_commands_list()
+            # Do not touch epilog.
+        else:
+            # Use the command's docstring if available. For commands, unlike
+            # module docstring, realign.
+            lines = (command.__doc__ or '').rstrip().splitlines()
+            if lines[:1]:
+                rest = textwrap.dedent('\n'.join(lines[1:]))
+                parser.description = '\n'.join((lines[0], rest))
+            else:
+                parser.description = lines[0] if lines else ''
+            if parser.description:
+                parser.description += '\n'
+            parser.epilog = getattr(command, 'epilog', None)
+            if parser.epilog:
+                parser.epilog = '\n' + parser.epilog.strip() + '\n'
+
+        more = getattr(command, 'usage_more', '')
+        extra = '' if not more else ' ' + more
+        parser.set_usage('usage: %%prog %s [options]%s' % (cmd_name, extra))
+
+    @staticmethod
+    def _create_command_summary(cmd_name, command):
+        """Creates a oneliner summary from the command's docstring."""
+        if cmd_name != _function_to_name(command.__name__):
+            # Skip aliases. For example using at module level:
+            # CMDfoo = CMDbar
+            return ''
+        doc = command.__doc__ or ''
+        line = doc.split('\n', 1)[0].rstrip('.')
+        if not line:
+            return line
+        return (line[0].lower() + line[1:]).strip()
+
+    def execute(self, parser, args):
+        """Dispatches execution to the right command.
 
     Fallbacks to 'help' if not disabled.
     """
-    # Unconditionally disable format_description() and format_epilog().
-    # Technically, a formatter should be used but it's not worth (yet) the
-    # trouble.
-    parser.format_description = lambda _: parser.description or ''
-    parser.format_epilog = lambda _: parser.epilog or ''
-
-    if args:
-      if args[0] in ('-h', '--help') and len(args) > 1:
-        # Reverse the argument order so 'tool --help cmd' is rewritten to
-        # 'tool cmd --help'.
-        args = [args[1], args[0]] + args[2:]
-      command = self.find_nearest_command(args[0])
-      if command:
-        if command.__name__ == 'CMDhelp' and len(args) > 1:
-          # Reverse the argument order so 'tool help cmd' is rewritten to
-          # 'tool cmd --help'. Do it here since we want 'tool help cmd' to work
-          # too.
-          args = [args[1], '--help'] + args[2:]
-          command = self.find_nearest_command(args[0]) or command
-
-        # "fix" the usage and the description now that we know the subcommand.
-        self._add_command_usage(parser, command)
-        return command(parser, args[1:])
-
-    cmdhelp = self.enumerate_commands().get('help')
-    if cmdhelp:
-      # Not a known command. Default to help.
-      self._add_command_usage(parser, cmdhelp)
-      # Don't pass list of arguments as those may not be supported by cmdhelp.
-      # See: https://crbug.com/1352093
-      return cmdhelp(parser, [])
-
-    # Nothing can be done.
-    return 2
+        # Unconditionally disable format_description() and format_epilog().
+        # Technically, a formatter should be used but it's not worth (yet) the
+        # trouble.
+        parser.format_description = lambda _: parser.description or ''
+        parser.format_epilog = lambda _: parser.epilog or ''
+
+        if args:
+            if args[0] in ('-h', '--help') and len(args) > 1:
+                # Reverse the argument order so 'tool --help cmd' is rewritten
+                # to 'tool cmd --help'.
+                args = [args[1], args[0]] + args[2:]
+            command = self.find_nearest_command(args[0])
+            if command:
+                if command.__name__ == 'CMDhelp' and len(args) > 1:
+                    # Reverse the argument order so 'tool help cmd' is rewritten
+                    # to 'tool cmd --help'. Do it here since we want 'tool help
+                    # cmd' to work too.
+                    args = [args[1], '--help'] + args[2:]
+                    command = self.find_nearest_command(args[0]) or command
+
+                # "fix" the usage and the description now that we know the
+                # subcommand.
+                self._add_command_usage(parser, command)
+                return command(parser, args[1:])
+
+        cmdhelp = self.enumerate_commands().get('help')
+        if cmdhelp:
+            # Not a known command. Default to help.
+            self._add_command_usage(parser, cmdhelp)
+            # Don't pass list of arguments as those may not be supported by
+            # cmdhelp. See: https://crbug.com/1352093
+            return cmdhelp(parser, [])
+
+        # Nothing can be done.
+        return 2

+ 136 - 136
subprocess2.py

@@ -15,7 +15,6 @@ import subprocess
 import sys
 import threading
 
-
 # Constants forwarded from subprocess.
 PIPE = subprocess.PIPE
 STDOUT = subprocess.STDOUT
@@ -23,72 +22,74 @@ DEVNULL = subprocess.DEVNULL
 
 
 class CalledProcessError(subprocess.CalledProcessError):
-  """Augment the standard exception with more data."""
-  def __init__(self, returncode, cmd, cwd, stdout, stderr):
-    super(CalledProcessError, self).__init__(returncode, cmd, output=stdout)
-    self.stdout = self.output  # for backward compatibility.
-    self.stderr = stderr
-    self.cwd = cwd
-
-  def __str__(self):
-    out = 'Command %r returned non-zero exit status %s' % (
-        ' '.join(self.cmd), self.returncode)
-    if self.cwd:
-      out += ' in ' + self.cwd
-    if self.stdout:
-      out += '\n' + self.stdout.decode('utf-8', 'ignore')
-    if self.stderr:
-      out += '\n' + self.stderr.decode('utf-8', 'ignore')
-    return out
+    """Augment the standard exception with more data."""
+    def __init__(self, returncode, cmd, cwd, stdout, stderr):
+        super(CalledProcessError, self).__init__(returncode, cmd, output=stdout)
+        self.stdout = self.output  # for backward compatibility.
+        self.stderr = stderr
+        self.cwd = cwd
+
+    def __str__(self):
+        out = 'Command %r returned non-zero exit status %s' % (' '.join(
+            self.cmd), self.returncode)
+        if self.cwd:
+            out += ' in ' + self.cwd
+        if self.stdout:
+            out += '\n' + self.stdout.decode('utf-8', 'ignore')
+        if self.stderr:
+            out += '\n' + self.stderr.decode('utf-8', 'ignore')
+        return out
 
 
 class CygwinRebaseError(CalledProcessError):
-  """Occurs when cygwin's fork() emulation fails due to rebased dll."""
+    """Occurs when cygwin's fork() emulation fails due to rebased dll."""
 
 
 ## Utility functions
 
 
 def kill_pid(pid):
-  """Kills a process by its process id."""
-  try:
-    # Unable to import 'module'
-    # pylint: disable=no-member,F0401
-    import signal
-    return os.kill(pid, signal.SIGTERM)
-  except ImportError:
-    pass
+    """Kills a process by its process id."""
+    try:
+        # Unable to import 'module'
+        # pylint: disable=no-member,F0401
+        import signal
+        return os.kill(pid, signal.SIGTERM)
+    except ImportError:
+        pass
 
 
 def get_english_env(env):
-  """Forces LANG and/or LANGUAGE to be English.
+    """Forces LANG and/or LANGUAGE to be English.
 
   Forces encoding to utf-8 for subprocesses.
 
   Returns None if it is unnecessary.
   """
-  if sys.platform == 'win32':
-    return None
-  env = env or os.environ
+    if sys.platform == 'win32':
+        return None
+    env = env or os.environ
+
+    # Test if it is necessary at all.
+    is_english = lambda name: env.get(name, 'en').startswith('en')
 
-  # Test if it is necessary at all.
-  is_english = lambda name: env.get(name, 'en').startswith('en')
+    if is_english('LANG') and is_english('LANGUAGE'):
+        return None
 
-  if is_english('LANG') and is_english('LANGUAGE'):
-    return None
+    # Requires modifications.
+    env = env.copy()
 
-  # Requires modifications.
-  env = env.copy()
-  def fix_lang(name):
-    if not is_english(name):
-      env[name] = 'en_US.UTF-8'
-  fix_lang('LANG')
-  fix_lang('LANGUAGE')
-  return env
+    def fix_lang(name):
+        if not is_english(name):
+            env[name] = 'en_US.UTF-8'
+
+    fix_lang('LANG')
+    fix_lang('LANGUAGE')
+    return env
 
 
 class Popen(subprocess.Popen):
-  """Wraps subprocess.Popen() with various workarounds.
+    """Wraps subprocess.Popen() with various workarounds.
 
   - Forces English output since it's easier to parse the stdout if it is always
     in English.
@@ -100,69 +101,68 @@ class Popen(subprocess.Popen):
   Note: Popen() can throw OSError when cwd or args[0] doesn't exist. Translate
   exceptions generated by cygwin when it fails trying to emulate fork().
   """
-  # subprocess.Popen.__init__() is not threadsafe; there is a race between
-  # creating the exec-error pipe for the child and setting it to CLOEXEC during
-  # which another thread can fork and cause the pipe to be inherited by its
-  # descendents, which will cause the current Popen to hang until all those
-  # descendents exit. Protect this with a lock so that only one fork/exec can
-  # happen at a time.
-  popen_lock = threading.Lock()
-
-  def __init__(self, args, **kwargs):
-    env = get_english_env(kwargs.get('env'))
-    if env:
-      kwargs['env'] = env
-    if kwargs.get('env') is not None:
-      # Subprocess expects environment variables to be strings in Python 3.
-      def ensure_str(value):
-        if isinstance(value, bytes):
-          return value.decode()
-        return value
-
-      kwargs['env'] = {
-          ensure_str(k): ensure_str(v)
-          for k, v in kwargs['env'].items()
-      }
-    if kwargs.get('shell') is None:
-      # *Sigh*:  Windows needs shell=True, or else it won't search %PATH% for
-      # the executable, but shell=True makes subprocess on Linux fail when it's
-      # called with a list because it only tries to execute the first item in
-      # the list.
-      kwargs['shell'] = bool(sys.platform=='win32')
-
-    if isinstance(args, (str, bytes)):
-      tmp_str = args
-    elif isinstance(args, (list, tuple)):
-      tmp_str = ' '.join(args)
-    else:
-      raise CalledProcessError(None, args, kwargs.get('cwd'), None, None)
-    if kwargs.get('cwd', None):
-      tmp_str += ';  cwd=%s' % kwargs['cwd']
-    logging.debug(tmp_str)
-
-    try:
-      with self.popen_lock:
-        super(Popen, self).__init__(args, **kwargs)
-    except OSError as e:
-      if e.errno == errno.EAGAIN and sys.platform == 'cygwin':
-        # Convert fork() emulation failure into a CygwinRebaseError().
-        raise CygwinRebaseError(
-            e.errno,
-            args,
-            kwargs.get('cwd'),
-            None,
-            'Visit '
-            'http://code.google.com/p/chromium/wiki/CygwinDllRemappingFailure '
-            'to learn how to fix this error; you need to rebase your cygwin '
-            'dlls')
-      # Popen() can throw OSError when cwd or args[0] doesn't exist.
-      raise OSError('Execution failed with error: %s.\n'
-                    'Check that %s or %s exist and have execution permission.'
-                    % (str(e), kwargs.get('cwd'), args[0]))
+    # subprocess.Popen.__init__() is not threadsafe; there is a race between
+    # creating the exec-error pipe for the child and setting it to CLOEXEC
+    # during which another thread can fork and cause the pipe to be inherited by
+    # its descendents, which will cause the current Popen to hang until all
+    # those descendents exit. Protect this with a lock so that only one
+    # fork/exec can happen at a time.
+    popen_lock = threading.Lock()
+
+    def __init__(self, args, **kwargs):
+        env = get_english_env(kwargs.get('env'))
+        if env:
+            kwargs['env'] = env
+        if kwargs.get('env') is not None:
+            # Subprocess expects environment variables to be strings in Python
+            # 3.
+            def ensure_str(value):
+                if isinstance(value, bytes):
+                    return value.decode()
+                return value
+
+            kwargs['env'] = {
+                ensure_str(k): ensure_str(v)
+                for k, v in kwargs['env'].items()
+            }
+        if kwargs.get('shell') is None:
+            # *Sigh*:  Windows needs shell=True, or else it won't search %PATH%
+            # for the executable, but shell=True makes subprocess on Linux fail
+            # when it's called with a list because it only tries to execute the
+            # first item in the list.
+            kwargs['shell'] = bool(sys.platform == 'win32')
+
+        if isinstance(args, (str, bytes)):
+            tmp_str = args
+        elif isinstance(args, (list, tuple)):
+            tmp_str = ' '.join(args)
+        else:
+            raise CalledProcessError(None, args, kwargs.get('cwd'), None, None)
+        if kwargs.get('cwd', None):
+            tmp_str += ';  cwd=%s' % kwargs['cwd']
+        logging.debug(tmp_str)
+
+        try:
+            with self.popen_lock:
+                super(Popen, self).__init__(args, **kwargs)
+        except OSError as e:
+            if e.errno == errno.EAGAIN and sys.platform == 'cygwin':
+                # Convert fork() emulation failure into a CygwinRebaseError().
+                raise CygwinRebaseError(
+                    e.errno, args, kwargs.get('cwd'), None, 'Visit '
+                    'http://code.google.com/p/chromium/wiki/'
+                    'CygwinDllRemappingFailure '
+                    'to learn how to fix this error; you need to rebase your '
+                    'cygwin dlls')
+            # Popen() can throw OSError when cwd or args[0] doesn't exist.
+            raise OSError(
+                'Execution failed with error: %s.\n'
+                'Check that %s or %s exist and have execution permission.' %
+                (str(e), kwargs.get('cwd'), args[0]))
 
 
 def communicate(args, **kwargs):
-  """Wraps subprocess.Popen().communicate().
+    """Wraps subprocess.Popen().communicate().
 
   Returns ((stdout, stderr), returncode).
 
@@ -170,19 +170,19 @@ def communicate(args, **kwargs):
     output, print a warning to stderr.
   - Automatically passes stdin content as input so do not specify stdin=PIPE.
   """
-  stdin = None
-  # When stdin is passed as an argument, use it as the actual input data and
-  # set the Popen() parameter accordingly.
-  if 'stdin' in kwargs and isinstance(kwargs['stdin'], (str, bytes)):
-    stdin = kwargs['stdin']
-    kwargs['stdin'] = PIPE
+    stdin = None
+    # When stdin is passed as an argument, use it as the actual input data and
+    # set the Popen() parameter accordingly.
+    if 'stdin' in kwargs and isinstance(kwargs['stdin'], (str, bytes)):
+        stdin = kwargs['stdin']
+        kwargs['stdin'] = PIPE
 
-  proc = Popen(args, **kwargs)
-  return proc.communicate(stdin), proc.returncode
+    proc = Popen(args, **kwargs)
+    return proc.communicate(stdin), proc.returncode
 
 
 def call(args, **kwargs):
-  """Emulates subprocess.call().
+    """Emulates subprocess.call().
 
   Automatically convert stdout=PIPE or stderr=PIPE to DEVNULL.
   In no case they can be returned since no code path raises
@@ -190,47 +190,47 @@ def call(args, **kwargs):
 
   Returns exit code.
   """
-  if kwargs.get('stdout') == PIPE:
-    kwargs['stdout'] = DEVNULL
-  if kwargs.get('stderr') == PIPE:
-    kwargs['stderr'] = DEVNULL
-  return communicate(args, **kwargs)[1]
+    if kwargs.get('stdout') == PIPE:
+        kwargs['stdout'] = DEVNULL
+    if kwargs.get('stderr') == PIPE:
+        kwargs['stderr'] = DEVNULL
+    return communicate(args, **kwargs)[1]
 
 
 def check_call_out(args, **kwargs):
-  """Improved version of subprocess.check_call().
+    """Improved version of subprocess.check_call().
 
   Returns (stdout, stderr), unlike subprocess.check_call().
   """
-  out, returncode = communicate(args, **kwargs)
-  if returncode:
-    raise CalledProcessError(
-        returncode, args, kwargs.get('cwd'), out[0], out[1])
-  return out
+    out, returncode = communicate(args, **kwargs)
+    if returncode:
+        raise CalledProcessError(returncode, args, kwargs.get('cwd'), out[0],
+                                 out[1])
+    return out
 
 
 def check_call(args, **kwargs):
-  """Emulate subprocess.check_call()."""
-  check_call_out(args, **kwargs)
-  return 0
+    """Emulate subprocess.check_call()."""
+    check_call_out(args, **kwargs)
+    return 0
 
 
 def capture(args, **kwargs):
-  """Captures stdout of a process call and returns it.
+    """Captures stdout of a process call and returns it.
 
   Returns stdout.
 
   - Discards returncode.
   - Blocks stdin by default if not specified since no output will be visible.
   """
-  kwargs.setdefault('stdin', DEVNULL)
+    kwargs.setdefault('stdin', DEVNULL)
 
-  # Like check_output, deny the caller from using stdout arg.
-  return communicate(args, stdout=PIPE, **kwargs)[0][0]
+    # Like check_output, deny the caller from using stdout arg.
+    return communicate(args, stdout=PIPE, **kwargs)[0][0]
 
 
 def check_output(args, **kwargs):
-  """Emulates subprocess.check_output().
+    """Emulates subprocess.check_output().
 
   Captures stdout of a process call and returns stdout only.
 
@@ -238,7 +238,7 @@ def check_output(args, **kwargs):
   - Blocks stdin by default if not specified since no output will be visible.
   - As per doc, "The stdout argument is not allowed as it is used internally."
   """
-  kwargs.setdefault('stdin', DEVNULL)
-  if 'stdout' in kwargs:
-    raise ValueError('stdout argument not allowed, it would be overridden.')
-  return check_call_out(args, stdout=PIPE, **kwargs)[0]
+    kwargs.setdefault('stdin', DEVNULL)
+    if 'stdout' in kwargs:
+        raise ValueError('stdout argument not allowed, it would be overridden.')
+    return check_call_out(args, stdout=PIPE, **kwargs)[0]

+ 42 - 42
swift_format.py

@@ -15,60 +15,60 @@ import sys
 
 
 class NotFoundError(Exception):
-  """A file could not be found."""
-
-  def __init__(self, e):
-    Exception.__init__(
-        self,
-        'Problem while looking for swift-format in Chromium source tree:\n'
-        '%s' % e)
+    """A file could not be found."""
+    def __init__(self, e):
+        Exception.__init__(
+            self,
+            'Problem while looking for swift-format in Chromium source tree:\n'
+            '%s' % e)
 
 
 def FindSwiftFormatToolInChromiumTree():
-  """Return a path to the rustfmt executable, or die trying."""
-  chromium_src_path = gclient_paths.GetPrimarySolutionPath()
-  if not chromium_src_path:
-    raise NotFoundError(
-        'Could not find checkout in any parent of the current path.\n'
-        'Set CHROMIUM_BUILDTOOLS_PATH to use outside of a chromium checkout.')
+    """Return a path to the rustfmt executable, or die trying."""
+    chromium_src_path = gclient_paths.GetPrimarySolutionPath()
+    if not chromium_src_path:
+        raise NotFoundError(
+            'Could not find checkout in any parent of the current path.\n'
+            'Set CHROMIUM_BUILDTOOLS_PATH to use outside of a chromium '
+            'checkout.')
 
-  tool_path = os.path.join(chromium_src_path, 'third_party', 'swift-format',
-                           'swift-format')
-  if not os.path.exists(tool_path):
-    raise NotFoundError('File does not exist: %s' % tool_path)
-  return tool_path
+    tool_path = os.path.join(chromium_src_path, 'third_party', 'swift-format',
+                             'swift-format')
+    if not os.path.exists(tool_path):
+        raise NotFoundError('File does not exist: %s' % tool_path)
+    return tool_path
 
 
 def IsSwiftFormatSupported():
-  if sys.platform != 'darwin':
-    return False
-  try:
-    FindSwiftFormatToolInChromiumTree()
-    return True
-  except NotFoundError:
-    return False
+    if sys.platform != 'darwin':
+        return False
+    try:
+        FindSwiftFormatToolInChromiumTree()
+        return True
+    except NotFoundError:
+        return False
 
 
 def main(args):
-  try:
-    tool = FindSwiftFormatToolInChromiumTree()
-  except NotFoundError as e:
-    sys.stderr.write("%s\n" % str(e))
-    return 1
+    try:
+        tool = FindSwiftFormatToolInChromiumTree()
+    except NotFoundError as e:
+        sys.stderr.write("%s\n" % str(e))
+        return 1
 
-  # Add some visibility to --help showing where the tool lives, since this
-  # redirection can be a little opaque.
-  help_syntax = ('-h', '--help', '-help', '-help-list', '--help-list')
-  if any(match in args for match in help_syntax):
-    print('\nDepot tools redirects you to the swift-format at:\n    %s\n' %
-          tool)
+    # Add some visibility to --help showing where the tool lives, since this
+    # redirection can be a little opaque.
+    help_syntax = ('-h', '--help', '-help', '-help-list', '--help-list')
+    if any(match in args for match in help_syntax):
+        print('\nDepot tools redirects you to the swift-format at:\n    %s\n' %
+              tool)
 
-  return subprocess.call([tool] + args)
+    return subprocess.call([tool] + args)
 
 
 if __name__ == '__main__':
-  try:
-    sys.exit(main(sys.argv[1:]))
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main(sys.argv[1:]))
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 0 - 3
testing_support/.style.yapf

@@ -1,3 +0,0 @@
-[style]
-based_on_style = pep8
-column_limit = 80

+ 2 - 3
testing_support/coverage_utils.py

@@ -63,9 +63,8 @@ def covered_main(includes,
             sys.path.insert(0, os.path.join(ROOT_PATH, 'third_party'))
             import coverage
         else:
-            print(
-                "ERROR: python-coverage (%s) is required to be installed on "
-                "your PYTHONPATH to run this test." % require_native)
+            print("ERROR: python-coverage (%s) is required to be installed on "
+                  "your PYTHONPATH to run this test." % require_native)
             sys.exit(1)
 
     COVERAGE = coverage.coverage(include=includes)

+ 0 - 3
tests/.style.yapf

@@ -1,3 +0,0 @@
-[style]
-based_on_style = pep8
-column_limit = 80

+ 20 - 19
update_depot_tools_toggle.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2017 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Small utility script to enable/disable `depot_tools` automatic updating."""
 
 import argparse
@@ -10,29 +9,31 @@ import datetime
 import os
 import sys
 
-
 DEPOT_TOOLS_ROOT = os.path.abspath(os.path.dirname(__file__))
 SENTINEL_PATH = os.path.join(DEPOT_TOOLS_ROOT, '.disable_auto_update')
 
 
 def main():
-  parser = argparse.ArgumentParser()
-  group = parser.add_mutually_exclusive_group(required=True)
-  group.add_argument('--enable', action='store_true',
-      help='Enable auto-updating.')
-  group.add_argument('--disable', action='store_true',
-      help='Disable auto-updating.')
-  args = parser.parse_args()
-
-  if args.enable:
-    if os.path.exists(SENTINEL_PATH):
-      os.unlink(SENTINEL_PATH)
-  if args.disable:
-    if not os.path.exists(SENTINEL_PATH):
-      with open(SENTINEL_PATH, 'w') as fd:
-        fd.write('Disabled by %s at %s\n' % (__file__, datetime.datetime.now()))
-  return 0
+    parser = argparse.ArgumentParser()
+    group = parser.add_mutually_exclusive_group(required=True)
+    group.add_argument('--enable',
+                       action='store_true',
+                       help='Enable auto-updating.')
+    group.add_argument('--disable',
+                       action='store_true',
+                       help='Disable auto-updating.')
+    args = parser.parse_args()
+
+    if args.enable:
+        if os.path.exists(SENTINEL_PATH):
+            os.unlink(SENTINEL_PATH)
+    if args.disable:
+        if not os.path.exists(SENTINEL_PATH):
+            with open(SENTINEL_PATH, 'w') as fd:
+                fd.write('Disabled by %s at %s\n' %
+                         (__file__, datetime.datetime.now()))
+    return 0
 
 
 if __name__ == '__main__':
-  sys.exit(main())
+    sys.exit(main())

+ 16 - 15
upload_metrics.py

@@ -11,23 +11,24 @@ import urllib.request
 import auth
 import metrics_utils
 
+
 def main():
-  metrics = input()
-  try:
-    headers = {}
-    if 'bot_metrics' in metrics:
-      token = auth.Authenticator().get_access_token().token
-      headers = {'Authorization': 'Bearer ' + token}
-    urllib.request.urlopen(urllib.request.Request(
-        url=metrics_utils.APP_URL + '/upload',
-        data=metrics.encode('utf-8'),
-        headers=headers))
-  except (urllib.error.HTTPError, urllib.error.URLError,
-          http.client.RemoteDisconnected):
-    pass
+    metrics = input()
+    try:
+        headers = {}
+        if 'bot_metrics' in metrics:
+            token = auth.Authenticator().get_access_token().token
+            headers = {'Authorization': 'Bearer ' + token}
+        urllib.request.urlopen(
+            urllib.request.Request(url=metrics_utils.APP_URL + '/upload',
+                                   data=metrics.encode('utf-8'),
+                                   headers=headers))
+    except (urllib.error.HTTPError, urllib.error.URLError,
+            http.client.RemoteDisconnected):
+        pass
 
-  return 0
+    return 0
 
 
 if __name__ == '__main__':
-  sys.exit(main())
+    sys.exit(main())

+ 270 - 249
upload_to_google_storage.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Uploads files to Google Storage content addressed."""
 
 from __future__ import print_function
@@ -44,270 +43,292 @@ find . -name .svn -prune -o -size +1000k -type f -print0 | %prog -0 -b bkt -
 
 
 def get_md5(filename):
-  md5_calculator = hashlib.md5()
-  with open(filename, 'rb') as f:
-    while True:
-      chunk = f.read(1024*1024)
-      if not chunk:
-        break
-      md5_calculator.update(chunk)
-  return md5_calculator.hexdigest()
+    md5_calculator = hashlib.md5()
+    with open(filename, 'rb') as f:
+        while True:
+            chunk = f.read(1024 * 1024)
+            if not chunk:
+                break
+            md5_calculator.update(chunk)
+    return md5_calculator.hexdigest()
 
 
 def get_md5_cached(filename):
-  """Don't calculate the MD5 if we can find a .md5 file."""
-  # See if we can find an existing MD5 sum stored in a file.
-  if os.path.exists('%s.md5' % filename):
-    with open('%s.md5' % filename, 'rb') as f:
-      md5_match = re.search('([a-z0-9]{32})', f.read().decode())
-      if md5_match:
-        return md5_match.group(1)
-  else:
-    md5_hash = get_md5(filename)
-    with open('%s.md5' % filename, 'wb') as f:
-      f.write(md5_hash.encode())
-    return md5_hash
-
-
-def _upload_worker(
-    thread_num, upload_queue, base_url, gsutil, md5_lock, force,
-    use_md5, stdout_queue, ret_codes, gzip):
-  while True:
-    filename, sha1_sum = upload_queue.get()
-    if not filename:
-      break
-    file_url = '%s/%s' % (base_url, sha1_sum)
-    if gsutil.check_call('ls', file_url)[0] == 0 and not force:
-      # File exists, check MD5 hash.
-      _, out, _ = gsutil.check_call_with_retries('ls', '-L', file_url)
-      etag_match = re.search(r'ETag:\s+\S+', out)
-      if etag_match:
-        stdout_queue.put(
-            '%d> File with url %s already exists' % (thread_num, file_url))
-        remote_md5 = etag_match.group(0).split()[1]
-        # Calculate the MD5 checksum to match it to Google Storage's ETag.
-        with md5_lock:
-          if use_md5:
-            local_md5 = get_md5_cached(filename)
-          else:
-            local_md5 = get_md5(filename)
-        if local_md5 == remote_md5:
-          stdout_queue.put(
-              '%d> File %s already exists and MD5 matches, upload skipped' %
-              (thread_num, filename))
-          continue
-    stdout_queue.put('%d> Uploading %s...' % (
-        thread_num, filename))
-    gsutil_args = ['-h', 'Cache-Control:public, max-age=31536000', 'cp']
-    if gzip:
-      gsutil_args.extend(['-z', gzip])
-    gsutil_args.extend([filename, file_url])
-    code, _, err = gsutil.check_call_with_retries(*gsutil_args)
-    if code != 0:
-      ret_codes.put(
-          (code,
-           'Encountered error on uploading %s to %s\n%s' %
-           (filename, file_url, err)))
-      continue
-
-    # Mark executable files with the header "x-goog-meta-executable: 1" which
-    # the download script will check for to preserve the executable bit.
-    if not sys.platform.startswith('win'):
-      if os.stat(filename).st_mode & stat.S_IEXEC:
-        code, _, err = gsutil.check_call_with_retries(
-            'setmeta', '-h', 'x-goog-meta-executable:1', file_url)
+    """Don't calculate the MD5 if we can find a .md5 file."""
+    # See if we can find an existing MD5 sum stored in a file.
+    if os.path.exists('%s.md5' % filename):
+        with open('%s.md5' % filename, 'rb') as f:
+            md5_match = re.search('([a-z0-9]{32})', f.read().decode())
+            if md5_match:
+                return md5_match.group(1)
+    else:
+        md5_hash = get_md5(filename)
+        with open('%s.md5' % filename, 'wb') as f:
+            f.write(md5_hash.encode())
+        return md5_hash
+
+
+def _upload_worker(thread_num, upload_queue, base_url, gsutil, md5_lock, force,
+                   use_md5, stdout_queue, ret_codes, gzip):
+    while True:
+        filename, sha1_sum = upload_queue.get()
+        if not filename:
+            break
+        file_url = '%s/%s' % (base_url, sha1_sum)
+        if gsutil.check_call('ls', file_url)[0] == 0 and not force:
+            # File exists, check MD5 hash.
+            _, out, _ = gsutil.check_call_with_retries('ls', '-L', file_url)
+            etag_match = re.search(r'ETag:\s+\S+', out)
+            if etag_match:
+                stdout_queue.put('%d> File with url %s already exists' %
+                                 (thread_num, file_url))
+                remote_md5 = etag_match.group(0).split()[1]
+                # Calculate the MD5 checksum to match it to Google Storage's
+                # ETag.
+                with md5_lock:
+                    if use_md5:
+                        local_md5 = get_md5_cached(filename)
+                    else:
+                        local_md5 = get_md5(filename)
+                if local_md5 == remote_md5:
+                    stdout_queue.put(
+                        '%d> File %s already exists and MD5 matches, upload '
+                        'skipped' % (thread_num, filename))
+                    continue
+        stdout_queue.put('%d> Uploading %s...' % (thread_num, filename))
+        gsutil_args = ['-h', 'Cache-Control:public, max-age=31536000', 'cp']
+        if gzip:
+            gsutil_args.extend(['-z', gzip])
+        gsutil_args.extend([filename, file_url])
+        code, _, err = gsutil.check_call_with_retries(*gsutil_args)
         if code != 0:
-          ret_codes.put(
-              (code,
-               'Encountered error on setting metadata on %s\n%s' %
-               (file_url, err)))
+            ret_codes.put((code, 'Encountered error on uploading %s to %s\n%s' %
+                           (filename, file_url, err)))
+            continue
+
+        # Mark executable files with the header "x-goog-meta-executable: 1"
+        # which the download script will check for to preserve the executable
+        # bit.
+        if not sys.platform.startswith('win'):
+            if os.stat(filename).st_mode & stat.S_IEXEC:
+                code, _, err = gsutil.check_call_with_retries(
+                    'setmeta', '-h', 'x-goog-meta-executable:1', file_url)
+                if code != 0:
+                    ret_codes.put(
+                        (code,
+                         'Encountered error on setting metadata on %s\n%s' %
+                         (file_url, err)))
 
 
 def get_targets(args, parser, use_null_terminator):
-  if not args:
-    parser.error('Missing target.')
-
-  if len(args) == 1 and args[0] == '-':
-    # Take stdin as a newline or null separated list of files.
-    if use_null_terminator:
-      return sys.stdin.read().split('\0')
-
-    return sys.stdin.read().splitlines()
-
-  return args
-
-
-def upload_to_google_storage(
-    input_filenames, base_url, gsutil, force,
-    use_md5, num_threads, skip_hashing, gzip):
-  # We only want one MD5 calculation happening at a time to avoid HD thrashing.
-  md5_lock = threading.Lock()
-
-  # Start up all the worker threads plus the printer thread.
-  all_threads = []
-  ret_codes = queue.Queue()
-  ret_codes.put((0, None))
-  upload_queue = queue.Queue()
-  upload_timer = time.time()
-  stdout_queue = queue.Queue()
-  printer_thread = PrinterThread(stdout_queue)
-  printer_thread.daemon = True
-  printer_thread.start()
-  for thread_num in range(num_threads):
-    t = threading.Thread(
-        target=_upload_worker,
-        args=[thread_num, upload_queue, base_url, gsutil, md5_lock,
-              force, use_md5, stdout_queue, ret_codes, gzip])
-    t.daemon = True
-    t.start()
-    all_threads.append(t)
-
-  # We want to hash everything in a single thread since its faster.
-  # The bottleneck is in disk IO, not CPU.
-  hashing_start = time.time()
-  has_missing_files = False
-  for filename in input_filenames:
-    if not os.path.exists(filename):
-      stdout_queue.put('Main> Error: %s not found, skipping.' % filename)
-      has_missing_files = True
-      continue
-    if os.path.exists('%s.sha1' % filename) and skip_hashing:
-      stdout_queue.put(
-          'Main> Found hash for %s, sha1 calculation skipped.' % filename)
-      with open(filename + '.sha1', 'rb') as f:
-        sha1_file = f.read(1024)
-      if not re.match('^([a-z0-9]{40})$', sha1_file.decode()):
-        print('Invalid sha1 hash file %s.sha1' % filename, file=sys.stderr)
-        return 1
-      upload_queue.put((filename, sha1_file.decode()))
-      continue
-    stdout_queue.put('Main> Calculating hash for %s...' % filename)
-    sha1_sum = get_sha1(filename)
-    with open(filename + '.sha1', 'wb') as f:
-      f.write(sha1_sum.encode())
-    stdout_queue.put('Main> Done calculating hash for %s.' % filename)
-    upload_queue.put((filename, sha1_sum))
-  hashing_duration = time.time() - hashing_start
-
-  # Wait for everything to finish.
-  for _ in all_threads:
-    upload_queue.put((None, None))  # To mark the end of the work queue.
-  for t in all_threads:
-    t.join()
-  stdout_queue.put(None)
-  printer_thread.join()
-
-  # Print timing information.
-  print('Hashing %s files took %1f seconds' % (
-      len(input_filenames), hashing_duration))
-  print('Uploading took %1f seconds' % (time.time() - upload_timer))
-
-  # See if we ran into any errors.
-  max_ret_code = 0
-  for ret_code, message in ret_codes.queue:
-    max_ret_code = max(ret_code, max_ret_code)
-    if message:
-      print(message, file=sys.stderr)
-  if has_missing_files:
-    print('One or more input files missing', file=sys.stderr)
-    max_ret_code = max(1, max_ret_code)
-
-  if not max_ret_code:
-    print('Success!')
-
-  return max_ret_code
+    if not args:
+        parser.error('Missing target.')
+
+    if len(args) == 1 and args[0] == '-':
+        # Take stdin as a newline or null separated list of files.
+        if use_null_terminator:
+            return sys.stdin.read().split('\0')
+
+        return sys.stdin.read().splitlines()
+
+    return args
+
+
+def upload_to_google_storage(input_filenames, base_url, gsutil, force, use_md5,
+                             num_threads, skip_hashing, gzip):
+    # We only want one MD5 calculation happening at a time to avoid HD
+    # thrashing.
+    md5_lock = threading.Lock()
+
+    # Start up all the worker threads plus the printer thread.
+    all_threads = []
+    ret_codes = queue.Queue()
+    ret_codes.put((0, None))
+    upload_queue = queue.Queue()
+    upload_timer = time.time()
+    stdout_queue = queue.Queue()
+    printer_thread = PrinterThread(stdout_queue)
+    printer_thread.daemon = True
+    printer_thread.start()
+    for thread_num in range(num_threads):
+        t = threading.Thread(target=_upload_worker,
+                             args=[
+                                 thread_num, upload_queue, base_url, gsutil,
+                                 md5_lock, force, use_md5, stdout_queue,
+                                 ret_codes, gzip
+                             ])
+        t.daemon = True
+        t.start()
+        all_threads.append(t)
+
+    # We want to hash everything in a single thread since its faster.
+    # The bottleneck is in disk IO, not CPU.
+    hashing_start = time.time()
+    has_missing_files = False
+    for filename in input_filenames:
+        if not os.path.exists(filename):
+            stdout_queue.put('Main> Error: %s not found, skipping.' % filename)
+            has_missing_files = True
+            continue
+        if os.path.exists('%s.sha1' % filename) and skip_hashing:
+            stdout_queue.put(
+                'Main> Found hash for %s, sha1 calculation skipped.' % filename)
+            with open(filename + '.sha1', 'rb') as f:
+                sha1_file = f.read(1024)
+            if not re.match('^([a-z0-9]{40})$', sha1_file.decode()):
+                print('Invalid sha1 hash file %s.sha1' % filename,
+                      file=sys.stderr)
+                return 1
+            upload_queue.put((filename, sha1_file.decode()))
+            continue
+        stdout_queue.put('Main> Calculating hash for %s...' % filename)
+        sha1_sum = get_sha1(filename)
+        with open(filename + '.sha1', 'wb') as f:
+            f.write(sha1_sum.encode())
+        stdout_queue.put('Main> Done calculating hash for %s.' % filename)
+        upload_queue.put((filename, sha1_sum))
+    hashing_duration = time.time() - hashing_start
+
+    # Wait for everything to finish.
+    for _ in all_threads:
+        upload_queue.put((None, None))  # To mark the end of the work queue.
+    for t in all_threads:
+        t.join()
+    stdout_queue.put(None)
+    printer_thread.join()
+
+    # Print timing information.
+    print('Hashing %s files took %1f seconds' %
+          (len(input_filenames), hashing_duration))
+    print('Uploading took %1f seconds' % (time.time() - upload_timer))
+
+    # See if we ran into any errors.
+    max_ret_code = 0
+    for ret_code, message in ret_codes.queue:
+        max_ret_code = max(ret_code, max_ret_code)
+        if message:
+            print(message, file=sys.stderr)
+    if has_missing_files:
+        print('One or more input files missing', file=sys.stderr)
+        max_ret_code = max(1, max_ret_code)
+
+    if not max_ret_code:
+        print('Success!')
+
+    return max_ret_code
 
 
 def create_archives(dirs):
-  archive_names = []
-  for name in dirs:
-    tarname = '%s.tar.gz' % name
-    with tarfile.open(tarname, 'w:gz') as tar:
-      tar.add(name)
-    archive_names.append(tarname)
-  return archive_names
+    archive_names = []
+    for name in dirs:
+        tarname = '%s.tar.gz' % name
+        with tarfile.open(tarname, 'w:gz') as tar:
+            tar.add(name)
+        archive_names.append(tarname)
+    return archive_names
 
 
 def validate_archive_dirs(dirs):
-  for d in dirs:
-    # We don't allow .. in paths in our archives.
-    if d == '..':
-      return False
-    # We only allow dirs.
-    if not os.path.isdir(d):
-      return False
-    # We don't allow sym links in our archives.
-    if os.path.islink(d):
-      return False
-    # We required that the subdirectories we are archiving are all just below
-    # cwd.
-    if d not in next(os.walk('.'))[1]:
-      return False
-
-  return True
+    for d in dirs:
+        # We don't allow .. in paths in our archives.
+        if d == '..':
+            return False
+        # We only allow dirs.
+        if not os.path.isdir(d):
+            return False
+        # We don't allow sym links in our archives.
+        if os.path.islink(d):
+            return False
+        # We required that the subdirectories we are archiving are all just
+        # below cwd.
+        if d not in next(os.walk('.'))[1]:
+            return False
+
+    return True
 
 
 def main():
-  parser = optparse.OptionParser(USAGE_STRING)
-  parser.add_option('-b', '--bucket',
-                    help='Google Storage bucket to upload to.')
-  parser.add_option('-e', '--boto', help='Specify a custom boto file.')
-  parser.add_option('-a', '--archive', action='store_true',
-                    help='Archive directory as a tar.gz file')
-  parser.add_option('-f', '--force', action='store_true',
-                    help='Force upload even if remote file exists.')
-  parser.add_option('-g', '--gsutil_path', default=GSUTIL_DEFAULT_PATH,
-                    help='Path to the gsutil script.')
-  parser.add_option('-m', '--use_md5', action='store_true',
-                    help='Generate MD5 files when scanning, and don\'t check '
-                    'the MD5 checksum if a .md5 file is found.')
-  parser.add_option('-t', '--num_threads', default=1, type='int',
-                    help='Number of uploader threads to run.')
-  parser.add_option('-s', '--skip_hashing', action='store_true',
-                    help='Skip hashing if .sha1 file exists.')
-  parser.add_option('-0', '--use_null_terminator', action='store_true',
-                    help='Use \\0 instead of \\n when parsing '
-                    'the file list from stdin.  This is useful if the input '
-                    'is coming from "find ... -print0".')
-  parser.add_option('-z', '--gzip', metavar='ext',
-                    help='Gzip files which end in ext. '
-                         'ext is a comma-separated list')
-  (options, args) = parser.parse_args()
-
-  # Enumerate our inputs.
-  input_filenames = get_targets(args, parser, options.use_null_terminator)
-
-  if options.archive:
-    if not validate_archive_dirs(input_filenames):
-      parser.error('Only directories just below cwd are valid entries when '
-                   'using the --archive argument. Entries can not contain .. '
-                   ' and entries can not be symlinks. Entries was %s' %
-                    input_filenames)
-      return 1
-    input_filenames = create_archives(input_filenames)
-
-  # Make sure we can find a working instance of gsutil.
-  if os.path.exists(GSUTIL_DEFAULT_PATH):
-    gsutil = Gsutil(GSUTIL_DEFAULT_PATH, boto_path=options.boto)
-  else:
-    gsutil = None
-    for path in os.environ["PATH"].split(os.pathsep):
-      if os.path.exists(path) and 'gsutil' in os.listdir(path):
-        gsutil = Gsutil(os.path.join(path, 'gsutil'), boto_path=options.boto)
-    if not gsutil:
-      parser.error('gsutil not found in %s, bad depot_tools checkout?' %
-                   GSUTIL_DEFAULT_PATH)
-
-  base_url = 'gs://%s' % options.bucket
-
-  return upload_to_google_storage(
-      input_filenames, base_url, gsutil, options.force, options.use_md5,
-      options.num_threads, options.skip_hashing, options.gzip)
+    parser = optparse.OptionParser(USAGE_STRING)
+    parser.add_option('-b',
+                      '--bucket',
+                      help='Google Storage bucket to upload to.')
+    parser.add_option('-e', '--boto', help='Specify a custom boto file.')
+    parser.add_option('-a',
+                      '--archive',
+                      action='store_true',
+                      help='Archive directory as a tar.gz file')
+    parser.add_option('-f',
+                      '--force',
+                      action='store_true',
+                      help='Force upload even if remote file exists.')
+    parser.add_option('-g',
+                      '--gsutil_path',
+                      default=GSUTIL_DEFAULT_PATH,
+                      help='Path to the gsutil script.')
+    parser.add_option('-m',
+                      '--use_md5',
+                      action='store_true',
+                      help='Generate MD5 files when scanning, and don\'t check '
+                      'the MD5 checksum if a .md5 file is found.')
+    parser.add_option('-t',
+                      '--num_threads',
+                      default=1,
+                      type='int',
+                      help='Number of uploader threads to run.')
+    parser.add_option('-s',
+                      '--skip_hashing',
+                      action='store_true',
+                      help='Skip hashing if .sha1 file exists.')
+    parser.add_option('-0',
+                      '--use_null_terminator',
+                      action='store_true',
+                      help='Use \\0 instead of \\n when parsing '
+                      'the file list from stdin.  This is useful if the input '
+                      'is coming from "find ... -print0".')
+    parser.add_option('-z',
+                      '--gzip',
+                      metavar='ext',
+                      help='Gzip files which end in ext. '
+                      'ext is a comma-separated list')
+    (options, args) = parser.parse_args()
+
+    # Enumerate our inputs.
+    input_filenames = get_targets(args, parser, options.use_null_terminator)
+
+    if options.archive:
+        if not validate_archive_dirs(input_filenames):
+            parser.error(
+                'Only directories just below cwd are valid entries when '
+                'using the --archive argument. Entries can not contain .. '
+                ' and entries can not be symlinks. Entries was %s' %
+                input_filenames)
+            return 1
+        input_filenames = create_archives(input_filenames)
+
+    # Make sure we can find a working instance of gsutil.
+    if os.path.exists(GSUTIL_DEFAULT_PATH):
+        gsutil = Gsutil(GSUTIL_DEFAULT_PATH, boto_path=options.boto)
+    else:
+        gsutil = None
+        for path in os.environ["PATH"].split(os.pathsep):
+            if os.path.exists(path) and 'gsutil' in os.listdir(path):
+                gsutil = Gsutil(os.path.join(path, 'gsutil'),
+                                boto_path=options.boto)
+        if not gsutil:
+            parser.error('gsutil not found in %s, bad depot_tools checkout?' %
+                         GSUTIL_DEFAULT_PATH)
+
+    base_url = 'gs://%s' % options.bucket
+
+    return upload_to_google_storage(input_filenames, base_url, gsutil,
+                                    options.force, options.use_md5,
+                                    options.num_threads, options.skip_hashing,
+                                    options.gzip)
 
 
 if __name__ == '__main__':
-  try:
-    sys.exit(main())
-  except KeyboardInterrupt:
-    sys.stderr.write('interrupted\n')
-    sys.exit(1)
+    try:
+        sys.exit(main())
+    except KeyboardInterrupt:
+        sys.stderr.write('interrupted\n')
+        sys.exit(1)

+ 15 - 15
utils.py

@@ -7,19 +7,19 @@ import subprocess
 
 
 def depot_tools_version():
-  depot_tools_root = os.path.dirname(os.path.abspath(__file__))
-  try:
-    commit_hash = subprocess.check_output(['git', 'rev-parse', 'HEAD'],
-                                          cwd=depot_tools_root).decode(
-                                              'utf-8', 'ignore')
-    return 'git-%s' % commit_hash
-  except Exception:
-    pass
+    depot_tools_root = os.path.dirname(os.path.abspath(__file__))
+    try:
+        commit_hash = subprocess.check_output(['git', 'rev-parse', 'HEAD'],
+                                              cwd=depot_tools_root).decode(
+                                                  'utf-8', 'ignore')
+        return 'git-%s' % commit_hash
+    except Exception:
+        pass
 
-  # git check failed, let's check last modification of frequently checked file
-  try:
-    mtime = os.path.getmtime(
-        os.path.join(depot_tools_root, 'infra', 'config', 'recipes.cfg'))
-    return 'recipes.cfg-%d' % (mtime)
-  except Exception:
-    return 'unknown'
+    # git check failed, let's check last modification of frequently checked file
+    try:
+        mtime = os.path.getmtime(
+            os.path.join(depot_tools_root, 'infra', 'config', 'recipes.cfg'))
+        return 'recipes.cfg-%d' % (mtime)
+    except Exception:
+        return 'unknown'

+ 93 - 92
watchlists.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2011 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Watchlists
 
 Watchlists is a mechanism that allow a developer (a "watcher") to watch over
@@ -27,7 +26,7 @@ import sys
 
 
 class Watchlists(object):
-  """Manage Watchlists.
+    """Manage Watchlists.
 
   This class provides mechanism to load watchlists for a repo and identify
   watchers.
@@ -37,77 +36,79 @@ class Watchlists(object):
                                        "/path/to/file2",])
   """
 
-  _RULES = "WATCHLISTS"
-  _RULES_FILENAME = _RULES
-  _repo_root = None
-  _defns = {}       # Definitions
-  _path_regexps = {}  # Name -> Regular expression mapping
-  _watchlists = {}  # name to email mapping
-
-  def __init__(self, repo_root):
-    self._repo_root = repo_root
-    self._LoadWatchlistRules()
-
-  def _GetRulesFilePath(self):
-    """Returns path to WATCHLISTS file."""
-    return os.path.join(self._repo_root, self._RULES_FILENAME)
-
-  def _HasWatchlistsFile(self):
-    """Determine if watchlists are available for this repo."""
-    return os.path.exists(self._GetRulesFilePath())
-
-  def _ContentsOfWatchlistsFile(self):
-    """Read the WATCHLISTS file and return its contents."""
-    try:
-      watchlists_file = open(self._GetRulesFilePath())
-      contents = watchlists_file.read()
-      watchlists_file.close()
-      return contents
-    except IOError as e:
-      logging.error("Cannot read %s: %s" % (self._GetRulesFilePath(), e))
-      return ''
-
-  def _LoadWatchlistRules(self):
-    """Load watchlists from WATCHLISTS file. Does nothing if not present."""
-    if not self._HasWatchlistsFile():
-      return
-
-    contents = self._ContentsOfWatchlistsFile()
-    watchlists_data = None
-    try:
-      watchlists_data = eval(contents, {'__builtins__': None}, None)
-    except SyntaxError as e:
-      logging.error("Cannot parse %s. %s" % (self._GetRulesFilePath(), e))
-      return
-
-    defns = watchlists_data.get("WATCHLIST_DEFINITIONS")
-    if not defns:
-      logging.error("WATCHLIST_DEFINITIONS not defined in %s" %
-                    self._GetRulesFilePath())
-      return
-    watchlists = watchlists_data.get("WATCHLISTS")
-    if not watchlists:
-      logging.error("WATCHLISTS not defined in %s" % self._GetRulesFilePath())
-      return
-    self._defns = defns
-    self._watchlists = watchlists
-
-    # Compile the regular expressions ahead of time to avoid creating them
-    # on-the-fly multiple times per file.
-    self._path_regexps = {}
-    for name, rule in defns.items():
-      filepath = rule.get('filepath')
-      if not filepath:
-        continue
-      self._path_regexps[name] = re.compile(filepath)
-
-    # Verify that all watchlist names are defined
-    for name in watchlists:
-      if name not in defns:
-        logging.error("%s not defined in %s" % (name, self._GetRulesFilePath()))
-
-  def GetWatchersForPaths(self, paths):
-    """Fetch the list of watchers for |paths|
+    _RULES = "WATCHLISTS"
+    _RULES_FILENAME = _RULES
+    _repo_root = None
+    _defns = {}  # Definitions
+    _path_regexps = {}  # Name -> Regular expression mapping
+    _watchlists = {}  # name to email mapping
+
+    def __init__(self, repo_root):
+        self._repo_root = repo_root
+        self._LoadWatchlistRules()
+
+    def _GetRulesFilePath(self):
+        """Returns path to WATCHLISTS file."""
+        return os.path.join(self._repo_root, self._RULES_FILENAME)
+
+    def _HasWatchlistsFile(self):
+        """Determine if watchlists are available for this repo."""
+        return os.path.exists(self._GetRulesFilePath())
+
+    def _ContentsOfWatchlistsFile(self):
+        """Read the WATCHLISTS file and return its contents."""
+        try:
+            watchlists_file = open(self._GetRulesFilePath())
+            contents = watchlists_file.read()
+            watchlists_file.close()
+            return contents
+        except IOError as e:
+            logging.error("Cannot read %s: %s" % (self._GetRulesFilePath(), e))
+            return ''
+
+    def _LoadWatchlistRules(self):
+        """Load watchlists from WATCHLISTS file. Does nothing if not present."""
+        if not self._HasWatchlistsFile():
+            return
+
+        contents = self._ContentsOfWatchlistsFile()
+        watchlists_data = None
+        try:
+            watchlists_data = eval(contents, {'__builtins__': None}, None)
+        except SyntaxError as e:
+            logging.error("Cannot parse %s. %s" % (self._GetRulesFilePath(), e))
+            return
+
+        defns = watchlists_data.get("WATCHLIST_DEFINITIONS")
+        if not defns:
+            logging.error("WATCHLIST_DEFINITIONS not defined in %s" %
+                          self._GetRulesFilePath())
+            return
+        watchlists = watchlists_data.get("WATCHLISTS")
+        if not watchlists:
+            logging.error("WATCHLISTS not defined in %s" %
+                          self._GetRulesFilePath())
+            return
+        self._defns = defns
+        self._watchlists = watchlists
+
+        # Compile the regular expressions ahead of time to avoid creating them
+        # on-the-fly multiple times per file.
+        self._path_regexps = {}
+        for name, rule in defns.items():
+            filepath = rule.get('filepath')
+            if not filepath:
+                continue
+            self._path_regexps[name] = re.compile(filepath)
+
+        # Verify that all watchlist names are defined
+        for name in watchlists:
+            if name not in defns:
+                logging.error("%s not defined in %s" %
+                              (name, self._GetRulesFilePath()))
+
+    def GetWatchersForPaths(self, paths):
+        """Fetch the list of watchers for |paths|
 
     Args:
       paths: [path1, path2, ...]
@@ -115,28 +116,28 @@ class Watchlists(object):
     Returns:
       [u1@chromium.org, u2@gmail.com, ...]
     """
-    watchers = set()  # A set, to avoid duplicates
-    for path in paths:
-      path = path.replace(os.sep, '/')
-      for name, rule in self._path_regexps.items():
-        if name not in self._watchlists:
-          continue
-        if rule.search(path):
-          for watchlist in self._watchlists[name]:
-            watchers.add(watchlist)
-    return sorted(watchers)
+        watchers = set()  # A set, to avoid duplicates
+        for path in paths:
+            path = path.replace(os.sep, '/')
+            for name, rule in self._path_regexps.items():
+                if name not in self._watchlists:
+                    continue
+                if rule.search(path):
+                    for watchlist in self._watchlists[name]:
+                        watchers.add(watchlist)
+        return sorted(watchers)
 
 
 def main(argv):
-  # Confirm that watchlists can be parsed and spew out the watchers
-  if len(argv) < 2:
-    print("Usage (from the base of repo):")
-    print("  %s [file-1] [file-2] ...." % argv[0])
-    return 1
-  wl = Watchlists(os.getcwd())
-  watchers = wl.GetWatchersForPaths(argv[1:])
-  print(watchers)
+    # Confirm that watchlists can be parsed and spew out the watchers
+    if len(argv) < 2:
+        print("Usage (from the base of repo):")
+        print("  %s [file-1] [file-2] ...." % argv[0])
+        return 1
+    wl = Watchlists(os.getcwd())
+    watchers = wl.GetWatchersForPaths(argv[1:])
+    print(watchers)
 
 
 if __name__ == '__main__':
-  main(sys.argv)
+    main(sys.argv)

+ 25 - 24
weekly

@@ -2,7 +2,6 @@
 # Copyright (c) 2010 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Display log of checkins of one particular developer since a particular
 date. Only works on git dependencies at the moment."""
 
@@ -17,38 +16,40 @@ import sys
 
 
 def show_log(path, authors, since='1 week ago'):
-  """Display log in a single git repo."""
+    """Display log in a single git repo."""
 
-  author_option = ' '.join(['--author=' + author for author in authors])
-  command = ' '.join(['git log', author_option, '--since="%s"' % since,
-                      'origin/HEAD', '| git shortlog'])
-  status = subprocess.Popen(['sh', '-c', command],
-                            cwd=path,
-                            stdout=subprocess.PIPE).communicate()[0].rstrip()
+    author_option = ' '.join(['--author=' + author for author in authors])
+    command = ' '.join([
+        'git log', author_option,
+        '--since="%s"' % since, 'origin/HEAD', '| git shortlog'
+    ])
+    status = subprocess.Popen(['sh', '-c', command],
+                              cwd=path,
+                              stdout=subprocess.PIPE).communicate()[0].rstrip()
 
-  if len(status.splitlines()) > 0:
-    print('---------- %s ----------' % path)
-    print(status)
+    if len(status.splitlines()) > 0:
+        print('---------- %s ----------' % path)
+        print(status)
 
 
 def main():
-  """Take no arguments."""
+    """Take no arguments."""
 
-  option_parser = optparse.OptionParser()
-  option_parser.add_option("-a", "--author", action="append", default=[])
-  option_parser.add_option("-s", "--since", default="1 week ago")
-  options, args = option_parser.parse_args()
+    option_parser = optparse.OptionParser()
+    option_parser.add_option("-a", "--author", action="append", default=[])
+    option_parser.add_option("-s", "--since", default="1 week ago")
+    options, args = option_parser.parse_args()
 
-  root, entries = gclient_utils.GetGClientRootAndEntries()
+    root, entries = gclient_utils.GetGClientRootAndEntries()
 
-  # which entries map to a git repos?
-  paths = [k for k, v in entries.items() if not re.search('svn', v)]
-  paths.sort()
+    # which entries map to a git repos?
+    paths = [k for k, v in entries.items() if not re.search('svn', v)]
+    paths.sort()
 
-  for path in paths:
-    dir = os.path.normpath(os.path.join(root, path))
-    show_log(dir, options.author, options.since)
+    for path in paths:
+        dir = os.path.normpath(os.path.join(root, path))
+        show_log(dir, options.author, options.since)
 
 
 if __name__ == '__main__':
-  main()
+    main()

+ 7 - 7
win32imports.py

@@ -14,13 +14,13 @@ LOCKFILE_FAIL_IMMEDIATELY = 0x00000001
 
 
 class Overlapped(ctypes.Structure):
-  """Overlapped is required and used in LockFileEx and UnlockFileEx."""
-  _fields_ = [('Internal', ctypes.wintypes.LPVOID),
-              ('InternalHigh', ctypes.wintypes.LPVOID),
-              ('Offset', ctypes.wintypes.DWORD),
-              ('OffsetHigh', ctypes.wintypes.DWORD),
-              ('Pointer', ctypes.wintypes.LPVOID),
-              ('hEvent', ctypes.wintypes.HANDLE)]
+    """Overlapped is required and used in LockFileEx and UnlockFileEx."""
+    _fields_ = [('Internal', ctypes.wintypes.LPVOID),
+                ('InternalHigh', ctypes.wintypes.LPVOID),
+                ('Offset', ctypes.wintypes.DWORD),
+                ('OffsetHigh', ctypes.wintypes.DWORD),
+                ('Pointer', ctypes.wintypes.LPVOID),
+                ('hEvent', ctypes.wintypes.HANDLE)]
 
 
 # https://docs.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-createfilew

+ 498 - 470
win_toolchain/get_toolchain_if_necessary.py

@@ -2,7 +2,6 @@
 # Copyright 2013 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Downloads and unpacks a toolchain for building on Windows. The contents are
 matched by sha1 which will be updated when the toolchain is updated.
 
@@ -42,568 +41,597 @@ ENV_TOOLCHAIN_ROOT = 'DEPOT_TOOLS_WIN_TOOLCHAIN_ROOT'
 
 # winreg isn't natively available under CygWin
 if sys.platform == "win32":
-  try:
-    import winreg
-  except ImportError:
-    import _winreg as winreg
+    try:
+        import winreg
+    except ImportError:
+        import _winreg as winreg
 elif sys.platform == "cygwin":
-  try:
-    import cygwinreg as winreg
-  except ImportError:
-    print('')
-    print('CygWin does not natively support winreg but a replacement exists.')
-    print('https://pypi.python.org/pypi/cygwinreg/')
-    print('')
-    print('Try: easy_install cygwinreg')
-    print('')
-    raise
+    try:
+        import cygwinreg as winreg
+    except ImportError:
+        print('')
+        print(
+            'CygWin does not natively support winreg but a replacement exists.')
+        print('https://pypi.python.org/pypi/cygwinreg/')
+        print('')
+        print('Try: easy_install cygwinreg')
+        print('')
+        raise
 
 BASEDIR = os.path.dirname(os.path.abspath(__file__))
 DEPOT_TOOLS_PATH = os.path.join(BASEDIR, '..')
 sys.path.append(DEPOT_TOOLS_PATH)
 try:
-  import download_from_google_storage
+    import download_from_google_storage
 except ImportError:
-  # Allow use of utility functions in this script from package_from_installed
-  # on bare VM that doesn't have a full depot_tools.
-  pass
+    # Allow use of utility functions in this script from package_from_installed
+    # on bare VM that doesn't have a full depot_tools.
+    pass
 
 
 def GetFileList(root):
-  """Gets a normalized list of files under |root|."""
-  assert not os.path.isabs(root)
-  assert os.path.normpath(root) == root
-  file_list = []
-  # Ignore WER ReportQueue entries that vctip/cl leave in the bin dir if/when
-  # they crash. Also ignores the content of the
-  # Windows Kits/10/debuggers/x(86|64)/(sym|src)/ directories as this is just
-  # the temporarily location that Windbg might use to store the symbol files
-  # and downloaded sources.
-  #
-  # Note: These files are only created on a Windows host, so the
-  # ignored_directories list isn't relevant on non-Windows hosts.
-
-  # The Windows SDK is either in `win_sdk` or in `Windows Kits\10`. This
-  # script must work with both layouts, so check which one it is.
-  # This can be different in each |root|.
-  if os.path.isdir(os.path.join(root, 'Windows Kits', '10')):
-    win_sdk = 'Windows Kits\\10'
-  else:
-    win_sdk = 'win_sdk'
-
-  ignored_directories = ['wer\\reportqueue',
-                         win_sdk + '\\debuggers\\x86\\sym\\',
-                         win_sdk + '\\debuggers\\x64\\sym\\',
-                         win_sdk + '\\debuggers\\x86\\src\\',
-                         win_sdk + '\\debuggers\\x64\\src\\']
-  ignored_directories = [d.lower() for d in ignored_directories]
-
-  for base, _, files in os.walk(root):
-    paths = [os.path.join(base, f) for f in files]
-    for p in paths:
-      if any(ignored_dir in p.lower() for ignored_dir in ignored_directories):
-        continue
-      file_list.append(p)
-  return sorted(file_list, key=lambda s: s.replace('/', '\\').lower())
+    """Gets a normalized list of files under |root|."""
+    assert not os.path.isabs(root)
+    assert os.path.normpath(root) == root
+    file_list = []
+    # Ignore WER ReportQueue entries that vctip/cl leave in the bin dir if/when
+    # they crash. Also ignores the content of the
+    # Windows Kits/10/debuggers/x(86|64)/(sym|src)/ directories as this is just
+    # the temporarily location that Windbg might use to store the symbol files
+    # and downloaded sources.
+    #
+    # Note: These files are only created on a Windows host, so the
+    # ignored_directories list isn't relevant on non-Windows hosts.
+
+    # The Windows SDK is either in `win_sdk` or in `Windows Kits\10`. This
+    # script must work with both layouts, so check which one it is.
+    # This can be different in each |root|.
+    if os.path.isdir(os.path.join(root, 'Windows Kits', '10')):
+        win_sdk = 'Windows Kits\\10'
+    else:
+        win_sdk = 'win_sdk'
+
+    ignored_directories = [
+        'wer\\reportqueue', win_sdk + '\\debuggers\\x86\\sym\\',
+        win_sdk + '\\debuggers\\x64\\sym\\',
+        win_sdk + '\\debuggers\\x86\\src\\', win_sdk + '\\debuggers\\x64\\src\\'
+    ]
+    ignored_directories = [d.lower() for d in ignored_directories]
+
+    for base, _, files in os.walk(root):
+        paths = [os.path.join(base, f) for f in files]
+        for p in paths:
+            if any(ignored_dir in p.lower()
+                   for ignored_dir in ignored_directories):
+                continue
+            file_list.append(p)
+    return sorted(file_list, key=lambda s: s.replace('/', '\\').lower())
 
 
 def MakeTimestampsFileName(root, sha1):
-  return os.path.join(root, os.pardir, '%s.timestamps' % sha1)
+    return os.path.join(root, os.pardir, '%s.timestamps' % sha1)
 
 
 def CalculateHash(root, expected_hash):
-  """Calculates the sha1 of the paths to all files in the given |root| and the
+    """Calculates the sha1 of the paths to all files in the given |root| and the
   contents of those files, and returns as a hex string.
 
   |expected_hash| is the expected hash value for this toolchain if it has
   already been installed.
   """
-  if expected_hash:
-    full_root_path = os.path.join(root, expected_hash)
-  else:
-    full_root_path = root
-  file_list = GetFileList(full_root_path)
-  # Check whether we previously saved timestamps in $root/../{sha1}.timestamps.
-  # If we didn't, or they don't match, then do the full calculation, otherwise
-  # return the saved value.
-  timestamps_file = MakeTimestampsFileName(root, expected_hash)
-  timestamps_data = {'files': [], 'sha1': ''}
-  if os.path.exists(timestamps_file):
-    with open(timestamps_file, 'rb') as f:
-      try:
-        timestamps_data = json.load(f)
-      except ValueError:
-        # json couldn't be loaded, empty data will force a re-hash.
-        pass
-
-  matches = len(file_list) == len(timestamps_data['files'])
-  # Don't check the timestamp of the version file as we touch this file to
-  # indicates which versions of the toolchain are still being used.
-  vc_dir = os.path.join(full_root_path, 'VC').lower()
-  if matches:
-    for disk, cached in zip(file_list, timestamps_data['files']):
-      if disk != cached[0] or (
-          disk != vc_dir and os.path.getmtime(disk) != cached[1]):
-        matches = False
-        break
-  elif os.path.exists(timestamps_file):
-    # Print some information about the extra/missing files. Don't do this if we
-    # don't have a timestamp file, as all the files will be considered as
-    # missing.
-    timestamps_data_files = []
-    for f in timestamps_data['files']:
-      timestamps_data_files.append(f[0])
-    missing_files = [f for f in timestamps_data_files if f not in file_list]
-    if len(missing_files):
-      print('%d files missing from the %s version of the toolchain:' %
-             (len(missing_files), expected_hash))
-      for f in missing_files[:10]:
-        print('\t%s' % f)
-      if len(missing_files) > 10:
-        print('\t...')
-    extra_files = [f for f in file_list if f not in timestamps_data_files]
-    if len(extra_files):
-      print('%d extra files in the %s version of the toolchain:' %
-             (len(extra_files), expected_hash))
-      for f in extra_files[:10]:
-        print('\t%s' % f)
-      if len(extra_files) > 10:
-        print('\t...')
-  if matches:
-    return timestamps_data['sha1']
-
-  # Make long hangs when updating the toolchain less mysterious.
-  print('Calculating hash of toolchain in %s. Please wait...' % full_root_path)
-  sys.stdout.flush()
-  digest = hashlib.sha1()
-  for path in file_list:
-    path_without_hash = str(path).replace('/', '\\')
     if expected_hash:
-      path_without_hash = path_without_hash.replace(
-          os.path.join(root, expected_hash).replace('/', '\\'), root)
-    digest.update(bytes(path_without_hash.lower(), 'utf-8'))
-    with open(path, 'rb') as f:
-      digest.update(f.read())
-
-  # Save the timestamp file if the calculated hash is the expected one.
-  # The expected hash may be shorter, to reduce path lengths, in which case just
-  # compare that many characters.
-  if expected_hash and digest.hexdigest().startswith(expected_hash):
-    SaveTimestampsAndHash(root, digest.hexdigest())
-    # Return the (potentially truncated) expected_hash.
-    return expected_hash
-  return digest.hexdigest()
+        full_root_path = os.path.join(root, expected_hash)
+    else:
+        full_root_path = root
+    file_list = GetFileList(full_root_path)
+    # Check whether we previously saved timestamps in
+    # $root/../{sha1}.timestamps. If we didn't, or they don't match, then do the
+    # full calculation, otherwise return the saved value.
+    timestamps_file = MakeTimestampsFileName(root, expected_hash)
+    timestamps_data = {'files': [], 'sha1': ''}
+    if os.path.exists(timestamps_file):
+        with open(timestamps_file, 'rb') as f:
+            try:
+                timestamps_data = json.load(f)
+            except ValueError:
+                # json couldn't be loaded, empty data will force a re-hash.
+                pass
+
+    matches = len(file_list) == len(timestamps_data['files'])
+    # Don't check the timestamp of the version file as we touch this file to
+    # indicates which versions of the toolchain are still being used.
+    vc_dir = os.path.join(full_root_path, 'VC').lower()
+    if matches:
+        for disk, cached in zip(file_list, timestamps_data['files']):
+            if disk != cached[0] or (disk != vc_dir
+                                     and os.path.getmtime(disk) != cached[1]):
+                matches = False
+                break
+    elif os.path.exists(timestamps_file):
+        # Print some information about the extra/missing files. Don't do this if
+        # we don't have a timestamp file, as all the files will be considered as
+        # missing.
+        timestamps_data_files = []
+        for f in timestamps_data['files']:
+            timestamps_data_files.append(f[0])
+        missing_files = [f for f in timestamps_data_files if f not in file_list]
+        if len(missing_files):
+            print('%d files missing from the %s version of the toolchain:' %
+                  (len(missing_files), expected_hash))
+            for f in missing_files[:10]:
+                print('\t%s' % f)
+            if len(missing_files) > 10:
+                print('\t...')
+        extra_files = [f for f in file_list if f not in timestamps_data_files]
+        if len(extra_files):
+            print('%d extra files in the %s version of the toolchain:' %
+                  (len(extra_files), expected_hash))
+            for f in extra_files[:10]:
+                print('\t%s' % f)
+            if len(extra_files) > 10:
+                print('\t...')
+    if matches:
+        return timestamps_data['sha1']
+
+    # Make long hangs when updating the toolchain less mysterious.
+    print('Calculating hash of toolchain in %s. Please wait...' %
+          full_root_path)
+    sys.stdout.flush()
+    digest = hashlib.sha1()
+    for path in file_list:
+        path_without_hash = str(path).replace('/', '\\')
+        if expected_hash:
+            path_without_hash = path_without_hash.replace(
+                os.path.join(root, expected_hash).replace('/', '\\'), root)
+        digest.update(bytes(path_without_hash.lower(), 'utf-8'))
+        with open(path, 'rb') as f:
+            digest.update(f.read())
+
+    # Save the timestamp file if the calculated hash is the expected one.
+    # The expected hash may be shorter, to reduce path lengths, in which case
+    # just compare that many characters.
+    if expected_hash and digest.hexdigest().startswith(expected_hash):
+        SaveTimestampsAndHash(root, digest.hexdigest())
+        # Return the (potentially truncated) expected_hash.
+        return expected_hash
+    return digest.hexdigest()
 
 
 def CalculateToolchainHashes(root, remove_corrupt_toolchains):
-  """Calculate the hash of the different toolchains installed in the |root|
+    """Calculate the hash of the different toolchains installed in the |root|
   directory."""
-  hashes = []
-  dir_list = [
-      d for d in os.listdir(root) if os.path.isdir(os.path.join(root, d))]
-  for d in dir_list:
-    toolchain_hash = CalculateHash(root, d)
-    if toolchain_hash != d:
-      print('The hash of a version of the toolchain has an unexpected value ('
-             '%s instead of %s)%s.' % (toolchain_hash, d,
-             ', removing it' if remove_corrupt_toolchains else ''))
-      if remove_corrupt_toolchains:
-        RemoveToolchain(root, d, True)
-    else:
-      hashes.append(toolchain_hash)
-  return hashes
+    hashes = []
+    dir_list = [
+        d for d in os.listdir(root) if os.path.isdir(os.path.join(root, d))
+    ]
+    for d in dir_list:
+        toolchain_hash = CalculateHash(root, d)
+        if toolchain_hash != d:
+            print(
+                'The hash of a version of the toolchain has an unexpected value ('
+                '%s instead of %s)%s.' %
+                (toolchain_hash, d,
+                 ', removing it' if remove_corrupt_toolchains else ''))
+            if remove_corrupt_toolchains:
+                RemoveToolchain(root, d, True)
+        else:
+            hashes.append(toolchain_hash)
+    return hashes
 
 
 def SaveTimestampsAndHash(root, sha1):
-  """Saves timestamps and the final hash to be able to early-out more quickly
+    """Saves timestamps and the final hash to be able to early-out more quickly
   next time."""
-  file_list = GetFileList(os.path.join(root, sha1))
-  timestamps_data = {
-    'files': [[f, os.path.getmtime(f)] for f in file_list],
-    'sha1': sha1,
-  }
-  with open(MakeTimestampsFileName(root, sha1), 'wb') as f:
-    f.write(json.dumps(timestamps_data).encode('utf-8'))
+    file_list = GetFileList(os.path.join(root, sha1))
+    timestamps_data = {
+        'files': [[f, os.path.getmtime(f)] for f in file_list],
+        'sha1': sha1,
+    }
+    with open(MakeTimestampsFileName(root, sha1), 'wb') as f:
+        f.write(json.dumps(timestamps_data).encode('utf-8'))
 
 
 def HaveSrcInternalAccess():
-  """Checks whether access to src-internal is available."""
-  with open(os.devnull, 'w') as nul:
-    # This is required to avoid modal dialog boxes after Git 2.14.1 and Git
-    # Credential Manager for Windows 1.12. See https://crbug.com/755694 and
-    # https://github.com/Microsoft/Git-Credential-Manager-for-Windows/issues/482.
-    child_env = dict(os.environ, GCM_INTERACTIVE='NEVER')
-    return subprocess.call(
-       ['git', '-c', 'core.askpass=true', 'remote', 'show',
-        'https://chrome-internal.googlesource.com/chrome/src-internal/'],
-       shell=True, stdin=nul, stdout=nul, stderr=nul, env=child_env) == 0
+    """Checks whether access to src-internal is available."""
+    with open(os.devnull, 'w') as nul:
+        # This is required to avoid modal dialog boxes after Git 2.14.1 and Git
+        # Credential Manager for Windows 1.12. See https://crbug.com/755694 and
+        # https://github.com/Microsoft/Git-Credential-Manager-for-Windows/issues/482.
+        child_env = dict(os.environ, GCM_INTERACTIVE='NEVER')
+        return subprocess.call([
+            'git', '-c', 'core.askpass=true', 'remote', 'show',
+            'https://chrome-internal.googlesource.com/chrome/src-internal/'
+        ],
+                               shell=True,
+                               stdin=nul,
+                               stdout=nul,
+                               stderr=nul,
+                               env=child_env) == 0
 
 
 def LooksLikeGoogler():
-  """Checks for a USERDOMAIN environment variable of 'GOOGLE', which
+    """Checks for a USERDOMAIN environment variable of 'GOOGLE', which
   probably implies the current user is a Googler."""
-  return os.environ.get('USERDOMAIN', '').upper() == 'GOOGLE'
+    return os.environ.get('USERDOMAIN', '').upper() == 'GOOGLE'
 
 
 def CanAccessToolchainBucket():
-  """Checks whether the user has access to gs://chrome-wintoolchain/."""
-  gsutil = download_from_google_storage.Gsutil(
-      download_from_google_storage.GSUTIL_DEFAULT_PATH, boto_path=None)
-  code, stdout, stderr = gsutil.check_call('ls', 'gs://chrome-wintoolchain/')
-  if code != 0:
-    # Make sure any error messages are made visible to the user.
-    print(stderr, file=sys.stderr, end='')
-    print(stdout, end='')
-  return code == 0
+    """Checks whether the user has access to gs://chrome-wintoolchain/."""
+    gsutil = download_from_google_storage.Gsutil(
+        download_from_google_storage.GSUTIL_DEFAULT_PATH, boto_path=None)
+    code, stdout, stderr = gsutil.check_call('ls', 'gs://chrome-wintoolchain/')
+    if code != 0:
+        # Make sure any error messages are made visible to the user.
+        print(stderr, file=sys.stderr, end='')
+        print(stdout, end='')
+    return code == 0
 
 
 def ToolchainBaseURL():
-  base_url = os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN_BASE_URL', '')
-  if base_url.startswith('file://'):
-    base_url = base_url[len('file://'):]
-  return base_url
+    base_url = os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN_BASE_URL', '')
+    if base_url.startswith('file://'):
+        base_url = base_url[len('file://'):]
+    return base_url
 
 
 def UsesToolchainFromFile():
-  return os.path.isdir(ToolchainBaseURL())
+    return os.path.isdir(ToolchainBaseURL())
 
 
 def UsesToolchainFromHttp():
-  url = ToolchainBaseURL()
-  return url.startswith('http://') or url.startswith('https://')
+    url = ToolchainBaseURL()
+    return url.startswith('http://') or url.startswith('https://')
 
 
 def RequestGsAuthentication():
-  """Requests that the user authenticate to be able to access gs:// as a
+    """Requests that the user authenticate to be able to access gs:// as a
   Googler. This allows much faster downloads, and pulling (old) toolchains
   that match src/ revisions.
   """
-  print('Access to gs://chrome-wintoolchain/ not configured.')
-  print('-----------------------------------------------------------------')
-  print()
-  print('You appear to be a Googler.')
-  print()
-  print('I\'m sorry for the hassle, but you need to do a one-time manual')
-  print('authentication. Please run:')
-  print()
-  print('    download_from_google_storage --config')
-  print()
-  print('and follow the instructions.')
-  print()
-  print('NOTE 1: Use your google.com credentials, not chromium.org.')
-  print('NOTE 2: Enter 0 when asked for a "project-id".')
-  print()
-  print('-----------------------------------------------------------------')
-  print()
-  sys.stdout.flush()
-  sys.exit(1)
+    print('Access to gs://chrome-wintoolchain/ not configured.')
+    print('-----------------------------------------------------------------')
+    print()
+    print('You appear to be a Googler.')
+    print()
+    print('I\'m sorry for the hassle, but you need to do a one-time manual')
+    print('authentication. Please run:')
+    print()
+    print('    download_from_google_storage --config')
+    print()
+    print('and follow the instructions.')
+    print()
+    print('NOTE 1: Use your google.com credentials, not chromium.org.')
+    print('NOTE 2: Enter 0 when asked for a "project-id".')
+    print()
+    print('-----------------------------------------------------------------')
+    print()
+    sys.stdout.flush()
+    sys.exit(1)
 
 
 def DelayBeforeRemoving(target_dir):
-  """A grace period before deleting the out of date toolchain directory."""
-  if (os.path.isdir(target_dir) and
-      not bool(int(os.environ.get('CHROME_HEADLESS', '0')))):
-    for i in range(9, 0, -1):
-      sys.stdout.write(
-              '\rRemoving old toolchain in %ds... (Ctrl-C to cancel)' % i)
-      sys.stdout.flush()
-      time.sleep(1)
-    print()
+    """A grace period before deleting the out of date toolchain directory."""
+    if (os.path.isdir(target_dir)
+            and not bool(int(os.environ.get('CHROME_HEADLESS', '0')))):
+        for i in range(9, 0, -1):
+            sys.stdout.write(
+                '\rRemoving old toolchain in %ds... (Ctrl-C to cancel)' % i)
+            sys.stdout.flush()
+            time.sleep(1)
+        print()
 
 
 def DownloadUsingHttp(filename):
-  """Downloads the given file from a url defined in
+    """Downloads the given file from a url defined in
      DEPOT_TOOLS_WIN_TOOLCHAIN_BASE_URL environment variable."""
-  temp_dir = tempfile.mkdtemp()
-  assert os.path.basename(filename) == filename
-  target_path = os.path.join(temp_dir, filename)
-  base_url = ToolchainBaseURL()
-  src_url = urljoin(base_url, filename)
-  try:
-    with closing(urlopen(src_url)) as fsrc, \
-         open(target_path, 'wb') as fdst:
-      shutil.copyfileobj(fsrc, fdst)
-  except URLError as e:
-    RmDir(temp_dir)
-    sys.exit('Failed to retrieve file: %s' % e)
-  return temp_dir, target_path
+    temp_dir = tempfile.mkdtemp()
+    assert os.path.basename(filename) == filename
+    target_path = os.path.join(temp_dir, filename)
+    base_url = ToolchainBaseURL()
+    src_url = urljoin(base_url, filename)
+    try:
+        with closing(urlopen(src_url)) as fsrc, \
+             open(target_path, 'wb') as fdst:
+            shutil.copyfileobj(fsrc, fdst)
+    except URLError as e:
+        RmDir(temp_dir)
+        sys.exit('Failed to retrieve file: %s' % e)
+    return temp_dir, target_path
 
 
 def DownloadUsingGsutil(filename):
-  """Downloads the given file from Google Storage chrome-wintoolchain bucket."""
-  temp_dir = tempfile.mkdtemp()
-  assert os.path.basename(filename) == filename
-  target_path = os.path.join(temp_dir, filename)
-  gsutil = download_from_google_storage.Gsutil(
-      download_from_google_storage.GSUTIL_DEFAULT_PATH, boto_path=None)
-  code = gsutil.call('cp', 'gs://chrome-wintoolchain/' + filename, target_path)
-  if code != 0:
-    sys.exit('gsutil failed')
-  return temp_dir, target_path
+    """Downloads the given file from Google Storage chrome-wintoolchain bucket."""
+    temp_dir = tempfile.mkdtemp()
+    assert os.path.basename(filename) == filename
+    target_path = os.path.join(temp_dir, filename)
+    gsutil = download_from_google_storage.Gsutil(
+        download_from_google_storage.GSUTIL_DEFAULT_PATH, boto_path=None)
+    code = gsutil.call('cp', 'gs://chrome-wintoolchain/' + filename,
+                       target_path)
+    if code != 0:
+        sys.exit('gsutil failed')
+    return temp_dir, target_path
 
 
 def RmDir(path):
-  """Deletes path and all the files it contains."""
-  if sys.platform != 'win32':
-    shutil.rmtree(path, ignore_errors=True)
-  else:
-    # shutil.rmtree() doesn't delete read-only files on Windows.
-    subprocess.check_call('rmdir /s/q "%s"' % path, shell=True)
+    """Deletes path and all the files it contains."""
+    if sys.platform != 'win32':
+        shutil.rmtree(path, ignore_errors=True)
+    else:
+        # shutil.rmtree() doesn't delete read-only files on Windows.
+        subprocess.check_call('rmdir /s/q "%s"' % path, shell=True)
 
 
 def DoTreeMirror(target_dir, tree_sha1):
-  """In order to save temporary space on bots that do not have enough space to
+    """In order to save temporary space on bots that do not have enough space to
   download ISOs, unpack them, and copy to the target location, the whole tree
   is uploaded as a zip to internal storage, and then mirrored here."""
-  if UsesToolchainFromFile():
-    temp_dir = None
-    local_zip = os.path.join(ToolchainBaseURL(), tree_sha1 + '.zip')
-    if not os.path.isfile(local_zip):
-      sys.exit('%s is not a valid file.' % local_zip)
-  elif UsesToolchainFromHttp():
-    temp_dir, local_zip = DownloadUsingHttp(tree_sha1 + '.zip')
-  else:
-    temp_dir, local_zip = DownloadUsingGsutil(tree_sha1 + '.zip')
-  sys.stdout.write('Extracting %s...\n' % local_zip)
-  sys.stdout.flush()
-  with zipfile.ZipFile(local_zip, 'r', zipfile.ZIP_DEFLATED, True) as zf:
-    zf.extractall(target_dir)
-  if temp_dir:
-    RmDir(temp_dir)
+    if UsesToolchainFromFile():
+        temp_dir = None
+        local_zip = os.path.join(ToolchainBaseURL(), tree_sha1 + '.zip')
+        if not os.path.isfile(local_zip):
+            sys.exit('%s is not a valid file.' % local_zip)
+    elif UsesToolchainFromHttp():
+        temp_dir, local_zip = DownloadUsingHttp(tree_sha1 + '.zip')
+    else:
+        temp_dir, local_zip = DownloadUsingGsutil(tree_sha1 + '.zip')
+    sys.stdout.write('Extracting %s...\n' % local_zip)
+    sys.stdout.flush()
+    with zipfile.ZipFile(local_zip, 'r', zipfile.ZIP_DEFLATED, True) as zf:
+        zf.extractall(target_dir)
+    if temp_dir:
+        RmDir(temp_dir)
 
 
 def RemoveToolchain(root, sha1, delay_before_removing):
-  """Remove the |sha1| version of the toolchain from |root|."""
-  toolchain_target_dir = os.path.join(root, sha1)
-  if delay_before_removing:
-    DelayBeforeRemoving(toolchain_target_dir)
-  if sys.platform == 'win32':
-    # These stay resident and will make the rmdir below fail.
-    kill_list = [
-      'mspdbsrv.exe',
-      'vctip.exe', # Compiler and tools experience improvement data uploader.
-    ]
-    for process_name in kill_list:
-      with open(os.devnull, 'wb') as nul:
-        subprocess.call(['taskkill', '/f', '/im', process_name],
-                        stdin=nul, stdout=nul, stderr=nul)
-  if os.path.isdir(toolchain_target_dir):
-    RmDir(toolchain_target_dir)
-
-  timestamp_file = MakeTimestampsFileName(root, sha1)
-  if os.path.exists(timestamp_file):
-    os.remove(timestamp_file)
+    """Remove the |sha1| version of the toolchain from |root|."""
+    toolchain_target_dir = os.path.join(root, sha1)
+    if delay_before_removing:
+        DelayBeforeRemoving(toolchain_target_dir)
+    if sys.platform == 'win32':
+        # These stay resident and will make the rmdir below fail.
+        kill_list = [
+            'mspdbsrv.exe',
+            'vctip.exe',  # Compiler and tools experience improvement data uploader.
+        ]
+        for process_name in kill_list:
+            with open(os.devnull, 'wb') as nul:
+                subprocess.call(['taskkill', '/f', '/im', process_name],
+                                stdin=nul,
+                                stdout=nul,
+                                stderr=nul)
+    if os.path.isdir(toolchain_target_dir):
+        RmDir(toolchain_target_dir)
+
+    timestamp_file = MakeTimestampsFileName(root, sha1)
+    if os.path.exists(timestamp_file):
+        os.remove(timestamp_file)
 
 
 def RemoveUnusedToolchains(root):
-  """Remove the versions of the toolchain that haven't been used recently."""
-  valid_toolchains = []
-  dirs_to_remove = []
-
-  for d in os.listdir(root):
-    full_path = os.path.join(root, d)
-    if os.path.isdir(full_path):
-      if not os.path.exists(MakeTimestampsFileName(root, d)):
-        dirs_to_remove.append(d)
-      else:
-        vc_dir = os.path.join(full_path, 'VC')
-        valid_toolchains.append((os.path.getmtime(vc_dir), d))
-    elif os.path.isfile(full_path):
-      os.remove(full_path)
-
-  for d in dirs_to_remove:
-    print('Removing %s as it doesn\'t correspond to any known toolchain.' %
-           os.path.join(root, d))
-    # Use the RemoveToolchain function to remove these directories as they might
-    # contain an older version of the toolchain.
-    RemoveToolchain(root, d, False)
-
-  # Remove the versions of the toolchains that haven't been used in the past 30
-  # days.
-  toolchain_expiration_time = 60 * 60 * 24 * 30
-  for toolchain in valid_toolchains:
-    toolchain_age_in_sec = time.time() - toolchain[0]
-    if toolchain_age_in_sec > toolchain_expiration_time:
-      print('Removing version %s of the Win toolchain as it hasn\'t been used'
-             ' in the past %d days.' % (toolchain[1],
-                                        toolchain_age_in_sec / 60 / 60 / 24))
-      RemoveToolchain(root, toolchain[1], True)
+    """Remove the versions of the toolchain that haven't been used recently."""
+    valid_toolchains = []
+    dirs_to_remove = []
+
+    for d in os.listdir(root):
+        full_path = os.path.join(root, d)
+        if os.path.isdir(full_path):
+            if not os.path.exists(MakeTimestampsFileName(root, d)):
+                dirs_to_remove.append(d)
+            else:
+                vc_dir = os.path.join(full_path, 'VC')
+                valid_toolchains.append((os.path.getmtime(vc_dir), d))
+        elif os.path.isfile(full_path):
+            os.remove(full_path)
+
+    for d in dirs_to_remove:
+        print('Removing %s as it doesn\'t correspond to any known toolchain.' %
+              os.path.join(root, d))
+        # Use the RemoveToolchain function to remove these directories as they
+        # might contain an older version of the toolchain.
+        RemoveToolchain(root, d, False)
+
+    # Remove the versions of the toolchains that haven't been used in the past
+    # 30 days.
+    toolchain_expiration_time = 60 * 60 * 24 * 30
+    for toolchain in valid_toolchains:
+        toolchain_age_in_sec = time.time() - toolchain[0]
+        if toolchain_age_in_sec > toolchain_expiration_time:
+            print(
+                'Removing version %s of the Win toolchain as it hasn\'t been used'
+                ' in the past %d days.' %
+                (toolchain[1], toolchain_age_in_sec / 60 / 60 / 24))
+            RemoveToolchain(root, toolchain[1], True)
 
 
 def EnableCrashDumpCollection():
-  """Tell Windows Error Reporting to record crash dumps so that we can diagnose
+    """Tell Windows Error Reporting to record crash dumps so that we can diagnose
   linker crashes and other toolchain failures. Documented at:
   https://msdn.microsoft.com/en-us/library/windows/desktop/bb787181.aspx
   """
-  if sys.platform == 'win32' and os.environ.get('CHROME_HEADLESS') == '1':
-    key_name = r'SOFTWARE\Microsoft\Windows\Windows Error Reporting'
-    try:
-      key = winreg.CreateKeyEx(winreg.HKEY_LOCAL_MACHINE, key_name, 0,
-                               winreg.KEY_WOW64_64KEY | winreg.KEY_ALL_ACCESS)
-      # Merely creating LocalDumps is sufficient to enable the defaults.
-      winreg.CreateKey(key, "LocalDumps")
-      # Disable the WER UI, as documented here:
-      # https://msdn.microsoft.com/en-us/library/windows/desktop/bb513638.aspx
-      winreg.SetValueEx(key, "DontShowUI", 0, winreg.REG_DWORD, 1)
-    # Trap OSError instead of WindowsError so pylint will succeed on Linux.
-    # Catching errors is important because some build machines are not elevated
-    # and writing to HKLM requires elevation.
-    except OSError:
-      pass
+    if sys.platform == 'win32' and os.environ.get('CHROME_HEADLESS') == '1':
+        key_name = r'SOFTWARE\Microsoft\Windows\Windows Error Reporting'
+        try:
+            key = winreg.CreateKeyEx(
+                winreg.HKEY_LOCAL_MACHINE, key_name, 0,
+                winreg.KEY_WOW64_64KEY | winreg.KEY_ALL_ACCESS)
+            # Merely creating LocalDumps is sufficient to enable the defaults.
+            winreg.CreateKey(key, "LocalDumps")
+            # Disable the WER UI, as documented here:
+            # https://msdn.microsoft.com/en-us/library/windows/desktop/bb513638.aspx
+            winreg.SetValueEx(key, "DontShowUI", 0, winreg.REG_DWORD, 1)
+        # Trap OSError instead of WindowsError so pylint will succeed on Linux.
+        # Catching errors is important because some build machines are not
+        # elevated and writing to HKLM requires elevation.
+        except OSError:
+            pass
 
 
 def main():
-  parser = argparse.ArgumentParser(
-               description=__doc__,
-               formatter_class=argparse.RawDescriptionHelpFormatter,
-               )
-  parser.add_argument('--output-json', metavar='FILE',
-                      help='write information about toolchain to FILE')
-  parser.add_argument('--force', action='store_true',
-                      help='force script to run on non-Windows hosts')
-  parser.add_argument('--no-download', action='store_true',
-                      help='configure if present but don\'t download')
-  parser.add_argument('--toolchain-dir',
-                      default=os.getenv(ENV_TOOLCHAIN_ROOT, BASEDIR),
-                      help='directory to install toolchain into')
-  parser.add_argument('desired_hash', metavar='desired-hash',
-                      help='toolchain hash to download')
-  args = parser.parse_args()
-
-  if not (sys.platform.startswith(('cygwin', 'win32')) or args.force):
-    return 0
-
-  if sys.platform == 'cygwin':
-    # This script requires Windows Python, so invoke with depot_tools' Python.
-    def winpath(path):
-      return subprocess.check_output(['cygpath', '-w', path]).strip()
-    python = os.path.join(DEPOT_TOOLS_PATH, 'python3.bat')
-    cmd = [python, winpath(__file__)]
-    if args.output_json:
-      cmd.extend(['--output-json', winpath(args.output_json)])
-    cmd.append(args.desired_hash)
-    sys.exit(subprocess.call(cmd))
-  assert sys.platform != 'cygwin'
-
-  # Create our toolchain destination and "chdir" to it.
-  toolchain_dir = os.path.abspath(args.toolchain_dir)
-  if not os.path.isdir(toolchain_dir):
-    os.makedirs(toolchain_dir)
-  os.chdir(toolchain_dir)
-
-  # Move to depot_tools\win_toolchain where we'll store our files, and where
-  # the downloader script is.
-  target_dir = 'vs_files'
-  if not os.path.isdir(target_dir):
-    os.mkdir(target_dir)
-  toolchain_target_dir = os.path.join(target_dir, args.desired_hash)
-
-  abs_toolchain_target_dir = os.path.abspath(toolchain_target_dir)
-
-  got_new_toolchain = False
-
-  # If the current hash doesn't match what we want in the file, nuke and pave.
-  # Typically this script is only run when the .sha1 one file is updated, but
-  # directly calling "gclient runhooks" will also run it, so we cache
-  # based on timestamps to make that case fast.
-  current_hashes = CalculateToolchainHashes(target_dir, True)
-  if args.desired_hash not in current_hashes:
-    if args.no_download:
-      raise SystemExit('Toolchain is out of date. Run "gclient runhooks" to '
-                       'update the toolchain, or set '
-                       'DEPOT_TOOLS_WIN_TOOLCHAIN=0 to use the locally '
-                       'installed toolchain.')
-    should_use_file = False
-    should_use_http = False
-    should_use_gs = False
-    if UsesToolchainFromFile():
-      should_use_file = True
-    elif UsesToolchainFromHttp():
-      should_use_http = True
-    elif (HaveSrcInternalAccess() or
-        LooksLikeGoogler() or
-        CanAccessToolchainBucket()):
-      should_use_gs = True
-      if not CanAccessToolchainBucket():
-        RequestGsAuthentication()
-    if not should_use_file and not should_use_gs and not should_use_http:
-      if sys.platform not in ('win32', 'cygwin'):
-        doc = 'https://chromium.googlesource.com/chromium/src/+/HEAD/docs/' \
-              'win_cross.md'
-        print('\n\n\nPlease follow the instructions at %s\n\n' % doc)
-      else:
-        doc = 'https://chromium.googlesource.com/chromium/src/+/HEAD/docs/' \
-              'windows_build_instructions.md'
-        print('\n\n\nNo downloadable toolchain found. In order to use your '
-              'locally installed version of Visual Studio to build Chrome '
-              'please set DEPOT_TOOLS_WIN_TOOLCHAIN=0.\n'
-              'For details search for DEPOT_TOOLS_WIN_TOOLCHAIN in the '
-              'instructions at %s\n\n' % doc)
-      return 1
-    print('Windows toolchain out of date or doesn\'t exist, updating (Pro)...')
-    print('  current_hashes: %s' % ', '.join(current_hashes))
-    print('  desired_hash: %s' % args.desired_hash)
-    sys.stdout.flush()
-
-    DoTreeMirror(toolchain_target_dir, args.desired_hash)
-
-    got_new_toolchain = True
-
-  # The Windows SDK is either in `win_sdk` or in `Windows Kits\10`. This
-  # script must work with both layouts, so check which one it is.
-  win_sdk_in_windows_kits = os.path.isdir(
-          os.path.join(abs_toolchain_target_dir, 'Windows Kits', '10'))
-  if win_sdk_in_windows_kits:
-    win_sdk = os.path.join(abs_toolchain_target_dir, 'Windows Kits', '10')
-  else:
-    win_sdk = os.path.join(abs_toolchain_target_dir, 'win_sdk')
-
-  version_file = os.path.join(toolchain_target_dir, 'VS_VERSION')
-  vc_dir = os.path.join(toolchain_target_dir, 'VC')
-  with open(version_file, 'rb') as f:
-    vs_version = f.read().decode('utf-8').strip()
-    # Touch the VC directory so we can use its timestamp to know when this
-    # version of the toolchain has been used for the last time.
-  os.utime(vc_dir, None)
-
-  data = {
-      'path': abs_toolchain_target_dir,
-      'version': vs_version,
-      'win_sdk': win_sdk,
-      'wdk': os.path.join(abs_toolchain_target_dir, 'wdk'),
-      'runtime_dirs': [
-        os.path.join(abs_toolchain_target_dir, 'sys64'),
-        os.path.join(abs_toolchain_target_dir, 'sys32'),
-        os.path.join(abs_toolchain_target_dir, 'sysarm64'),
-      ],
-  }
-  data_json = json.dumps(data, indent=2)
-  data_path = os.path.join(target_dir, '..', 'data.json')
-  if not os.path.exists(data_path) or open(data_path).read() != data_json:
-    with open(data_path, 'w') as f:
-      f.write(data_json)
-
-  if got_new_toolchain:
-    current_hashes = CalculateToolchainHashes(target_dir, False)
+    parser = argparse.ArgumentParser(
+        description=__doc__,
+        formatter_class=argparse.RawDescriptionHelpFormatter,
+    )
+    parser.add_argument('--output-json',
+                        metavar='FILE',
+                        help='write information about toolchain to FILE')
+    parser.add_argument('--force',
+                        action='store_true',
+                        help='force script to run on non-Windows hosts')
+    parser.add_argument('--no-download',
+                        action='store_true',
+                        help='configure if present but don\'t download')
+    parser.add_argument('--toolchain-dir',
+                        default=os.getenv(ENV_TOOLCHAIN_ROOT, BASEDIR),
+                        help='directory to install toolchain into')
+    parser.add_argument('desired_hash',
+                        metavar='desired-hash',
+                        help='toolchain hash to download')
+    args = parser.parse_args()
+
+    if not (sys.platform.startswith(('cygwin', 'win32')) or args.force):
+        return 0
+
+    if sys.platform == 'cygwin':
+        # This script requires Windows Python, so invoke with depot_tools'
+        # Python.
+        def winpath(path):
+            return subprocess.check_output(['cygpath', '-w', path]).strip()
+
+        python = os.path.join(DEPOT_TOOLS_PATH, 'python3.bat')
+        cmd = [python, winpath(__file__)]
+        if args.output_json:
+            cmd.extend(['--output-json', winpath(args.output_json)])
+        cmd.append(args.desired_hash)
+        sys.exit(subprocess.call(cmd))
+    assert sys.platform != 'cygwin'
+
+    # Create our toolchain destination and "chdir" to it.
+    toolchain_dir = os.path.abspath(args.toolchain_dir)
+    if not os.path.isdir(toolchain_dir):
+        os.makedirs(toolchain_dir)
+    os.chdir(toolchain_dir)
+
+    # Move to depot_tools\win_toolchain where we'll store our files, and where
+    # the downloader script is.
+    target_dir = 'vs_files'
+    if not os.path.isdir(target_dir):
+        os.mkdir(target_dir)
+    toolchain_target_dir = os.path.join(target_dir, args.desired_hash)
+
+    abs_toolchain_target_dir = os.path.abspath(toolchain_target_dir)
+
+    got_new_toolchain = False
+
+    # If the current hash doesn't match what we want in the file, nuke and pave.
+    # Typically this script is only run when the .sha1 one file is updated, but
+    # directly calling "gclient runhooks" will also run it, so we cache
+    # based on timestamps to make that case fast.
+    current_hashes = CalculateToolchainHashes(target_dir, True)
     if args.desired_hash not in current_hashes:
-      print(
-          'Got wrong hash after pulling a new toolchain. '
-          'Wanted \'%s\', got one of \'%s\'.' % (
-              args.desired_hash, ', '.join(current_hashes)), file=sys.stderr)
-      return 1
-    SaveTimestampsAndHash(target_dir, args.desired_hash)
+        if args.no_download:
+            raise SystemExit(
+                'Toolchain is out of date. Run "gclient runhooks" to '
+                'update the toolchain, or set '
+                'DEPOT_TOOLS_WIN_TOOLCHAIN=0 to use the locally '
+                'installed toolchain.')
+        should_use_file = False
+        should_use_http = False
+        should_use_gs = False
+        if UsesToolchainFromFile():
+            should_use_file = True
+        elif UsesToolchainFromHttp():
+            should_use_http = True
+        elif (HaveSrcInternalAccess() or LooksLikeGoogler()
+              or CanAccessToolchainBucket()):
+            should_use_gs = True
+            if not CanAccessToolchainBucket():
+                RequestGsAuthentication()
+        if not should_use_file and not should_use_gs and not should_use_http:
+            if sys.platform not in ('win32', 'cygwin'):
+                doc = 'https://chromium.googlesource.com/chromium/src/+/HEAD/docs/' \
+                      'win_cross.md'
+                print('\n\n\nPlease follow the instructions at %s\n\n' % doc)
+            else:
+                doc = 'https://chromium.googlesource.com/chromium/src/+/HEAD/docs/' \
+                      'windows_build_instructions.md'
+                print(
+                    '\n\n\nNo downloadable toolchain found. In order to use your '
+                    'locally installed version of Visual Studio to build Chrome '
+                    'please set DEPOT_TOOLS_WIN_TOOLCHAIN=0.\n'
+                    'For details search for DEPOT_TOOLS_WIN_TOOLCHAIN in the '
+                    'instructions at %s\n\n' % doc)
+            return 1
+        print(
+            'Windows toolchain out of date or doesn\'t exist, updating (Pro)...'
+        )
+        print('  current_hashes: %s' % ', '.join(current_hashes))
+        print('  desired_hash: %s' % args.desired_hash)
+        sys.stdout.flush()
+
+        DoTreeMirror(toolchain_target_dir, args.desired_hash)
+
+        got_new_toolchain = True
+
+    # The Windows SDK is either in `win_sdk` or in `Windows Kits\10`. This
+    # script must work with both layouts, so check which one it is.
+    win_sdk_in_windows_kits = os.path.isdir(
+        os.path.join(abs_toolchain_target_dir, 'Windows Kits', '10'))
+    if win_sdk_in_windows_kits:
+        win_sdk = os.path.join(abs_toolchain_target_dir, 'Windows Kits', '10')
+    else:
+        win_sdk = os.path.join(abs_toolchain_target_dir, 'win_sdk')
+
+    version_file = os.path.join(toolchain_target_dir, 'VS_VERSION')
+    vc_dir = os.path.join(toolchain_target_dir, 'VC')
+    with open(version_file, 'rb') as f:
+        vs_version = f.read().decode('utf-8').strip()
+        # Touch the VC directory so we can use its timestamp to know when this
+        # version of the toolchain has been used for the last time.
+    os.utime(vc_dir, None)
+
+    data = {
+        'path':
+        abs_toolchain_target_dir,
+        'version':
+        vs_version,
+        'win_sdk':
+        win_sdk,
+        'wdk':
+        os.path.join(abs_toolchain_target_dir, 'wdk'),
+        'runtime_dirs': [
+            os.path.join(abs_toolchain_target_dir, 'sys64'),
+            os.path.join(abs_toolchain_target_dir, 'sys32'),
+            os.path.join(abs_toolchain_target_dir, 'sysarm64'),
+        ],
+    }
+    data_json = json.dumps(data, indent=2)
+    data_path = os.path.join(target_dir, '..', 'data.json')
+    if not os.path.exists(data_path) or open(data_path).read() != data_json:
+        with open(data_path, 'w') as f:
+            f.write(data_json)
+
+    if got_new_toolchain:
+        current_hashes = CalculateToolchainHashes(target_dir, False)
+        if args.desired_hash not in current_hashes:
+            print('Got wrong hash after pulling a new toolchain. '
+                  'Wanted \'%s\', got one of \'%s\'.' %
+                  (args.desired_hash, ', '.join(current_hashes)),
+                  file=sys.stderr)
+            return 1
+        SaveTimestampsAndHash(target_dir, args.desired_hash)
 
-  if args.output_json:
-    if (not os.path.exists(args.output_json) or
-        not filecmp.cmp(data_path, args.output_json)):
-      shutil.copyfile(data_path, args.output_json)
+    if args.output_json:
+        if (not os.path.exists(args.output_json)
+                or not filecmp.cmp(data_path, args.output_json)):
+            shutil.copyfile(data_path, args.output_json)
 
-  EnableCrashDumpCollection()
+    EnableCrashDumpCollection()
 
-  RemoveUnusedToolchains(target_dir)
+    RemoveUnusedToolchains(target_dir)
 
-  return 0
+    return 0
 
 
 if __name__ == '__main__':
-  sys.exit(main())
+    sys.exit(main())

部分文件因为文件数量过多而无法显示