Parcourir la source

Fix multiline comment formatting

Many incorrectly formatted comments exist from the switch to
4 space indent: https://crrev.com/c/4836379

Bug: 1514505
Change-Id: I6366f9da812919bd35b999f18fa8a49b7a66c09b
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/5153633
Commit-Queue: Gavin Mak <gavinmak@google.com>
Reviewed-by: Josip Sokcevic <sokcevic@chromium.org>
Gavin Mak il y a 1 an
Parent
commit
edba22d4eb
62 fichiers modifiés avec 2320 ajouts et 2302 suppressions
  1. 1 1
      PRESUBMIT.py
  2. 27 27
      auth.py
  3. 46 46
      bootstrap/bootstrap.py
  4. 347 348
      cpplint.py
  5. 1 1
      detect_host_arch.py
  6. 17 17
      download_from_google_storage.py
  7. 16 16
      fetch.py
  8. 29 29
      fix_encoding.py
  9. 74 74
      gclient.py
  10. 28 28
      gclient_eval.py
  11. 1 1
      gclient_scm.py
  12. 129 126
      gclient_utils.py
  13. 11 10
      gerrit_client.py
  14. 103 102
      gerrit_util.py
  15. 25 24
      git_cache.py
  16. 216 217
      git_cl.py
  17. 142 139
      git_common.py
  18. 8 8
      git_dates.py
  19. 32 30
      git_footers.py
  20. 19 18
      git_hyper_blame.py
  21. 8 7
      git_map_branches.py
  22. 28 28
      git_number.py
  23. 4 4
      git_rebase_update.py
  24. 5 5
      git_retry.py
  25. 7 7
      lockfile.py
  26. 9 8
      metrics.py
  27. 14 14
      metrics_utils.py
  28. 15 13
      my_activity.py
  29. 21 21
      owners_client.py
  30. 189 190
      presubmit_canned_checks.py
  31. 230 230
      presubmit_support.py
  32. 19 18
      rdb_wrapper.py
  33. 38 38
      reclient_helper.py
  34. 1 1
      reclient_metrics.py
  35. 3 3
      roll_dep.py
  36. 15 15
      scm.py
  37. 2 2
      setup_color.py
  38. 64 64
      split_cl.py
  39. 25 24
      subcommand.py
  40. 35 35
      subprocess2.py
  41. 10 10
      testing_support/coverage_utils.py
  42. 6 6
      testing_support/fake_repos.py
  43. 4 4
      testing_support/filesystem_mock.py
  44. 99 97
      testing_support/git_test_utils.py
  45. 24 24
      testing_support/presubmit_canned_checks_test_mocks.py
  46. 1 1
      testing_support/test_case_utils.py
  47. 4 4
      testing_support/trial_dir.py
  48. 15 15
      tests/bot_update_coverage_test.py
  49. 9 9
      tests/cipd_bootstrap_test.py
  50. 7 8
      tests/gclient_scm_test.py
  51. 6 6
      tests/gclient_smoketest_base.py
  52. 43 43
      tests/gclient_test.py
  53. 5 5
      tests/gclient_transitions_smoketest.py
  54. 9 9
      tests/git_cl_test.py
  55. 2 2
      tests/git_common_test.py
  56. 13 13
      tests/git_hyper_blame_test.py
  57. 2 2
      tests/presubmit_canned_checks_test.py
  58. 21 19
      tests/presubmit_unittest.py
  59. 4 4
      tests/subprocess2_test.py
  60. 12 12
      watchlists.py
  61. 15 15
      win_toolchain/get_toolchain_if_necessary.py
  62. 5 5
      win_toolchain/package_from_installed.py

+ 1 - 1
PRESUBMIT.py

@@ -100,7 +100,7 @@ def CheckJsonFiles(input_api, output_api):
 
 
 def CheckUnitTestsOnCommit(input_api, output_api):
-    """ Do not run integration tests on upload since they are way too slow."""
+    """Do not run integration tests on upload since they are way too slow."""
 
     input_api.SetTimeout(TEST_TIMEOUT_S)
 

+ 27 - 27
auth.py

@@ -69,11 +69,11 @@ def has_luci_context_local_auth():
 class Authenticator(object):
     """Object that knows how to refresh access tokens or id tokens when needed.
 
-  Args:
-    scopes: space separated oauth scopes. It's used to generate access tokens.
+    Args:
+        scopes: space separated oauth scopes. It's used to generate access tokens.
             Defaults to OAUTH_SCOPE_EMAIL.
-    audience: An audience in ID tokens to claim which clients should accept it.
-  """
+        audience: An audience in ID tokens to claim which clients should accept it.
+    """
     def __init__(self, scopes=OAUTH_SCOPE_EMAIL, audience=None):
         self._access_token = None
         self._scopes = scopes
@@ -83,20 +83,20 @@ class Authenticator(object):
     def has_cached_credentials(self):
         """Returns True if credentials can be obtained.
 
-    If returns False, get_access_token() or get_id_token() later will probably
-    ask for interactive login by raising LoginRequiredError.
+        If returns False, get_access_token() or get_id_token() later will probably
+        ask for interactive login by raising LoginRequiredError.
 
-    If returns True, get_access_token() or get_id_token() won't ask for
-    interactive login.
-    """
+        If returns True, get_access_token() or get_id_token() won't ask for
+        interactive login.
+        """
         return bool(self._get_luci_auth_token())
 
     def get_access_token(self):
         """Returns AccessToken, refreshing it if necessary.
 
-    Raises:
-      LoginRequiredError if user interaction is required.
-    """
+        Raises:
+            LoginRequiredError if user interaction is required.
+        """
         if self._access_token and not self._access_token.needs_refresh():
             return self._access_token
 
@@ -113,12 +113,12 @@ class Authenticator(object):
     def get_id_token(self):
         """Returns id token, refreshing it if necessary.
 
-    Returns:
-       A Token object.
+        Returns:
+            A Token object.
 
-    Raises:
-      LoginRequiredError if user interaction is required.
-    """
+        Raises:
+            LoginRequiredError if user interaction is required.
+        """
         if self._id_token and not self._id_token.needs_refresh():
             return self._id_token
 
@@ -133,15 +133,15 @@ class Authenticator(object):
     def authorize(self, http, use_id_token=False):
         """Monkey patches authentication logic of httplib2.Http instance.
 
-    The modified http.request method will add authentication headers to each
-    request.
+        The modified http.request method will add authentication headers to each
+        request.
 
-    Args:
-       http: An instance of httplib2.Http.
+        Args:
+            http: An instance of httplib2.Http.
 
-    Returns:
-       A modified instance of http that was passed in.
-    """
+        Returns:
+            A modified instance of http that was passed in.
+        """
         # Adapted from oauth2client.OAuth2Credentials.authorize.
         request_orig = http.request
 
@@ -167,9 +167,9 @@ class Authenticator(object):
     def _run_luci_auth_login(self):
         """Run luci-auth login.
 
-    Returns:
-      AccessToken with credentials.
-    """
+        Returns:
+            AccessToken with credentials.
+        """
         logging.debug('Running luci-auth login')
         subprocess2.check_call(['luci-auth', 'login', '-scopes', self._scopes])
         return self._get_luci_auth_token()

+ 46 - 46
bootstrap/bootstrap.py

@@ -54,15 +54,15 @@ class Template(
     def maybe_install(self, name, dst_path):
         """Installs template |name| to |dst_path| if it has changed.
 
-    This loads the template |name| from THIS_DIR, resolves template parameters,
-    and installs it to |dst_path|. See `maybe_update` for more information.
+        This loads the template |name| from THIS_DIR, resolves template parameters,
+        and installs it to |dst_path|. See `maybe_update` for more information.
 
-    Args:
-      name (str): The name of the template to install.
-      dst_path (str): The destination filesystem path.
+        Args:
+            name (str): The name of the template to install.
+            dst_path (str): The destination filesystem path.
 
-    Returns (bool): True if |dst_path| was updated, False otherwise.
-    """
+        Returns (bool): True if |dst_path| was updated, False otherwise.
+        """
         template_path = os.path.join(THIS_DIR, name)
         with open(template_path, 'r', encoding='utf8') as fd:
             t = string.Template(fd.read())
@@ -72,17 +72,17 @@ class Template(
 def maybe_update(content, dst_path):
     """Writes |content| to |dst_path| if |dst_path| does not already match.
 
-  This function will ensure that there is a file at |dst_path| containing
-  |content|. If |dst_path| already exists and contains |content|, no operation
-  will be performed, preserving filesystem modification times and avoiding
-  potential write contention.
+    This function will ensure that there is a file at |dst_path| containing
+    |content|. If |dst_path| already exists and contains |content|, no operation
+    will be performed, preserving filesystem modification times and avoiding
+    potential write contention.
 
-  Args:
-    content (str): The file content.
-    dst_path (str): The destination filesystem path.
+    Args:
+        content (str): The file content.
+        dst_path (str): The destination filesystem path.
 
-  Returns (bool): True if |dst_path| was updated, False otherwise.
-  """
+    Returns (bool): True if |dst_path| was updated, False otherwise.
+    """
     # If the path already exists and matches the new content, refrain from
     # writing a new one.
     if os.path.exists(dst_path):
@@ -100,14 +100,14 @@ def maybe_update(content, dst_path):
 def maybe_copy(src_path, dst_path):
     """Writes the content of |src_path| to |dst_path| if needed.
 
-  See `maybe_update` for more information.
+    See `maybe_update` for more information.
 
-  Args:
-    src_path (str): The content source filesystem path.
-    dst_path (str): The destination filesystem path.
+    Args:
+        src_path (str): The content source filesystem path.
+        dst_path (str): The destination filesystem path.
 
-  Returns (bool): True if |dst_path| was updated, False otherwise.
-  """
+    Returns (bool): True if |dst_path| was updated, False otherwise.
+    """
     with open(src_path, 'r', encoding='utf-8') as fd:
         content = fd.read()
     return maybe_update(content, dst_path)
@@ -116,21 +116,21 @@ def maybe_copy(src_path, dst_path):
 def call_if_outdated(stamp_path, stamp_version, fn):
     """Invokes |fn| if the stamp at |stamp_path| doesn't match |stamp_version|.
 
-  This can be used to keep a filesystem record of whether an operation has been
-  performed. The record is stored at |stamp_path|. To invalidate a record,
-  change the value of |stamp_version|.
+    This can be used to keep a filesystem record of whether an operation has been
+    performed. The record is stored at |stamp_path|. To invalidate a record,
+    change the value of |stamp_version|.
 
-  After |fn| completes successfully, |stamp_path| will be updated to match
-  |stamp_version|, preventing the same update from happening in the future.
+    After |fn| completes successfully, |stamp_path| will be updated to match
+    |stamp_version|, preventing the same update from happening in the future.
 
-  Args:
-    stamp_path (str): The filesystem path of the stamp file.
-    stamp_version (str): The desired stamp version.
-    fn (callable): A callable to invoke if the current stamp version doesn't
-        match |stamp_version|.
+    Args:
+        stamp_path (str): The filesystem path of the stamp file.
+        stamp_version (str): The desired stamp version.
+        fn (callable): A callable to invoke if the current stamp version doesn't
+            match |stamp_version|.
 
-  Returns (bool): True if an update occurred.
-  """
+    Returns (bool): True if an update occurred.
+    """
 
     stamp_version = stamp_version.strip()
     if os.path.isfile(stamp_path):
@@ -149,13 +149,13 @@ def call_if_outdated(stamp_path, stamp_version, fn):
 def _in_use(path):
     """Checks if a Windows file is in use.
 
-  When Windows is using an executable, it prevents other writers from
-  modifying or deleting that executable. We can safely test for an in-use
-  file by opening it in write mode and checking whether or not there was
-  an error.
+    When Windows is using an executable, it prevents other writers from
+    modifying or deleting that executable. We can safely test for an in-use
+    file by opening it in write mode and checking whether or not there was
+    an error.
 
-  Returns (bool): True if the file was in use, False if not.
-  """
+    Returns (bool): True if the file was in use, False if not.
+    """
     try:
         with open(path, 'r+'):
             return False
@@ -165,7 +165,7 @@ def _in_use(path):
 
 def _toolchain_in_use(toolchain_path):
     """Returns (bool): True if a toolchain rooted at |path| is in use.
-  """
+    """
     # Look for Python files that may be in use.
     for python_dir in (
             os.path.join(toolchain_path, 'python', 'bin'),  # CIPD
@@ -225,11 +225,11 @@ def _safe_rmtree(path):
 def clean_up_old_installations(skip_dir):
     """Removes Python installations other than |skip_dir|.
 
-  This includes an "in-use" check against the "python.exe" in a given directory
-  to avoid removing Python executables that are currently ruinning. We need
-  this because our Python bootstrap may be run after (and by) other software
-  that is using the bootstrapped Python!
-  """
+    This includes an "in-use" check against the "python.exe" in a given directory
+    to avoid removing Python executables that are currently ruinning. We need
+    this because our Python bootstrap may be run after (and by) other software
+    that is using the bootstrapped Python!
+    """
     root_contents = os.listdir(ROOT_DIR)
     for f in ('win_tools-*_bin', 'python27*_bin', 'git-*_bin',
               'bootstrap-*_bin'):

Fichier diff supprimé car celui-ci est trop grand
+ 347 - 348
cpplint.py


+ 1 - 1
detect_host_arch.py

@@ -59,7 +59,7 @@ def HostArch():
 
 def DoMain(_):
     """Hook to be called from gyp without starting a separate python
-  interpreter."""
+    interpreter."""
     return HostArch()
 
 

+ 17 - 17
download_from_google_storage.py

@@ -48,7 +48,7 @@ class InvalidPlatformError(Exception):
 
 def GetNormalizedPlatform():
     """Returns the result of sys.platform accounting for cygwin.
-  Under cygwin, this will always return "win32" like the native Python."""
+    Under cygwin, this will always return "win32" like the native Python."""
     if sys.platform == 'cygwin':
         return 'win32'
     return sys.platform
@@ -57,11 +57,11 @@ def GetNormalizedPlatform():
 # Common utilities
 class Gsutil(object):
     """Call gsutil with some predefined settings.  This is a convenience object,
-  and is also immutable.
+    and is also immutable.
 
-  HACK: This object is used directly by the external script
-    `<depot_tools>/win_toolchain/get_toolchain_if_necessary.py`
-  """
+    HACK: This object is used directly by the external script
+        `<depot_tools>/win_toolchain/get_toolchain_if_necessary.py`
+    """
 
     MAX_TRIES = 5
     RETRY_BASE_DELAY = 5.0
@@ -402,18 +402,18 @@ class PrinterThread(threading.Thread):
 def _data_exists(input_sha1_sum, output_filename, extract):
     """Returns True if the data exists locally and matches the sha1.
 
-  This conservatively returns False for error cases.
-
-  Args:
-    input_sha1_sum: Expected sha1 stored on disk.
-    output_filename: The file to potentially download later. Its sha1 will be
-        compared to input_sha1_sum.
-    extract: Whether or not a downloaded file should be extracted. If the file
-        is not extracted, this just compares the sha1 of the file. If the file
-        is to be extracted, this only compares the sha1 of the target archive if
-        the target directory already exists. The content of the target directory
-        is not checked.
-  """
+    This conservatively returns False for error cases.
+
+    Args:
+        input_sha1_sum: Expected sha1 stored on disk.
+        output_filename: The file to potentially download later. Its sha1 will
+            be compared to input_sha1_sum.
+        extract: Whether or not a downloaded file should be extracted. If the
+            file is not extracted, this just compares the sha1 of the file. If
+            the file is to be extracted, this only compares the sha1 of the
+            target archive if the target directory already exists. The content
+            of the target directory is not checked.
+    """
     extract_dir = None
     if extract:
         if not output_filename.endswith('.tar.gz'):

+ 16 - 16
fetch.py

@@ -6,7 +6,7 @@
 Tool to perform checkouts in one easy command line!
 
 Usage:
-  fetch <config> [--property=value [--property2=value2 ...]]
+    fetch <config> [--property=value [--property2=value2 ...]]
 
 This script is a wrapper around various version control and repository
 checkout commands. It requires a |config| name, fetches data from that
@@ -37,13 +37,13 @@ SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__))
 class Checkout(object):
     """Base class for implementing different types of checkouts.
 
-  Attributes:
-    |base|: the absolute path of the directory in which this script is run.
-    |spec|: the spec for this checkout as returned by the config. Different
-        subclasses will expect different keys in this dictionary.
-    |root|: the directory into which the checkout will be performed, as returned
-        by the config. This is a relative path from |base|.
-  """
+    Attributes:
+        |base|: the absolute path of the directory in which this script is run.
+        |spec|: the spec for this checkout as returned by the config. Different
+            subclasses will expect different keys in this dictionary.
+        |root|: the directory into which the checkout will be performed, as
+            returnedby the config. This is a relative path from |base|.
+    """
     def __init__(self, options, spec, root):
         self.base = os.getcwd()
         self.options = options
@@ -51,7 +51,7 @@ class Checkout(object):
         self.root = root
 
     def exists(self):
-        """Check does this checkout already exist on desired location"""
+        """Check does this checkout already exist on desired location."""
 
     def init(self):
         pass
@@ -239,7 +239,7 @@ def handle_args(argv):
 
 def run_config_fetch(config, props, aliased=False):
     """Invoke a config's fetch method with the passed-through args
-  and return its json output as a python object."""
+    and return its json output as a python object."""
     config_path = os.path.abspath(
         os.path.join(SCRIPT_PATH, 'fetch_configs', config))
     if not os.path.exists(config_path + '.py'):
@@ -264,12 +264,12 @@ def run_config_fetch(config, props, aliased=False):
 def run(options, spec, root):
     """Perform a checkout with the given type and configuration.
 
-    Args:
-      options: Options instance.
-      spec: Checkout configuration returned by the the config's fetch_spec
-          method (checkout type, repository url, etc.).
-      root: The directory into which the repo expects to be checkout out.
-  """
+        Args:
+        options: Options instance.
+        spec: Checkout configuration returned by the the config's fetch_spec
+            method (checkout type, repository url, etc.).
+        root: The directory into which the repo expects to be checkout out.
+    """
     assert 'type' in spec
     checkout_type = spec['type']
     checkout_spec = spec['%s_spec' % checkout_type]

+ 29 - 29
fix_encoding.py

@@ -13,10 +13,10 @@ import sys
 
 def complain(message):
     """If any exception occurs in this file, we'll probably try to print it
-  on stderr, which makes for frustrating debugging if stderr is directed
-  to our wrapper. So be paranoid about catching errors and reporting them
-  to sys.__stderr__, so that the user has a higher chance to see them.
-  """
+    on stderr, which makes for frustrating debugging if stderr is directed
+    to our wrapper. So be paranoid about catching errors and reporting them
+    to sys.__stderr__, so that the user has a higher chance to see them.
+    """
     print(isinstance(message, str) and message or repr(message),
           file=sys.__stderr__)
 
@@ -24,11 +24,11 @@ def complain(message):
 def fix_default_encoding():
     """Forces utf8 solidly on all platforms.
 
-  By default python execution environment is lazy and defaults to ascii
-  encoding.
+    By default python execution environment is lazy and defaults to ascii
+    encoding.
 
-  http://uucode.com/blog/2007/03/23/shut-up-you-dummy-7-bit-python/
-  """
+    http://uucode.com/blog/2007/03/23/shut-up-you-dummy-7-bit-python/
+    """
     if sys.getdefaultencoding() == 'utf-8':
         return False
 
@@ -79,10 +79,10 @@ def fix_win_codec():
 
 class WinUnicodeOutputBase(object):
     """Base class to adapt sys.stdout or sys.stderr to behave correctly on
-  Windows.
+    Windows.
 
-  Setting encoding to utf-8 is recommended.
-  """
+    Setting encoding to utf-8 is recommended.
+    """
     def __init__(self, fileno, name, encoding):
         # Corresponding file handle.
         self._fileno = fileno
@@ -122,8 +122,8 @@ class WinUnicodeOutputBase(object):
 class WinUnicodeConsoleOutput(WinUnicodeOutputBase):
     """Output adapter to a Windows Console.
 
-  Understands how to use the win32 console API.
-  """
+    Understands how to use the win32 console API.
+    """
     def __init__(self, console_handle, fileno, stream_name, encoding):
         super(WinUnicodeConsoleOutput,
               self).__init__(fileno, '<Unicode console %s>' % stream_name,
@@ -183,9 +183,9 @@ class WinUnicodeConsoleOutput(WinUnicodeOutputBase):
 class WinUnicodeOutput(WinUnicodeOutputBase):
     """Output adaptor to a file output on Windows.
 
-  If the standard FileWrite function is used, it will be encoded in the current
-  code page. WriteConsoleW() permits writing any character.
-  """
+    If the standard FileWrite function is used, it will be encoded in the
+    current code page. WriteConsoleW() permits writing any character.
+    """
     def __init__(self, stream, fileno, encoding):
         super(WinUnicodeOutput,
               self).__init__(fileno, '<Unicode redirected %s>' % stream.name,
@@ -247,11 +247,11 @@ def win_handle_is_a_console(handle):
 def win_get_unicode_stream(stream, excepted_fileno, output_handle, encoding):
     """Returns a unicode-compatible stream.
 
-  This function will return a direct-Console writing object only if:
-  - the file number is the expected console file number
-  - the handle the expected file handle
-  - the 'real' handle is in fact a handle to a console.
-  """
+    This function will return a direct-Console writing object only if:
+    - the file number is the expected console file number
+    - the handle the expected file handle
+    - the 'real' handle is in fact a handle to a console.
+    """
     old_fileno = getattr(stream, 'fileno', lambda: None)()
     if old_fileno == excepted_fileno:
         # These types are available on linux but not Mac.
@@ -276,12 +276,12 @@ def win_get_unicode_stream(stream, excepted_fileno, output_handle, encoding):
 def fix_win_console(encoding):
     """Makes Unicode console output work independently of the current code page.
 
-  This also fixes <http://bugs.python.org/issue1602>.
-  Credit to Michael Kaplan
-  <http://blogs.msdn.com/b/michkap/archive/2010/04/07/9989346.aspx> and
-  TZOmegaTZIOY
-  <http://stackoverflow.com/questions/878972/windows-cmd-encoding-change-causes-python-crash/1432462#1432462>.
-  """
+    This also fixes <http://bugs.python.org/issue1602>.
+    Credit to Michael Kaplan
+    <http://blogs.msdn.com/b/michkap/archive/2010/04/07/9989346.aspx> and
+    TZOmegaTZIOY
+    <http://stackoverflow.com/questions/878972/windows-cmd-encoding-change-causes-python-crash/1432462#1432462>.
+    """
     if (isinstance(sys.stdout, WinUnicodeOutputBase)
             or isinstance(sys.stderr, WinUnicodeOutputBase)):
         return False
@@ -308,8 +308,8 @@ def fix_win_console(encoding):
 def fix_encoding():
     """Fixes various encoding problems on all platforms.
 
-  Should be called at the very beginning of the process.
-  """
+    Should be called at the very beginning of the process.
+    """
     ret = True
     if sys.platform == 'win32':
         ret &= fix_win_codec()

+ 74 - 74
gclient.py

@@ -1682,7 +1682,7 @@ solutions = %(solution_list)s
 
     def _CheckConfig(self):
         """Verify that the config matches the state of the existing checked-out
-    solutions."""
+        solutions."""
         for dep in self.dependencies:
             if dep.managed and dep.url:
                 scm = dep.CreateSCM()
@@ -1806,7 +1806,7 @@ it or fix the checkout.
     def LoadCurrentConfig(options):
         # type: (optparse.Values) -> GClient
         """Searches for and loads a .gclient file relative to the current working
-    dir."""
+        dir."""
         if options.spec:
             client = GClient('.', options)
             client.SetConfig(options.spec)
@@ -1862,8 +1862,8 @@ it or fix the checkout.
     def _SaveEntries(self):
         """Creates a .gclient_entries file to record the list of unique checkouts.
 
-    The .gclient_entries file lives in the same directory as .gclient.
-    """
+        The .gclient_entries file lives in the same directory as .gclient.
+        """
         # Sometimes pprint.pformat will use {', sometimes it'll use { ' ... It
         # makes testing a bit too fun.
         result = 'entries = {\n'
@@ -1878,10 +1878,10 @@ it or fix the checkout.
     def _ReadEntries(self):
         """Read the .gclient_entries file for the given client.
 
-    Returns:
-      A sequence of solution names, which will be empty if there is the
-      entries file hasn't been created yet.
-    """
+        Returns:
+            A sequence of solution names, which will be empty if there is the
+            entries file hasn't been created yet.
+        """
         scope = {}
         filename = os.path.join(self.root_dir, self._options.entries_filename)
         if not os.path.exists(filename):
@@ -2047,12 +2047,12 @@ it or fix the checkout.
     def _RemoveUnversionedGitDirs(self):
         """Remove directories that are no longer part of the checkout.
 
-    Notify the user if there is an orphaned entry in their working copy.
-    Only delete the directory if there are no changes in it, and
-    delete_unversioned_trees is set to true.
+        Notify the user if there is an orphaned entry in their working copy.
+        Only delete the directory if there are no changes in it, and
+        delete_unversioned_trees is set to true.
 
-    Returns CIPD packages that are no longer versioned.
-    """
+        Returns CIPD packages that are no longer versioned.
+        """
 
         entry_names_and_sync = [(i.name, i._should_sync)
                                 for i in self.root.subtree(False) if i.url]
@@ -2227,10 +2227,10 @@ it or fix the checkout.
                   progress=True):
         """Runs a command on each dependency in a client and its dependencies.
 
-    Args:
-      command: The command to use (e.g., 'status' or 'diff')
-      args: list of str - extra arguments to add to the command line.
-    """
+        Args:
+            command: The command to use (e.g., 'status' or 'diff')
+            args: list of str - extra arguments to add to the command line.
+        """
         if not self.dependencies:
             raise gclient_utils.Error('No solution specified')
 
@@ -2613,16 +2613,16 @@ class CipdDependency(Dependency):
 def CMDrecurse(parser, args):
     """Operates [command args ...] on all the dependencies.
 
-  Change directory to each dependency's directory, and call [command
-  args ...] there.  Sets GCLIENT_DEP_PATH environment variable as the
-  dep's relative location to root directory of the checkout.
+    Change directory to each dependency's directory, and call [command
+    args ...] there.  Sets GCLIENT_DEP_PATH environment variable as the
+    dep's relative location to root directory of the checkout.
 
-  Examples:
-  * `gclient recurse --no-progress -j1 sh -c 'echo "$GCLIENT_DEP_PATH"'`
-  print the relative path of each dependency.
-  * `gclient recurse --no-progress -j1 sh -c "pwd"`
-  print the absolute path of each dependency.
-  """
+    Examples:
+    * `gclient recurse --no-progress -j1 sh -c 'echo "$GCLIENT_DEP_PATH"'`
+    print the relative path of each dependency.
+    * `gclient recurse --no-progress -j1 sh -c "pwd"`
+    print the absolute path of each dependency.
+    """
     # Stop parsing at the first non-arg so that these go through to the command
     parser.disable_interspersed_args()
     parser.add_option('-s',
@@ -2676,8 +2676,8 @@ def CMDrecurse(parser, args):
 def CMDfetch(parser, args):
     """Fetches upstream commits for all modules.
 
-  Completely git-specific. Simply runs 'git fetch [args ...]' for each module.
-  """
+    Completely git-specific. Simply runs 'git fetch [args ...]' for each module.
+    """
     (options, args) = parser.parse_args(args)
     return CMDrecurse(
         OptionParser(),
@@ -2689,11 +2689,11 @@ class Flattener(object):
     def __init__(self, client, pin_all_deps=False):
         """Constructor.
 
-    Arguments:
-      client (GClient): client to flatten
-      pin_all_deps (bool): whether to pin all deps, even if they're not pinned
-          in DEPS
-    """
+        Arguments:
+            client (GClient): client to flatten
+            pin_all_deps (bool): whether to pin all deps, even if they're not pinned
+                in DEPS
+        """
         self._client = client
 
         self._deps_string = None
@@ -2725,9 +2725,9 @@ class Flattener(object):
     def _pin_dep(self, dep):
         """Pins a dependency to specific full revision sha.
 
-    Arguments:
-      dep (Dependency): dependency to process
-    """
+        Arguments:
+            dep (Dependency): dependency to process
+        """
         if dep.url is None:
             return
 
@@ -2742,10 +2742,10 @@ class Flattener(object):
     def _flatten(self, pin_all_deps=False):
         """Runs the flattener. Saves resulting DEPS string.
 
-    Arguments:
-      pin_all_deps (bool): whether to pin all deps, even if they're not pinned
-          in DEPS
-    """
+        Arguments:
+            pin_all_deps (bool): whether to pin all deps, even if they're not pinned
+                in DEPS
+        """
         for solution in self._client.dependencies:
             self._add_dep(solution)
             self._flatten_dep(solution)
@@ -2791,9 +2791,9 @@ class Flattener(object):
     def _add_dep(self, dep):
         """Helper to add a dependency to flattened DEPS.
 
-    Arguments:
-      dep (Dependency): dependency to add
-    """
+        Arguments:
+            dep (Dependency): dependency to add
+        """
         assert dep.name not in self._deps or self._deps.get(
             dep.name) == dep, (dep.name, self._deps.get(dep.name))
         if dep.url:
@@ -2802,9 +2802,9 @@ class Flattener(object):
     def _flatten_dep(self, dep):
         """Visits a dependency in order to flatten it (see CMDflatten).
 
-    Arguments:
-      dep (Dependency): dependency to process
-    """
+        Arguments:
+            dep (Dependency): dependency to process
+        """
         logging.debug('_flatten_dep(%s)', dep.name)
 
         assert dep.deps_parsed, (
@@ -2854,10 +2854,10 @@ class Flattener(object):
 def CMDgitmodules(parser, args):
     """Adds or updates Git Submodules based on the contents of the DEPS file.
 
-  This command should be run in the root directory of the repo.
-  It will create or update the .gitmodules file and include
-  `gclient-condition` values. Commits in gitlinks will also be updated.
-  """
+    This command should be run in the root directory of the repo.
+    It will create or update the .gitmodules file and include
+    `gclient-condition` values. Commits in gitlinks will also be updated.
+    """
     parser.add_option('--output-gitmodules',
                       help='name of the .gitmodules file to write to',
                       default='.gitmodules')
@@ -3028,7 +3028,7 @@ def _DepsToLines(deps):
 
 def _DepsToDotGraphLines(deps):
     # type: (Mapping[str, Dependency]) -> Sequence[str]
-    """Converts  |deps| dict to list of lines for dot graphs"""
+    """Converts  |deps| dict to list of lines for dot graphs."""
     if not deps:
         return []
     graph_lines = ["digraph {\n\trankdir=\"LR\";"]
@@ -3137,8 +3137,8 @@ def _VarsToLines(variables):
 def CMDgrep(parser, args):
     """Greps through git repos managed by gclient.
 
-  Runs 'git grep [args...]' for each module.
-  """
+    Runs 'git grep [args...]' for each module.
+    """
     # We can't use optparse because it will try to parse arguments sent
     # to git grep and throw an error. :-(
     if not args or re.match('(-h|--help)$', args[0]):
@@ -3180,12 +3180,12 @@ def CMDroot(parser, args):
 def CMDconfig(parser, args):
     """Creates a .gclient file in the current directory.
 
-  This specifies the configuration for further commands. After update/sync,
-  top-level DEPS files in each module are read to determine dependent
-  modules to operate on as well. If optional [url] parameter is
-  provided, then configuration is read from a specified Subversion server
-  URL.
-  """
+    This specifies the configuration for further commands. After update/sync,
+    top-level DEPS files in each module are read to determine dependent
+    modules to operate on as well. If optional [url] parameter is
+    provided, then configuration is read from a specified Subversion server
+    URL.
+    """
     # We do a little dance with the --gclientfile option.  'gclient config' is
     # the only command where it's acceptable to have both '--gclientfile' and
     # '--spec' arguments.  So, we temporarily stash any --gclientfile parameter
@@ -3279,11 +3279,11 @@ def CMDconfig(parser, args):
 def CMDpack(parser, args):
     """Generates a patch which can be applied at the root of the tree.
 
-  Internally, runs 'git diff' on each checked out module and
-  dependencies, and performs minimal postprocessing of the output. The
-  resulting patch is printed to stdout and can be applied to a freshly
-  checked out tree via 'patch -p0 < patchfile'.
-  """
+    Internally, runs 'git diff' on each checked out module and
+    dependencies, and performs minimal postprocessing of the output. The
+    resulting patch is printed to stdout and can be applied to a freshly
+    checked out tree via 'patch -p0 < patchfile'.
+    """
     parser.add_option('--deps',
                       dest='deps_os',
                       metavar='OS_LIST',
@@ -3582,8 +3582,8 @@ def CMDdiff(parser, args):
 def CMDrevert(parser, args):
     """Reverts all modifications in every dependencies.
 
-  That's the nuclear option to get back to a 'clean' state. It removes anything
-  that shows up in git status."""
+    That's the nuclear option to get back to a 'clean' state. It removes anything
+    that shows up in git status."""
     parser.add_option('--deps',
                       dest='deps_os',
                       metavar='OS_LIST',
@@ -3670,11 +3670,11 @@ def CMDinstallhooks(parser, args):
 def CMDrevinfo(parser, args):
     """Outputs revision info mapping for the client and its dependencies.
 
-  This allows the capture of an overall 'revision' for the source tree that
-  can be used to reproduce the same tree in the future. It is only useful for
-  'unpinned dependencies', i.e. DEPS/deps references without a git hash.
-  A git branch name isn't 'pinned' since the actual commit can change.
-  """
+    This allows the capture of an overall 'revision' for the source tree that
+    can be used to reproduce the same tree in the future. It is only useful for
+    'unpinned dependencies', i.e. DEPS/deps references without a git hash.
+    A git branch name isn't 'pinned' since the actual commit can change.
+    """
     parser.add_option('--deps',
                       dest='deps_os',
                       metavar='OS_LIST',
@@ -3719,8 +3719,8 @@ def CMDrevinfo(parser, args):
 def CMDgetdep(parser, args):
     """Gets revision information and variable values from a DEPS file.
 
-  If key doesn't exist or is incorrectly declared, this script exits with exit
-  code 2."""
+    If key doesn't exist or is incorrectly declared, this script exits with exit
+    code 2."""
     parser.add_option('--var',
                       action='append',
                       dest='vars',
@@ -4124,7 +4124,7 @@ def can_run_gclient_and_helpers():
 
 def main(argv):
     """Doesn't parse the arguments here, just find the right subcommand to
-  execute."""
+    execute."""
     if not can_run_gclient_and_helpers():
         return 2
     fix_encoding.fix_encoding()

+ 28 - 28
gclient_eval.py

@@ -444,11 +444,11 @@ def Exec(content, filename='<unknown>', vars_override=None, builtin_vars=None):
 def _StandardizeDeps(deps_dict, vars_dict):
     """"Standardizes the deps_dict.
 
-  For each dependency:
-  - Expands the variable in the dependency name.
-  - Ensures the dependency is a dictionary.
-  - Set's the 'dep_type' to be 'git' by default.
-  """
+    For each dependency:
+    - Expands the variable in the dependency name.
+    - Ensures the dependency is a dictionary.
+    - Set's the 'dep_type' to be 'git' by default.
+    """
     new_deps_dict = {}
     for dep_name, dep_info in deps_dict.items():
         dep_name = dep_name.format(**vars_dict)
@@ -462,10 +462,10 @@ def _StandardizeDeps(deps_dict, vars_dict):
 def _MergeDepsOs(deps_dict, os_deps_dict, os_name):
     """Merges the deps in os_deps_dict into conditional dependencies in deps_dict.
 
-  The dependencies in os_deps_dict are transformed into conditional dependencies
-  using |'checkout_' + os_name|.
-  If the dependency is already present, the URL and revision must coincide.
-  """
+    The dependencies in os_deps_dict are transformed into conditional dependencies
+    using |'checkout_' + os_name|.
+    If the dependency is already present, the URL and revision must coincide.
+    """
     for dep_name, dep_info in os_deps_dict.items():
         # Make this condition very visible, so it's not a silent failure.
         # It's unclear how to support None override in deps_os.
@@ -493,8 +493,8 @@ def _MergeDepsOs(deps_dict, os_deps_dict, os_name):
 def UpdateCondition(info_dict, op, new_condition):
     """Updates info_dict's condition with |new_condition|.
 
-  An absent value is treated as implicitly True.
-  """
+    An absent value is treated as implicitly True.
+    """
     curr_condition = info_dict.get('condition')
     # Easy case: Both are present.
     if curr_condition and new_condition:
@@ -511,23 +511,23 @@ def UpdateCondition(info_dict, op, new_condition):
 def Parse(content, filename, vars_override=None, builtin_vars=None):
     """Parses DEPS strings.
 
-  Executes the Python-like string stored in content, resulting in a Python
-  dictionary specified by the schema above. Supports syntax validation and
-  variable expansion.
-
-  Args:
-    content: str. DEPS file stored as a string.
-    filename: str. The name of the DEPS file, or a string describing the source
-      of the content, e.g. '<string>', '<unknown>'.
-    vars_override: dict, optional. A dictionary with overrides for the variables
-      defined by the DEPS file.
-    builtin_vars: dict, optional. A dictionary with variables that are provided
-      by default.
-
-  Returns:
-    A Python dict with the parsed contents of the DEPS file, as specified by the
-    schema above.
-  """
+    Executes the Python-like string stored in content, resulting in a Python
+    dictionary specified by the schema above. Supports syntax validation and
+    variable expansion.
+
+    Args:
+        content: str. DEPS file stored as a string.
+        filename: str. The name of the DEPS file, or a string describing the source
+            of the content, e.g. '<string>', '<unknown>'.
+        vars_override: dict, optional. A dictionary with overrides for the variables
+            defined by the DEPS file.
+        builtin_vars: dict, optional. A dictionary with variables that are provided
+            by default.
+
+    Returns:
+        A Python dict with the parsed contents of the DEPS file, as specified by the
+        schema above.
+    """
     result = Exec(content, filename, vars_override, builtin_vars)
 
     vars_dict = result.get('vars', {})

+ 1 - 1
gclient_scm.py

@@ -1586,7 +1586,7 @@ class GitWrapper(SCMWrapper):
     def _AutoFetchRef(self, options, revision, depth=None):
         """Attempts to fetch |revision| if not available in local repo.
 
-    Returns possibly updated revision."""
+        Returns possibly updated revision."""
         if not scm.GIT.IsValidRevision(self.checkout_path, revision):
             self._Fetch(options, refspec=revision, depth=depth)
             revision = self._Capture(['rev-parse', 'FETCH_HEAD'])

+ 129 - 126
gclient_utils.py

@@ -85,14 +85,15 @@ def FuzzyMatchRepo(repo, candidates):
     # type: (str, Union[Collection[str], Mapping[str, Any]]) -> Optional[str]
     """Attempts to find a representation of repo in the candidates.
 
-  Args:
-    repo: a string representation of a repo in the form of a url or the
-      name and path of the solution it represents.
-    candidates: The candidates to look through which may contain `repo` in
-      in any of the forms mentioned above.
-  Returns:
-    The matching string, if any, which may be in a different form from `repo`.
-  """
+    Args:
+        repo: a string representation of a repo in the form of a url or the
+            name and path of the solution it represents.
+        candidates: The candidates to look through which may contain `repo` in
+            in any of the forms mentioned above.
+    Returns:
+        The matching string, if any, which may be in a different form from
+        `repo`.
+    """
     if repo in candidates:
         return repo
     if repo.endswith('.git') and repo[:-len('.git')] in candidates:
@@ -103,7 +104,7 @@ def FuzzyMatchRepo(repo, candidates):
 
 
 def SplitUrlRevision(url):
-    """Splits url and returns a two-tuple: url, rev"""
+    """Splits url and returns a two-tuple: url, rev."""
     if url.startswith('ssh:'):
         # Make sure ssh://user-name@example.com/~/test.git@stable works
         regex = r'(ssh://(?:[-.\w]+@)?[-\w:\.]+/[-~\w\./]+)(?:@(.+))?'
@@ -129,12 +130,12 @@ def ExtractRefName(remote, full_refs_str):
 
 
 def IsGitSha(revision):
-    """Returns true if the given string is a valid hex-encoded sha"""
+    """Returns true if the given string is a valid hex-encoded sha."""
     return re.match('^[a-fA-F0-9]{6,40}$', revision) is not None
 
 
 def IsFullGitSha(revision):
-    """Returns true if the given string is a valid hex-encoded full sha"""
+    """Returns true if the given string is a valid hex-encoded full sha."""
     return re.match('^[a-fA-F0-9]{40}$', revision) is not None
 
 
@@ -145,12 +146,12 @@ def IsDateRevision(revision):
 
 def MakeDateRevision(date):
     """Returns a revision representing the latest revision before the given
-  date."""
+    date."""
     return "{" + date + "}"
 
 
 def SyntaxErrorToError(filename, e):
-    """Raises a gclient_utils.Error exception with the human readable message"""
+    """Raises a gclient_utils.Error exception with a human readable message."""
     try:
         # Try to construct a human readable error message
         if filename:
@@ -223,23 +224,23 @@ def temporary_directory(**kwargs):
 def temporary_file():
     """Creates a temporary file.
 
-  On Windows, a file must be closed before it can be opened again. This function
-  allows to write something like:
+    On Windows, a file must be closed before it can be opened again. This
+    function allows to write something like:
 
-    with gclient_utils.temporary_file() as tmp:
-      gclient_utils.FileWrite(tmp, foo)
-      useful_stuff(tmp)
+        with gclient_utils.temporary_file() as tmp:
+            gclient_utils.FileWrite(tmp, foo)
+            useful_stuff(tmp)
 
-  Instead of something like:
+    Instead of something like:
 
-    with tempfile.NamedTemporaryFile(delete=False) as tmp:
-      tmp.write(foo)
-      tmp.close()
-      try:
-        useful_stuff(tmp)
-      finally:
-        os.remove(tmp.name)
-  """
+        with tempfile.NamedTemporaryFile(delete=False) as tmp:
+            tmp.write(foo)
+            tmp.close()
+            try:
+                useful_stuff(tmp)
+            finally:
+                os.remove(tmp.name)
+    """
     handle, name = tempfile.mkstemp()
     os.close(handle)
     try:
@@ -251,11 +252,11 @@ def temporary_file():
 def safe_rename(old, new):
     """Renames a file reliably.
 
-  Sometimes os.rename does not work because a dying git process keeps a handle
-  on it for a few seconds. An exception is then thrown, which make the program
-  give up what it was doing and remove what was deleted.
-  The only solution is to catch the exception and try again until it works.
-  """
+    Sometimes os.rename does not work because a dying git process keeps a handle
+    on it for a few seconds. An exception is then thrown, which make the program
+    give up what it was doing and remove what was deleted.
+    The only solution is to catch the exception and try again until it works.
+    """
     # roughly 10s
     retries = 100
     for i in range(retries):
@@ -282,28 +283,28 @@ def rm_file_or_tree(path):
 def rmtree(path):
     """shutil.rmtree() on steroids.
 
-  Recursively removes a directory, even if it's marked read-only.
-
-  shutil.rmtree() doesn't work on Windows if any of the files or directories
-  are read-only. We need to be able to force the files to be writable (i.e.,
-  deletable) as we traverse the tree.
-
-  Even with all this, Windows still sometimes fails to delete a file, citing
-  a permission error (maybe something to do with antivirus scans or disk
-  indexing).  The best suggestion any of the user forums had was to wait a
-  bit and try again, so we do that too.  It's hand-waving, but sometimes it
-  works. :/
-
-  On POSIX systems, things are a little bit simpler.  The modes of the files
-  to be deleted doesn't matter, only the modes of the directories containing
-  them are significant.  As the directory tree is traversed, each directory
-  has its mode set appropriately before descending into it.  This should
-  result in the entire tree being removed, with the possible exception of
-  *path itself, because nothing attempts to change the mode of its parent.
-  Doing so would be hazardous, as it's not a directory slated for removal.
-  In the ordinary case, this is not a problem: for our purposes, the user
-  will never lack write permission on *path's parent.
-  """
+    Recursively removes a directory, even if it's marked read-only.
+
+    shutil.rmtree() doesn't work on Windows if any of the files or directories
+    are read-only. We need to be able to force the files to be writable (i.e.,
+    deletable) as we traverse the tree.
+
+    Even with all this, Windows still sometimes fails to delete a file, citing
+    a permission error (maybe something to do with antivirus scans or disk
+    indexing).  The best suggestion any of the user forums had was to wait a
+    bit and try again, so we do that too.  It's hand-waving, but sometimes it
+    works. :/
+
+    On POSIX systems, things are a little bit simpler.  The modes of the files
+    to be deleted doesn't matter, only the modes of the directories containing
+    them are significant.  As the directory tree is traversed, each directory
+    has its mode set appropriately before descending into it.  This should
+    result in the entire tree being removed, with the possible exception of
+    *path itself, because nothing attempts to change the mode of its parent.
+    Doing so would be hazardous, as it's not a directory slated for removal.
+    In the ordinary case, this is not a problem: for our purposes, the user
+    will never lack write permission on *path's parent.
+    """
     if not os.path.exists(path):
         return
 
@@ -349,9 +350,9 @@ def rmtree(path):
 def safe_makedirs(tree):
     """Creates the directory in a safe manner.
 
-  Because multiple threads can create these directories concurrently, trap the
-  exception and pass on.
-  """
+    Because multiple threads can create these directories concurrently, trap the
+    exception and pass on.
+    """
     count = 0
     while not os.path.exists(tree):
         count += 1
@@ -373,8 +374,8 @@ def CommandToStr(args):
 
 class Wrapper(object):
     """Wraps an object, acting as a transparent proxy for all properties by
-  default.
-  """
+    default.
+    """
     def __init__(self, wrapped):
         self._wrapped = wrapped
 
@@ -567,20 +568,21 @@ def CheckCallAndFilter(args,
                        **kwargs):
     """Runs a command and calls back a filter function if needed.
 
-  Accepts all subprocess2.Popen() parameters plus:
-    print_stdout: If True, the command's stdout is forwarded to stdout.
-    filter_fn: A function taking a single string argument called with each line
-               of the subprocess2's output. Each line has the trailing newline
-               character trimmed.
-    show_header: Whether to display a header before the command output.
-    always_show_header: Show header even when the command produced no output.
-    retry: If the process exits non-zero, sleep for a brief interval and try
-           again, up to RETRY_MAX times.
+    Accepts all subprocess2.Popen() parameters plus:
+        print_stdout: If True, the command's stdout is forwarded to stdout.
+        filter_fn: A function taking a single string argument called with each
+            line of the subprocess2's output. Each line has the trailing
+            newline character trimmed.
+        show_header: Whether to display a header before the command output.
+        always_show_header: Show header even when the command produced no
+            output.
+        retry: If the process exits non-zero, sleep for a brief interval and
+            try again, up to RETRY_MAX times.
 
-  stderr is always redirected to stdout.
+    stderr is always redirected to stdout.
 
-  Returns the output of the command as a binary string.
-  """
+    Returns the output of the command as a binary string.
+    """
     def show_header_if_necessary(needs_header, attempt):
         """Show the header at most once."""
         if not needs_header[0]:
@@ -716,21 +718,22 @@ def CheckCallAndFilter(args,
 class GitFilter(object):
     """A filter_fn implementation for quieting down git output messages.
 
-  Allows a custom function to skip certain lines (predicate), and will throttle
-  the output of percentage completed lines to only output every X seconds.
-  """
+    Allows a custom function to skip certain lines (predicate), and will
+    throttle the output of percentage completed lines to only output every X
+    seconds.
+    """
     PERCENT_RE = re.compile('(.*) ([0-9]{1,3})% .*')
 
     def __init__(self, time_throttle=0, predicate=None, out_fh=None):
         """
-    Args:
-      time_throttle (int): GitFilter will throttle 'noisy' output (such as the
-        XX% complete messages) to only be printed at least |time_throttle|
-        seconds apart.
-      predicate (f(line)): An optional function which is invoked for every line.
-        The line will be skipped if predicate(line) returns False.
-      out_fh: File handle to write output to.
-    """
+        Args:
+        time_throttle (int): GitFilter will throttle 'noisy' output (such as the
+            XX% complete messages) to only be printed at least |time_throttle|
+            seconds apart.
+        predicate (f(line)): An optional function which is invoked for every
+            line. The line will be skipped if predicate(line) returns False.
+        out_fh: File handle to write output to.
+        """
         self.first_line = True
         self.last_time = 0
         self.time_throttle = time_throttle
@@ -762,8 +765,8 @@ class GitFilter(object):
 def FindFileUpwards(filename, path=None):
     """Search upwards from the a directory (default: current) to find a file.
 
-  Returns nearest upper-level directory with the passed in file.
-  """
+    Returns nearest upper-level directory with the passed in file.
+    """
     if not path:
         path = os.getcwd()
     path = os.path.realpath(path)
@@ -844,7 +847,7 @@ class WorkItem(object):
 
     def run(self, work_queue):
         """work_queue is passed as keyword argument so it should be
-    the last parameters of the function when you override it."""
+        the last parameters of the function when you override it."""
 
     @property
     def name(self):
@@ -853,16 +856,16 @@ class WorkItem(object):
 
 class ExecutionQueue(object):
     """Runs a set of WorkItem that have interdependencies and were WorkItem are
-  added as they are processed.
+    added as they are processed.
 
-  This class manages that all the required dependencies are run
-  before running each one.
+    This class manages that all the required dependencies are run
+    before running each one.
 
-  Methods of this class are thread safe.
-  """
+    Methods of this class are thread safe.
+    """
     def __init__(self, jobs, progress, ignore_requirements, verbose=False):
         """jobs specifies the number of concurrent tasks to allow. progress is a
-    Progress instance."""
+        Progress instance."""
         # Set when a thread is done or a new item is enqueued.
         self.ready_cond = threading.Condition()
         # Maximum number of concurrent tasks.
@@ -887,8 +890,8 @@ class ExecutionQueue(object):
 
     def enqueue(self, d):
         """Enqueue one Dependency to be executed later once its requirements are
-    satisfied.
-    """
+        satisfied.
+        """
         assert isinstance(d, WorkItem)
         self.ready_cond.acquire()
         try:
@@ -1126,16 +1129,16 @@ class ExecutionQueue(object):
 def GetEditor(git_editor=None):
     """Returns the most plausible editor to use.
 
-  In order of preference:
-  - GIT_EDITOR environment variable
-  - core.editor git configuration variable (if supplied by git-cl)
-  - VISUAL environment variable
-  - EDITOR environment variable
-  - vi (non-Windows) or notepad (Windows)
+    In order of preference:
+    - GIT_EDITOR environment variable
+    - core.editor git configuration variable (if supplied by git-cl)
+    - VISUAL environment variable
+    - EDITOR environment variable
+    - vi (non-Windows) or notepad (Windows)
 
-  In the case of git-cl, this matches git's behaviour, except that it does not
-  include dumb terminal detection.
-  """
+    In the case of git-cl, this matches git's behaviour, except that it does not
+    include dumb terminal detection.
+    """
     editor = os.environ.get('GIT_EDITOR') or git_editor
     if not editor:
         editor = os.environ.get('VISUAL')
@@ -1201,10 +1204,10 @@ def RunEditor(content, git, git_editor=None):
 def UpgradeToHttps(url):
     """Upgrades random urls to https://.
 
-  Do not touch unknown urls like ssh:// or git://.
-  Do not touch http:// urls with a port number,
-  Fixes invalid GAE url.
-  """
+    Do not touch unknown urls like ssh:// or git://.
+    Do not touch http:// urls with a port number,
+    Fixes invalid GAE url.
+    """
     if not url:
         return url
     if not re.match(r'[a-z\-]+\://.*', url):
@@ -1240,10 +1243,10 @@ def ParseCodereviewSettingsContent(content):
 def NumLocalCpus():
     """Returns the number of processors.
 
-  multiprocessing.cpu_count() is permitted to raise NotImplementedError, and
-  is known to do this on some Windows systems and OSX 10.6. If we can't get the
-  CPU count, we will fall back to '1'.
-  """
+    multiprocessing.cpu_count() is permitted to raise NotImplementedError, and
+    is known to do this on some Windows systems and OSX 10.6. If we can't get
+    the CPU count, we will fall back to '1'.
+    """
     # Surround the entire thing in try/except; no failure here should stop
     # gclient from working.
     try:
@@ -1272,10 +1275,10 @@ def NumLocalCpus():
 def DefaultDeltaBaseCacheLimit():
     """Return a reasonable default for the git config core.deltaBaseCacheLimit.
 
-  The primary constraint is the address space of virtual memory.  The cache
-  size limit is per-thread, and 32-bit systems can hit OOM errors if this
-  parameter is set too high.
-  """
+    The primary constraint is the address space of virtual memory.  The cache
+    size limit is per-thread, and 32-bit systems can hit OOM errors if this
+    parameter is set too high.
+    """
     if platform.architecture()[0].startswith('64'):
         return '2g'
 
@@ -1285,8 +1288,8 @@ def DefaultDeltaBaseCacheLimit():
 def DefaultIndexPackConfig(url=''):
     """Return reasonable default values for configuring git-index-pack.
 
-  Experiments suggest that higher values for pack.threads don't improve
-  performance."""
+    Experiments suggest that higher values for pack.threads don't improve
+    performance."""
     cache_limit = DefaultDeltaBaseCacheLimit()
     result = ['-c', 'core.deltaBaseCacheLimit=%s' % cache_limit]
     if url in THREADED_INDEX_PACK_BLOCKLIST:
@@ -1316,15 +1319,15 @@ def FindExecutable(executable):
 def freeze(obj):
     """Takes a generic object ``obj``, and returns an immutable version of it.
 
-  Supported types:
-    * dict / OrderedDict -> FrozenDict
-    * list -> tuple
-    * set -> frozenset
-    * any object with a working __hash__ implementation (assumes that hashable
-      means immutable)
+    Supported types:
+        * dict / OrderedDict -> FrozenDict
+        * list -> tuple
+        * set -> frozenset
+        * any object with a working __hash__ implementation (assumes that
+            hashable means immutable)
 
-  Will raise TypeError if you pass an object which is not hashable.
-  """
+    Will raise TypeError if you pass an object which is not hashable.
+    """
     if isinstance(obj, collections.abc.Mapping):
         return FrozenDict((freeze(k), freeze(v)) for k, v in obj.items())
 
@@ -1341,8 +1344,8 @@ def freeze(obj):
 class FrozenDict(collections.abc.Mapping):
     """An immutable OrderedDict.
 
-  Modified From: http://stackoverflow.com/a/2704866
-  """
+    Modified From: http://stackoverflow.com/a/2704866
+    """
     def __init__(self, *args, **kwargs):
         self._d = collections.OrderedDict(*args, **kwargs)
 

+ 11 - 10
gerrit_client.py

@@ -5,7 +5,7 @@
 """Simple client for the Gerrit REST API.
 
 Example usage:
-  ./gerrit_client.py [command] [args]
+    ./gerrit_client.py [command] [args]
 """
 
 import json
@@ -415,17 +415,18 @@ def CMDabandon(parser, args):
 def CMDmass_abandon(parser, args):
     """Mass abandon changes
 
-  Abandons CLs that match search criteria provided by user. Before any change is
-  actually abandoned, user is presented with a list of CLs that will be affected
-  if user confirms. User can skip confirmation by passing --force parameter.
+    Abandons CLs that match search criteria provided by user. Before any change
+    is actually abandoned, user is presented with a list of CLs that will be
+    affected if user confirms. User can skip confirmation by passing --force
+    parameter.
 
-  The script can abandon up to 100 CLs per invocation.
+    The script can abandon up to 100 CLs per invocation.
 
-  Examples:
-  gerrit_client.py mass-abandon --host https://HOST -p 'project=repo2'
-  gerrit_client.py mass-abandon --host https://HOST -p 'message=testing'
-  gerrit_client.py mass-abandon --host https://HOST -p 'is=wip' -p 'age=1y'
-  """
+    Examples:
+    gerrit_client.py mass-abandon --host https://HOST -p 'project=repo2'
+    gerrit_client.py mass-abandon --host https://HOST -p 'message=testing'
+    gerrit_client.py mass-abandon --host https://HOST -p 'is=wip' -p 'age=1y'
+    """
     parser.add_option('-p',
                       '--param',
                       dest='params',

+ 103 - 102
gerrit_util.py

@@ -83,8 +83,8 @@ class GerritError(Exception):
 def _QueryString(params, first_param=None):
     """Encodes query parameters in the key:val[+key:val...] format specified here:
 
-  https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes
-  """
+    https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes
+    """
     q = [urllib.parse.quote(first_param)] if first_param else []
     q.extend(['%s:%s' % (key, val.replace(" ", "+")) for key, val in params])
     return '+'.join(q)
@@ -99,9 +99,9 @@ class Authenticator(object):
     def get():
         """Returns: (Authenticator) The identified Authenticator to use.
 
-    Probes the local system and its environment and identifies the
-    Authenticator instance to use.
-    """
+        Probes the local system and its environment and identifies the
+        Authenticator instance to use.
+        """
         # LUCI Context takes priority since it's normally present only on bots,
         # which then must use it.
         if LuciContextAuthenticator.is_luci():
@@ -116,8 +116,8 @@ class Authenticator(object):
 class CookiesAuthenticator(Authenticator):
     """Authenticator implementation that uses ".netrc" or ".gitcookies" for token.
 
-  Expected case for developer workstations.
-  """
+    Expected case for developer workstations.
+    """
 
     _EMPTY = object()
 
@@ -286,7 +286,7 @@ NetrcAuthenticator = CookiesAuthenticator
 
 class GceAuthenticator(Authenticator):
     """Authenticator implementation that uses GCE metadata service for token.
-  """
+    """
 
     _INFO_URL = 'http://metadata.google.internal'
     _ACQUIRE_URL = ('%s/computeMetadata/v1/instance/'
@@ -361,7 +361,7 @@ class GceAuthenticator(Authenticator):
 
 class LuciContextAuthenticator(Authenticator):
     """Authenticator implementation that uses LUCI_CONTEXT ambient local auth.
-  """
+    """
     @staticmethod
     def is_luci():
         return auth.has_luci_context_local_auth()
@@ -429,12 +429,13 @@ def CreateHttpConn(host,
 def ReadHttpResponse(conn, accept_statuses=frozenset([200])):
     """Reads an HTTP response from a connection into a string buffer.
 
-  Args:
-    conn: An Http object created by CreateHttpConn above.
-    accept_statuses: Treat any of these statuses as success. Default: [200]
-                     Common additions include 204, 400, and 404.
-  Returns: A string buffer containing the connection's reply.
-  """
+    Args:
+        conn: An Http object created by CreateHttpConn above.
+        accept_statuses: Treat any of these statuses as success. Default: [200]
+            Common additions include 204, 400, and 404.
+    Returns:
+        A string buffer containing the connection's reply.
+    """
     sleep_time = SLEEP_TIME
     for idx in range(TRY_LIMIT):
         before_response = time.time()
@@ -534,21 +535,21 @@ def QueryChanges(host,
                  o_params=None,
                  start=None):
     """
-  Queries a gerrit-on-borg server for changes matching query terms.
-
-  Args:
-    params: A list of key:value pairs for search parameters, as documented
-        here (e.g. ('is', 'owner') for a parameter 'is:owner'):
-        https://gerrit-review.googlesource.com/Documentation/user-search.html#search-operators
-    first_param: A change identifier
-    limit: Maximum number of results to return.
-    start: how many changes to skip (starting with the most recent)
-    o_params: A list of additional output specifiers, as documented here:
-        https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes
-
-  Returns:
-    A list of json-decoded query results.
-  """
+    Queries a gerrit-on-borg server for changes matching query terms.
+
+    Args:
+        params: A list of key:value pairs for search parameters, as documented
+            here (e.g. ('is', 'owner') for a parameter 'is:owner'):
+            https://gerrit-review.googlesource.com/Documentation/user-search.html#search-operators
+        first_param: A change identifier
+        limit: Maximum number of results to return.
+        start: how many changes to skip (starting with the most recent)
+        o_params: A list of additional output specifiers, as documented here:
+            https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes
+
+    Returns:
+        A list of json-decoded query results.
+    """
     # Note that no attempt is made to escape special characters; YMMV.
     if not params and not first_param:
         raise RuntimeError('QueryChanges requires search parameters')
@@ -569,24 +570,24 @@ def GenerateAllChanges(host,
                        o_params=None,
                        start=None):
     """Queries a gerrit-on-borg server for all the changes matching the query
-  terms.
+    terms.
 
-  WARNING: this is unreliable if a change matching the query is modified while
-  this function is being called.
+    WARNING: this is unreliable if a change matching the query is modified while
+    this function is being called.
 
-  A single query to gerrit-on-borg is limited on the number of results by the
-  limit parameter on the request (see QueryChanges) and the server maximum
-  limit.
+    A single query to gerrit-on-borg is limited on the number of results by the
+    limit parameter on the request (see QueryChanges) and the server maximum
+    limit.
 
-  Args:
-    params, first_param: Refer to QueryChanges().
-    limit: Maximum number of requested changes per query.
-    o_params: Refer to QueryChanges().
-    start: Refer to QueryChanges().
+    Args:
+        params, first_param: Refer to QueryChanges().
+        limit: Maximum number of requested changes per query.
+        o_params: Refer to QueryChanges().
+        start: Refer to QueryChanges().
 
-  Returns:
-    A generator object to the list of returned changes.
-  """
+    Returns:
+        A generator object to the list of returned changes.
+    """
     already_returned = set()
 
     def at_most_once(cls):
@@ -666,7 +667,7 @@ def GetGerritFetchUrl(host):
 
 def GetCodeReviewTbrScore(host, project):
     """Given a Gerrit host name and project, return the Code-Review score for TBR.
-  """
+    """
     conn = CreateHttpConn(host,
                           '/projects/%s' % urllib.parse.quote(project, ''))
     project = ReadHttpJsonResponse(conn)
@@ -836,8 +837,8 @@ def DeletePendingChangeEdit(host, change):
 
 def CherryPick(host, change, destination, revision='current'):
     """Create a cherry-pick commit from the given change, onto the given
-  destination.
-  """
+    destination.
+    """
     path = 'changes/%s/revisions/%s/cherrypick' % (change, revision)
     body = {'destination': destination}
     conn = CreateHttpConn(host, path, reqtype='POST', body=body)
@@ -847,9 +848,9 @@ def CherryPick(host, change, destination, revision='current'):
 def GetFileContents(host, change, path):
     """Get the contents of a file with the given path in the given revision.
 
-  Returns:
-    A bytes object with the file's contents.
-  """
+    Returns:
+        A bytes object with the file's contents.
+    """
     path = 'changes/%s/revisions/current/files/%s/content' % (
         change, urllib.parse.quote(path, ''))
     conn = CreateHttpConn(host, path, reqtype='GET')
@@ -874,11 +875,11 @@ def SetCommitMessage(host, change, description, notify='ALL'):
 def GetCommitIncludedIn(host, project, commit):
     """Retrieves the branches and tags for a given commit.
 
-  https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#get-included-in
+    https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#get-included-in
 
-  Returns:
-    A JSON object with keys of 'branches' and 'tags'.
-  """
+    Returns:
+        A JSON object with keys of 'branches' and 'tags'.
+    """
     path = 'projects/%s/commits/%s/in' % (urllib.parse.quote(project,
                                                              ''), commit)
     conn = CreateHttpConn(host, path, reqtype='GET')
@@ -1073,16 +1074,16 @@ def ResetReviewLabels(host,
 
 def CreateChange(host, project, branch='main', subject='', params=()):
     """
-  Creates a new change.
+    Creates a new change.
 
-  Args:
-    params: A list of additional ChangeInput specifiers, as documented here:
-        (e.g. ('is_private', 'true') to mark the change private.
-        https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#change-input
+    Args:
+        params: A list of additional ChangeInput specifiers, as documented here:
+            (e.g. ('is_private', 'true') to mark the change private.
+            https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#change-input
 
-  Returns:
-    ChangeInfo for the new change.
-  """
+    Returns:
+        ChangeInfo for the new change.
+    """
     path = 'changes/'
     body = {'project': project, 'branch': branch, 'subject': subject}
     body.update(dict(params))
@@ -1097,11 +1098,11 @@ def CreateChange(host, project, branch='main', subject='', params=()):
 def CreateGerritBranch(host, project, branch, commit):
     """Creates a new branch from given project and commit
 
-  https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#create-branch
+    https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#create-branch
 
-  Returns:
-    A JSON object with 'ref' key.
-  """
+    Returns:
+        A JSON object with 'ref' key.
+    """
     path = 'projects/%s/branches/%s' % (project, branch)
     body = {'revision': commit}
     conn = CreateHttpConn(host, path, reqtype='PUT', body=body)
@@ -1114,11 +1115,11 @@ def CreateGerritBranch(host, project, branch, commit):
 def CreateGerritTag(host, project, tag, commit):
     """Creates a new tag at the given commit.
 
-  https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#create-tag
+    https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#create-tag
 
-  Returns:
-    A JSON object with 'ref' key.
-  """
+    Returns:
+        A JSON object with 'ref' key.
+    """
     path = 'projects/%s/tags/%s' % (project, tag)
     body = {'revision': commit}
     conn = CreateHttpConn(host, path, reqtype='PUT', body=body)
@@ -1131,11 +1132,11 @@ def CreateGerritTag(host, project, tag, commit):
 def GetHead(host, project):
     """Retrieves current HEAD of Gerrit project
 
-  https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#get-head
+    https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#get-head
 
-  Returns:
-    A JSON object with 'ref' key.
-  """
+    Returns:
+        A JSON object with 'ref' key.
+    """
     path = 'projects/%s/HEAD' % (project)
     conn = CreateHttpConn(host, path, reqtype='GET')
     response = ReadHttpJsonResponse(conn, accept_statuses=[200])
@@ -1147,11 +1148,11 @@ def GetHead(host, project):
 def UpdateHead(host, project, branch):
     """Updates Gerrit HEAD to point to branch
 
-  https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#set-head
+    https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#set-head
 
-  Returns:
-    A JSON object with 'ref' key.
-  """
+    Returns:
+        A JSON object with 'ref' key.
+    """
     path = 'projects/%s/HEAD' % (project)
     body = {'ref': branch}
     conn = CreateHttpConn(host, path, reqtype='PUT', body=body)
@@ -1164,12 +1165,12 @@ def UpdateHead(host, project, branch):
 def GetGerritBranch(host, project, branch):
     """Gets a branch info from given project and branch name.
 
-  See:
-  https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#get-branch
+    See:
+    https://gerrit-review.googlesource.com/Documentation/rest-api-projects.html#get-branch
 
-  Returns:
-    A JSON object with 'revision' key if the branch exists, otherwise None.
-  """
+    Returns:
+        A JSON object with 'revision' key if the branch exists, otherwise None.
+    """
     path = 'projects/%s/branches/%s' % (project, branch)
     conn = CreateHttpConn(host, path, reqtype='GET')
     return ReadHttpJsonResponse(conn, accept_statuses=[200, 404])
@@ -1184,14 +1185,14 @@ def GetProjectHead(host, project):
 def GetAccountDetails(host, account_id='self'):
     """Returns details of the account.
 
-  If account_id is not given, uses magic value 'self' which corresponds to
-  whichever account user is authenticating as.
+    If account_id is not given, uses magic value 'self' which corresponds to
+    whichever account user is authenticating as.
 
-  Documentation:
-  https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#get-account
+    Documentation:
+    https://gerrit-review.googlesource.com/Documentation/rest-api-accounts.html#get-account
 
-  Returns None if account is not found (i.e., Gerrit returned 404).
-  """
+    Returns None if account is not found (i.e., Gerrit returned 404).
+    """
     conn = CreateHttpConn(host, '/accounts/%s' % account_id)
     return ReadHttpJsonResponse(conn, accept_statuses=[200, 404])
 
@@ -1199,9 +1200,9 @@ def GetAccountDetails(host, account_id='self'):
 def ValidAccounts(host, accounts, max_threads=10):
     """Returns a mapping from valid account to its details.
 
-  Invalid accounts, either not existing or without unique match,
-  are not present as returned dictionary keys.
-  """
+    Invalid accounts, either not existing or without unique match,
+    are not present as returned dictionary keys.
+    """
     assert not isinstance(accounts, str), type(accounts)
     accounts = list(set(accounts))
     if not accounts:
@@ -1225,14 +1226,14 @@ def ValidAccounts(host, accounts, max_threads=10):
 def PercentEncodeForGitRef(original):
     """Applies percent-encoding for strings sent to Gerrit via git ref metadata.
 
-  The encoding used is based on but stricter than URL encoding (Section 2.1 of
-  RFC 3986). The only non-escaped characters are alphanumerics, and 'SPACE'
-  (U+0020) can be represented as 'LOW LINE' (U+005F) or 'PLUS SIGN' (U+002B).
+    The encoding used is based on but stricter than URL encoding (Section 2.1 of
+    RFC 3986). The only non-escaped characters are alphanumerics, and 'SPACE'
+    (U+0020) can be represented as 'LOW LINE' (U+005F) or 'PLUS SIGN' (U+002B).
 
-  For more information, see the Gerrit docs here:
+    For more information, see the Gerrit docs here:
 
-  https://gerrit-review.googlesource.com/Documentation/user-upload.html#message
-  """
+    https://gerrit-review.googlesource.com/Documentation/user-upload.html#message
+    """
     safe = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789 '
     encoded = ''.join(c if c in safe else '%%%02X' % ord(c) for c in original)
 
@@ -1255,10 +1256,10 @@ def tempdir():
 
 def ChangeIdentifier(project, change_number):
     """Returns change identifier "project~number" suitable for |change| arg of
-  this module API.
+    this module API.
 
-  Such format is allows for more efficient Gerrit routing of HTTP requests,
-  comparing to specifying just change_number.
-  """
+    Such format is allows for more efficient Gerrit routing of HTTP requests,
+    comparing to specifying just change_number.
+    """
     assert int(change_number)
     return '%s~%s' % (urllib.parse.quote(project, ''), change_number)

+ 25 - 24
git_cache.py

@@ -54,23 +54,23 @@ def exponential_backoff_retry(fn,
                               printerr=None):
     """Executes |fn| up to |count| times, backing off exponentially.
 
-  Args:
-    fn (callable): The function to execute. If this raises a handled
-        exception, the function will retry with exponential backoff.
-    excs (tuple): A tuple of Exception types to handle. If one of these is
-        raised by |fn|, a retry will be attempted. If |fn| raises an Exception
-        that is not in this list, it will immediately pass through. If |excs|
-        is empty, the Exception base class will be used.
-    name (str): Optional operation name to print in the retry string.
-    count (int): The number of times to try before allowing the exception to
-        pass through.
-    sleep_time (float): The initial number of seconds to sleep in between
-        retries. This will be doubled each retry.
-    printerr (callable): Function that will be called with the error string upon
-        failures. If None, |logging.warning| will be used.
-
-  Returns: The return value of the successful fn.
-  """
+    Args:
+        fn (callable): The function to execute. If this raises a handled
+            exception, the function will retry with exponential backoff.
+        excs (tuple): A tuple of Exception types to handle. If one of these is
+            raised by |fn|, a retry will be attempted. If |fn| raises an
+            Exception that is not in this list, it will immediately pass
+            through. If |excs| is empty, the Exception base class will be used.
+        name (str): Optional operation name to print in the retry string.
+        count (int): The number of times to try before allowing the exception
+            to pass through.
+        sleep_time (float): The initial number of seconds to sleep in between
+            retries. This will be doubled each retry.
+        printerr (callable): Function that will be called with the error string
+            upon failures. If None, |logging.warning| will be used.
+
+    Returns: The return value of the successful fn.
+    """
     printerr = printerr or logging.warning
     for i in range(count):
         try:
@@ -101,9 +101,9 @@ class Mirror(object):
     def parse_fetch_spec(spec):
         """Parses and canonicalizes a fetch spec.
 
-    Returns (fetchspec, value_regex), where value_regex can be used
-    with 'git config --replace-all'.
-    """
+        Returns (fetchspec, value_regex), where value_regex can be used
+        with 'git config --replace-all'.
+        """
         parts = spec.split(':', 1)
         src = parts[0].lstrip('+').rstrip('/')
         if not src.startswith('refs/'):
@@ -290,8 +290,9 @@ class Mirror(object):
     def bootstrap_repo(self, directory):
         """Bootstrap the repo from Google Storage if possible.
 
-    More apt-ly named bootstrap_repo_from_cloud_if_possible_else_do_nothing().
-    """
+        More apt-ly named
+        bootstrap_repo_from_cloud_if_possible_else_do_nothing().
+        """
         if not self.bootstrap_bucket:
             return False
 
@@ -369,8 +370,8 @@ class Mirror(object):
     def _preserve_fetchspec(self):
         """Read and preserve remote.origin.fetch from an existing mirror.
 
-    This modifies self.fetch_specs.
-    """
+        This modifies self.fetch_specs.
+        """
         if not self.exists():
             return
         try:

+ 216 - 217
git_cl.py

@@ -372,11 +372,11 @@ def _get_counterpart_host(host):
 def _trigger_tryjobs(changelist, jobs, options, patchset):
     """Sends a request to Buildbucket to trigger tryjobs for a changelist.
 
-  Args:
-    changelist: Changelist that the tryjobs are associated with.
-    jobs: A list of (project, bucket, builder).
-    options: Command-line options.
-  """
+    Args:
+        changelist: Changelist that the tryjobs are associated with.
+        jobs: A list of (project, bucket, builder).
+        options: Command-line options.
+    """
     print('Scheduling jobs on:')
     for project, bucket, builder in jobs:
         print('  %s/%s: %s' % (project, bucket, builder))
@@ -460,8 +460,8 @@ def _make_tryjob_schedule_requests(changelist, jobs, options, patchset):
 def _fetch_tryjobs(changelist, buildbucket_host, patchset=None):
     """Fetches tryjobs from buildbucket.
 
-  Returns list of buildbucket.v2.Build with the try jobs for the changelist.
-  """
+    Returns list of buildbucket.v2.Build with the try jobs for the changelist.
+    """
     fields = ['id', 'builder', 'status', 'createTime', 'tags']
     request = {
         'predicate': {
@@ -488,17 +488,17 @@ def _fetch_tryjobs(changelist, buildbucket_host, patchset=None):
 
 def _fetch_latest_builds(changelist, buildbucket_host, latest_patchset=None):
     """Fetches builds from the latest patchset that has builds (within
-  the last few patchsets).
-
-  Args:
-    changelist (Changelist): The CL to fetch builds for
-    buildbucket_host (str): Buildbucket host, e.g. "cr-buildbucket.appspot.com"
-    lastest_patchset(int|NoneType): the patchset to start fetching builds from.
-      If None (default), starts with the latest available patchset.
-  Returns:
-    A tuple (builds, patchset) where builds is a list of buildbucket.v2.Build,
-    and patchset is the patchset number where those builds came from.
-  """
+    the last few patchsets).
+
+    Args:
+        changelist (Changelist): The CL to fetch builds for
+        buildbucket_host (str): Buildbucket host, e.g. "cr-buildbucket.appspot.com"
+        lastest_patchset(int|NoneType): the patchset to start fetching builds from.
+            If None (default), starts with the latest available patchset.
+    Returns:
+        A tuple (builds, patchset) where builds is a list of buildbucket.v2.Build,
+        and patchset is the patchset number where those builds came from.
+    """
     assert buildbucket_host
     assert changelist.GetIssue(), 'CL must be uploaded first'
     assert changelist.GetCodereviewServer(), 'CL must be uploaded first'
@@ -521,15 +521,15 @@ def _fetch_latest_builds(changelist, buildbucket_host, latest_patchset=None):
 def _filter_failed_for_retry(all_builds):
     """Returns a list of buckets/builders that are worth retrying.
 
-  Args:
-    all_builds (list): Builds, in the format returned by _fetch_tryjobs,
-      i.e. a list of buildbucket.v2.Builds which includes status and builder
-      info.
+    Args:
+        all_builds (list): Builds, in the format returned by _fetch_tryjobs,
+            i.e. a list of buildbucket.v2.Builds which includes status and builder
+            info.
 
-  Returns:
-    A dict {(proj, bucket): [builders]}. This is the same format accepted by
-    _trigger_tryjobs.
-  """
+    Returns:
+        A dict {(proj, bucket): [builders]}. This is the same format accepted by
+        _trigger_tryjobs.
+    """
     grouped = {}
     for build in all_builds:
         builder = build['builder']
@@ -634,10 +634,10 @@ def _print_tryjobs(options, builds):
 def _ComputeFormatDiffLineRanges(files, upstream_commit):
     """Gets the changed line ranges for each file since upstream_commit.
 
-  Parses a git diff on provided files and returns a dict that maps a file name
-  to an ordered list of range tuples in the form (start_line, count).
-  Ranges are in the same format as a git diff.
-  """
+    Parses a git diff on provided files and returns a dict that maps a file name
+    to an ordered list of range tuples in the form (start_line, count).
+    Ranges are in the same format as a git diff.
+    """
     # If files is empty then diff_output will be a full diff.
     if len(files) == 0:
         return {}
@@ -690,10 +690,10 @@ def _ComputeFormatDiffLineRanges(files, upstream_commit):
 def _FindYapfConfigFile(fpath, yapf_config_cache, top_dir=None):
     """Checks if a yapf file is in any parent directory of fpath until top_dir.
 
-  Recursively checks parent directories to find yapf file and if no yapf file
-  is found returns None. Uses yapf_config_cache as a cache for previously found
-  configs.
-  """
+    Recursively checks parent directories to find yapf file and if no yapf file
+    is found returns None. Uses yapf_config_cache as a cache for previously found
+    configs.
+    """
     fpath = os.path.abspath(fpath)
     # Return result if we've already computed it.
     if fpath in yapf_config_cache:
@@ -721,19 +721,19 @@ def _FindYapfConfigFile(fpath, yapf_config_cache, top_dir=None):
 def _GetYapfIgnorePatterns(top_dir):
     """Returns all patterns in the .yapfignore file.
 
-  yapf is supposed to handle the ignoring of files listed in .yapfignore itself,
-  but this functionality appears to break when explicitly passing files to
-  yapf for formatting. According to
-  https://github.com/google/yapf/blob/HEAD/README.rst#excluding-files-from-formatting-yapfignore,
-  the .yapfignore file should be in the directory that yapf is invoked from,
-  which we assume to be the top level directory in this case.
+    yapf is supposed to handle the ignoring of files listed in .yapfignore itself,
+    but this functionality appears to break when explicitly passing files to
+    yapf for formatting. According to
+    https://github.com/google/yapf/blob/HEAD/README.rst#excluding-files-from-formatting-yapfignore,
+    the .yapfignore file should be in the directory that yapf is invoked from,
+    which we assume to be the top level directory in this case.
 
-  Args:
-    top_dir: The top level directory for the repository being formatted.
+    Args:
+        top_dir: The top level directory for the repository being formatted.
 
-  Returns:
-    A set of all fnmatch patterns to be ignored.
-  """
+    Returns:
+        A set of all fnmatch patterns to be ignored.
+    """
     yapfignore_file = os.path.join(top_dir, '.yapfignore')
     ignore_patterns = set()
     if not os.path.exists(yapfignore_file):
@@ -751,14 +751,14 @@ def _GetYapfIgnorePatterns(top_dir):
 def _FilterYapfIgnoredFiles(filepaths, patterns):
     """Filters out any filepaths that match any of the given patterns.
 
-  Args:
-    filepaths: An iterable of strings containing filepaths to filter.
-    patterns: An iterable of strings containing fnmatch patterns to filter on.
+    Args:
+        filepaths: An iterable of strings containing filepaths to filter.
+        patterns: An iterable of strings containing fnmatch patterns to filter on.
 
-  Returns:
-    A list of strings containing all the elements of |filepaths| that did not
-    match any of the patterns in |patterns|.
-  """
+    Returns:
+        A list of strings containing all the elements of |filepaths| that did not
+        match any of the patterns in |patterns|.
+    """
     # Not inlined so that tests can use the same implementation.
     return [
         f for f in filepaths
@@ -770,8 +770,8 @@ def _GetCommitCountSummary(begin_commit: str,
                            end_commit: str = "HEAD") -> Optional[str]:
     """Generate a summary of the number of commits in (begin_commit, end_commit).
 
-  Returns a string containing the summary, or None if the range is empty.
-  """
+    Returns a string containing the summary, or None if the range is empty.
+    """
     count = int(
         RunGitSilent(['rev-list', '--count', f'{begin_commit}..{end_commit}']))
 
@@ -876,8 +876,8 @@ class Settings(object):
     def GetSquashGerritUploadsOverride(self):
         """Return True or False if codereview.settings should be overridden.
 
-    Returns None if no override has been defined.
-    """
+        Returns None if no override has been defined.
+        """
         # See also http://crbug.com/611892#c23
         result = self._GetConfig('gerrit.override-squash-uploads').lower()
         if result == 'true':
@@ -894,7 +894,7 @@ class Settings(object):
 
     def GetGerritSkipEnsureAuthenticated(self):
         """Return True if EnsureAuthenticated should not be done for Gerrit
-    uploads."""
+        uploads."""
         if self.gerrit_skip_ensure_authenticated is None:
             self.gerrit_skip_ensure_authenticated = self._GetConfig(
                 'gerrit.skip-ensure-authenticated').lower() == 'true'
@@ -1121,9 +1121,9 @@ class ChangeDescription(object):
     def update_reviewers(self, reviewers):
         """Rewrites the R= line(s) as a single line each.
 
-    Args:
-      reviewers (list(str)) - list of additional emails to use for reviewers.
-    """
+        Args:
+            reviewers (list(str)) - list of additional emails to use for reviewers.
+        """
         if not reviewers:
             return
 
@@ -1200,10 +1200,10 @@ class ChangeDescription(object):
     def append_footer(self, line):
         """Adds a footer line to the description.
 
-    Differentiates legacy "KEY=xxx" footers (used to be called tags) and
-    Gerrit's footers in the form of "Footer-Key: footer any value" and ensures
-    that Gerrit footers are always at the end.
-    """
+        Differentiates legacy "KEY=xxx" footers (used to be called tags) and
+        Gerrit's footers in the form of "Footer-Key: footer any value" and ensures
+        that Gerrit footers are always at the end.
+        """
         parsed_footer_line = git_footers.parse_footer(line)
         if parsed_footer_line:
             # Line is a gerrit footer in the form: Footer-Key: any value.
@@ -1285,19 +1285,19 @@ class ChangeDescription(object):
     def sanitize_hash_tag(cls, tag):
         """Returns a sanitized Gerrit hash tag.
 
-    A sanitized hashtag can be used as a git push refspec parameter value.
-    """
+        A sanitized hashtag can be used as a git push refspec parameter value.
+        """
         return re.sub(cls.BAD_HASH_TAG_CHUNK, '-', tag).strip('-').lower()
 
 
 class Changelist(object):
     """Changelist works with one changelist in local branch.
 
-  Notes:
-    * Not safe for concurrent multi-{thread,process} use.
-    * Caches values from current branch. Therefore, re-use after branch change
-      with great care.
-  """
+    Notes:
+        * Not safe for concurrent multi-{thread,process} use.
+        * Caches values from current branch. Therefore, re-use after branch change
+        with great care.
+    """
     def __init__(self,
                  branchref=None,
                  issue=None,
@@ -1305,8 +1305,8 @@ class Changelist(object):
                  commit_date=None):
         """Create a new ChangeList instance.
 
-    **kwargs will be passed directly to Gerrit implementation.
-    """
+        **kwargs will be passed directly to Gerrit implementation.
+        """
         # Poke settings so we get the "configure your server" message if
         # necessary.
         global settings
@@ -1358,9 +1358,9 @@ class Changelist(object):
     def GetCCList(self):
         """Returns the users cc'd on this CL.
 
-    The return value is a string suitable for passing to git cl with the --cc
-    flag.
-    """
+        The return value is a string suitable for passing to git cl with the --cc
+        flag.
+        """
         if self.cc is None:
             base_cc = settings.GetDefaultCCList()
             more_cc = ','.join(self.more_cc)
@@ -1405,8 +1405,8 @@ class Changelist(object):
     @staticmethod
     def FetchUpstreamTuple(branch):
         """Returns a tuple containing remote and remote ref,
-       e.g. 'origin', 'refs/heads/main'
-    """
+        e.g. 'origin', 'refs/heads/main'
+        """
         remote, upstream_branch = scm.GIT.FetchUpstreamTuple(
             settings.GetRoot(), branch)
         if not remote or not upstream_branch:
@@ -1477,8 +1477,8 @@ class Changelist(object):
     def GetRemoteUrl(self) -> Optional[str]:
         """Return the configured remote URL, e.g. 'git://example.org/foo.git/'.
 
-    Returns None if there is no remote.
-    """
+        Returns None if there is no remote.
+        """
         is_cached, value = self._cached_remote_url
         if is_cached:
             return value
@@ -1939,16 +1939,15 @@ class Changelist(object):
                               end_commit: Optional[str] = None) -> _NewUpload:
         """Create a squashed commit to upload.
 
-
-      Args:
-        parent: The commit to use as the parent for the new squashed.
-        orig_parent: The commit that is an actual ancestor of `end_commit`. It
-            is part of the same original tree as end_commit, which does not
-            contain squashed commits. This is used to create the change
-            description for the new squashed commit with:
-            `git log orig_parent..end_commit`.
-        end_commit: The commit to use as the end of the new squashed commit.
-    """
+        Args:
+            parent: The commit to use as the parent for the new squashed.
+            orig_parent: The commit that is an actual ancestor of `end_commit`. It
+                is part of the same original tree as end_commit, which does not
+                contain squashed commits. This is used to create the change
+                description for the new squashed commit with:
+                `git log orig_parent..end_commit`.
+            end_commit: The commit to use as the end of the new squashed commit.
+        """
 
         if end_commit is None:
             end_commit = RunGit(['rev-parse', self.branchref]).strip()
@@ -2173,8 +2172,8 @@ class Changelist(object):
     def SetCQState(self, new_state):
         """Updates the CQ state for the latest patchset.
 
-    Issue must have been already uploaded and known.
-    """
+        Issue must have been already uploaded and known.
+        """
         assert new_state in _CQState.ALL_STATES
         assert self.GetIssue()
         try:
@@ -2276,9 +2275,9 @@ class Changelist(object):
     def _GerritChangeIdentifier(self):
         """Handy method for gerrit_util.ChangeIdentifier for a given CL.
 
-    Not to be confused by value of "Change-Id:" footer.
-    If Gerrit project can be determined, this will speed up Gerrit HTTP API RPC.
-    """
+        Not to be confused by value of "Change-Id:" footer.
+        If Gerrit project can be determined, this will speed up Gerrit HTTP API RPC.
+        """
         project = self.GetGerritProject()
         if project:
             return gerrit_util.ChangeIdentifier(project, self.GetIssue())
@@ -2395,18 +2394,18 @@ class Changelist(object):
 
     def GetStatus(self):
         """Applies a rough heuristic to give a simple summary of an issue's review
-    or CQ status, assuming adherence to a common workflow.
-
-    Returns None if no issue for this branch, or one of the following keywords:
-      * 'error'   - error from review tool (including deleted issues)
-      * 'unsent'  - no reviewers added
-      * 'waiting' - waiting for review
-      * 'reply'   - waiting for uploader to reply to review
-      * 'lgtm'    - Code-Review label has been set
-      * 'dry-run' - dry-running in the CQ
-      * 'commit'  - in the CQ
-      * 'closed'  - successfully submitted or abandoned
-    """
+        or CQ status, assuming adherence to a common workflow.
+
+        Returns None if no issue for this branch, or one of the following keywords:
+            * 'error'   - error from review tool (including deleted issues)
+            * 'unsent'  - no reviewers added
+            * 'waiting' - waiting for review
+            * 'reply'   - waiting for uploader to reply to review
+            * 'lgtm'    - Code-Review label has been set
+            * 'dry-run' - dry-running in the CQ
+            * 'commit'  - in the CQ
+            * 'closed'  - successfully submitted or abandoned
+        """
         if not self.GetIssue():
             return None
 
@@ -2464,7 +2463,7 @@ class Changelist(object):
 
     def _IsPatchsetRangeSignificant(self, lower, upper):
         """Returns True if the inclusive range of patchsets contains any reworks or
-    rebases."""
+        rebases."""
         if not self.GetIssue():
             return False
 
@@ -2481,8 +2480,8 @@ class Changelist(object):
 
     def GetMostRecentDryRunPatchset(self):
         """Get patchsets equivalent to the most recent patchset and return
-    the patchset with the latest dry run. If none have been dry run, return
-    the latest patchset."""
+        the patchset with the latest dry run. If none have been dry run, return
+        the latest patchset."""
         if not self.GetIssue():
             return None
 
@@ -3010,7 +3009,7 @@ class Changelist(object):
     def CMDUploadChange(self, options, git_diff_args, custom_cl_base,
                         change_desc):
         """Upload the current branch to Gerrit, retry if new remote HEAD is
-    found. options and change_desc may be mutated."""
+        found. options and change_desc may be mutated."""
         remote, remote_branch = self.GetRemoteBranch()
         branch = GetTargetRef(remote, remote_branch, options.target_branch)
 
@@ -3226,8 +3225,8 @@ class Changelist(object):
                        change_desc):
         """Computes parent of the generated commit to be uploaded to Gerrit.
 
-    Returns revision or a ref name.
-    """
+        Returns revision or a ref name.
+        """
         if custom_cl_base:
             # Try to avoid creating additional unintended CLs when uploading,
             # unless user wants to take this risk.
@@ -3291,8 +3290,8 @@ class Changelist(object):
     def _UpdateWithExternalChanges(self):
         """Updates workspace with external changes.
 
-    Returns the commit hash that should be used as the merge base on upload.
-    """
+        Returns the commit hash that should be used as the merge base on upload.
+        """
         local_ps = self.GetPatchset()
         if local_ps is None:
             return
@@ -3427,8 +3426,8 @@ class Changelist(object):
 
     def _AddChangeIdToCommitMessage(self, log_desc, args):
         """Re-commits using the current message, assumes the commit hook is in
-    place.
-    """
+        place.
+        """
         RunGit(['commit', '--amend', '-m', log_desc])
         new_log_desc = _create_description_from_log(args)
         if git_footers.get_footer_change_id(new_log_desc):
@@ -3479,18 +3478,18 @@ class Changelist(object):
 
 def _get_bug_line_values(default_project_prefix, bugs):
     """Given default_project_prefix and comma separated list of bugs, yields bug
-  line values.
+    line values.
 
-  Each bug can be either:
-    * a number, which is combined with default_project_prefix
-    * string, which is left as is.
+    Each bug can be either:
+        * a number, which is combined with default_project_prefix
+        * string, which is left as is.
 
-  This function may produce more than one line, because bugdroid expects one
-  project per line.
+    This function may produce more than one line, because bugdroid expects one
+    project per line.
 
-  >>> list(_get_bug_line_values('v8:', '123,chromium:789'))
-      ['v8:123', 'chromium:789']
-  """
+    >>> list(_get_bug_line_values('v8:', '123,chromium:789'))
+        ['v8:123', 'chromium:789']
+    """
     default_bugs = []
     others = []
     for bug in bugs.split(','):
@@ -3518,9 +3517,9 @@ def _get_bug_line_values(default_project_prefix, bugs):
 def FindCodereviewSettingsFile(filename='codereview.settings'):
     """Finds the given file starting in the cwd and going up.
 
-  Only looks up to the top of the repository unless an
-  'inherit-review-settings-ok' file exists in the root of the repository.
-  """
+    Only looks up to the top of the repository unless an
+    'inherit-review-settings-ok' file exists in the root of the repository.
+    """
     inherit_ok_file = 'inherit-review-settings-ok'
     cwd = os.getcwd()
     root = settings.GetRoot()
@@ -3595,8 +3594,8 @@ def LoadCodereviewSettingsFromFile(fileobj):
 def urlretrieve(source, destination):
     """Downloads a network object to a local file, like urllib.urlretrieve.
 
-  This is necessary because urllib is broken for SSL connections via a proxy.
-  """
+    This is necessary because urllib is broken for SSL connections via a proxy.
+    """
     with open(destination, 'wb') as f:
         f.write(urllib.request.urlopen(source).read())
 
@@ -3610,9 +3609,9 @@ def hasSheBang(fname):
 def DownloadGerritHook(force):
     """Downloads and installs a Gerrit commit-msg hook.
 
-  Args:
-    force: True to update hooks. False to install hooks if not present.
-  """
+    Args:
+        force: True to update hooks. False to install hooks if not present.
+    """
     src = 'https://gerrit-review.googlesource.com/tools/hooks/commit-msg'
     dst = os.path.join(settings.GetRoot(), '.git', 'hooks', 'commit-msg')
     if not os.access(dst, os.X_OK):
@@ -3645,7 +3644,7 @@ class _GitCookiesChecker(object):
 
     def ensure_configured_gitcookies(self):
         """Runs checks and suggests fixes to make git use .gitcookies from default
-    path."""
+        path."""
         default = gerrit_util.CookiesAuthenticator.get_gitcookies_path()
         configured_path = RunGitSilent(
             ['config', '--global', 'http.cookiefile']).strip()
@@ -3746,8 +3745,8 @@ class _GitCookiesChecker(object):
     def has_generic_host(self):
         """Returns whether generic .googlesource.com has been configured.
 
-    Chrome Infra recommends to use explicit ${host}.googlesource.com instead.
-    """
+        Chrome Infra recommends to use explicit ${host}.googlesource.com instead.
+        """
         for host, _, _ in self.get_hosts_with_creds(include_netrc=False):
             if host == '.' + _GOOGLESOURCE:
                 return True
@@ -3756,8 +3755,8 @@ class _GitCookiesChecker(object):
     def _get_git_gerrit_identity_pairs(self):
         """Returns map from canonic host to pair of identities (Git, Gerrit).
 
-    One of identities might be None, meaning not configured.
-    """
+        One of identities might be None, meaning not configured.
+        """
         host_to_identity_pairs = {}
         for host, identity, _ in self.get_hosts_with_creds():
             canonical = _canonical_git_googlesource_host(host)
@@ -3924,15 +3923,15 @@ def color_for_status(status):
 def get_cl_statuses(changes, fine_grained, max_processes=None):
     """Returns a blocking iterable of (cl, status) for given branches.
 
-  If fine_grained is true, this will fetch CL statuses from the server.
-  Otherwise, simply indicate if there's a matching url for the given branches.
+    If fine_grained is true, this will fetch CL statuses from the server.
+    Otherwise, simply indicate if there's a matching url for the given branches.
 
-  If max_processes is specified, it is used as the maximum number of processes
-  to spawn to fetch CL status from the server. Otherwise 1 process per branch is
-  spawned.
+    If max_processes is specified, it is used as the maximum number of processes
+    to spawn to fetch CL status from the server. Otherwise 1 process per branch is
+    spawned.
 
-  See GetStatus() for a list of possible statuses.
-  """
+    See GetStatus() for a list of possible statuses.
+    """
     if not changes:
         return
 
@@ -3986,23 +3985,23 @@ def get_cl_statuses(changes, fine_grained, max_processes=None):
 def upload_branch_deps(cl, args, force=False):
     """Uploads CLs of local branches that are dependents of the current branch.
 
-  If the local branch dependency tree looks like:
+    If the local branch dependency tree looks like:
 
-    test1 -> test2.1 -> test3.1
-                     -> test3.2
-          -> test2.2 -> test3.3
+        test1 -> test2.1 -> test3.1
+                         -> test3.2
+              -> test2.2 -> test3.3
 
-  and you run "git cl upload --dependencies" from test1 then "git cl upload" is
-  run on the dependent branches in this order:
-  test2.1, test3.1, test3.2, test2.2, test3.3
+    and you run "git cl upload --dependencies" from test1 then "git cl upload" is
+    run on the dependent branches in this order:
+    test2.1, test3.1, test3.2, test2.2, test3.3
 
-  Note: This function does not rebase your local dependent branches. Use it
+    Note: This function does not rebase your local dependent branches. Use it
         when you make a change to the parent branch that will not conflict
         with its dependent branches, and you would like their dependencies
         updated in Gerrit.
         If the new stacked change flow is used, and ancestor diverged, upload
         will fail. To recover, `git rebase-update [-n]` must be executed.
-  """
+    """
     if git_common.is_dirty_git_tree('upload-branch-deps'):
         return 1
 
@@ -4086,8 +4085,8 @@ def upload_branch_deps(cl, args, force=False):
 
 def GetArchiveTagForBranch(issue_num, branch_name, existing_tags, pattern):
     """Given a proposed tag name, returns a tag name that is guaranteed to be
-  unique. If 'foo' is proposed but already exists, then 'foo-2' is used,
-  or 'foo-3', and so on."""
+    unique. If 'foo' is proposed but already exists, then 'foo-2' is used,
+    or 'foo-3', and so on."""
 
     proposed_tag = pattern.format(**{'issue': issue_num, 'branch': branch_name})
     for suffix_num in itertools.count(1):
@@ -4205,17 +4204,17 @@ def CMDarchive(parser, args):
 def CMDstatus(parser, args):
     """Show status of changelists.
 
-  Colors are used to tell the state of the CL unless --fast is used:
-    - Blue     waiting for review
-    - Yellow   waiting for you to reply to review, or not yet sent
-    - Green    LGTM'ed
-    - Red      'not LGTM'ed
-    - Magenta  in the CQ
-    - Cyan     was committed, branch can be deleted
-    - White    error, or unknown status
-
-  Also see 'git cl comments'.
-  """
+    Colors are used to tell the state of the CL unless --fast is used:
+        - Blue     waiting for review
+        - Yellow   waiting for you to reply to review, or not yet sent
+        - Green    LGTM'ed
+        - Red      'not LGTM'ed
+        - Magenta  in the CQ
+        - Cyan     was committed, branch can be deleted
+        - White    error, or unknown status
+
+    Also see 'git cl comments'.
+    """
     parser.add_option('--no-branch-color',
                       action='store_true',
                       help='Disable colorized branch names')
@@ -4290,7 +4289,7 @@ def CMDstatus(parser, args):
 
     def FormatBranchName(branch, colorize=False):
         """Simulates 'git branch' behavior. Colorizes and prefixes branch name with
-    an asterisk when it is the current branch."""
+        an asterisk when it is the current branch."""
 
         asterisk = ""
         color = Fore.RESET
@@ -4387,8 +4386,8 @@ def write_json(path, contents):
 def CMDissue(parser, args):
     """Sets or displays the current code review issue number.
 
-  Pass issue number 0 to clear the current issue.
-  """
+    Pass issue number 0 to clear the current issue.
+    """
     parser.add_option('-r',
                       '--reverse',
                       action='store_true',
@@ -4730,13 +4729,13 @@ def CMDpresubmit(parser, args):
 def GenerateGerritChangeId(message):
     """Returns the Change ID footer value (Ixxxxxx...xxx).
 
-  Works the same way as
-  https://gerrit-review.googlesource.com/tools/hooks/commit-msg
-  but can be called on demand on all platforms.
+    Works the same way as
+    https://gerrit-review.googlesource.com/tools/hooks/commit-msg
+    but can be called on demand on all platforms.
 
-  The basic idea is to generate git hash of a state of the tree, original
-  commit message, author/committer info and timestamps.
-  """
+    The basic idea is to generate git hash of a state of the tree, original
+    commit message, author/committer info and timestamps.
+    """
     lines = []
     tree_hash = RunGitSilent(['write-tree'])
     lines.append('tree %s' % tree_hash.strip())
@@ -4761,11 +4760,11 @@ def GenerateGerritChangeId(message):
 def GetTargetRef(remote, remote_branch, target_branch):
     """Computes the remote branch ref to use for the CL.
 
-  Args:
-    remote (str): The git remote for the CL.
-    remote_branch (str): The git remote branch for the CL.
-    target_branch (str): The target branch specified by the user.
-  """
+    Args:
+        remote (str): The git remote for the CL.
+        remote_branch (str): The git remote branch for the CL.
+        target_branch (str): The target branch specified by the user.
+    """
     if not (remote and remote_branch):
         return None
 
@@ -4818,9 +4817,9 @@ def GetTargetRef(remote, remote_branch, target_branch):
 def cleanup_list(l):
     """Fixes a list so that comma separated items are put as individual items.
 
-  So that "--reviewers joe@c,john@c --reviewers joa@c" results in
-  options.reviewers == sorted(['joe@c', 'john@c', 'joa@c']).
-  """
+    So that "--reviewers joe@c,john@c --reviewers joa@c" results in
+    options.reviewers == sorted(['joe@c', 'john@c', 'joa@c']).
+    """
     items = sum((i.split(',') for i in l), [])
     stripped_items = (i.strip() for i in items)
     return sorted(filter(None, stripped_items))
@@ -4831,22 +4830,22 @@ def cleanup_list(l):
 def CMDupload(parser, args):
     """Uploads the current changelist to codereview.
 
-  Can skip dependency patchset uploads for a branch by running:
-    git config branch.branch_name.skip-deps-uploads True
-  To unset, run:
-    git config --unset branch.branch_name.skip-deps-uploads
-  Can also set the above globally by using the --global flag.
-
-  If the name of the checked out branch starts with "bug-" or "fix-" followed
-  by a bug number, this bug number is automatically populated in the CL
-  description.
-
-  If subject contains text in square brackets or has "<text>: " prefix, such
-  text(s) is treated as Gerrit hashtags. For example, CLs with subjects:
-    [git-cl] add support for hashtags
-    Foo bar: implement foo
-  will be hashtagged with "git-cl" and "foo-bar" respectively.
-  """
+    Can skip dependency patchset uploads for a branch by running:
+        git config branch.branch_name.skip-deps-uploads True
+    To unset, run:
+        git config --unset branch.branch_name.skip-deps-uploads
+    Can also set the above globally by using the --global flag.
+
+    If the name of the checked out branch starts with "bug-" or "fix-" followed
+    by a bug number, this bug number is automatically populated in the CL
+    description.
+
+    If subject contains text in square brackets or has "<text>: " prefix, such
+    text(s) is treated as Gerrit hashtags. For example, CLs with subjects:
+        [git-cl] add support for hashtags
+        Foo bar: implement foo
+    will be hashtagged with "git-cl" and "foo-bar" respectively.
+    """
     parser.add_option('--bypass-hooks',
                       action='store_true',
                       dest='bypass_hooks',
@@ -5249,10 +5248,10 @@ def _UploadAllPrecheck(options, orig_args):
     # bool]
     """Checks the state of the tree and gives the user uploading options
 
-  Returns: A tuple of the ordered list of changes that have new commits
-      since their last upload and a boolean of whether the user wants to
-      cherry-pick and upload the current branch instead of uploading all cls.
-  """
+    Returns: A tuple of the ordered list of changes that have new commits
+        since their last upload and a boolean of whether the user wants to
+        cherry-pick and upload the current branch instead of uploading all cls.
+    """
     cl = Changelist()
     if cl.GetBranch() is None:
         DieWithError('Can\'t upload from detached HEAD state. Get on a branch!')
@@ -5378,11 +5377,11 @@ def _UploadAllPrecheck(options, orig_args):
 def CMDsplit(parser, args):
     """Splits a branch into smaller branches and uploads CLs.
 
-  Creates a branch and uploads a CL for each group of files modified in the
-  current branch that share a common OWNERS file. In the CL description and
-  comment, the string '$directory', is replaced with the directory containing
-  the shared OWNERS file.
-  """
+    Creates a branch and uploads a CL for each group of files modified in the
+    current branch that share a common OWNERS file. In the CL description and
+    comment, the string '$directory', is replaced with the directory containing
+    the shared OWNERS file.
+    """
     parser.add_option('-d',
                       '--description',
                       dest='description_file',
@@ -5466,9 +5465,9 @@ def CMDdcommit(parser, args):
 def CMDland(parser, args):
     """Commits the current changelist via git.
 
-  In case of Gerrit, uses Gerrit REST api to "submit" the issue, which pushes
-  upstream and closes the issue automatically and atomically.
-  """
+    In case of Gerrit, uses Gerrit REST api to "submit" the issue, which pushes
+    upstream and closes the issue automatically and atomically.
+    """
     parser.add_option('--bypass-hooks',
                       action='store_true',
                       dest='bypass_hooks',
@@ -5591,7 +5590,7 @@ def CMDpatch(parser, args):
 
 def GetTreeStatus(url=None):
     """Fetches the tree status and returns either 'open', 'closed',
-  'unknown' or 'unset'."""
+    'unknown' or 'unset'."""
     url = url or settings.GetTreeStatusUrl(error_ok=True)
     if url:
         status = str(urllib.request.urlopen(url).read().lower())
@@ -5607,7 +5606,7 @@ def GetTreeStatus(url=None):
 
 def GetTreeStatusReason():
     """Fetches the tree status from a json url and returns the message
-  with the reason for the tree to be opened or closed."""
+    with the reason for the tree to be opened or closed."""
     url = settings.GetTreeStatusUrl()
     json_url = urllib.parse.urljoin(url, '/current?format=json')
     connection = urllib.request.urlopen(json_url)
@@ -6198,7 +6197,7 @@ def _RunGoogleJavaFormat(opts, paths, top_dir, upstream_commit):
 
 def _RunRustFmt(opts, rust_diff_files, top_dir, upstream_commit):
     """Runs rustfmt.  Just like _RunClangFormatDiff returns 2 to indicate that
-  presubmit checks have failed (and returns 0 otherwise)."""
+    presubmit checks have failed (and returns 0 otherwise)."""
     # Locate the rustfmt binary.
     try:
         rustfmt_tool = rustfmt.FindRustfmtToolInChromiumTree()
@@ -6221,7 +6220,7 @@ def _RunRustFmt(opts, rust_diff_files, top_dir, upstream_commit):
 
 def _RunSwiftFormat(opts, swift_diff_files, top_dir, upstream_commit):
     """Runs swift-format.  Just like _RunClangFormatDiff returns 2 to indicate
-  that presubmit checks have failed (and returns 0 otherwise)."""
+    that presubmit checks have failed (and returns 0 otherwise)."""
     if sys.platform != 'darwin':
         DieWithError('swift-format is only supported on macOS.')
     # Locate the swift-format binary.

+ 142 - 139
git_common.py

@@ -137,19 +137,21 @@ class BadCommitRefException(Exception):
 def memoize_one(**kwargs):
     """Memoizes a single-argument pure function.
 
-  Values of None are not cached.
-
-  Kwargs:
-    threadsafe (bool) - REQUIRED. Specifies whether to use locking around
-      cache manipulation functions. This is a kwarg so that users of memoize_one
-      are forced to explicitly and verbosely pick True or False.
-
-  Adds three methods to the decorated function:
-    * get(key, default=None) - Gets the value for this key from the cache.
-    * set(key, value) - Sets the value for this key from the cache.
-    * clear() - Drops the entire contents of the cache.  Useful for unittests.
-    * update(other) - Updates the contents of the cache from another dict.
-  """
+    Values of None are not cached.
+
+    Kwargs:
+        threadsafe (bool) - REQUIRED. Specifies whether to use locking around
+            cache manipulation functions. This is a kwarg so that users of
+            memoize_one are forced to explicitly and verbosely pick True or
+            False.
+
+    Adds three methods to the decorated function:
+        * get(key, default=None) - Gets the value for this key from the cache.
+        * set(key, value) - Sets the value for this key from the cache.
+        * clear() - Drops the entire contents of the cache.  Useful for
+            unittests.
+        * update(other) - Updates the contents of the cache from another dict.
+    """
     assert 'threadsafe' in kwargs, 'Must specify threadsafe={True,False}'
     threadsafe = kwargs['threadsafe']
 
@@ -203,8 +205,8 @@ def memoize_one(**kwargs):
 def _ScopedPool_initer(orig, orig_args):  # pragma: no cover
     """Initializer method for ScopedPool's subprocesses.
 
-  This helps ScopedPool handle Ctrl-C's correctly.
-  """
+    This helps ScopedPool handle Ctrl-C's correctly.
+    """
     signal.signal(signal.SIGINT, signal.SIG_IGN)
     if orig:
         orig(*orig_args)
@@ -213,14 +215,14 @@ def _ScopedPool_initer(orig, orig_args):  # pragma: no cover
 @contextlib.contextmanager
 def ScopedPool(*args, **kwargs):
     """Context Manager which returns a multiprocessing.pool instance which
-  correctly deals with thrown exceptions.
+    correctly deals with thrown exceptions.
 
-  *args - Arguments to multiprocessing.pool
+    *args - Arguments to multiprocessing.pool
 
-  Kwargs:
-    kind ('threads', 'procs') - The type of underlying coprocess to use.
-    **etc - Arguments to multiprocessing.pool
-  """
+    Kwargs:
+        kind ('threads', 'procs') - The type of underlying coprocess to use.
+        **etc - Arguments to multiprocessing.pool
+    """
     if kwargs.pop('kind', None) == 'threads':
         pool = multiprocessing.pool.ThreadPool(*args, **kwargs)
     else:
@@ -244,23 +246,23 @@ class ProgressPrinter(object):
     def __init__(self, fmt, enabled=None, fout=sys.stderr, period=0.5):
         """Create a ProgressPrinter.
 
-    Use it as a context manager which produces a simple 'increment' method:
-
-      with ProgressPrinter('(%%(count)d/%d)' % 1000) as inc:
-        for i in xrange(1000):
-          # do stuff
-          if i % 10 == 0:
-            inc(10)
-
-    Args:
-      fmt - String format with a single '%(count)d' where the counter value
-        should go.
-      enabled (bool) - If this is None, will default to True if
-        logging.getLogger() is set to INFO or more verbose.
-      fout (file-like) - The stream to print status messages to.
-      period (float) - The time in seconds for the printer thread to wait
-        between printing.
-    """
+        Use it as a context manager which produces a simple 'increment' method:
+
+        with ProgressPrinter('(%%(count)d/%d)' % 1000) as inc:
+            for i in xrange(1000):
+            # do stuff
+            if i % 10 == 0:
+                inc(10)
+
+        Args:
+        fmt - String format with a single '%(count)d' where the counter value
+            should go.
+        enabled (bool) - If this is None, will default to True if
+            logging.getLogger() is set to INFO or more verbose.
+        fout (file-like) - The stream to print status messages to.
+        period (float) - The time in seconds for the printer thread to wait
+            between printing.
+        """
         self.fmt = fmt
         if enabled is None:  # pragma: no cover
             self.enabled = logging.getLogger().isEnabledFor(logging.INFO)
@@ -303,7 +305,7 @@ class ProgressPrinter(object):
 
 def once(function):
     """@Decorates |function| so that it only performs its action once, no matter
-  how many times the decorated |function| is called."""
+    how many times the decorated |function| is called."""
     has_run = [False]
 
     def _wrapper(*args, **kwargs):
@@ -556,9 +558,9 @@ def freeze():
 def get_branch_tree(use_limit=False):
     """Get the dictionary of {branch: parent}, compatible with topo_iter.
 
-  Returns a tuple of (skipped, <branch_tree dict>) where skipped is a set of
-  branches without upstream branches defined.
-  """
+    Returns a tuple of (skipped, <branch_tree dict>) where skipped is a set of
+    branches without upstream branches defined.
+    """
     skipped = set()
     branch_tree = {}
 
@@ -575,8 +577,8 @@ def get_branch_tree(use_limit=False):
 def get_or_create_merge_base(branch, parent=None):
     """Finds the configured merge base for branch.
 
-  If parent is supplied, it's used instead of calling upstream(branch).
-  """
+    If parent is supplied, it's used instead of calling upstream(branch).
+    """
     base = branch_config(branch, 'base')
     base_upstream = branch_config(branch, 'base-upstream')
     parent = parent or upstream(branch)
@@ -630,12 +632,12 @@ def in_rebase():
 def intern_f(f, kind='blob'):
     """Interns a file object into the git object store.
 
-  Args:
-    f (file-like object) - The file-like object to intern
-    kind (git object type) - One of 'blob', 'commit', 'tree', 'tag'.
+    Args:
+        f (file-like object) - The file-like object to intern
+        kind (git object type) - One of 'blob', 'commit', 'tree', 'tag'.
 
-  Returns the git hash of the interned object (hex encoded).
-  """
+    Returns the git hash of the interned object (hex encoded).
+    """
     ret = run('hash-object', '-t', kind, '-w', '--stdin', stdin=f)
     f.close()
     return ret
@@ -660,11 +662,11 @@ def manual_merge_base(branch, base, parent):
 def mktree(treedict):
     """Makes a git tree object and returns its hash.
 
-  See |tree()| for the values of mode, type, and ref.
+    See |tree()| for the values of mode, type, and ref.
 
-  Args:
-    treedict - { name: (mode, type, ref) }
-  """
+    Args:
+        treedict - { name: (mode, type, ref) }
+    """
     with tempfile.TemporaryFile() as f:
         for name, (mode, typ, ref) in treedict.items():
             f.write(('%s %s %s\t%s\0' % (mode, typ, ref, name)).encode('utf-8'))
@@ -675,11 +677,11 @@ def mktree(treedict):
 def parse_commitrefs(*commitrefs):
     """Returns binary encoded commit hashes for one or more commitrefs.
 
-  A commitref is anything which can resolve to a commit. Popular examples:
-    * 'HEAD'
-    * 'origin/main'
-    * 'cool_branch~2'
-  """
+    A commitref is anything which can resolve to a commit. Popular examples:
+        * 'HEAD'
+        * 'origin/main'
+        * 'cool_branch~2'
+    """
     try:
         return [binascii.unhexlify(h) for h in hash_multi(*commitrefs)]
     except subprocess2.CalledProcessError:
@@ -692,26 +694,26 @@ RebaseRet = collections.namedtuple('RebaseRet', 'success stdout stderr')
 def rebase(parent, start, branch, abort=False, allow_gc=False):
     """Rebases |start|..|branch| onto the branch |parent|.
 
-  Sets 'gc.auto=0' for the duration of this call to prevent the rebase from
-  running a potentially slow garbage collection cycle.
-
-  Args:
-    parent - The new parent ref for the rebased commits.
-    start  - The commit to start from
-    branch - The branch to rebase
-    abort  - If True, will call git-rebase --abort in the event that the rebase
-             doesn't complete successfully.
-    allow_gc - If True, sets "-c gc.auto=1" on the rebase call, rather than
-               "-c gc.auto=0". Usually if you're doing a series of rebases,
-               you'll only want to run a single gc pass at the end of all the
-               rebase activity.
-
-  Returns a namedtuple with fields:
-    success - a boolean indicating that the rebase command completed
-              successfully.
-    message - if the rebase failed, this contains the stdout of the failed
-              rebase.
-  """
+    Sets 'gc.auto=0' for the duration of this call to prevent the rebase from
+    running a potentially slow garbage collection cycle.
+
+    Args:
+        parent - The new parent ref for the rebased commits.
+        start  - The commit to start from
+        branch - The branch to rebase
+        abort  - If True, will call git-rebase --abort in the event that the
+            rebase doesn't complete successfully.
+        allow_gc - If True, sets "-c gc.auto=1" on the rebase call, rather than
+            "-c gc.auto=0". Usually if you're doing a series of rebases,
+            you'll only want to run a single gc pass at the end of all the
+            rebase activity.
+
+    Returns a namedtuple with fields:
+        success - a boolean indicating that the rebase command completed
+            successfully.
+        message - if the rebase failed, this contains the stdout of the failed
+            rebase.
+    """
     try:
         args = [
             '-c',
@@ -770,11 +772,11 @@ def root():
 def less():  # pragma: no cover
     """Runs 'less' as context manager yielding its stdin as a PIPE.
 
-  Automatically checks if sys.stdout is a non-TTY stream. If so, it avoids
-  running less and just yields sys.stdout.
+    Automatically checks if sys.stdout is a non-TTY stream. If so, it avoids
+    running less and just yields sys.stdout.
 
-  The returned PIPE is opened on binary mode.
-  """
+    The returned PIPE is opened on binary mode.
+    """
     if not setup_color.IS_TTY:
         # On Python 3, sys.stdout doesn't accept bytes, and sys.stdout.buffer
         # must be used.
@@ -815,9 +817,9 @@ def run_with_retcode(*cmd, **kwargs):
 def run_stream(*cmd, **kwargs):
     """Runs a git command. Returns stdout as a PIPE (file-like object).
 
-  stderr is dropped to avoid races if the process outputs to both stdout and
-  stderr.
-  """
+    stderr is dropped to avoid races if the process outputs to both stdout and
+    stderr.
+    """
     kwargs.setdefault('stderr', subprocess2.DEVNULL)
     kwargs.setdefault('stdout', subprocess2.PIPE)
     kwargs.setdefault('shell', False)
@@ -830,11 +832,11 @@ def run_stream(*cmd, **kwargs):
 def run_stream_with_retcode(*cmd, **kwargs):
     """Runs a git command as context manager yielding stdout as a PIPE.
 
-  stderr is dropped to avoid races if the process outputs to both stdout and
-  stderr.
+    stderr is dropped to avoid races if the process outputs to both stdout and
+    stderr.
 
-  Raises subprocess2.CalledProcessError on nonzero return code.
-  """
+    Raises subprocess2.CalledProcessError on nonzero return code.
+    """
     kwargs.setdefault('stderr', subprocess2.DEVNULL)
     kwargs.setdefault('stdout', subprocess2.PIPE)
     kwargs.setdefault('shell', False)
@@ -852,12 +854,12 @@ def run_stream_with_retcode(*cmd, **kwargs):
 def run_with_stderr(*cmd, **kwargs):
     """Runs a git command.
 
-  Returns (stdout, stderr) as a pair of strings.
+    Returns (stdout, stderr) as a pair of strings.
 
-  kwargs
-    autostrip (bool) - Strip the output. Defaults to True.
-    indata (str) - Specifies stdin data for the process.
-  """
+    kwargs
+        autostrip (bool) - Strip the output. Defaults to True.
+        indata (str) - Specifies stdin data for the process.
+    """
     kwargs.setdefault('stdin', subprocess2.PIPE)
     kwargs.setdefault('stdout', subprocess2.PIPE)
     kwargs.setdefault('stderr', subprocess2.PIPE)
@@ -920,17 +922,17 @@ def is_dirty_git_tree(cmd):
 def status(ignore_submodules=None):
     """Returns a parsed version of git-status.
 
-  Args:
-   ignore_submodules (str|None): "all", "none", or None.
-                                 None is equivalent to "none".
-
-  Returns a generator of (current_name, (lstat, rstat, src)) pairs where:
-    * current_name is the name of the file
-    * lstat is the left status code letter from git-status
-    * rstat is the right status code letter from git-status
-    * src is the current name of the file, or the original name of the file
-      if lstat == 'R'
-  """
+    Args:
+        ignore_submodules (str|None): "all", "none", or None.
+            None is equivalent to "none".
+
+    Returns a generator of (current_name, (lstat, rstat, src)) pairs where:
+        * current_name is the name of the file
+        * lstat is the left status code letter from git-status
+        * rstat is the right status code letter from git-status
+        * src is the current name of the file, or the original name of the file
+        if lstat == 'R'
+    """
 
     ignore_submodules = ignore_submodules or 'none'
     assert ignore_submodules in (
@@ -1024,27 +1026,27 @@ def thaw():
 def topo_iter(branch_tree, top_down=True):
     """Generates (branch, parent) in topographical order for a branch tree.
 
-  Given a tree:
+    Given a tree:
 
-            A1
-        B1      B2
-      C1  C2    C3
-                D1
+                A1
+            B1      B2
+        C1  C2    C3
+                    D1
 
-  branch_tree would look like: {
-    'D1': 'C3',
-    'C3': 'B2',
-    'B2': 'A1',
-    'C1': 'B1',
-    'C2': 'B1',
-    'B1': 'A1',
-  }
+    branch_tree would look like: {
+        'D1': 'C3',
+        'C3': 'B2',
+        'B2': 'A1',
+        'C1': 'B1',
+        'C2': 'B1',
+        'B1': 'A1',
+    }
 
-  It is OK to have multiple 'root' nodes in your graph.
+    It is OK to have multiple 'root' nodes in your graph.
 
-  if top_down is True, items are yielded from A->D. Otherwise they're yielded
-  from D->A. Within a layer the branches will be yielded in sorted order.
-  """
+    if top_down is True, items are yielded from A->D. Otherwise they're yielded
+    from D->A. Within a layer the branches will be yielded in sorted order.
+    """
     branch_tree = branch_tree.copy()
 
     # TODO(iannucci): There is probably a more efficient way to do these.
@@ -1074,26 +1076,27 @@ def topo_iter(branch_tree, top_down=True):
 def tree(treeref, recurse=False):
     """Returns a dict representation of a git tree object.
 
-  Args:
-    treeref (str) - a git ref which resolves to a tree (commits count as trees).
-    recurse (bool) - include all of the tree's descendants too. File names will
-      take the form of 'some/path/to/file'.
+    Args:
+        treeref (str) - a git ref which resolves to a tree (commits count as
+            trees).
+        recurse (bool) - include all of the tree's descendants too. File names
+            will take the form of 'some/path/to/file'.
 
-  Return format:
-    { 'file_name': (mode, type, ref) }
+    Return format:
+        { 'file_name': (mode, type, ref) }
 
-    mode is an integer where:
-      * 0040000 - Directory
-      * 0100644 - Regular non-executable file
-      * 0100664 - Regular non-executable group-writeable file
-      * 0100755 - Regular executable file
-      * 0120000 - Symbolic link
-      * 0160000 - Gitlink
+        mode is an integer where:
+        * 0040000 - Directory
+        * 0100644 - Regular non-executable file
+        * 0100664 - Regular non-executable group-writeable file
+        * 0100755 - Regular executable file
+        * 0120000 - Symbolic link
+        * 0160000 - Gitlink
 
-    type is a string where it's one of 'blob', 'commit', 'tree', 'tag'.
+        type is a string where it's one of 'blob', 'commit', 'tree', 'tag'.
 
-    ref is the hex encoded hash of the entry.
-  """
+        ref is the hex encoded hash of the entry.
+    """
     ret = {}
     opts = ['ls-tree', '--full-tree']
     if recurse:
@@ -1125,7 +1128,7 @@ def upstream(branch):
 
 def get_git_version():
     """Returns a tuple that contains the numeric components of the current git
-  version."""
+    version."""
     version_string = run('--version')
     return _extract_git_tuple(version_string)
 

+ 8 - 8
git_dates.py

@@ -9,16 +9,16 @@ import datetime
 def timestamp_offset_to_datetime(timestamp, offset):
     """Converts a timestamp + offset into a datetime.datetime.
 
-  Useful for dealing with the output of porcelain commands, which provide times
-  as timestamp and offset strings.
+    Useful for dealing with the output of porcelain commands, which provide
+    times as timestamp and offset strings.
 
-  Args:
-    timestamp: An int UTC timestamp, or a string containing decimal digits.
-    offset: A str timezone offset. e.g., '-0800'.
+    Args:
+        timestamp: An int UTC timestamp, or a string containing decimal digits.
+        offset: A str timezone offset. e.g., '-0800'.
 
-  Returns:
-    A tz-aware datetime.datetime for this timestamp.
-  """
+    Returns:
+        A tz-aware datetime.datetime for this timestamp.
+    """
     timestamp = int(timestamp)
     tz = FixedOffsetTZ.from_offset_string(offset)
     return datetime.datetime.fromtimestamp(timestamp, tz)

+ 32 - 30
git_footers.py

@@ -44,8 +44,8 @@ def parse_footers(message):
 def matches_footer_key(line, key):
     """Returns whether line is a valid footer whose key matches a given one.
 
-  Keys are compared in normalized form.
-  """
+    Keys are compared in normalized form.
+    """
     r = parse_footer(line)
     if r is None:
         return False
@@ -55,13 +55,14 @@ def matches_footer_key(line, key):
 def split_footers(message):
     """Returns (non_footer_lines, footer_lines, parsed footers).
 
-  Guarantees that:
-    (non_footer_lines + footer_lines) ~= message.splitlines(), with at
-      most one new newline, if the last paragraph is text followed by footers.
-    parsed_footers is parse_footer applied on each line of footer_lines.
-      There could be fewer parsed_footers than footer lines if some lines in
-      last paragraph are malformed.
-  """
+    Guarantees that:
+        (non_footer_lines + footer_lines) ~= message.splitlines(), with at
+            most one new newline, if the last paragraph is text followed by
+            footers.
+        parsed_footers is parse_footer applied on each line of footer_lines.
+            There could be fewer parsed_footers than footer lines if some lines
+            in last paragraph are malformed.
+    """
     message_lines = list(message.rstrip().splitlines())
     footer_lines = []
     maybe_footer_lines = []
@@ -102,10 +103,10 @@ def get_footer_change_id(message):
 def add_footer_change_id(message, change_id):
     """Returns message with Change-ID footer in it.
 
-  Assumes that Change-Id is not yet in footers, which is then inserted at
-  earliest footer line which is after all of these footers:
-    Bug|Issue|Test|Feature.
-  """
+    Assumes that Change-Id is not yet in footers, which is then inserted at
+    earliest footer line which is after all of these footers:
+        Bug|Issue|Test|Feature.
+    """
     assert 'Change-Id' not in parse_footers(message)
     return add_footer(message,
                       'Change-Id',
@@ -116,18 +117,19 @@ def add_footer_change_id(message, change_id):
 def add_footer(message, key, value, after_keys=None, before_keys=None):
     """Returns a message with given footer appended.
 
-  If after_keys and before_keys are both None (default), appends footer last.
-  If after_keys is provided and matches footers already present, inserts footer
-  as *early* as possible while still appearing after all provided keys, even
-  if doing so conflicts with before_keys.
-  If before_keys is provided, inserts footer as late as possible while still
-  appearing before all provided keys.
-
-  For example, given
-      message='Header.\n\nAdded: 2016\nBug: 123\nVerified-By: CQ'
-      after_keys=['Bug', 'Issue']
-  the new footer will be inserted between Bug and Verified-By existing footers.
-  """
+    If after_keys and before_keys are both None (default), appends footer last.
+    If after_keys is provided and matches footers already present, inserts
+    footer as *early* as possible while still appearing after all provided
+    keys, even if doing so conflicts with before_keys.
+    If before_keys is provided, inserts footer as late as possible while still
+    appearing before all provided keys.
+
+    For example, given
+        message='Header.\n\nAdded: 2016\nBug: 123\nVerified-By: CQ'
+        after_keys=['Bug', 'Issue']
+    the new footer will be inserted between Bug and Verified-By existing
+    footers.
+    """
     assert key == normalize_name(key), 'Use normalized key'
     new_footer = '%s: %s' % (key, value)
     if not FOOTER_PATTERN.match(new_footer):
@@ -192,13 +194,13 @@ def get_unique(footers, key):
 def get_position(footers):
     """Get the commit position from the footers multimap using a heuristic.
 
-  Returns:
-    A tuple of the branch and the position on that branch. For example,
+    Returns:
+        A tuple of the branch and the position on that branch. For example,
 
-    Cr-Commit-Position: refs/heads/main@{#292272}
+        Cr-Commit-Position: refs/heads/main@{#292272}
 
-    would give the return value ('refs/heads/main', 292272).
-  """
+        would give the return value ('refs/heads/main', 292272).
+    """
 
     position = get_unique(footers, 'Cr-Commit-Position')
     if position:

+ 19 - 18
git_hyper_blame.py

@@ -91,10 +91,10 @@ def parse_blame(blameoutput):
 def print_table(outbuf, table, align):
     """Print a 2D rectangular array, aligning columns with spaces.
 
-  Args:
-    align: string of 'l' and 'r', designating whether each column is left- or
-           right-aligned.
-  """
+    Args:
+        align: string of 'l' and 'r', designating whether each column is
+            left- or right-aligned.
+    """
     if len(table) == 0:
         return
 
@@ -189,20 +189,21 @@ def approx_lineno_across_revs(filename, newfilename, revision, newrevision,
                               lineno):
     """Computes the approximate movement of a line number between two revisions.
 
-  Consider line |lineno| in |filename| at |revision|. This function computes the
-  line number of that line in |newfilename| at |newrevision|. This is
-  necessarily approximate.
-
-  Args:
-    filename: The file (within the repo) at |revision|.
-    newfilename: The name of the same file at |newrevision|.
-    revision: A git revision.
-    newrevision: Another git revision. Note: Can be ahead or behind |revision|.
-    lineno: Line number within |filename| at |revision|.
-
-  Returns:
-    Line number within |newfilename| at |newrevision|.
-  """
+    Consider line |lineno| in |filename| at |revision|. This function computes
+    the line number of that line in |newfilename| at |newrevision|. This is
+    necessarily approximate.
+
+    Args:
+        filename: The file (within the repo) at |revision|.
+        newfilename: The name of the same file at |newrevision|.
+        revision: A git revision.
+        newrevision: Another git revision. Note: Can be ahead or behind
+            |revision|.
+        lineno: Line number within |filename| at |revision|.
+
+    Returns:
+        Line number within |newfilename| at |newrevision|.
+    """
     # This doesn't work that well if there are a lot of line changes within the
     # hunk (demonstrated by
     # GitHyperBlameLineMotionTest.testIntraHunkLineMotion). A fuzzy heuristic

+ 8 - 7
git_map_branches.py

@@ -77,7 +77,7 @@ class OutputManager(object):
 class OutputLine(object):
     """A single line of data.
 
-  This consists of an equal number of columns, colors and separators."""
+    This consists of an equal number of columns, colors and separators."""
     def __init__(self):
         self.columns = []
         self.separators = []
@@ -90,7 +90,7 @@ class OutputLine(object):
 
     def as_padded_string(self, max_column_lengths):
         """"Returns the data as a string with each column padded to
-    |max_column_lengths|."""
+        |max_column_lengths|."""
         output_string = ''
         for i, (color, data, separator) in enumerate(
                 zip(self.colors, self.columns, self.separators)):
@@ -106,10 +106,11 @@ class OutputLine(object):
 class BranchMapper(object):
     """A class which constructs output representing the tree's branch structure.
 
-  Attributes:
-    __branches_info: a map of branches to their BranchesInfo objects which
-      consist of the branch hash, upstream and ahead/behind status.
-    __gone_branches: a set of upstreams which are not fetchable by git"""
+    Attributes:
+        __branches_info: a map of branches to their BranchesInfo objects which
+            consist of the branch hash, upstream and ahead/behind status.
+        __gone_branches: a set of upstreams which are not fetchable by git
+    """
     def __init__(self):
         self.verbosity = 0
         self.maxjobs = 0
@@ -228,7 +229,7 @@ class BranchMapper(object):
 
     def __append_branch(self, branch, output, depth=0):
         """Recurses through the tree structure and appends an OutputLine to the
-    OutputManager for each branch."""
+        OutputManager for each branch."""
         child_output = OutputManager()
         for child in sorted(self.__parent_map.pop(branch, ())):
             self.__append_branch(child, child_output, depth=depth + 1)

+ 28 - 28
git_number.py

@@ -51,24 +51,24 @@ POOL_KIND = 'procs'
 
 def pathlify(hash_prefix):
     """Converts a binary object hash prefix into a posix path, one folder per
-  byte.
+    byte.
 
-  >>> pathlify('\xDE\xAD')
-  'de/ad'
-  """
+    >>> pathlify('\xDE\xAD')
+    'de/ad'
+    """
     return '/'.join('%02x' % b for b in hash_prefix)
 
 
 @git.memoize_one(threadsafe=False)
 def get_number_tree(prefix_bytes):
     """Returns a dictionary of the git-number registry specified by
-  |prefix_bytes|.
+    |prefix_bytes|.
 
-  This is in the form of {<full binary ref>: <gen num> ...}
+    This is in the form of {<full binary ref>: <gen num> ...}
 
-  >>> get_number_tree('\x83\xb4')
-  {'\x83\xb4\xe3\xe4W\xf9J*\x8f/c\x16\xecD\xd1\x04\x8b\xa9qz': 169, ...}
-  """
+    >>> get_number_tree('\x83\xb4')
+    {'\x83\xb4\xe3\xe4W\xf9J*\x8f/c\x16\xecD\xd1\x04\x8b\xa9qz': 169, ...}
+    """
     ref = '%s:%s' % (REF, pathlify(prefix_bytes))
 
     try:
@@ -84,9 +84,9 @@ def get_number_tree(prefix_bytes):
 def get_num(commit_hash):
     """Returns the generation number for a commit.
 
-  Returns None if the generation number for this commit hasn't been calculated
-  yet (see load_generation_numbers()).
-  """
+    Returns None if the generation number for this commit hasn't been calculated
+    yet (see load_generation_numbers()).
+    """
     return get_number_tree(commit_hash[:PREFIX_LEN]).get(commit_hash)
 
 
@@ -100,14 +100,14 @@ def clear_caches(on_disk=False):
 
 def intern_number_tree(tree):
     """Transforms a number tree (in the form returned by |get_number_tree|) into
-  a git blob.
+    a git blob.
 
-  Returns the git blob id as hex-encoded string.
+    Returns the git blob id as hex-encoded string.
 
-  >>> d = {'\x83\xb4\xe3\xe4W\xf9J*\x8f/c\x16\xecD\xd1\x04\x8b\xa9qz': 169}
-  >>> intern_number_tree(d)
-  'c552317aa95ca8c3f6aae3357a4be299fbcb25ce'
-  """
+    >>> d = {'\x83\xb4\xe3\xe4W\xf9J*\x8f/c\x16\xecD\xd1\x04\x8b\xa9qz': 169}
+    >>> intern_number_tree(d)
+    'c552317aa95ca8c3f6aae3357a4be299fbcb25ce'
+    """
     with tempfile.TemporaryFile() as f:
         for k, v in sorted(tree.items()):
             f.write(struct.pack(CHUNK_FMT, k, v))
@@ -124,11 +124,11 @@ def leaf_map_fn(pre_tree):
 def finalize(targets):
     """Saves all cache data to the git repository.
 
-  After calculating the generation number for |targets|, call finalize() to
-  save all the work to the git repository.
+    After calculating the generation number for |targets|, call finalize() to
+    save all the work to the git repository.
 
-  This in particular saves the trees referred to by DIRTY_TREES.
-  """
+    This in particular saves the trees referred to by DIRTY_TREES.
+    """
     if not DIRTY_TREES:
         return
 
@@ -197,14 +197,14 @@ def all_prefixes(depth=PREFIX_LEN):
 
 def load_generation_numbers(targets):
     """Populates the caches of get_num and get_number_tree so they contain
-  the results for |targets|.
+    the results for |targets|.
 
-  Loads cached numbers from disk, and calculates missing numbers if one or
-  more of |targets| is newer than the cached calculations.
+    Loads cached numbers from disk, and calculates missing numbers if one or
+    more of |targets| is newer than the cached calculations.
 
-  Args:
-    targets - An iterable of binary-encoded full git commit hashes.
-  """
+    Args:
+        targets - An iterable of binary-encoded full git commit hashes.
+    """
     # In case they pass us a generator, listify targets.
     targets = list(targets)
 

+ 4 - 4
git_rebase_update.py

@@ -24,11 +24,11 @@ STARTING_WORKDIR_KEY = 'depot-tools.rebase-update.starting-workdir'
 
 def find_return_branch_workdir():
     """Finds the branch and working directory which we should return to after
-  rebase-update completes.
+    rebase-update completes.
 
-  These values may persist across multiple invocations of rebase-update, if
-  rebase-update runs into a conflict mid-way.
-  """
+    These values may persist across multiple invocations of rebase-update, if
+    rebase-update runs into a conflict mid-way.
+    """
     return_branch = git.get_config(STARTING_BRANCH_KEY)
     workdir = git.get_config(STARTING_WORKDIR_KEY)
     if not return_branch:

+ 5 - 5
git_retry.py

@@ -77,12 +77,12 @@ class GitRetry(object):
     def computeDelay(self, iteration):
         """Returns: the delay (in seconds) for a given iteration
 
-    The first iteration has a delay of '0'.
+        The first iteration has a delay of '0'.
 
-    Args:
-      iteration: (int) The iteration index (starting with zero as the first
-          iteration)
-    """
+        Args:
+            iteration: (int) The iteration index (starting with zero as the
+                first iteration)
+        """
         if (not self.delay) or (iteration == 0):
             return 0
         if self.delay_factor == 0:

+ 7 - 7
lockfile.py

@@ -80,7 +80,7 @@ def _try_lock(lockfile):
 def _lock(path, timeout=0):
     """_lock returns function to release the lock if locking was successful.
 
-  _lock also implements simple retry logic."""
+    _lock also implements simple retry logic."""
     elapsed = 0
     while True:
         try:
@@ -101,13 +101,13 @@ def _lock(path, timeout=0):
 def lock(path, timeout=0):
     """Get exclusive lock to path.
 
-  Usage:
-    import lockfile
-    with lockfile.lock(path, timeout):
-      # Do something
-      pass
+    Usage:
+        import lockfile
+        with lockfile.lock(path, timeout):
+            # Do something
+            pass
 
-   """
+    """
     release_fn = _lock(path, timeout)
     try:
         yield

+ 9 - 8
metrics.py

@@ -267,9 +267,9 @@ class MetricsCollector(object):
     def collect_metrics(self, command_name):
         """A decorator used to collect metrics over the life of a function.
 
-    This decorator executes the function and collects metrics about the system
-    environment and the function performance.
-    """
+        This decorator executes the function and collects metrics about the
+        system environment and the function performance.
+        """
         def _decorator(func):
             if not self.config.should_collect_metrics:
                 return func
@@ -287,11 +287,12 @@ class MetricsCollector(object):
     def print_notice_and_exit(self):
         """A context manager used to print the notice and terminate execution.
 
-    This decorator executes the function and prints the monitoring notice if
-    necessary. If an exception is raised, we will catch it, and print it before
-    printing the metrics collection notice.
-    This will call sys.exit() with an appropriate exit code to ensure the notice
-    is the last thing printed."""
+        This decorator executes the function and prints the monitoring notice if
+        necessary. If an exception is raised, we will catch it, and print it
+        before printing the metrics collection notice.
+        This will call sys.exit() with an appropriate exit code to ensure the
+        notice is the last thing printed.
+        """
         # Needed to preserve the __name__ and __doc__ attributes of func.
         try:
             yield

+ 14 - 14
metrics_utils.py

@@ -211,20 +211,20 @@ def extract_known_subcommand_args(args):
 def extract_http_metrics(request_uri, method, status, response_time):
     """Extract metrics from the request URI.
 
-  Extracts the host, path, and arguments from the request URI, and returns them
-  along with the method, status and response time.
+    Extracts the host, path, and arguments from the request URI, and returns
+    them along with the method, status and response time.
 
-  The host, method, path and arguments must be in the KNOWN_HTTP_* constants
-  defined above.
+    The host, method, path and arguments must be in the KNOWN_HTTP_* constants
+    defined above.
 
-  Arguments are the values of the o= url parameter. In Gerrit, additional fields
-  can be obtained by adding o parameters, each option requires more database
-  lookups and slows down the query response time to the client, so we make an
-  effort to collect them.
+    Arguments are the values of the o= url parameter. In Gerrit, additional
+    fields can be obtained by adding o parameters, each option requires more
+    database lookups and slows down the query response time to the client, so
+    we make an effort to collect them.
 
-  The regex defined in KNOWN_HTTP_PATH_RES are checked against the path, and
-  those that match will be returned.
-  """
+    The regex defined in KNOWN_HTTP_PATH_RES are checked against the path, and
+    those that match will be returned.
+    """
     http_metrics = {
         'status': status,
         'response_time': response_time,
@@ -256,9 +256,9 @@ def extract_http_metrics(request_uri, method, status, response_time):
 def get_repo_timestamp(path_to_repo):
     """Get an approximate timestamp for the upstream of |path_to_repo|.
 
-  Returns the top two bits of the timestamp of the HEAD for the upstream of the
-  branch path_to_repo is checked out at.
-  """
+    Returns the top two bits of the timestamp of the HEAD for the upstream of
+    the branch path_to_repo is checked out at.
+    """
     # Get the upstream for the current branch. If we're not in a branch,
     # fallback to HEAD.
     try:

+ 15 - 13
my_activity.py

@@ -320,19 +320,21 @@ class MyActivity(object):
     def filter_modified_monorail_issue(self, issue):
         """Precisely checks if an issue has been modified in the time range.
 
-    This fetches all issue comments to check if the issue has been modified in
-    the time range specified by user. This is needed because monorail only
-    allows filtering by last updated and published dates, which is not
-    sufficient to tell whether a given issue has been modified at some specific
-    time range. Any update to the issue is a reported as comment on Monorail.
-
-    Args:
-      issue: Issue dict as returned by monorail_query_issues method. In
-          particular, must have a key 'uid' formatted as 'project:issue_id'.
-
-    Returns:
-      Passed issue if modified, None otherwise.
-    """
+        This fetches all issue comments to check if the issue has been modified
+        in the time range specified by user. This is needed because monorail
+        only allows filtering by last updated and published dates, which is not
+        sufficient to tell whether a given issue has been modified at some
+        specific time range. Any update to the issue is a reported as comment
+        on Monorail.
+
+        Args:
+            issue: Issue dict as returned by monorail_query_issues method. In
+                particular, must have a key 'uid' formatted as
+                'project:issue_id'.
+
+        Returns:
+            Passed issue if modified, None otherwise.
+        """
         http = self.monorail_get_auth_http()
         project, issue_id = issue['uid'].split(':')
         url = ('https://monorail-prod.appspot.com/_ah/api/monorail/v1/projects'

+ 21 - 21
owners_client.py

@@ -12,17 +12,17 @@ import git_common
 class OwnersClient(object):
     """Interact with OWNERS files in a repository.
 
-  This class allows you to interact with OWNERS files in a repository both the
-  Gerrit Code-Owners plugin REST API, and the owners database implemented by
-  Depot Tools in owners.py:
+    This class allows you to interact with OWNERS files in a repository both the
+    Gerrit Code-Owners plugin REST API, and the owners database implemented by
+    Depot Tools in owners.py:
 
-   - List all the owners for a group of files.
-   - Check if files have been approved.
-   - Suggest owners for a group of files.
+        - List all the owners for a group of files.
+        - Check if files have been approved.
+        - Suggest owners for a group of files.
 
-  All code should use this class to interact with OWNERS files instead of the
-  owners database in owners.py
-  """
+    All code should use this class to interact with OWNERS files instead of the
+    owners database in owners.py
+    """
     # '*' means that everyone can approve.
     EVERYONE = '*'
 
@@ -39,15 +39,15 @@ class OwnersClient(object):
     def ListOwners(self, path):
         """List all owners for a file.
 
-    The returned list is sorted so that better owners appear first.
-    """
+        The returned list is sorted so that better owners appear first.
+        """
         raise Exception('Not implemented')
 
     def BatchListOwners(self, paths):
         """List all owners for a group of files.
 
-    Returns a dictionary {path: [owners]}.
-    """
+        Returns a dictionary {path: [owners]}.
+        """
         with git_common.ScopedPool(kind='threads') as pool:
             return dict(
                 pool.imap_unordered(lambda p: (p, self.ListOwners(p)), paths))
@@ -55,11 +55,11 @@ class OwnersClient(object):
     def GetFilesApprovalStatus(self, paths, approvers, reviewers):
         """Check the approval status for the given paths.
 
-    Utility method to check for approval status when a change has not yet been
-    created, given reviewers and approvers.
+        Utility method to check for approval status when a change has not yet
+        been created, given reviewers and approvers.
 
-    See GetChangeApprovalStatus for description of the returned value.
-    """
+        See GetChangeApprovalStatus for description of the returned value.
+        """
         approvers = set(approvers)
         if approvers:
             approvers.add(self.EVERYONE)
@@ -167,8 +167,8 @@ class GerritClient(OwnersClient):
     def BatchListBestOwners(self, paths):
         """List only the higest-scoring owners for a group of files.
 
-    Returns a dictionary {path: [owners]}.
-    """
+        Returns a dictionary {path: [owners]}.
+        """
         with git_common.ScopedPool(kind='threads') as pool:
             return dict(
                 pool.imap_unordered(lambda p: (p, self.ListBestOwners(p)),
@@ -178,8 +178,8 @@ class GerritClient(OwnersClient):
 def GetCodeOwnersClient(host, project, branch):
     """Get a new OwnersClient.
 
-  Uses GerritClient and raises an exception if code-owners plugin is not
-  available."""
+    Uses GerritClient and raises an exception if code-owners plugin is not
+    available."""
     if gerrit_util.IsCodeOwnersEnabledOnHost(host):
         return GerritClient(host, project, branch)
     raise Exception(

+ 189 - 190
presubmit_canned_checks.py

@@ -73,7 +73,7 @@ _CORP_LINK_KEYWORD = '.corp.google'
 
 def CheckChangeHasBugFieldFromChange(change, output_api, show_suggestions=True):
     """Requires that the changelist have a Bug: field. If show_suggestions is
-  False then only report on incorrect tags, not missing tags."""
+    False then only report on incorrect tags, not missing tags."""
     bugs = change.BugsFromDescription()
     results = []
     if bugs:
@@ -122,7 +122,7 @@ def CheckChangeHasNoUnwantedTags(input_api, output_api):
 
 def CheckDoNotSubmitInDescription(input_api, output_api):
     """Checks that the user didn't add 'DO NOT ''SUBMIT' to the CL description.
-  """
+    """
     # Keyword is concatenated to avoid presubmit check rejecting the CL.
     keyword = 'DO NOT ' + 'SUBMIT'
     if keyword in input_api.change.DescriptionText():
@@ -171,11 +171,11 @@ def CheckChangeWasUploaded(input_api, output_api):
 
 def CheckDescriptionUsesColonInsteadOfEquals(input_api, output_api):
     """Checks that the CL description uses a colon after 'Bug' and 'Fixed' tags
-  instead of equals.
+    instead of equals.
 
-  crbug.com only interprets the lines "Bug: xyz" and "Fixed: xyz" but not
-  "Bug=xyz" or "Fixed=xyz".
-  """
+    crbug.com only interprets the lines "Bug: xyz" and "Fixed: xyz" but not
+    "Bug=xyz" or "Fixed=xyz".
+    """
     text = input_api.change.DescriptionText()
     if input_api.re.search(r'^(Bug|Fixed)=',
                            text,
@@ -192,8 +192,8 @@ def CheckDescriptionUsesColonInsteadOfEquals(input_api, output_api):
 
 def CheckAuthorizedAuthor(input_api, output_api, bot_allowlist=None):
     """For non-googler/chromites committers, verify the author's email address is
-  in AUTHORS.
-  """
+    in AUTHORS.
+    """
     if input_api.is_committing or input_api.no_diffs:
         error_type = output_api.PresubmitError
     else:
@@ -376,8 +376,8 @@ def CheckChangeHasNoCrAndHasOnlyOneEol(input_api,
                                        source_file_filter=None):
     """Runs both CheckChangeHasNoCR and CheckChangeHasOnlyOneEOL in one pass.
 
-  It is faster because it is reading the file only once.
-  """
+    It is faster because it is reading the file only once.
+    """
     cr_files = []
     eof_files = []
     for f in input_api.AffectedSourceFiles(source_file_filter):
@@ -403,8 +403,8 @@ def CheckChangeHasNoCrAndHasOnlyOneEol(input_api,
 
 def CheckGenderNeutral(input_api, output_api, source_file_filter=None):
     """Checks that there are no gendered pronouns in any of the text files to be
-  submitted.
-  """
+    submitted.
+    """
     if input_api.no_diffs:
         return []
 
@@ -434,16 +434,16 @@ def _ReportErrorFileAndLine(filename, line_num, dummy_line):
 def _GenerateAffectedFileExtList(input_api, source_file_filter):
     """Generate a list of (file, extension) tuples from affected files.
 
-  The result can be fed to _FindNewViolationsOfRule() directly, or
-  could be filtered before doing that.
+    The result can be fed to _FindNewViolationsOfRule() directly, or
+    could be filtered before doing that.
 
-  Args:
-    input_api: object to enumerate the affected files.
-    source_file_filter: a filter to be passed to the input api.
-  Yields:
-    A list of (file, extension) tuples, where |file| is an affected
-      file, and |extension| its file path extension.
-  """
+    Args:
+        input_api: object to enumerate the affected files.
+        source_file_filter: a filter to be passed to the input api.
+    Yields:
+        A list of (file, extension) tuples, where |file| is an affected
+        file, and |extension| its file path extension.
+    """
     for f in input_api.AffectedFiles(include_deletes=False,
                                      file_filter=source_file_filter):
         extension = str(f.LocalPath()).rsplit('.', 1)[-1]
@@ -456,19 +456,19 @@ def _FindNewViolationsOfRuleForList(callable_rule,
     """Find all newly introduced violations of a per-line rule (a callable).
 
   Prefer calling _FindNewViolationsOfRule() instead of this function, unless
-  the list of affected files need to be filtered in a special way.
-
-  Arguments:
-    callable_rule: a callable taking a file extension and line of input and
-      returning True if the rule is satisfied and False if there was a problem.
-    file_ext_list: a list of input (file, extension) tuples, as returned by
-      _GenerateAffectedFileExtList().
-    error_formatter: a callable taking (filename, line_number, line) and
-      returning a formatted error string.
-
-  Returns:
-    A list of the newly-introduced violations reported by the rule.
-  """
+    the list of affected files need to be filtered in a special way.
+
+    Arguments:
+        callable_rule: a callable taking a file extension and line of input and
+            returning True if the rule is satisfied and False if there was a problem.
+        file_ext_list: a list of input (file, extension) tuples, as returned by
+            _GenerateAffectedFileExtList().
+        error_formatter: a callable taking (filename, line_number, line) and
+            returning a formatted error string.
+
+    Returns:
+        A list of the newly-introduced violations reported by the rule.
+    """
     errors = []
     for f, extension in file_ext_list:
         # For speed, we do two passes, checking first the full file.  Shelling
@@ -491,17 +491,17 @@ def _FindNewViolationsOfRule(callable_rule,
                              error_formatter=_ReportErrorFileAndLine):
     """Find all newly introduced violations of a per-line rule (a callable).
 
-  Arguments:
-    callable_rule: a callable taking a file extension and line of input and
-      returning True if the rule is satisfied and False if there was a problem.
-    input_api: object to enumerate the affected files.
-    source_file_filter: a filter to be passed to the input api.
-    error_formatter: a callable taking (filename, line_number, line) and
-      returning a formatted error string.
-
-  Returns:
-    A list of the newly-introduced violations reported by the rule.
-  """
+    Arguments:
+        callable_rule: a callable taking a file extension and line of input and
+            returning True if the rule is satisfied and False if there was a problem.
+        input_api: object to enumerate the affected files.
+        source_file_filter: a filter to be passed to the input api.
+        error_formatter: a callable taking (filename, line_number, line) and
+            returning a formatted error string.
+
+    Returns:
+        A list of the newly-introduced violations reported by the rule.
+    """
     if input_api.no_diffs:
         return []
     return _FindNewViolationsOfRuleForList(
@@ -512,8 +512,8 @@ def _FindNewViolationsOfRule(callable_rule,
 
 def CheckChangeHasNoTabs(input_api, output_api, source_file_filter=None):
     """Checks that there are no tab characters in any of the text files to be
-  submitted.
-  """
+    submitted.
+    """
     # In addition to the filter, make sure that makefiles are skipped.
     if not source_file_filter:
         # It's the default filter.
@@ -569,8 +569,8 @@ def CheckChangeHasNoStrayWhitespace(input_api,
 
 def CheckLongLines(input_api, output_api, maxlen, source_file_filter=None):
     """Checks that there aren't any lines longer than maxlen characters in any of
-  the text files to be submitted.
-  """
+    the text files to be submitted.
+    """
     if input_api.no_diffs:
         return []
     maxlens = {
@@ -726,8 +726,7 @@ def CheckLicense(input_api,
                  project_name=None,
                  source_file_filter=None,
                  accept_empty_files=True):
-    """Verifies the license header.
-  """
+    """Verifies the license header."""
 
     # Early-out if the license_re is guaranteed to match everything.
     if license_re_param and license_re_param == '.*':
@@ -851,11 +850,11 @@ def CheckLicense(input_api,
 
 def CheckChromiumDependencyMetadata(input_api, output_api, file_filter=None):
     """Check files for Chromium third party dependency metadata have sufficient
-  information, and are correctly formatted.
+    information, and are correctly formatted.
 
-  See the README.chromium.template at
-  https://chromium.googlesource.com/chromium/src/+/main/third_party/README.chromium.template
-  """
+    See the README.chromium.template at
+    https://chromium.googlesource.com/chromium/src/+/main/third_party/README.chromium.template
+    """
     # If the file filter is unspecified, filter to known Chromium metadata
     # files.
     if file_filter is None:
@@ -901,17 +900,17 @@ def CheckTreeIsOpen(input_api,
                     json_url=None):
     """Check whether to allow commit without prompt.
 
-  Supports two styles:
-    1. Checks that an url's content doesn't match a regexp that would mean that
-       the tree is closed. (old)
-    2. Check the json_url to decide whether to allow commit without prompt.
-  Args:
-    input_api: input related apis.
-    output_api: output related apis.
-    url: url to use for regex based tree status.
-    closed: regex to match for closed status.
-    json_url: url to download json style status.
-  """
+    Supports two styles:
+        1. Checks that an url's content doesn't match a regexp that would mean that
+            the tree is closed. (old)
+        2. Check the json_url to decide whether to allow commit without prompt.
+    Args:
+        input_api: input related apis.
+        output_api: output related apis.
+        url: url to use for regex based tree status.
+        closed: regex to match for closed status.
+        json_url: url to download json style status.
+    """
     if not input_api.is_committing or \
         'PRESUBMIT_SKIP_NETWORK' in _os.environ:
         return []
@@ -963,11 +962,11 @@ def GetUnitTestsInDirectory(input_api,
                             blocklist=None):
     """Lists all files in a directory and runs them. Doesn't recurse.
 
-  It's mainly a wrapper for RunUnitTests. Use allowlist and blocklist to filter
-  tests accordingly. run_on_python2, run_on_python3, and skip_shebang_check are
-  no longer used but have to be retained because of the many callers in other
-  repos that pass them in.
-  """
+    It's mainly a wrapper for RunUnitTests. Use allowlist and blocklist to filter
+    tests accordingly. run_on_python2, run_on_python3, and skip_shebang_check are
+    no longer used but have to be retained because of the many callers in other
+    repos that pass them in.
+    """
     del run_on_python2
     del run_on_python3
     del skip_shebang_check
@@ -1011,11 +1010,11 @@ def GetUnitTests(input_api,
                  skip_shebang_check=True):
     """Runs all unit tests in a directory.
 
-  On Windows, sys.executable is used for unit tests ending with ".py".
-  run_on_python2, run_on_python3, and skip_shebang_check are no longer used but
-  have to be retained because of the many callers in other repos that pass them
-  in.
-  """
+    On Windows, sys.executable is used for unit tests ending with ".py".
+    run_on_python2, run_on_python3, and skip_shebang_check are no longer used but
+    have to be retained because of the many callers in other repos that pass them
+    in.
+    """
     del run_on_python2
     del run_on_python3
     del skip_shebang_check
@@ -1060,11 +1059,11 @@ def GetUnitTestsRecursively(input_api,
                             skip_shebang_check=True):
     """Gets all files in the directory tree (git repo) that match files_to_check.
 
-  Restricts itself to only find files within the Change's source repo, not
-  dependencies. run_on_python2, run_on_python3, and skip_shebang_check are no
-  longer used but have to be retained because of the many callers in other repos
-  that pass them in.
-  """
+    Restricts itself to only find files within the Change's source repo, not
+    dependencies. run_on_python2, run_on_python3, and skip_shebang_check are no
+    longer used but have to be retained because of the many callers in other repos
+    that pass them in.
+    """
     del run_on_python2
     del run_on_python3
     del skip_shebang_check
@@ -1094,10 +1093,10 @@ def GetUnitTestsRecursively(input_api,
 
 def GetPythonUnitTests(input_api, output_api, unit_tests, python3=False):
     """Run the unit tests out of process, capture the output and use the result
-  code to determine success.
+    code to determine success.
 
-  DEPRECATED.
-  """
+    DEPRECATED.
+    """
     # We don't want to hinder users from uploading incomplete patches.
     if input_api.is_committing or input_api.no_diffs:
         message_type = output_api.PresubmitError
@@ -1144,9 +1143,9 @@ def GetPythonUnitTests(input_api, output_api, unit_tests, python3=False):
 def RunUnitTestsInDirectory(input_api, *args, **kwargs):
     """Run tests in a directory serially.
 
-  For better performance, use GetUnitTestsInDirectory and then
-  pass to input_api.RunTests.
-  """
+    For better performance, use GetUnitTestsInDirectory and then
+    pass to input_api.RunTests.
+    """
     return input_api.RunTests(
         GetUnitTestsInDirectory(input_api, *args, **kwargs), False)
 
@@ -1154,17 +1153,17 @@ def RunUnitTestsInDirectory(input_api, *args, **kwargs):
 def RunUnitTests(input_api, *args, **kwargs):
     """Run tests serially.
 
-  For better performance, use GetUnitTests and then pass to
-  input_api.RunTests.
-  """
+    For better performance, use GetUnitTests and then pass to
+    input_api.RunTests.
+    """
     return input_api.RunTests(GetUnitTests(input_api, *args, **kwargs), False)
 
 
 def RunPythonUnitTests(input_api, *args, **kwargs):
     """Run python tests in a directory serially.
 
-  DEPRECATED
-  """
+    DEPRECATED
+    """
     return input_api.RunTests(GetPythonUnitTests(input_api, *args, **kwargs),
                               False)
 
@@ -1212,10 +1211,10 @@ def GetPylint(input_api,
               version='2.7'):
     """Run pylint on python files.
 
-  The default files_to_check enforces looking only at *.py files.
+    The default files_to_check enforces looking only at *.py files.
 
-  Currently only pylint version '2.6' and '2.7' are supported.
-  """
+    Currently only pylint version '2.6' and '2.7' are supported.
+    """
 
     files_to_check = tuple(files_to_check or (r'.*\.py$', ))
     files_to_skip = tuple(files_to_skip or input_api.DEFAULT_FILES_TO_SKIP)
@@ -1344,9 +1343,9 @@ def GetPylint(input_api,
 def RunPylint(input_api, *args, **kwargs):
     """Legacy presubmit function.
 
-  For better performance, get all tests and then pass to
-  input_api.RunTests.
-  """
+    For better performance, get all tests and then pass to
+    input_api.RunTests.
+    """
     return input_api.RunTests(GetPylint(input_api, *args, **kwargs), False)
 
 
@@ -1426,8 +1425,8 @@ def CheckNoNewMetadataInOwners(input_api, output_api):
 
 def CheckOwnersDirMetadataExclusive(input_api, output_api):
     """Check that metadata in OWNERS files and DIR_METADATA files are mutually
-  exclusive.
-  """
+    exclusive.
+    """
     _METADATA_LINE_RE = input_api.re.compile(
         r'^#\s*(TEAM|COMPONENT|OS|WPT-NOTIFY)+\s*:\s*\S+$',
         input_api.re.MULTILINE)
@@ -1498,9 +1497,9 @@ def GetCodereviewOwnerAndReviewers(input_api,
                                    approval_needed=True):
     """Return the owner and reviewers of a change, if any.
 
-  If approval_needed is True, only reviewers who have approved the change
-  will be returned.
-  """
+    If approval_needed is True, only reviewers who have approved the change
+    will be returned.
+    """
     # Recognizes 'X@Y' email addresses. Very simplistic.
     EMAIL_REGEXP = input_api.re.compile(r'^[\w\-\+\%\.]+\@[\w\-\+\%\.]+$')
     issue = input_api.change.issue
@@ -1550,26 +1549,26 @@ def PanProjectChecks(input_api,
                      global_checks=True):
     """Checks that ALL chromium orbit projects should use.
 
-  These are checks to be run on all Chromium orbit project, including:
-    Chromium
-    Native Client
-    V8
-  When you update this function, please take this broad scope into account.
-  Args:
-    input_api: Bag of input related interfaces.
-    output_api: Bag of output related interfaces.
-    excluded_paths: Don't include these paths in common checks.
-    text_files: Which file are to be treated as documentation text files.
-    license_header: What license header should be on files.
-    project_name: What is the name of the project as it appears in the license.
-    global_checks: If True run checks that are unaffected by other options or by
-      the PRESUBMIT script's location, such as CheckChangeHasDescription.
-      global_checks should be passed as False when this function is called from
-      locations other than the project's root PRESUBMIT.py, to avoid redundant
-      checking.
-  Returns:
-    A list of warning or error objects.
-  """
+    These are checks to be run on all Chromium orbit project, including:
+        Chromium
+        Native Client
+        V8
+    When you update this function, please take this broad scope into account.
+    Args:
+        input_api: Bag of input related interfaces.
+        output_api: Bag of output related interfaces.
+        excluded_paths: Don't include these paths in common checks.
+        text_files: Which file are to be treated as documentation text files.
+        license_header: What license header should be on files.
+        project_name: What is the name of the project as it appears in the license.
+        global_checks: If True run checks that are unaffected by other options or by
+            the PRESUBMIT script's location, such as CheckChangeHasDescription.
+            global_checks should be passed as False when this function is called from
+            locations other than the project's root PRESUBMIT.py, to avoid redundant
+            checking.
+    Returns:
+        A list of warning or error objects.
+    """
     excluded_paths = tuple(excluded_paths or [])
     text_files = tuple(text_files or (
         r'.+\.txt$',
@@ -1764,13 +1763,13 @@ def CheckGNFormatted(input_api, output_api):
 def CheckCIPDManifest(input_api, output_api, path=None, content=None):
     """Verifies that a CIPD ensure file manifest is valid against all platforms.
 
-  Exactly one of "path" or "content" must be provided. An assertion will occur
-  if neither or both are provided.
+    Exactly one of "path" or "content" must be provided. An assertion will occur
+    if neither or both are provided.
 
-  Args:
-    path (str): If provided, the filesystem path to the manifest to verify.
-    content (str): If provided, the raw content of the manifest to veirfy.
-  """
+    Args:
+        path (str): If provided, the filesystem path to the manifest to verify.
+        content (str): If provided, the raw content of the manifest to veirfy.
+    """
     cipd_bin = 'cipd' if not input_api.is_windows else 'cipd.bat'
     cmd = [cipd_bin, 'ensure-file-verify']
     kwargs = {}
@@ -1804,12 +1803,12 @@ def CheckCIPDManifest(input_api, output_api, path=None, content=None):
 
 def CheckCIPDPackages(input_api, output_api, platforms, packages):
     """Verifies that all named CIPD packages can be resolved against all supplied
-  platforms.
+    platforms.
 
-  Args:
-    platforms (list): List of CIPD platforms to verify.
-    packages (dict): Mapping of package name to version.
-  """
+    Args:
+        platforms (list): List of CIPD platforms to verify.
+        packages (dict): Mapping of package name to version.
+    """
     manifest = []
     for p in platforms:
         manifest.append('$VerifiedPlatform %s' % (p, ))
@@ -1821,13 +1820,13 @@ def CheckCIPDPackages(input_api, output_api, platforms, packages):
 def CheckCIPDClientDigests(input_api, output_api, client_version_file):
     """Verifies that *.digests file was correctly regenerated.
 
-  <client_version_file>.digests file contains pinned hashes of the CIPD client.
-  It is consulted during CIPD client bootstrap and self-update. It should be
-  regenerated each time CIPD client version file changes.
+    <client_version_file>.digests file contains pinned hashes of the CIPD client.
+    It is consulted during CIPD client bootstrap and self-update. It should be
+    regenerated each time CIPD client version file changes.
 
-  Args:
-    client_version_file (str): Path to a text file with CIPD client version.
-  """
+    Args:
+        client_version_file (str): Path to a text file with CIPD client version.
+    """
     cmd = [
         'cipd' if not input_api.is_windows else 'cipd.bat',
         'selfupdate-roll',
@@ -1847,17 +1846,17 @@ def CheckCIPDClientDigests(input_api, output_api, client_version_file):
 def CheckForCommitObjects(input_api, output_api):
     """Validates that commit objects match DEPS.
 
-  Commit objects are put into the git tree typically by submodule tooling.
-  Because we use gclient to handle external repository references instead,
-  we want to ensure DEPS content and Git are in sync when desired.
+    Commit objects are put into the git tree typically by submodule tooling.
+    Because we use gclient to handle external repository references instead,
+    we want to ensure DEPS content and Git are in sync when desired.
 
-  Args:
-    input_api: Bag of input related interfaces.
-    output_api: Bag of output related interfaces.
+    Args:
+        input_api: Bag of input related interfaces.
+        output_api: Bag of output related interfaces.
 
-  Returns:
-    A presubmit error if a commit object is not expected.
-  """
+    Returns:
+        A presubmit error if a commit object is not expected.
+    """
     # Get DEPS file.
     deps_file = input_api.os_path.join(input_api.PresubmitLocalPath(), 'DEPS')
     if not input_api.os_path.isfile(deps_file):
@@ -1880,12 +1879,12 @@ def CheckForCommitObjects(input_api, output_api):
     def parse_tree_entry(ent):
         """Splits a tree entry into components
 
-    Args:
-      ent: a tree entry in the form "filemode type hash\tname"
+        Args:
+            ent: a tree entry in the form "filemode type hash\tname"
 
-    Returns:
-      The tree entry split into component parts
-    """
+        Returns:
+            The tree entry split into component parts
+        """
         tabparts = ent.split('\t', 1)
         spaceparts = tabparts[0].split(' ', 2)
         return (spaceparts[0], spaceparts[1], spaceparts[2], tabparts[1])
@@ -2002,19 +2001,19 @@ def _ParseDeps(contents):
 
 def CheckVPythonSpec(input_api, output_api, file_filter=None):
     """Validates any changed .vpython and .vpython3 files with vpython
-  verification tool.
-
-  Args:
-    input_api: Bag of input related interfaces.
-    output_api: Bag of output related interfaces.
-    file_filter: Custom function that takes a path (relative to client root) and
-      returns boolean, which is used to filter files for which to apply the
-      verification to. Defaults to any path ending with .vpython, which captures
-      both global .vpython and <script>.vpython files.
-
-  Returns:
-    A list of input_api.Command objects containing verification commands.
-  """
+    verification tool.
+
+    Args:
+        input_api: Bag of input related interfaces.
+        output_api: Bag of output related interfaces.
+        file_filter: Custom function that takes a path (relative to client root) and
+            returns boolean, which is used to filter files for which to apply the
+            verification to. Defaults to any path ending with .vpython, which captures
+            both global .vpython and <script>.vpython files.
+
+    Returns:
+        A list of input_api.Command objects containing verification commands.
+    """
     file_filter = file_filter or (lambda f: f.LocalPath().endswith('.vpython')
                                   or f.LocalPath().endswith('.vpython3'))
     affected_files = input_api.AffectedTestableFiles(file_filter=file_filter)
@@ -2035,14 +2034,14 @@ def CheckVPythonSpec(input_api, output_api, file_filter=None):
 def CheckChangedLUCIConfigs(input_api, output_api):
     """Validates the changed config file against LUCI Config.
 
-  Only return the warning and/or error for files in input_api.AffectedFiles().
+    Only return the warning and/or error for files in input_api.AffectedFiles().
 
-  Assumes `lucicfg` binary is in PATH and the user is logged in.
+    Assumes `lucicfg` binary is in PATH and the user is logged in.
 
-  Returns:
-    A list presubmit errors and/or warnings from the validation result of files
-    in input_api.AffectedFiles()
-  """
+    Returns:
+        A list presubmit errors and/or warnings from the validation result of files
+        in input_api.AffectedFiles()
+    """
     import json
     import logging
 
@@ -2116,7 +2115,7 @@ def CheckChangedLUCIConfigs(input_api, output_api):
         return json.loads(res.read().decode('utf-8')[len(")]}'"):].strip())
 
     def format_config_set(cs):
-        """convert luci-config v2 config_set object to v1 format"""
+        """convert luci-config v2 config_set object to v1 format."""
         rev = cs.get('revision', {})
         return {
             'config_set': cs.get('name'),
@@ -2245,20 +2244,20 @@ def CheckChangedLUCIConfigs(input_api, output_api):
 def CheckLucicfgGenOutput(input_api, output_api, entry_script):
     """Verifies configs produced by `lucicfg` are up-to-date and pass validation.
 
-  Runs the check unconditionally, regardless of what files are modified. Examine
-  input_api.AffectedFiles() yourself before using CheckLucicfgGenOutput if this
-  is a concern.
+    Runs the check unconditionally, regardless of what files are modified. Examine
+    input_api.AffectedFiles() yourself before using CheckLucicfgGenOutput if this
+    is a concern.
 
-  Assumes `lucicfg` binary is in PATH and the user is logged in.
+    Assumes `lucicfg` binary is in PATH and the user is logged in.
 
-  Args:
-    entry_script: path to the entry-point *.star script responsible for
-        generating a single config set. Either absolute or relative to the
-        currently running PRESUBMIT.py script.
+    Args:
+        entry_script: path to the entry-point *.star script responsible for
+            generating a single config set. Either absolute or relative to the
+            currently running PRESUBMIT.py script.
 
-  Returns:
-    A list of input_api.Command objects containing verification commands.
-  """
+    Returns:
+        A list of input_api.Command objects containing verification commands.
+    """
     return [
         input_api.Command(
             'lucicfg validate "%s"' % entry_script,
@@ -2280,7 +2279,7 @@ def CheckLucicfgGenOutput(input_api, output_api, entry_script):
 
 def CheckJsonParses(input_api, output_api, file_filter=None):
     """Verifies that all JSON files at least parse as valid JSON. By default,
-  file_filter will look for all files that end with .json"""
+    file_filter will look for all files that end with .json"""
     import json
     if file_filter is None:
         file_filter = lambda x: x.LocalPath().endswith('.json')
@@ -2325,10 +2324,10 @@ def _GetMessageForMatchingTerm(input_api, affected_file, line_number, line,
                                term, message):
     """Helper method for CheckInclusiveLanguage.
 
-  Returns an string composed of the name of the file, the line number where the
-  match has been found and the additional text passed as |message| in case the
-  target type name matches the text inside the line passed as parameter.
-  """
+    Returns an string composed of the name of the file, the line number where the
+    match has been found and the additional text passed as |message| in case the
+    target type name matches the text inside the line passed as parameter.
+    """
     result = []
 
     # A // nocheck comment will bypass this error.

+ 230 - 230
presubmit_support.py

@@ -218,9 +218,9 @@ class ThreadPool(object):
     def CallCommand(self, test, show_callstack=None):
         """Runs an external program.
 
-    This function converts invocation of .py files and invocations of 'python'
-    to vpython invocations.
-    """
+        This function converts invocation of .py files and invocations of 'python'
+        to vpython invocations.
+        """
         cmd = self._GetCommand(test)
         try:
             start = time_time()
@@ -286,9 +286,9 @@ class ThreadPool(object):
 
 
 def normpath(path):
-    '''Version of os.path.normpath that also changes backward slashes to
-  forward slashes when not running on Windows.
-  '''
+    """Version of os.path.normpath that also changes backward slashes to
+    forward slashes when not running on Windows.
+    """
     # This is safe to always do because the Windows version of os.path.normpath
     # will replace forward slashes with backward slashes.
     path = path.replace(os.sep, '/')
@@ -319,10 +319,10 @@ class _PresubmitResult(object):
 
     def __init__(self, message, items=None, long_text='', show_callstack=None):
         """
-    message: A short one-line message to indicate errors.
-    items: A list of short strings to indicate where errors occurred.
-    long_text: multi-line text output, e.g. from another tool
-    """
+        message: A short one-line message to indicate errors.
+        items: A list of short strings to indicate where errors occurred.
+        long_text: multi-line text output, e.g. from another tool
+        """
         self._message = _PresubmitResult._ensure_str(message)
         self._items = items or []
         self._long_text = _PresubmitResult._ensure_str(long_text.rstrip())
@@ -335,10 +335,10 @@ class _PresubmitResult(object):
     @staticmethod
     def _ensure_str(val):
         """
-    val: A "stringish" value. Can be any of str or bytes.
-    returns: A str after applying encoding/decoding as needed.
-    Assumes/uses UTF-8 for relevant inputs/outputs.
-    """
+        val: A "stringish" value. Can be any of str or bytes.
+        returns: A str after applying encoding/decoding as needed.
+        Assumes/uses UTF-8 for relevant inputs/outputs.
+        """
         if isinstance(val, str):
             return val
         if isinstance(val, bytes):
@@ -400,8 +400,8 @@ class _MailTextResult(_PresubmitResult):
 class GerritAccessor(object):
     """Limited Gerrit functionality for canned presubmit checks to work.
 
-  To avoid excessive Gerrit calls, caches the results.
-  """
+    To avoid excessive Gerrit calls, caches the results.
+    """
     def __init__(self, url=None, project=None, branch=None):
         self.host = urlparse.urlparse(url).netloc if url else None
         self.project = project
@@ -424,11 +424,11 @@ class GerritAccessor(object):
     def GetChangeInfo(self, issue):
         """Returns labels and all revisions (patchsets) for this issue.
 
-    The result is a dictionary according to Gerrit REST Api.
-    https://gerrit-review.googlesource.com/Documentation/rest-api.html
+        The result is a dictionary according to Gerrit REST Api.
+        https://gerrit-review.googlesource.com/Documentation/rest-api.html
 
-    However, API isn't very clear what's inside, so see tests for example.
-    """
+        However, API isn't very clear what's inside, so see tests for example.
+        """
         assert issue
         cache_key = int(issue)
         if cache_key not in self.cache:
@@ -507,8 +507,8 @@ class GerritAccessor(object):
 
 class OutputApi(object):
     """An instance of OutputApi gets passed to presubmit scripts so that they
-  can output various types of results.
-  """
+    can output various types of results.
+    """
     PresubmitResult = _PresubmitResult
     PresubmitError = _PresubmitError
     PresubmitPromptWarning = _PresubmitPromptWarning
@@ -532,8 +532,8 @@ class OutputApi(object):
 
 class InputApi(object):
     """An instance of this object is passed to presubmit scripts so they can
-  know stuff about the change they're looking at.
-  """
+    know stuff about the change they're looking at.
+    """
     # Method could be a function
     # pylint: disable=no-self-use
 
@@ -610,17 +610,17 @@ class InputApi(object):
                  no_diffs=False):
         """Builds an InputApi object.
 
-    Args:
-      change: A presubmit.Change object.
-      presubmit_path: The path to the presubmit script being processed.
-      is_committing: True if the change is about to be committed.
-      gerrit_obj: provides basic Gerrit codereview functionality.
-      dry_run: if true, some Checks will be skipped.
-      parallel: if true, all tests reported via input_api.RunTests for all
+        Args:
+            change: A presubmit.Change object.
+            presubmit_path: The path to the presubmit script being processed.
+            is_committing: True if the change is about to be committed.
+            gerrit_obj: provides basic Gerrit codereview functionality.
+            dry_run: if true, some Checks will be skipped.
+            parallel: if true, all tests reported via input_api.RunTests for all
                 PRESUBMIT files will be run in parallel.
-      no_diffs: if true, implies that --files or --all was specified so some
+            no_diffs: if true, implies that --files or --all was specified so some
                 checks can be skipped, and some errors will be messages.
-    """
+        """
         # Version number of the presubmit_support script.
         self.version = [int(x) for x in __version__.split('.')]
         self.change = change
@@ -714,19 +714,19 @@ class InputApi(object):
     def PresubmitLocalPath(self):
         """Returns the local path of the presubmit script currently being run.
 
-    This is useful if you don't want to hard-code absolute paths in the
-    presubmit script.  For example, It can be used to find another file
-    relative to the PRESUBMIT.py script, so the whole tree can be branched and
-    the presubmit script still works, without editing its content.
-    """
+        This is useful if you don't want to hard-code absolute paths in the
+        presubmit script.  For example, It can be used to find another file
+        relative to the PRESUBMIT.py script, so the whole tree can be branched and
+        the presubmit script still works, without editing its content.
+        """
         return self._current_presubmit_path
 
     def AffectedFiles(self, include_deletes=True, file_filter=None):
         """Same as input_api.change.AffectedFiles() except only lists files
-    (and optionally directories) in the same directory as the current presubmit
-    script, or subdirectories thereof. Note that files are listed using the OS
-    path separator, so backslashes are used as separators on Windows.
-    """
+        (and optionally directories) in the same directory as the current presubmit
+        script, or subdirectories thereof. Note that files are listed using the OS
+        path separator, so backslashes are used as separators on Windows.
+        """
         dir_with_slash = normpath(self.PresubmitLocalPath())
         # normpath strips trailing path separators, so the trailing separator
         # has to be added after the normpath call.
@@ -751,9 +751,9 @@ class InputApi(object):
 
     def AffectedTestableFiles(self, include_deletes=None, **kwargs):
         """Same as input_api.change.AffectedTestableFiles() except only lists files
-    in the same directory as the current presubmit script, or subdirectories
-    thereof.
-    """
+        in the same directory as the current presubmit script, or subdirectories
+        thereof.
+        """
         if include_deletes is not None:
             warn('AffectedTestableFiles(include_deletes=%s)'
                  ' is deprecated and ignored' % str(include_deletes),
@@ -777,16 +777,16 @@ class InputApi(object):
                          block_list=None):
         """Filters out files that aren't considered 'source file'.
 
-    If files_to_check or files_to_skip is None, InputApi.DEFAULT_FILES_TO_CHECK
-    and InputApi.DEFAULT_FILES_TO_SKIP is used respectively.
+        If files_to_check or files_to_skip is None, InputApi.DEFAULT_FILES_TO_CHECK
+        and InputApi.DEFAULT_FILES_TO_SKIP is used respectively.
 
-    affected_file.LocalPath() needs to re.match an entry in the files_to_check
-    list and not re.match any entries in the files_to_skip list.
-    '/' path separators should be used in the regular expressions and will work
-    on Windows as well as other platforms.
+        affected_file.LocalPath() needs to re.match an entry in the files_to_check
+        list and not re.match any entries in the files_to_skip list.
+        '/' path separators should be used in the regular expressions and will work
+        on Windows as well as other platforms.
 
-    Note: Copy-paste this function to suit your needs or use a lambda function.
-    """
+        Note: Copy-paste this function to suit your needs or use a lambda function.
+        """
         if files_to_check is None:
             files_to_check = self.DEFAULT_FILES_TO_CHECK
         if files_to_skip is None:
@@ -810,8 +810,8 @@ class InputApi(object):
     def AffectedSourceFiles(self, source_file):
         """Filter the list of AffectedTestableFiles by the function source_file.
 
-    If source_file is None, InputApi.FilterSourceFile() is used.
-    """
+        If source_file is None, InputApi.FilterSourceFile() is used.
+        """
         if not source_file:
             source_file = self.FilterSourceFile
         return list(filter(source_file, self.AffectedTestableFiles()))
@@ -819,28 +819,28 @@ class InputApi(object):
     def RightHandSideLines(self, source_file_filter=None):
         """An iterator over all text lines in 'new' version of changed files.
 
-    Only lists lines from new or modified text files in the change that are
-    contained by the directory of the currently executing presubmit script.
+        Only lists lines from new or modified text files in the change that are
+        contained by the directory of the currently executing presubmit script.
 
-    This is useful for doing line-by-line regex checks, like checking for
-    trailing whitespace.
+        This is useful for doing line-by-line regex checks, like checking for
+        trailing whitespace.
 
-    Yields:
-      a 3 tuple:
-        the AffectedFile instance of the current file;
-        integer line number (1-based); and
-        the contents of the line as a string.
+        Yields:
+            a 3 tuple:
+                the AffectedFile instance of the current file;
+                integer line number (1-based); and
+                the contents of the line as a string.
 
-    Note: The carriage return (LF or CR) is stripped off.
-    """
+        Note: The carriage return (LF or CR) is stripped off.
+        """
         files = self.AffectedSourceFiles(source_file_filter)
         return _RightHandSideLinesImpl(files)
 
     def ReadFile(self, file_item, mode='r'):
         """Reads an arbitrary file.
 
-    Deny reading anything outside the repository.
-    """
+        Deny reading anything outside the repository.
+        """
         if isinstance(file_item, AffectedFile):
             file_item = file_item.AbsoluteLocalPath()
         if not file_item.startswith(self.change.RepositoryRoot()):
@@ -849,25 +849,25 @@ class InputApi(object):
 
     def CreateTemporaryFile(self, **kwargs):
         """Returns a named temporary file that must be removed with a call to
-    RemoveTemporaryFiles().
+        RemoveTemporaryFiles().
 
-    All keyword arguments are forwarded to tempfile.NamedTemporaryFile(),
-    except for |delete|, which is always set to False.
+        All keyword arguments are forwarded to tempfile.NamedTemporaryFile(),
+        except for |delete|, which is always set to False.
 
-    Presubmit checks that need to create a temporary file and pass it for
-    reading should use this function instead of NamedTemporaryFile(), as
-    Windows fails to open a file that is already open for writing.
+        Presubmit checks that need to create a temporary file and pass it for
+        reading should use this function instead of NamedTemporaryFile(), as
+        Windows fails to open a file that is already open for writing.
 
-      with input_api.CreateTemporaryFile() as f:
-        f.write('xyz')
-        input_api.subprocess.check_output(['script-that', '--reads-from',
-                                           f.name])
+        with input_api.CreateTemporaryFile() as f:
+            f.write('xyz')
+            input_api.subprocess.check_output(['script-that', '--reads-from',
+                                            f.name])
 
 
-    Note that callers of CreateTemporaryFile() should not worry about removing
-    any temporary file; this is done transparently by the presubmit handling
-    code.
-    """
+        Note that callers of CreateTemporaryFile() should not worry about removing
+        any temporary file; this is done transparently by the presubmit handling
+        code.
+        """
         if 'delete' in kwargs:
             # Prevent users from passing |delete|; we take care of file deletion
             # ourselves and this prevents unintuitive error messages when we
@@ -998,15 +998,14 @@ class AffectedFile(object):
     def LocalPath(self):
         """Returns the path of this file on the local disk relative to client root.
 
-    This should be used for error messages but not for accessing files,
-    because presubmit checks are run with CWD=PresubmitLocalPath() (which is
-    often != client root).
-    """
+        This should be used for error messages but not for accessing files,
+        because presubmit checks are run with CWD=PresubmitLocalPath() (which is
+        often != client root).
+        """
         return normpath(self._path)
 
     def AbsoluteLocalPath(self):
-        """Returns the absolute path of this file on the local disk.
-    """
+        """Returns the absolute path of this file on the local disk."""
         return os.path.abspath(os.path.join(self._local_root, self.LocalPath()))
 
     def Action(self):
@@ -1016,7 +1015,7 @@ class AffectedFile(object):
     def IsTestableFile(self):
         """Returns True if the file is a text file and not a binary file.
 
-    Deleted files are not text file."""
+        Deleted files are not text file."""
         raise NotImplementedError()  # Implement when needed
 
     def IsTextFile(self):
@@ -1026,24 +1025,24 @@ class AffectedFile(object):
     def OldContents(self):
         """Returns an iterator over the lines in the old version of file.
 
-    The old version is the file before any modifications in the user's
-    workspace, i.e. the 'left hand side'.
+        The old version is the file before any modifications in the user's
+        workspace, i.e. the 'left hand side'.
 
-    Contents will be empty if the file is a directory or does not exist.
-    Note: The carriage returns (LF or CR) are stripped off.
-    """
+        Contents will be empty if the file is a directory or does not exist.
+        Note: The carriage returns (LF or CR) are stripped off.
+        """
         return self._diff_cache.GetOldContents(self.LocalPath(),
                                                self._local_root).splitlines()
 
     def NewContents(self):
         """Returns an iterator over the lines in the new version of file.
 
-    The new version is the file in the user's workspace, i.e. the 'right hand
-    side'.
+        The new version is the file in the user's workspace, i.e. the 'right hand
+        side'.
 
-    Contents will be empty if the file is a directory or does not exist.
-    Note: The carriage returns (LF or CR) are stripped off.
-    """
+        Contents will be empty if the file is a directory or does not exist.
+        Note: The carriage returns (LF or CR) are stripped off.
+        """
         if self._cached_new_contents is None:
             self._cached_new_contents = []
             try:
@@ -1062,11 +1061,11 @@ class AffectedFile(object):
     def ChangedContents(self, keeplinebreaks=False):
         """Returns a list of tuples (line number, line text) of all new lines.
 
-     This relies on the scm diff output describing each changed code section
-     with a line of the form
+        This relies on the scm diff output describing each changed code section
+        with a line of the form
 
-     ^@@ <old line num>,<old size> <new line num>,<new size> @@$
-    """
+        ^@@ <old line num>,<old size> <new line num>,<new size> @@$
+        """
         # Don't return cached results when line breaks are requested.
         if not keeplinebreaks and self._cached_changed_contents is not None:
             return self._cached_changed_contents[:]
@@ -1123,13 +1122,13 @@ class GitAffectedFile(AffectedFile):
 class Change(object):
     """Describe a change.
 
-  Used directly by the presubmit scripts to query the current change being
-  tested.
+    Used directly by the presubmit scripts to query the current change being
+    tested.
 
-  Instance members:
-    tags: Dictionary of KEY=VALUE pairs found in the change description.
-    self.KEY: equivalent to tags['KEY']
-  """
+    Instance members:
+        tags: Dictionary of KEY=VALUE pairs found in the change description.
+        self.KEY: equivalent to tags['KEY']
+    """
 
     _AFFECTED_FILES = AffectedFile
 
@@ -1182,10 +1181,10 @@ class Change(object):
     def DescriptionText(self):
         """Returns the user-entered changelist description, minus tags.
 
-    Any line in the user-provided description starting with e.g. 'FOO='
-    (whitespace permitted before and around) is considered a tag line.  Such
-    lines are stripped out of the description this function returns.
-    """
+        Any line in the user-provided description starting with e.g. 'FOO='
+        (whitespace permitted before and around) is considered a tag line.  Such
+        lines are stripped out of the description this function returns.
+        """
         return self._description_without_tags
 
     def FullDescriptionText(self):
@@ -1195,7 +1194,7 @@ class Change(object):
     def SetDescriptionText(self, description):
         """Sets the full description text (including tags) to |description|.
 
-    Also updates the list of tags."""
+        Also updates the list of tags."""
         self._full_description = description
 
         # From the description text, build up a dictionary of key/value pairs
@@ -1216,12 +1215,12 @@ class Change(object):
     def AddDescriptionFooter(self, key, value):
         """Adds the given footer to the change description.
 
-    Args:
-      key: A string with the key for the git footer. It must conform to
-        the git footers format (i.e. 'List-Of-Tokens') and will be case
-        normalized so that each token is title-cased.
-      value: A string with the value for the git footer.
-    """
+        Args:
+            key: A string with the key for the git footer. It must conform to
+                the git footers format (i.e. 'List-Of-Tokens') and will be case
+                normalized so that each token is title-cased.
+            value: A string with the value for the git footer.
+        """
         description = git_footers.add_footer(self.FullDescriptionText(),
                                              git_footers.normalize_name(key),
                                              value)
@@ -1229,8 +1228,8 @@ class Change(object):
 
     def RepositoryRoot(self):
         """Returns the repository (checkout) root directory for this change,
-    as an absolute path.
-    """
+        as an absolute path.
+        """
         return self._local_root
 
     def __getattr__(self, attr):
@@ -1242,10 +1241,10 @@ class Change(object):
     def GitFootersFromDescription(self):
         """Return the git footers present in the description.
 
-    Returns:
-      footers: A dict of {footer: [values]} containing a multimap of the footers
-        in the change description.
-    """
+        Returns:
+            footers: A dict of {footer: [values]} containing a multimap of the footers
+                in the change description.
+        """
         return git_footers.parse_footers(self.FullDescriptionText())
 
     def BugsFromDescription(self):
@@ -1303,13 +1302,13 @@ class Change(object):
     def AffectedFiles(self, include_deletes=True, file_filter=None):
         """Returns a list of AffectedFile instances for all files in the change.
 
-    Args:
-      include_deletes: If false, deleted files will be filtered out.
-      file_filter: An additional filter to apply.
+        Args:
+            include_deletes: If false, deleted files will be filtered out.
+            file_filter: An additional filter to apply.
 
-    Returns:
-      [AffectedFile(path, action), AffectedFile(path, action)]
-    """
+        Returns:
+            [AffectedFile(path, action), AffectedFile(path, action)]
+        """
         affected = list(filter(file_filter, self._affected_files))
 
         if include_deletes:
@@ -1342,17 +1341,17 @@ class Change(object):
     def RightHandSideLines(self):
         """An iterator over all text lines in 'new' version of changed files.
 
-    Lists lines from new or modified text files in the change.
+        Lists lines from new or modified text files in the change.
 
-    This is useful for doing line-by-line regex checks, like checking for
-    trailing whitespace.
+        This is useful for doing line-by-line regex checks, like checking for
+        trailing whitespace.
 
-    Yields:
-      a 3 tuple:
-        the AffectedFile instance of the current file;
-        integer line number (1-based); and
-        the contents of the line as a string.
-    """
+        Yields:
+            a 3 tuple:
+                the AffectedFile instance of the current file;
+                integer line number (1-based); and
+                the contents of the line as a string.
+        """
         return _RightHandSideLinesImpl(
             x for x in self.AffectedFiles(include_deletes=False)
             if x.IsTestableFile())
@@ -1381,16 +1380,16 @@ class GitChange(Change):
 def ListRelevantPresubmitFiles(files, root):
     """Finds all presubmit files that apply to a given set of source files.
 
-  If inherit-review-settings-ok is present right under root, looks for
-  PRESUBMIT.py in directories enclosing root.
+    If inherit-review-settings-ok is present right under root, looks for
+    PRESUBMIT.py in directories enclosing root.
 
-  Args:
-    files: An iterable container containing file paths.
-    root: Path where to stop searching.
+    Args:
+        files: An iterable container containing file paths.
+        root: Path where to stop searching.
 
-  Return:
-    List of absolute paths of the existing PRESUBMIT.py scripts.
-  """
+    Return:
+        List of absolute paths of the existing PRESUBMIT.py scripts.
+    """
     files = [normpath(os.path.join(root, f)) for f in files]
 
     # List all the individual directories containing files.
@@ -1435,25 +1434,25 @@ def ListRelevantPresubmitFiles(files, root):
 class GetPostUploadExecuter(object):
     def __init__(self, change, gerrit_obj):
         """
-    Args:
-      change: The Change object.
-      gerrit_obj: provides basic Gerrit codereview functionality.
-    """
+        Args:
+            change: The Change object.
+            gerrit_obj: provides basic Gerrit codereview functionality.
+            """
         self.change = change
         self.gerrit = gerrit_obj
 
     def ExecPresubmitScript(self, script_text, presubmit_path):
         """Executes PostUploadHook() from a single presubmit script.
-    Caller is responsible for validating whether the hook should be executed
-    and should only call this function if it should be.
+        Caller is responsible for validating whether the hook should be executed
+        and should only call this function if it should be.
 
-    Args:
-      script_text: The text of the presubmit script.
-      presubmit_path: Project script to run.
+        Args:
+            script_text: The text of the presubmit script.
+            presubmit_path: Project script to run.
 
-    Return:
-      A list of results objects.
-    """
+        Return:
+            A list of results objects.
+        """
         # Change to the presubmit file's directory to support local imports.
         presubmit_dir = os.path.dirname(presubmit_path)
         main_path = os.getcwd()
@@ -1500,11 +1499,11 @@ def _MergeMasters(masters1, masters2):
 def DoPostUploadExecuter(change, gerrit_obj, verbose):
     """Execute the post upload hook.
 
-  Args:
-    change: The Change object.
-    gerrit_obj: The GerritAccessor object.
-    verbose: Prints debug info.
-  """
+    Args:
+        change: The Change object.
+        gerrit_obj: The GerritAccessor object.
+        verbose: Prints debug info.
+    """
     python_version = 'Python %s' % sys.version_info.major
     sys.stdout.write('Running %s post upload checks ...\n' % python_version)
     presubmit_files = ListRelevantPresubmitFiles(change.LocalPaths(),
@@ -1555,16 +1554,17 @@ class PresubmitExecuter(object):
                  parallel=False,
                  no_diffs=False):
         """
-    Args:
-      change: The Change object.
-      committing: True if 'git cl land' is running, False if 'git cl upload' is.
-      gerrit_obj: provides basic Gerrit codereview functionality.
-      dry_run: if true, some Checks will be skipped.
-      parallel: if true, all tests reported via input_api.RunTests for all
+        Args:
+            change: The Change object.
+            committing: True if 'git cl land' is running, False if
+                'git cl upload' is.
+            gerrit_obj: provides basic Gerrit codereview functionality.
+            dry_run: if true, some Checks will be skipped.
+            parallel: if true, all tests reported via input_api.RunTests for all
                 PRESUBMIT files will be run in parallel.
-      no_diffs: if true, implies that --files or --all was specified so some
+            no_diffs: if true, implies that --files or --all was specified so some
                 checks can be skipped, and some errors will be messages.
-    """
+        """
         self.change = change
         self.committing = committing
         self.gerrit = gerrit_obj
@@ -1577,17 +1577,17 @@ class PresubmitExecuter(object):
 
     def ExecPresubmitScript(self, script_text, presubmit_path):
         """Executes a single presubmit script.
-    Caller is responsible for validating whether the hook should be executed
-    and should only call this function if it should be.
+        Caller is responsible for validating whether the hook should be executed
+        and should only call this function if it should be.
 
-    Args:
-      script_text: The text of the presubmit script.
-      presubmit_path: The path to the presubmit file (this will be reported via
-        input_api.PresubmitLocalPath()).
+        Args:
+            script_text: The text of the presubmit script.
+            presubmit_path: The path to the presubmit file (this will be
+                reported via input_api.PresubmitLocalPath()).
 
-    Return:
-      A list of result objects, empty if no problems.
-    """
+        Return:
+            A list of result objects, empty if no problems.
+        """
         # Change to the presubmit file's directory to support local imports.
         presubmit_dir = os.path.dirname(presubmit_path)
         main_path = os.getcwd()
@@ -1702,16 +1702,16 @@ class PresubmitExecuter(object):
     def _run_check_function(self, function_name, context, sink, presubmit_path):
         """Evaluates and returns the result of a given presubmit function.
 
-    If sink is given, the result of the presubmit function will be reported
-    to the ResultSink.
+        If sink is given, the result of the presubmit function will be reported
+        to the ResultSink.
 
-    Args:
-      function_name: the name of the presubmit function to evaluate
-      context: a context dictionary in which the function will be evaluated
-      sink: an instance of ResultSink. None, by default.
-    Returns:
-      the result of the presubmit function call.
-    """
+        Args:
+            function_name: the name of the presubmit function to evaluate
+            context: a context dictionary in which the function will be evaluated
+            sink: an instance of ResultSink. None, by default.
+        Returns:
+            the result of the presubmit function call.
+        """
         start_time = time_time()
         try:
             result = eval(function_name + '(*__args)', context)
@@ -1736,7 +1736,7 @@ class PresubmitExecuter(object):
 
     def _check_result_type(self, result):
         """Helper function which ensures result is a list, and all elements are
-    instances of OutputApi.PresubmitResult"""
+        instances of OutputApi.PresubmitResult"""
         if not isinstance(result, (tuple, list)):
             raise PresubmitFailure(
                 'Presubmit functions must return a tuple or list')
@@ -1776,29 +1776,29 @@ def DoPresubmitChecks(change,
                       no_diffs=False):
     """Runs all presubmit checks that apply to the files in the change.
 
-  This finds all PRESUBMIT.py files in directories enclosing the files in the
-  change (up to the repository root) and calls the relevant entrypoint function
-  depending on whether the change is being committed or uploaded.
-
-  Prints errors, warnings and notifications.  Prompts the user for warnings
-  when needed.
-
-  Args:
-    change: The Change object.
-    committing: True if 'git cl land' is running, False if 'git cl upload' is.
-    verbose: Prints debug info.
-    default_presubmit: A default presubmit script to execute in any case.
-    may_prompt: Enable (y/n) questions on warning or error. If False,
-                any questions are answered with yes by default.
-    gerrit_obj: provides basic Gerrit codereview functionality.
-    dry_run: if true, some Checks will be skipped.
-    parallel: if true, all tests specified by input_api.RunTests in all
-              PRESUBMIT files will be run in parallel.
-    no_diffs: if true, implies that --files or --all was specified so some
-              checks can be skipped, and some errors will be messages.
-  Return:
-    1 if presubmit checks failed or 0 otherwise.
-  """
+    This finds all PRESUBMIT.py files in directories enclosing the files in the
+    change (up to the repository root) and calls the relevant entrypoint function
+    depending on whether the change is being committed or uploaded.
+
+    Prints errors, warnings and notifications.  Prompts the user for warnings
+    when needed.
+
+    Args:
+        change: The Change object.
+        committing: True if 'git cl land' is running, False if 'git cl upload' is.
+        verbose: Prints debug info.
+        default_presubmit: A default presubmit script to execute in any case.
+        may_prompt: Enable (y/n) questions on warning or error. If False,
+            any questions are answered with yes by default.
+        gerrit_obj: provides basic Gerrit codereview functionality.
+        dry_run: if true, some Checks will be skipped.
+        parallel: if true, all tests specified by input_api.RunTests in all
+            PRESUBMIT files will be run in parallel.
+        no_diffs: if true, implies that --files or --all was specified so some
+            checks can be skipped, and some errors will be messages.
+    Return:
+        1 if presubmit checks failed or 0 otherwise.
+    """
     with setup_environ({'PYTHONDONTWRITEBYTECODE': '1'}):
         python_version = 'Python %s' % sys.version_info.major
         if committing:
@@ -1935,12 +1935,12 @@ def _parse_files(args, recursive):
 def _parse_change(parser, options):
     """Process change options.
 
-  Args:
-    parser: The parser used to parse the arguments from command line.
-    options: The arguments parsed from command line.
-  Returns:
-    A GitChange if the change root is a git repository, or a Change otherwise.
-  """
+    Args:
+        parser: The parser used to parse the arguments from command line.
+        options: The arguments parsed from command line.
+    Returns:
+        A GitChange if the change root is a git repository, or a Change otherwise.
+    """
     if options.files and options.all_files:
         parser.error('<files> cannot be specified when --all-files is set.')
 
@@ -1982,15 +1982,15 @@ def _parse_change(parser, options):
 def _parse_gerrit_options(parser, options):
     """Process gerrit options.
 
-  SIDE EFFECTS: Modifies options.author and options.description from Gerrit if
-  options.gerrit_fetch is set.
+    SIDE EFFECTS: Modifies options.author and options.description from Gerrit if
+    options.gerrit_fetch is set.
 
-  Args:
-    parser: The parser used to parse the arguments from command line.
-    options: The arguments parsed from command line.
-  Returns:
-    A GerritAccessor object if options.gerrit_url is set, or None otherwise.
-  """
+    Args:
+        parser: The parser used to parse the arguments from command line.
+        options: The arguments parsed from command line.
+    Returns:
+        A GerritAccessor object if options.gerrit_url is set, or None otherwise.
+    """
     gerrit_obj = None
     if options.gerrit_url:
         gerrit_obj = GerritAccessor(url=options.gerrit_url,

+ 19 - 18
rdb_wrapper.py

@@ -31,12 +31,12 @@ class ResultSink(object):
     def report(self, function_name, status, elapsed_time, failure_reason=None):
         """Reports the result and elapsed time of a presubmit function call.
 
-    Args:
-      function_name (str): The name of the presubmit function
-      status: the status to report the function call with
-      elapsed_time: the time taken to invoke the presubmit function
-      failure_reason (str or None): if set, the failure reason
-    """
+        Args:
+            function_name (str): The name of the presubmit function
+            status: the status to report the function call with
+            elapsed_time: the time taken to invoke the presubmit function
+            failure_reason (str or None): if set, the failure reason
+        """
         tr = {
             'testId': self._prefix + function_name,
             'status': status,
@@ -56,19 +56,20 @@ class ResultSink(object):
 def client(prefix):
     """Returns a client for ResultSink.
 
-  This is a context manager that returns a client for ResultSink,
-  if LUCI_CONTEXT with a section of result_sink is present. When the context
-  is closed, all the connetions to the SinkServer are closed.
+    This is a context manager that returns a client for ResultSink,
+    if LUCI_CONTEXT with a section of result_sink is present. When the context
+    is closed, all the connetions to the SinkServer are closed.
 
-  Args:
-    prefix: A prefix to be added to the test ID of reported function names.
-      The format for this is
-          presubmit:gerrit_host/folder/to/repo:path/to/file/
-      for example,
-          presubmit:chromium-review.googlesource.com/chromium/src/:services/viz/
-  Returns:
-    An instance of ResultSink() if the luci context is present. None, otherwise.
-  """
+    Args:
+        prefix: A prefix to be added to the test ID of reported function names.
+        The format for this is
+            presubmit:gerrit_host/folder/to/repo:path/to/file/
+        for example,
+            presubmit:chromium-review.googlesource.com/chromium/src/:services/viz/  # pylint: disable=line-too-long
+    Returns:
+        An instance of ResultSink() if the luci context is present. None,
+        otherwise.
+    """
     luci_ctx = os.environ.get('LUCI_CONTEXT')
     if not luci_ctx:
         yield None

+ 38 - 38
reclient_helper.py

@@ -87,13 +87,13 @@ def find_ninja_out_dir(args):
 def find_cache_dir(tmp_dir):
     """Helper to find the correct cache directory for a build.
 
-  tmp_dir should be a build specific temp directory within the out directory.
+    tmp_dir should be a build specific temp directory within the out directory.
 
-  If this is called from within a gclient checkout, the cache dir will be:
-  <gclient_root>/.reproxy_cache/md5(tmp_dir)/
-  If this is not called from within a gclient checkout, the cache dir will be:
-  tmp_dir/cache
-  """
+    If this is called from within a gclient checkout, the cache dir will be:
+    <gclient_root>/.reproxy_cache/md5(tmp_dir)/
+    If this is not called from within a gclient checkout, the cache dir will be:
+    tmp_dir/cache
+    """
     gclient_root = gclient_paths.FindGclientRoot(os.getcwd())
     if gclient_root:
         return os.path.join(gclient_root, '.reproxy_cache',
@@ -134,13 +134,13 @@ def get_hostname():
 def set_reproxy_metrics_flags(tool):
     """Helper to setup metrics collection flags for reproxy.
 
-  The following env vars are set if not already set:
-    RBE_metrics_project=chromium-reclient-metrics
-    RBE_invocation_id=$AUTONINJA_BUILD_ID
-    RBE_metrics_table=rbe_metrics.builds
-    RBE_metrics_labels=source=developer,tool={tool}
-    RBE_metrics_prefix=go.chromium.org
-  """
+    The following env vars are set if not already set:
+        RBE_metrics_project=chromium-reclient-metrics
+        RBE_invocation_id=$AUTONINJA_BUILD_ID
+        RBE_metrics_table=rbe_metrics.builds
+        RBE_metrics_labels=source=developer,tool={tool}
+        RBE_metrics_prefix=go.chromium.org
+    """
     autoninja_id = os.environ.get("AUTONINJA_BUILD_ID")
     if autoninja_id is not None:
         os.environ.setdefault("RBE_invocation_id",
@@ -169,31 +169,31 @@ def datetime_now():
 def set_reproxy_path_flags(out_dir, make_dirs=True):
     """Helper to setup the logs and cache directories for reclient.
 
-  Creates the following directory structure if make_dirs is true:
-  If in a gclient checkout
-  out_dir/
-    .reproxy_tmp/
-      logs/
-  <gclient_root>
-    .reproxy_cache/
-      md5(out_dir/.reproxy_tmp)/
-
-  If not in a gclient checkout
-  out_dir/
-    .reproxy_tmp/
-      logs/
-      cache/
-
-  The following env vars are set if not already set:
-    RBE_output_dir=out_dir/.reproxy_tmp/logs
-    RBE_proxy_log_dir=out_dir/.reproxy_tmp/logs
-    RBE_log_dir=out_dir/.reproxy_tmp/logs
-    RBE_cache_dir=out_dir/.reproxy_tmp/cache
-  *Nix Only:
-    RBE_server_address=unix://out_dir/.reproxy_tmp/reproxy.sock
-  Windows Only:
-    RBE_server_address=pipe://md5(out_dir/.reproxy_tmp)/reproxy.pipe
-  """
+    Creates the following directory structure if make_dirs is true:
+    If in a gclient checkout
+    out_dir/
+        .reproxy_tmp/
+        logs/
+    <gclient_root>
+        .reproxy_cache/
+        md5(out_dir/.reproxy_tmp)/
+
+    If not in a gclient checkout
+    out_dir/
+        .reproxy_tmp/
+        logs/
+        cache/
+
+    The following env vars are set if not already set:
+        RBE_output_dir=out_dir/.reproxy_tmp/logs
+        RBE_proxy_log_dir=out_dir/.reproxy_tmp/logs
+        RBE_log_dir=out_dir/.reproxy_tmp/logs
+        RBE_cache_dir=out_dir/.reproxy_tmp/cache
+    *Nix Only:
+        RBE_server_address=unix://out_dir/.reproxy_tmp/reproxy.sock
+    Windows Only:
+        RBE_server_address=pipe://md5(out_dir/.reproxy_tmp)/reproxy.pipe
+    """
     os.environ.setdefault("AUTONINJA_BUILD_ID", str(uuid.uuid4()))
     tmp_dir = os.path.abspath(os.path.join(out_dir, '.reproxy_tmp'))
     log_dir = os.path.join(tmp_dir, 'logs')

+ 1 - 1
reclient_metrics.py

@@ -106,7 +106,7 @@ def is_googler(config=None):
 def check_status(ninja_out):
     """Checks metrics collections status and shows notice to user if needed.
 
-  Returns True if metrics should be collected."""
+    Returns True if metrics should be collected."""
     config = load_config()
     if not is_googler(config):
         return False

+ 3 - 3
roll_dep.py

@@ -61,7 +61,7 @@ def check_call(*args, **kwargs):
 
 def return_code(*args, **kwargs):
     """subprocess2.call() passing shell=True on Windows for git and
-  subprocess2.DEVNULL for stdout and stderr."""
+    subprocess2.DEVNULL for stdout and stderr."""
     kwargs.setdefault('shell', NEED_SHELL)
     kwargs.setdefault('stdout', subprocess2.DEVNULL)
     kwargs.setdefault('stderr', subprocess2.DEVNULL)
@@ -166,8 +166,8 @@ def get_submodule_rev(submodule):
 
 def calculate_roll(full_dir, dependency, roll_to):
     """Calculates the roll for a dependency by processing gclient_dict, and
-  fetching the dependency via git.
-  """
+    fetching the dependency via git.
+    """
     # if the super-project uses submodules, get rev directly using git.
     if is_submoduled():
         head = get_submodule_rev(dependency)

+ 15 - 15
scm.py

@@ -72,8 +72,8 @@ def GenFakeDiff(filename):
 def determine_scm(root):
     """Similar to upload.py's version but much simpler.
 
-  Returns 'git' or None.
-  """
+    Returns 'git' or None.
+    """
     if os.path.isdir(os.path.join(root, '.git')):
         return 'git'
 
@@ -131,7 +131,7 @@ class GIT(object):
         # type: (str, str, Optional[str]) -> Sequence[Tuple[str, str]]
         """Returns git status.
 
-    Returns an array of (status, file) tuples."""
+        Returns an array of (status, file) tuples."""
         if end_commit is None:
             end_commit = ''
         if upstream_branch is None:
@@ -210,7 +210,7 @@ class GIT(object):
     @staticmethod
     def GetRemoteHeadRef(cwd, url, remote):
         """Returns the full default remote branch reference, e.g.
-    'refs/remotes/origin/main'."""
+        'refs/remotes/origin/main'."""
         if os.path.exists(cwd):
             try:
                 # Try using local git copy first
@@ -253,8 +253,8 @@ class GIT(object):
     @staticmethod
     def FetchUpstreamTuple(cwd, branch=None):
         """Returns a tuple containing remote and remote ref,
-       e.g. 'origin', 'refs/heads/main'
-    """
+        e.g. 'origin', 'refs/heads/main'
+        """
         try:
             branch = branch or GIT.GetBranch(cwd)
         except subprocess2.CalledProcessError:
@@ -286,10 +286,10 @@ class GIT(object):
     def RefToRemoteRef(ref, remote):
         """Convert a checkout ref to the equivalent remote ref.
 
-    Returns:
-      A tuple of the remote ref's (common prefix, unique suffix), or None if it
-      doesn't appear to refer to a remote ref (e.g. it's a commit hash).
-    """
+        Returns:
+            A tuple of the remote ref's (common prefix, unique suffix), or None if it
+            doesn't appear to refer to a remote ref (e.g. it's a commit hash).
+        """
         # TODO(mmoss): This is just a brute-force mapping based of the expected
         # git config. It's a bit better than the even more brute-force
         # replace('heads', ...), but could still be smarter (like maybe actually
@@ -360,8 +360,8 @@ class GIT(object):
                      files=None):
         """Diffs against the upstream branch or optionally another branch.
 
-    full_move means that move or copy operations should completely recreate the
-    files, usually in the prospect to apply the patch for a try job."""
+        full_move means that move or copy operations should completely recreate the
+        files, usually in the prospect to apply the patch for a try job."""
         if not branch:
             branch = GIT.GetUpstreamBranch(cwd)
         command = [
@@ -429,7 +429,7 @@ class GIT(object):
     @staticmethod
     def GetCheckoutRoot(cwd):
         """Returns the top level directory of a git checkout as an absolute path.
-    """
+        """
         root = GIT.Capture(['rev-parse', '--show-cdup'], cwd=cwd)
         return os.path.abspath(os.path.join(cwd, root))
 
@@ -488,8 +488,8 @@ class GIT(object):
     def IsValidRevision(cwd, rev, sha_only=False):
         """Verifies the revision is a proper git revision.
 
-    sha_only: Fail unless rev is a sha hash.
-    """
+        sha_only: Fail unless rev is a sha hash.
+        """
         try:
             sha = GIT.ResolveCommit(cwd, rev)
         except subprocess2.CalledProcessError:

+ 2 - 2
setup_color.py

@@ -16,8 +16,8 @@ OUT_TYPE = 'unknown'
 def enable_native_ansi():
     """Enables native ANSI sequences in console. Windows 10 only.
 
-  Returns whether successful.
-  """
+    Returns whether successful.
+    """
     kernel32 = ctypes.windll.kernel32
     ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x04
 

+ 64 - 64
split_cl.py

@@ -39,9 +39,9 @@ def EnsureInGitRepository():
 def CreateBranchForDirectories(prefix, directories, upstream):
     """Creates a branch named |prefix| + "_" + |directories[0]| + "_split".
 
-  Return false if the branch already exists. |upstream| is used as upstream for
-  the created branch.
-  """
+    Return false if the branch already exists. |upstream| is used as upstream
+    for the created branch.
+    """
     existing_branches = set(git.branches(use_limit=False))
     branch_name = prefix + '_' + directories[0] + '_split'
     if branch_name in existing_branches:
@@ -53,7 +53,7 @@ def CreateBranchForDirectories(prefix, directories, upstream):
 def FormatDirectoriesForPrinting(directories, prefix=None):
     """Formats directory list for printing
 
-  Uses dedicated format for single-item list."""
+    Uses dedicated format for single-item list."""
 
     prefixed = directories
     if prefix:
@@ -71,9 +71,9 @@ def FormatDescriptionOrComment(txt, directories):
 def AddUploadedByGitClSplitToDescription(description):
     """Adds a 'This CL was uploaded by git cl split.' line to |description|.
 
-  The line is added before footers, or at the end of |description| if it has no
-  footers.
-  """
+    The line is added before footers, or at the end of |description| if it has
+    no footers.
+    """
     split_footers = git_footers.split_footers(description)
     lines = split_footers[0]
     if lines[-1] and not lines[-1].isspace():
@@ -89,21 +89,21 @@ def UploadCl(refactor_branch, refactor_branch_upstream, directories, files,
              cq_dry_run, enable_auto_submit, topic, repository_root):
     """Uploads a CL with all changes to |files| in |refactor_branch|.
 
-  Args:
-    refactor_branch: Name of the branch that contains the changes to upload.
-    refactor_branch_upstream: Name of the upstream of |refactor_branch|.
-    directories: Paths to the directories that contain the OWNERS files for
-        which to upload a CL.
-    files: List of AffectedFile instances to include in the uploaded CL.
-    description: Description of the uploaded CL.
-    comment: Comment to post on the uploaded CL.
-    reviewers: A set of reviewers for the CL.
-    changelist: The Changelist class.
-    cmd_upload: The function associated with the git cl upload command.
-    cq_dry_run: If CL uploads should also do a cq dry run.
-    enable_auto_submit: If CL uploads should also enable auto submit.
-    topic: Topic to associate with uploaded CLs.
-  """
+    Args:
+        refactor_branch: Name of the branch that contains the changes to upload.
+        refactor_branch_upstream: Name of the upstream of |refactor_branch|.
+        directories: Paths to the directories that contain the OWNERS files for
+            which to upload a CL.
+        files: List of AffectedFile instances to include in the uploaded CL.
+        description: Description of the uploaded CL.
+        comment: Comment to post on the uploaded CL.
+        reviewers: A set of reviewers for the CL.
+        changelist: The Changelist class.
+        cmd_upload: The function associated with the git cl upload command.
+        cq_dry_run: If CL uploads should also do a cq dry run.
+        enable_auto_submit: If CL uploads should also enable auto submit.
+        topic: Topic to associate with uploaded CLs.
+    """
     # Create a branch.
     if not CreateBranchForDirectories(refactor_branch, directories,
                                       refactor_branch_upstream):
@@ -165,10 +165,10 @@ def UploadCl(refactor_branch, refactor_branch_upstream, directories, files,
 def GetFilesSplitByOwners(files, max_depth):
     """Returns a map of files split by OWNERS file.
 
-  Returns:
-    A map where keys are paths to directories containing an OWNERS file and
-    values are lists of files sharing an OWNERS file.
-  """
+    Returns:
+        A map where keys are paths to directories containing an OWNERS file and
+        values are lists of files sharing an OWNERS file.
+    """
     files_split_by_owners = {}
     for action, path in files:
         # normpath() is important to normalize separators here, in prepration
@@ -191,17 +191,17 @@ def PrintClInfo(cl_index, num_cls, directories, file_paths, description,
                 reviewers, enable_auto_submit, topic):
     """Prints info about a CL.
 
-  Args:
-    cl_index: The index of this CL in the list of CLs to upload.
-    num_cls: The total number of CLs that will be uploaded.
-    directories: Paths to directories that contains the OWNERS files for which
-        to upload a CL.
-    file_paths: A list of files in this CL.
-    description: The CL description.
-    reviewers: A set of reviewers for this CL.
-    enable_auto_submit: If the CL should also have auto submit enabled.
-    topic: Topic to set for this CL.
-  """
+    Args:
+        cl_index: The index of this CL in the list of CLs to upload.
+        num_cls: The total number of CLs that will be uploaded.
+        directories: Paths to directories that contains the OWNERS files for
+            which to upload a CL.
+        file_paths: A list of files in this CL.
+        description: The CL description.
+        reviewers: A set of reviewers for this CL.
+        enable_auto_submit: If the CL should also have auto submit enabled.
+        topic: Topic to set for this CL.
+    """
     description_lines = FormatDescriptionOrComment(description,
                                                    directories).splitlines()
     indented_description = '\n'.join(['    ' + l for l in description_lines])
@@ -220,21 +220,21 @@ def SplitCl(description_file, comment_file, changelist, cmd_upload, dry_run,
             cq_dry_run, enable_auto_submit, max_depth, topic, repository_root):
     """"Splits a branch into smaller branches and uploads CLs.
 
-  Args:
-    description_file: File containing the description of uploaded CLs.
-    comment_file: File containing the comment of uploaded CLs.
-    changelist: The Changelist class.
-    cmd_upload: The function associated with the git cl upload command.
-    dry_run: Whether this is a dry run (no branches or CLs created).
-    cq_dry_run: If CL uploads should also do a cq dry run.
-    enable_auto_submit: If CL uploads should also enable auto submit.
-    max_depth: The maximum directory depth to search for OWNERS files. A value
-               less than 1 means no limit.
-    topic: Topic to associate with split CLs.
-
-  Returns:
-    0 in case of success. 1 in case of error.
-  """
+    Args:
+        description_file: File containing the description of uploaded CLs.
+        comment_file: File containing the comment of uploaded CLs.
+        changelist: The Changelist class.
+        cmd_upload: The function associated with the git cl upload command.
+        dry_run: Whether this is a dry run (no branches or CLs created).
+        cq_dry_run: If CL uploads should also do a cq dry run.
+        enable_auto_submit: If CL uploads should also enable auto submit.
+        max_depth: The maximum directory depth to search for OWNERS files. A
+            value less than 1 means no limit.
+        topic: Topic to associate with split CLs.
+
+    Returns:
+        0 in case of success. 1 in case of error.
+    """
     description = AddUploadedByGitClSplitToDescription(
         gclient_utils.FileRead(description_file))
     comment = gclient_utils.FileRead(comment_file) if comment_file else None
@@ -320,12 +320,12 @@ def SplitCl(description_file, comment_file, changelist, cmd_upload, dry_run,
 def CheckDescriptionBugLink(description):
     """Verifies that the description contains a bug link.
 
-  Examples:
-      Bug: 123
-      Bug: chromium:456
+    Examples:
+        Bug: 123
+        Bug: chromium:456
 
-  Prompts user if the description does not contain a bug link.
-  """
+    Prompts user if the description does not contain a bug link.
+    """
     bug_pattern = re.compile(r"^Bug:\s*(?:[a-zA-Z]+:)?[0-9]+", re.MULTILINE)
     matches = re.findall(bug_pattern, description)
     answer = 'y'
@@ -338,13 +338,13 @@ def CheckDescriptionBugLink(description):
 def SelectReviewersForFiles(cl, author, files, max_depth):
     """Selects reviewers for passed-in files
 
-  Args:
-    cl: Changelist class instance
-    author: Email of person running 'git cl split'
-    files: List of files
-    max_depth: The maximum directory depth to search for OWNERS files. A value
-               less than 1 means no limit.
-  """
+    Args:
+        cl: Changelist class instance
+        author: Email of person running 'git cl split'
+        files: List of files
+        max_depth: The maximum directory depth to search for OWNERS files.
+            A value less than 1 means no limit.
+    """
     info_split_by_owners = GetFilesSplitByOwners(files, max_depth)
 
     info_split_by_reviewers = {}

+ 25 - 24
subcommand.py

@@ -56,8 +56,8 @@ def usage(more):
 def epilog(text):
     """Adds an 'epilog' property to a CMD function.
 
-  It will be shown in the epilog. Usually useful for examples.
-  """
+    It will be shown in the epilog. Usually useful for examples.
+    """
     def hook(fn):
         fn.epilog = text
         return fn
@@ -79,8 +79,8 @@ def CMDhelp(parser, args):
 def _get_color_module():
     """Returns the colorama module if available.
 
-  If so, assumes colors are supported and return the module handle.
-  """
+    If so, assumes colors are supported and return the module handle.
+    """
     return sys.modules.get('colorama') or sys.modules.get(
         'third_party.colorama')
 
@@ -95,29 +95,30 @@ class CommandDispatcher(object):
         """module is the name of the main python module where to look for
         commands.
 
-    The python builtin variable __name__ MUST be used for |module|. If the
-    script is executed in the form 'python script.py', __name__ == '__main__'
-    and sys.modules['script'] doesn't exist. On the other hand if it is unit
-    tested, __main__ will be the unit test's module so it has to reference to
-    itself with 'script'. __name__ always match the right value.
-    """
+        The python builtin variable __name__ MUST be used for |module|. If the
+        script is executed in the form 'python script.py',
+        __name__ == '__main__' and sys.modules['script'] doesn't exist. On the
+        other hand if it is unit tested, __main__ will be the unit test's
+        module so it has to reference to itself with 'script'. __name__ always
+        match the right value.
+        """
         self.module = sys.modules[module]
 
     def enumerate_commands(self):
         """Returns a dict of command and their handling function.
 
-    The commands must be in the '__main__' modules. To import a command from a
-    submodule, use:
-      from mysubcommand import CMDfoo
+        The commands must be in the '__main__' modules. To import a command
+        from a submodule, use:
+            from mysubcommand import CMDfoo
 
-    Automatically adds 'help' if not already defined.
+        Automatically adds 'help' if not already defined.
 
-    Normalizes '_' in the commands to '-'.
+        Normalizes '_' in the commands to '-'.
 
-    A command can be effectively disabled by defining a global variable to None,
-    e.g.:
-      CMDhelp = None
-    """
+        A command can be effectively disabled by defining a global variable to
+        None, e.g.:
+            CMDhelp = None
+        """
         cmds = dict((_function_to_name(name), getattr(self.module, name))
                     for name in dir(self.module) if name.startswith('CMD'))
         cmds.setdefault('help', CMDhelp)
@@ -126,9 +127,9 @@ class CommandDispatcher(object):
     def find_nearest_command(self, name_asked):
         """Retrieves the function to handle a command as supplied by the user.
 
-    It automatically tries to guess the _intended command_ by handling typos
-    and/or incomplete names.
-    """
+        It automatically tries to guess the _intended command_ by handling typos
+        and/or incomplete names.
+        """
         commands = self.enumerate_commands()
         name_to_dash = name_asked.replace('_', '-')
         if name_to_dash in commands:
@@ -225,8 +226,8 @@ class CommandDispatcher(object):
     def execute(self, parser, args):
         """Dispatches execution to the right command.
 
-    Fallbacks to 'help' if not disabled.
-    """
+        Fallbacks to 'help' if not disabled.
+        """
         # Unconditionally disable format_description() and format_epilog().
         # Technically, a formatter should be used but it's not worth (yet) the
         # trouble.

+ 35 - 35
subprocess2.py

@@ -7,7 +7,6 @@
 In theory you shouldn't need anything else in subprocess, or this module failed.
 """
 
-import codecs
 import errno
 import logging
 import os
@@ -62,10 +61,10 @@ def kill_pid(pid):
 def get_english_env(env):
     """Forces LANG and/or LANGUAGE to be English.
 
-  Forces encoding to utf-8 for subprocesses.
+    Forces encoding to utf-8 for subprocesses.
 
-  Returns None if it is unnecessary.
-  """
+    Returns None if it is unnecessary.
+    """
     if sys.platform == 'win32':
         return None
     env = env or os.environ
@@ -91,16 +90,17 @@ def get_english_env(env):
 class Popen(subprocess.Popen):
     """Wraps subprocess.Popen() with various workarounds.
 
-  - Forces English output since it's easier to parse the stdout if it is always
-    in English.
-  - Sets shell=True on windows by default. You can override this by forcing
-    shell parameter to a value.
-  - Adds support for DEVNULL to not buffer when not needed.
-  - Adds self.start property.
-
-  Note: Popen() can throw OSError when cwd or args[0] doesn't exist. Translate
-  exceptions generated by cygwin when it fails trying to emulate fork().
-  """
+    - Forces English output since it's easier to parse the stdout if it is
+        always in English.
+    - Sets shell=True on windows by default. You can override this by forcing
+        shell parameter to a value.
+    - Adds support for DEVNULL to not buffer when not needed.
+    - Adds self.start property.
+
+    Note: Popen() can throw OSError when cwd or args[0] doesn't exist.
+    Translate exceptions generated by cygwin when it fails trying to emulate
+    fork().
+    """
     # subprocess.Popen.__init__() is not threadsafe; there is a race between
     # creating the exec-error pipe for the child and setting it to CLOEXEC
     # during which another thread can fork and cause the pipe to be inherited by
@@ -164,12 +164,12 @@ class Popen(subprocess.Popen):
 def communicate(args, **kwargs):
     """Wraps subprocess.Popen().communicate().
 
-  Returns ((stdout, stderr), returncode).
+    Returns ((stdout, stderr), returncode).
 
-  - If the subprocess runs for |nag_timer| seconds without producing terminal
-    output, print a warning to stderr.
-  - Automatically passes stdin content as input so do not specify stdin=PIPE.
-  """
+    - If the subprocess runs for |nag_timer| seconds without producing terminal
+        output, print a warning to stderr.
+    - Automatically passes stdin content as input so do not specify stdin=PIPE.
+    """
     stdin = None
     # When stdin is passed as an argument, use it as the actual input data and
     # set the Popen() parameter accordingly.
@@ -184,12 +184,12 @@ def communicate(args, **kwargs):
 def call(args, **kwargs):
     """Emulates subprocess.call().
 
-  Automatically convert stdout=PIPE or stderr=PIPE to DEVNULL.
-  In no case they can be returned since no code path raises
-  subprocess2.CalledProcessError.
+    Automatically convert stdout=PIPE or stderr=PIPE to DEVNULL.
+    In no case they can be returned since no code path raises
+    subprocess2.CalledProcessError.
 
-  Returns exit code.
-  """
+    Returns exit code.
+    """
     if kwargs.get('stdout') == PIPE:
         kwargs['stdout'] = DEVNULL
     if kwargs.get('stderr') == PIPE:
@@ -200,8 +200,8 @@ def call(args, **kwargs):
 def check_call_out(args, **kwargs):
     """Improved version of subprocess.check_call().
 
-  Returns (stdout, stderr), unlike subprocess.check_call().
-  """
+    Returns (stdout, stderr), unlike subprocess.check_call().
+    """
     out, returncode = communicate(args, **kwargs)
     if returncode:
         raise CalledProcessError(returncode, args, kwargs.get('cwd'), out[0],
@@ -218,11 +218,11 @@ def check_call(args, **kwargs):
 def capture(args, **kwargs):
     """Captures stdout of a process call and returns it.
 
-  Returns stdout.
+    Returns stdout.
 
-  - Discards returncode.
-  - Blocks stdin by default if not specified since no output will be visible.
-  """
+    - Discards returncode.
+    - Blocks stdin by default if not specified since no output will be visible.
+    """
     kwargs.setdefault('stdin', DEVNULL)
 
     # Like check_output, deny the caller from using stdout arg.
@@ -232,12 +232,12 @@ def capture(args, **kwargs):
 def check_output(args, **kwargs):
     """Emulates subprocess.check_output().
 
-  Captures stdout of a process call and returns stdout only.
+    Captures stdout of a process call and returns stdout only.
 
-  - Throws if return code is not 0.
-  - Blocks stdin by default if not specified since no output will be visible.
-  - As per doc, "The stdout argument is not allowed as it is used internally."
-  """
+    - Throws if return code is not 0.
+    - Blocks stdin by default if not specified since no output will be visible.
+    - As per doc, "The stdout argument is not allowed as it is used internally."
+    """
     kwargs.setdefault('stdin', DEVNULL)
     if 'stdout' in kwargs:
         raise ValueError('stdout argument not allowed, it would be overridden.')

+ 10 - 10
testing_support/coverage_utils.py

@@ -28,17 +28,17 @@ def covered_main(includes,
                  required_percentage=100.0,
                  disable_coverage=True):
     """Equivalent of unittest.main(), except that it gathers coverage data, and
-  asserts if the test is not at 100% coverage.
+    asserts if the test is not at 100% coverage.
 
-  Args:
-    includes (list(str) or str) - List of paths to include in coverage report.
-      May also be a single path instead of a list.
-    require_native (str) - If non-None, will require that
-      at least |require_native| version of coverage is installed on the
-      system with CTracer.
-    disable_coverage (bool) - If True, just run unittest.main() without any
-      coverage tracking. Bug: crbug.com/662277
-  """
+    Args:
+        includes (list(str) or str) - List of paths to include in coverage
+            report. May also be a single path instead of a list.
+        require_native (str) - If non-None, will require that
+            at least |require_native| version of coverage is installed on the
+            system with CTracer.
+        disable_coverage (bool) - If True, just run unittest.main() without any
+            coverage tracking. Bug: crbug.com/662277
+    """
     if disable_coverage:
         unittest.main()
         return

+ 6 - 6
testing_support/fake_repos.py

@@ -80,13 +80,13 @@ def commit_git(repo):
 class FakeReposBase(object):
     """Generate git repositories to test gclient functionality.
 
-  Many DEPS functionalities need to be tested: Var, deps_os, hooks,
-  use_relative_paths.
+    Many DEPS functionalities need to be tested: Var, deps_os, hooks,
+    use_relative_paths.
 
-  And types of dependencies: Relative urls, Full urls, git.
+    And types of dependencies: Relative urls, Full urls, git.
 
-  populateGit() needs to be implemented by the subclass.
-  """
+    populateGit() needs to be implemented by the subclass.
+    """
     # Hostname
     NB_GIT_REPOS = 1
     USERS = [
@@ -1087,7 +1087,7 @@ class FakeReposTestBase(trial_dir.TestCase):
 
     def mangle_git_tree(self, *args):
         """Creates a 'virtual directory snapshot' to compare with the actual
-    result on disk."""
+        result on disk."""
         result = {}
         for item, new_root in args:
             repo, rev = item.split('@', 1)

+ 4 - 4
testing_support/filesystem_mock.py

@@ -16,10 +16,10 @@ def _RaiseNotFound(path):
 class MockFileSystem(object):
     """Stripped-down version of WebKit's webkitpy.common.system.filesystem_mock
 
-  Implements a filesystem-like interface on top of a dict of filenames ->
-  file contents. A file content value of None indicates that the file should
-  not exist (IOError will be raised if it is opened;
-  reading from a missing key raises a KeyError, not an IOError."""
+    Implements a filesystem-like interface on top of a dict of filenames ->
+    file contents. A file content value of None indicates that the file should
+    not exist (IOError will be raised if it is opened;
+    reading from a missing key raises a KeyError, not an IOError."""
     def __init__(self, files=None):
         self.files = files or {}
         self.written_files = {}

+ 99 - 97
testing_support/git_test_utils.py

@@ -23,8 +23,8 @@ DEFAULT_BRANCH = 'main'
 def git_hash_data(data, typ='blob'):
     """Calculate the git-style SHA1 for some data.
 
-  Only supports 'blob' type data at the moment.
-  """
+    Only supports 'blob' type data at the moment.
+    """
     assert typ == 'blob', 'Only support blobs for now'
     return hashlib.sha1(b'blob %d\0%s' % (len(data), data)).hexdigest()
 
@@ -101,8 +101,8 @@ class OrderedSet(collections.abc.MutableSet):
 class UTC(datetime.tzinfo):
     """UTC time zone.
 
-  from https://docs.python.org/2/library/datetime.html#tzinfo-objects
-  """
+    from https://docs.python.org/2/library/datetime.html#tzinfo-objects
+    """
     def utcoffset(self, dt):
         return datetime.timedelta(0)
 
@@ -119,41 +119,41 @@ UTC = UTC()
 class GitRepoSchema(object):
     """A declarative git testing repo.
 
-  Pass a schema to __init__ in the form of:
-     A B C D
-       B E D
+    Pass a schema to __init__ in the form of:
+        A B C D
+        B E D
 
-  This is the repo
+    This is the repo
 
-     A - B -  C - D
-           \\ E /
+        A - B -  C - D
+            \\ E /
 
-  Whitespace doesn't matter. Each line is a declaration of which commits come
-  before which other commits.
+    Whitespace doesn't matter. Each line is a declaration of which commits come
+    before which other commits.
 
-  Every commit gets a tag 'tag_%(commit)s'
-  Every unique terminal commit gets a branch 'branch_%(commit)s'
-  Last commit in First line is the branch 'main'
-  Root commits get a ref 'root_%(commit)s'
+    Every commit gets a tag 'tag_%(commit)s'
+    Every unique terminal commit gets a branch 'branch_%(commit)s'
+    Last commit in First line is the branch 'main'
+    Root commits get a ref 'root_%(commit)s'
 
-  Timestamps are in topo order, earlier commits (as indicated by their presence
-  in the schema) get earlier timestamps. Stamps start at the Unix Epoch, and
-  increment by 1 day each.
-  """
+    Timestamps are in topo order, earlier commits (as indicated by their
+    presence in the schema) get earlier timestamps. Stamps start at the Unix
+    Epoch, and increment by 1 day each.
+    """
     COMMIT = collections.namedtuple('COMMIT', 'name parents is_branch is_root')
 
     def __init__(self, repo_schema='', content_fn=lambda v: {v: {'data': v}}):
         """Builds a new GitRepoSchema.
 
-    Args:
-      repo_schema (str) - Initial schema for this repo. See class docstring for
-        info on the schema format.
-      content_fn ((commit_name) -> commit_data) - A function which will be
-        lazily called to obtain data for each commit. The results of this
-        function are cached (i.e. it will never be called twice for the same
-        commit_name). See the docstring on the GitRepo class for the format of
-        the data returned by this function.
-    """
+        Args:
+            repo_schema (str) - Initial schema for this repo. See class
+                docstring for info on the schema format.
+            content_fn ((commit_name) -> commit_data) - A function which will
+                be lazily called to obtain data for each commit. The results of
+                this function are cached (i.e. it will never be called twice
+                for the same commit_name). See the docstring on the GitRepo
+                class for the format of the data returned by this function.
+        """
         self.main = None
         self.par_map = {}
         self.data_cache = {}
@@ -163,10 +163,10 @@ class GitRepoSchema(object):
     def walk(self):
         """(Generator) Walks the repo schema from roots to tips.
 
-    Generates GitRepoSchema.COMMIT objects for each commit.
+        Generates GitRepoSchema.COMMIT objects for each commit.
 
-    Throws an AssertionError if it detects a cycle.
-    """
+        Throws an AssertionError if it detects a cycle.
+        """
         is_root = True
         par_map = copy.deepcopy(self.par_map)
         while par_map:
@@ -191,11 +191,11 @@ class GitRepoSchema(object):
     def add_commits(self, schema):
         """Adds more commits from a schema into the existing Schema.
 
-    Args:
-      schema (str) - See class docstring for info on schema format.
+        Args:
+            schema (str) - See class docstring for info on schema format.
 
-    Throws an AssertionError if it detects a cycle.
-    """
+        Throws an AssertionError if it detects a cycle.
+        """
         for commits in (l.split() for l in schema.splitlines() if l.strip()):
             parent = None
             for commit in commits:
@@ -213,10 +213,11 @@ class GitRepoSchema(object):
     def data_for(self, commit):
         """Obtains the data for |commit|.
 
-    See the docstring on the GitRepo class for the format of the returned data.
+        See the docstring on the GitRepo class for the format of the returned
+        data.
 
-    Caches the result on this GitRepoSchema instance.
-    """
+        Caches the result on this GitRepoSchema instance.
+        """
         if commit not in self.data_cache:
             self.data_cache[commit] = self.content_fn(commit)
         return self.data_cache[commit]
@@ -224,10 +225,10 @@ class GitRepoSchema(object):
     def simple_graph(self):
         """Returns a dictionary of {commit_subject: {parent commit_subjects}}
 
-    This allows you to get a very simple connection graph over the whole repo
-    for comparison purposes. Only commit subjects (not ids, not content/data)
-    are considered
-    """
+        This allows you to get a very simple connection graph over the whole
+        repo for comparison purposes. Only commit subjects (not ids, not
+        content/data) are considered.
+        """
         ret = {}
         for commit in self.walk():
             ret.setdefault(commit.name, set()).update(commit.parents)
@@ -237,27 +238,28 @@ class GitRepoSchema(object):
 class GitRepo(object):
     """Creates a real git repo for a GitRepoSchema.
 
-  Obtains schema and content information from the GitRepoSchema.
-
-  The format for the commit data supplied by GitRepoSchema.data_for is:
-    {
-      SPECIAL_KEY: special_value,
-      ...
-      "path/to/some/file": { 'data': "some data content for this file",
-                              'mode': 0o755 },
-      ...
-    }
-
-  The SPECIAL_KEYs are the following attributes of the GitRepo class:
-    * AUTHOR_NAME
-    * AUTHOR_EMAIL
-    * AUTHOR_DATE - must be a datetime.datetime instance
-    * COMMITTER_NAME
-    * COMMITTER_EMAIL
-    * COMMITTER_DATE - must be a datetime.datetime instance
-
-  For file content, if 'data' is None, then this commit will `git rm` that file.
-  """
+    Obtains schema and content information from the GitRepoSchema.
+
+    The format for the commit data supplied by GitRepoSchema.data_for is:
+        {
+        SPECIAL_KEY: special_value,
+        ...
+        "path/to/some/file": { 'data': "some data content for this file",
+                                'mode': 0o755 },
+        ...
+        }
+
+    The SPECIAL_KEYs are the following attributes of the GitRepo class:
+        * AUTHOR_NAME
+        * AUTHOR_EMAIL
+        * AUTHOR_DATE - must be a datetime.datetime instance
+        * COMMITTER_NAME
+        * COMMITTER_EMAIL
+        * COMMITTER_DATE - must be a datetime.datetime instance
+
+    For file content, if 'data' is None, then this commit will `git rm` that
+    file.
+    """
     BASE_TEMP_DIR = tempfile.mkdtemp(suffix='base', prefix='git_repo')
     atexit.register(gclient_utils.rmtree, BASE_TEMP_DIR)
 
@@ -279,14 +281,14 @@ class GitRepo(object):
     def __init__(self, schema):
         """Makes new GitRepo.
 
-    Automatically creates a temp folder under GitRepo.BASE_TEMP_DIR. It's
-    recommended that you clean this repo up by calling nuke() on it, but if not,
-    GitRepo will automatically clean up all allocated repos at the exit of the
-    program (assuming a normal exit like with sys.exit)
+        Automatically creates a temp folder under GitRepo.BASE_TEMP_DIR. It's
+        recommended that you clean this repo up by calling nuke() on it, but if
+        not, GitRepo will automatically clean up all allocated repos at the
+        exit of the program (assuming a normal exit like with sys.exit)
 
-    Args:
-      schema - An instance of GitRepoSchema
-    """
+        Args:
+            schema - An instance of GitRepoSchema
+        """
         self.repo_path = os.path.realpath(
             tempfile.mkdtemp(dir=self.BASE_TEMP_DIR))
         self.commit_map = {}
@@ -306,10 +308,10 @@ class GitRepo(object):
     def __getitem__(self, commit_name):
         """Gets the hash of a commit by its schema name.
 
-    >>> r = GitRepo(GitRepoSchema('A B C'))
-    >>> r['B']
-    '7381febe1da03b09da47f009963ab7998a974935'
-    """
+        >>> r = GitRepo(GitRepoSchema('A B C'))
+        >>> r['B']
+        '7381febe1da03b09da47f009963ab7998a974935'
+        """
         return self.commit_map[commit_name]
 
     def _add_schema_commit(self, commit, commit_data):
@@ -407,14 +409,14 @@ class GitRepo(object):
     def nuke(self):
         """Obliterates the git repo on disk.
 
-    Causes this GitRepo to be unusable.
-    """
+        Causes this GitRepo to be unusable.
+        """
         gclient_utils.rmtree(self.repo_path)
         self.repo_path = None
 
     def run(self, fn, *args, **kwargs):
         """Run a python function with the given args and kwargs with the cwd
-    set to the git repo."""
+        set to the git repo."""
         assert self.repo_path is not None
         curdir = os.getcwd()
         try:
@@ -425,11 +427,11 @@ class GitRepo(object):
 
     def capture_stdio(self, fn, *args, **kwargs):
         """Run a python function with the given args and kwargs with the cwd set
-    to the git repo.
+        to the git repo.
 
-    Returns the (stdout, stderr) of whatever ran, instead of the what |fn|
-    returned.
-    """
+        Returns the (stdout, stderr) of whatever ran, instead of the what |fn|
+        returned.
+        """
         stdout = sys.stdout
         stderr = sys.stderr
         try:
@@ -480,15 +482,15 @@ class GitRepo(object):
 class GitRepoSchemaTestBase(unittest.TestCase):
     """A TestCase with a built-in GitRepoSchema.
 
-  Expects a class variable REPO_SCHEMA to be a GitRepoSchema string in the form
-  described by that class.
+    Expects a class variable REPO_SCHEMA to be a GitRepoSchema string in the
+    form described by that class.
 
-  You may also set class variables in the form COMMIT_%(commit_name)s, which
-  provide the content for the given commit_name commits.
+    You may also set class variables in the form COMMIT_%(commit_name)s, which
+    provide the content for the given commit_name commits.
 
-  You probably will end up using either GitRepoReadOnlyTestBase or
-  GitRepoReadWriteTestBase for real tests.
-  """
+    You probably will end up using either GitRepoReadOnlyTestBase or
+    GitRepoReadWriteTestBase for real tests.
+    """
     REPO_SCHEMA = None
 
     @classmethod
@@ -505,11 +507,11 @@ class GitRepoSchemaTestBase(unittest.TestCase):
 
 class GitRepoReadOnlyTestBase(GitRepoSchemaTestBase):
     """Injects a GitRepo object given the schema and content from
-  GitRepoSchemaTestBase into TestCase classes which subclass this.
+    GitRepoSchemaTestBase into TestCase classes which subclass this.
 
-  This GitRepo will appear as self.repo, and will be deleted and recreated once
-  for the duration of all the tests in the subclass.
-  """
+    This GitRepo will appear as self.repo, and will be deleted and recreated
+    once for the duration of all the tests in the subclass.
+    """
     REPO_SCHEMA = None
 
     @classmethod
@@ -529,11 +531,11 @@ class GitRepoReadOnlyTestBase(GitRepoSchemaTestBase):
 
 class GitRepoReadWriteTestBase(GitRepoSchemaTestBase):
     """Injects a GitRepo object given the schema and content from
-  GitRepoSchemaTestBase into TestCase classes which subclass this.
+    GitRepoSchemaTestBase into TestCase classes which subclass this.
 
-  This GitRepo will appear as self.repo, and will be deleted and recreated for
-  each test function in the subclass.
-  """
+    This GitRepo will appear as self.repo, and will be deleted and recreated for
+    each test function in the subclass.
+    """
     REPO_SCHEMA = None
 
     def setUp(self):

+ 24 - 24
testing_support/presubmit_canned_checks_test_mocks.py

@@ -22,18 +22,18 @@ class MockCannedChecks(object):
                                  error_formatter=_ReportErrorFileAndLine):
         """Find all newly introduced violations of a per-line rule (a callable).
 
-    Arguments:
-      callable_rule: a callable taking a file extension and line of input and
-        returning True if the rule is satisfied and False if there was a
-        problem.
-      input_api: object to enumerate the affected files.
-      source_file_filter: a filter to be passed to the input api.
-      error_formatter: a callable taking (filename, line_number, line) and
-        returning a formatted error string.
-
-    Returns:
-      A list of the newly-introduced violations reported by the rule.
-    """
+        Arguments:
+            callable_rule: a callable taking a file extension and line of input
+                and returning True if the rule is satisfied and False if there
+                was a problem.
+            input_api: object to enumerate the affected files.
+            source_file_filter: a filter to be passed to the input api.
+            error_formatter: a callable taking (filename, line_number, line)
+                and returning a formatted error string.
+
+        Returns:
+            A list of the newly-introduced violations reported by the rule.
+        """
         errors = []
         for f in input_api.AffectedFiles(include_deletes=False,
                                          file_filter=source_file_filter):
@@ -57,9 +57,9 @@ class MockCannedChecks(object):
 class MockInputApi(object):
     """Mock class for the InputApi class.
 
-  This class can be used for unittests for presubmit by initializing the files
-  attribute as the list of changed files.
-  """
+    This class can be used for unittests for presubmit by initializing the files
+    attribute as the list of changed files.
+    """
 
     DEFAULT_FILES_TO_SKIP = ()
 
@@ -140,9 +140,9 @@ class MockInputApi(object):
 class MockOutputApi(object):
     """Mock class for the OutputApi class.
 
-  An instance of this class can be passed to presubmit unittests for outputing
-  various types of results.
-  """
+    An instance of this class can be passed to presubmit unittests for outputing
+    various types of results.
+    """
     class PresubmitResult(object):
         def __init__(self, message, items=None, long_text=''):
             self.message = message
@@ -186,9 +186,9 @@ class MockOutputApi(object):
 class MockFile(object):
     """Mock class for the File class.
 
-  This class can be used to form the mock list of changed files in
-  MockInputApi for presubmit unittests.
-  """
+    This class can be used to form the mock list of changed files in
+    MockInputApi for presubmit unittests.
+    """
     def __init__(self,
                  local_path,
                  new_contents,
@@ -255,9 +255,9 @@ class MockAffectedFile(MockFile):
 class MockChange(object):
     """Mock class for Change class.
 
-  This class can be used in presubmit unittests to mock the query of the
-  current change.
-  """
+    This class can be used in presubmit unittests to mock the query of the
+    current change.
+    """
     def __init__(self, changed_files, description=''):
         self._changed_files = changed_files
         self.footers = defaultdict(list)

+ 1 - 1
testing_support/test_case_utils.py

@@ -10,7 +10,7 @@ import string
 
 class TestCaseUtils(object):
     """Base class with some additional functionalities. People will usually want
-  to use SuperMoxTestBase instead."""
+    to use SuperMoxTestBase instead."""
     # Backup the separator in case it gets mocked
     _OS_SEP = os.sep
     _RANDOM_CHOICE = random.choice

+ 4 - 4
testing_support/trial_dir.py

@@ -15,10 +15,10 @@ import gclient_utils
 class TrialDir(object):
     """Manages a temporary directory.
 
-  On first object creation, TrialDir.TRIAL_ROOT will be set to a new temporary
-  directory created in /tmp or the equivalent. It will be deleted on process
-  exit unless TrialDir.SHOULD_LEAK is set to True.
-  """
+    On first object creation, TrialDir.TRIAL_ROOT will be set to a new temporary
+    directory created in /tmp or the equivalent. It will be deleted on process
+    exit unless TrialDir.SHOULD_LEAK is set to True.
+    """
     # When SHOULD_LEAK is set to True, temporary directories created while the
     # tests are running aren't deleted at the end of the tests. Expect failures
     # when running more than one test due to inter-test side-effects. Helps with

+ 15 - 15
tests/bot_update_coverage_test.py

@@ -22,8 +22,8 @@ import bot_update
 class MockedPopen(object):
     """A fake instance of a called subprocess.
 
-  This is meant to be used in conjunction with MockedCall.
-  """
+    This is meant to be used in conjunction with MockedCall.
+    """
     def __init__(self, args=None, kwargs=None):
         self.args = args or []
         self.kwargs = kwargs or {}
@@ -33,17 +33,17 @@ class MockedPopen(object):
     def returns(self, rv):
         """Set the return value when this popen is called.
 
-    rv can be a string, or a callable (eg function).
-    """
+        rv can be a string, or a callable (eg function).
+        """
         self.return_value = rv
         return self
 
     def check(self, args, kwargs):
         """Check to see if the given args/kwargs call match this instance.
 
-    This does a partial match, so that a call to "git clone foo" will match
-    this instance if this instance was recorded as "git clone"
-    """
+        This does a partial match, so that a call to "git clone foo" will match
+        this instance if this instance was recorded as "git clone"
+        """
         if any(input_arg != expected_arg
                for (input_arg, expected_arg) in zip(args, self.args)):
             return False
@@ -59,14 +59,14 @@ class MockedPopen(object):
 class MockedCall(object):
     """A fake instance of bot_update.call().
 
-  This object is pre-seeded with "answers" in self.expectations.  The type
-  is a MockedPopen object, or any object with a __call__() and check() method.
-  The check() method is used to check to see if the correct popen object is
-  chosen (can be a partial match, eg a "git clone" popen module would match
-  a "git clone foo" call).
-  By default, if no answers have been pre-seeded, the call() returns successful
-  with an empty string.
-  """
+    This object is pre-seeded with "answers" in self.expectations.  The type
+    is a MockedPopen object, or any object with a __call__() and check() method.
+    The check() method is used to check to see if the correct popen object is
+    chosen (can be a partial match, eg a "git clone" popen module would match
+    a "git clone foo" call).
+    By default, if no answers have been pre-seeded, the call() returns successful
+    with an empty string.
+    """
     def __init__(self, fake_filesystem):
         self.expectations = []
         self.records = []

+ 9 - 9
tests/cipd_bootstrap_test.py

@@ -41,11 +41,11 @@ windows-amd64   sha256  3e21561b45acb2845c309a04cbedb2ce1e0567b7b24bf89857e76736
 
 class CipdBootstrapTest(unittest.TestCase):
     """Tests that CIPD client can bootstrap from scratch and self-update from some
-  old version to a most recent one.
+    old version to a most recent one.
 
-  WARNING: This integration test touches real network and real CIPD backend and
-  downloads several megabytes of stuff.
-  """
+    WARNING: This integration test touches real network and real CIPD backend and
+    downloads several megabytes of stuff.
+    """
     def setUp(self):
         self.tempdir = tempfile.mkdtemp('depot_tools_cipd')
 
@@ -55,9 +55,9 @@ class CipdBootstrapTest(unittest.TestCase):
     def stage_files(self, cipd_version=None, digests=None):
         """Copies files needed for cipd bootstrap into the temp dir.
 
-    Args:
-      cipd_version: if not None, a value to put into cipd_client_version file.
-    """
+        Args:
+            cipd_version: if not None, a value to put into cipd_client_version file.
+        """
         names = (
             '.cipd_impl.ps1',
             'cipd',
@@ -80,8 +80,8 @@ class CipdBootstrapTest(unittest.TestCase):
     def call_cipd_help(self):
         """Calls 'cipd help' bootstrapping the client in tempdir.
 
-    Returns (exit code, merged stdout and stderr).
-    """
+        Returns (exit code, merged stdout and stderr).
+        """
         exe = 'cipd.bat' if sys.platform == 'win32' else 'cipd'
         p = subprocess.Popen([os.path.join(self.tempdir, exe), 'help'],
                              stdout=subprocess.PIPE,

+ 7 - 8
tests/gclient_scm_test.py

@@ -1181,7 +1181,7 @@ class GerritChangesTest(fake_repos.FakeReposTestBase):
 
     def assertCommits(self, commits):
         """Check that all, and only |commits| are present in the current checkout.
-    """
+        """
         for i in commits:
             name = os.path.join(self.root_dir, 'commit ' + str(i))
             self.assertTrue(os.path.exists(name), 'Commit not found: %s' % name)
@@ -1264,10 +1264,10 @@ class GerritChangesTest(fake_repos.FakeReposTestBase):
     def testCheckoutOlderThanPatchBase(self):
         """Test applying a patch on an old checkout.
 
-    We first checkout commit 1, and try to patch refs/changes/35/1235/1, which
-    contains commits 5 and 6, and is based on top of commit 3.
-    The final result should contain commits 1, 5 and 6, but not commits 2 or 3.
-    """
+        We first checkout commit 1, and try to patch refs/changes/35/1235/1, which
+        contains commits 5 and 6, and is based on top of commit 3.
+        The final result should contain commits 1, 5 and 6, but not commits 2 or 3.
+        """
         scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
         file_list = []
 
@@ -1349,8 +1349,7 @@ class GerritChangesTest(fake_repos.FakeReposTestBase):
                          self.gitrevparse(self.root_dir))
 
     def testDoesntRebasePatchMaster(self):
-        """Tests that we can apply a patch without rebasing it.
-    """
+        """Tests that we can apply a patch without rebasing it."""
         scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
         file_list = []
 
@@ -1369,7 +1368,7 @@ class GerritChangesTest(fake_repos.FakeReposTestBase):
 
     def testDoesntRebasePatchOldCheckout(self):
         """Tests that we can apply a patch without rebasing it on an old checkout.
-    """
+        """
         scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
         file_list = []
 

+ 6 - 6
tests/gclient_smoketest_base.py

@@ -83,13 +83,13 @@ class GClientSmokeBase(fake_repos.FakeReposTestBase):
 
     def splitBlock(self, stdout):
         """Split gclient's output into logical execution blocks.
-    ___ running 'foo' at '/bar'
-    (...)
-    ___ running 'baz' at '/bar'
-    (...)
+        ___ running 'foo' at '/bar'
+        (...)
+        ___ running 'baz' at '/bar'
+        (...)
 
-    will result in 2 items of len((...).splitlines()) each.
-    """
+        will result in 2 items of len((...).splitlines()) each.
+        """
         results = []
         for line in stdout.splitlines(False):
             # Intentionally skips empty lines.

+ 43 - 43
tests/gclient_test.py

@@ -113,12 +113,12 @@ class GclientTest(trial_dir.TestCase):
     def _dependencies(self, jobs):
         """Verifies that dependencies are processed in the right order.
 
-    e.g. if there is a dependency 'src' and another 'src/third_party/bar', that
-    bar isn't fetched until 'src' is done.
+        e.g. if there is a dependency 'src' and another 'src/third_party/bar', that
+        bar isn't fetched until 'src' is done.
 
-    Args:
-      |jobs| is the number of parallel jobs simulated.
-    """
+        Args:
+            |jobs| is the number of parallel jobs simulated.
+        """
         parser = gclient.OptionParser()
         options, args = parser.parse_args(['--jobs', jobs])
         write(
@@ -450,10 +450,10 @@ class GclientTest(trial_dir.TestCase):
     def testTargetOS(self):
         """Verifies that specifying a target_os pulls in all relevant dependencies.
 
-    The target_os variable allows specifying the name of an additional OS which
-    should be considered when selecting dependencies from a DEPS' deps_os. The
-    value will be appended to the _enforced_os tuple.
-    """
+        The target_os variable allows specifying the name of an additional OS which
+        should be considered when selecting dependencies from a DEPS' deps_os. The
+        value will be appended to the _enforced_os tuple.
+        """
 
         write(
             '.gclient', 'solutions = [\n'
@@ -479,13 +479,13 @@ class GclientTest(trial_dir.TestCase):
 
     def testTargetOsWithTargetOsOnly(self):
         """Verifies that specifying a target_os and target_os_only pulls in only
-    the relevant dependencies.
+        the relevant dependencies.
 
-    The target_os variable allows specifying the name of an additional OS which
-    should be considered when selecting dependencies from a DEPS' deps_os. With
-    target_os_only also set, the _enforced_os tuple will be set to only the
-    target_os value.
-    """
+        The target_os variable allows specifying the name of an additional OS which
+        should be considered when selecting dependencies from a DEPS' deps_os. With
+        target_os_only also set, the _enforced_os tuple will be set to only the
+        target_os value.
+        """
 
         write(
             '.gclient', 'solutions = [\n'
@@ -512,8 +512,8 @@ class GclientTest(trial_dir.TestCase):
 
     def testTargetOsOnlyWithoutTargetOs(self):
         """Verifies that specifying a target_os_only without target_os_only raises
-    an exception.
-    """
+        an exception.
+        """
 
         write(
             '.gclient', 'solutions = [\n'
@@ -542,12 +542,12 @@ class GclientTest(trial_dir.TestCase):
 
     def testTargetOsInDepsFile(self):
         """Verifies that specifying a target_os value in a DEPS file pulls in all
-    relevant dependencies.
+        relevant dependencies.
 
-    The target_os variable in a DEPS file allows specifying the name of an
-    additional OS which should be considered when selecting dependencies from a
-    DEPS' deps_os. The value will be appended to the _enforced_os tuple.
-    """
+        The target_os variable in a DEPS file allows specifying the name of an
+        additional OS which should be considered when selecting dependencies from a
+        DEPS' deps_os. The value will be appended to the _enforced_os tuple.
+        """
 
         write(
             '.gclient', 'solutions = [\n'
@@ -575,8 +575,8 @@ class GclientTest(trial_dir.TestCase):
 
     def testTargetOsForHooksInDepsFile(self):
         """Verifies that specifying a target_os value in a DEPS file runs the right
-    entries in hooks_os.
-    """
+        entries in hooks_os.
+        """
 
         write(
             'DEPS', 'hooks = [\n'
@@ -703,8 +703,8 @@ class GclientTest(trial_dir.TestCase):
 
     def testDepsOsOverrideDepsInDepsFile(self):
         """Verifies that a 'deps_os' path cannot override a 'deps' path. Also
-    see testUpdateWithOsDeps above.
-    """
+        see testUpdateWithOsDeps above.
+        """
 
         write(
             '.gclient', 'solutions = [\n'
@@ -741,14 +741,14 @@ class GclientTest(trial_dir.TestCase):
     def testRecursedepsOverride(self):
         """Verifies gclient respects the |recursedeps| var syntax.
 
-    This is what we mean to check here:
-    - |recursedeps| = [...] on 2 levels means we pull exactly 3 deps
-      (up to /fizz, but not /fuzz)
-    - pulling foo/bar with no recursion (in .gclient) is overridden by
-      a later pull of foo/bar with recursion (in the dep tree)
-    - pulling foo/tar with no recursion (in .gclient) is no recursively
-      pulled (taz is left out)
-    """
+        This is what we mean to check here:
+        - |recursedeps| = [...] on 2 levels means we pull exactly 3 deps
+        (up to /fizz, but not /fuzz)
+        - pulling foo/bar with no recursion (in .gclient) is overridden by
+        a later pull of foo/bar with recursion (in the dep tree)
+        - pulling foo/tar with no recursion (in .gclient) is no recursively
+        pulled (taz is left out)
+        """
         write(
             '.gclient', 'solutions = [\n'
             '  { "name": "foo", "url": "svn://example.com/foo" },\n'
@@ -908,11 +908,11 @@ class GclientTest(trial_dir.TestCase):
 
     def testRecursedepsAltfile(self):
         """Verifies gclient respects the |recursedeps| var syntax with overridden
-    target DEPS file.
+        target DEPS file.
 
-    This is what we mean to check here:
-    - Naming an alternate DEPS file in recursedeps pulls from that one.
-    """
+        This is what we mean to check here:
+        - Naming an alternate DEPS file in recursedeps pulls from that one.
+        """
         write(
             '.gclient', 'solutions = [\n'
             '  { "name": "foo", "url": "svn://example.com/foo" },\n'
@@ -939,10 +939,10 @@ class GclientTest(trial_dir.TestCase):
     def testGitDeps(self):
         """Verifies gclient respects a .DEPS.git deps file.
 
-    Along the way, we also test that if both DEPS and .DEPS.git are present,
-    that gclient does not read the DEPS file.  This will reliably catch bugs
-    where gclient is always hitting the wrong file (DEPS).
-    """
+        Along the way, we also test that if both DEPS and .DEPS.git are present,
+        that gclient does not read the DEPS file.  This will reliably catch bugs
+        where gclient is always hitting the wrong file (DEPS).
+        """
         write(
             '.gclient', 'solutions = [\n'
             '  { "name": "foo", "url": "svn://example.com/foo",\n'
@@ -1329,7 +1329,7 @@ class GclientTest(trial_dir.TestCase):
         self.assertEqual(sol.ParseGitSubmodules(), {})
 
     def testParseGitSubmodules_ParsesSubmodules(self):
-        """ParseGitSubmodules returns submodules when present. """
+        """ParseGitSubmodules returns submodules when present."""
         solutions = [{
             'name': 'foobar',
             'url': 'https://example.com/foobar',

+ 5 - 5
tests/gclient_transitions_smoketest.py

@@ -27,7 +27,7 @@ from testing_support import fake_repos
 
 class SkiaDEPSTransitionSmokeTest(gclient_smoketest_base.GClientSmokeBase):
     """Simulate the behavior of bisect bots as they transition across the Skia
-  DEPS change."""
+    DEPS change."""
 
     FAKE_REPOS_CLASS = fake_repos.FakeRepoSkiaDEPS
 
@@ -128,7 +128,7 @@ class SkiaDEPSTransitionSmokeTest(gclient_smoketest_base.GClientSmokeBase):
 
 class BlinkDEPSTransitionSmokeTest(gclient_smoketest_base.GClientSmokeBase):
     """Simulate the behavior of bisect bots as they transition across the Blink
-  DEPS change."""
+    DEPS change."""
 
     FAKE_REPOS_CLASS = fake_repos.FakeRepoBlinkDEPS
 
@@ -183,7 +183,7 @@ class BlinkDEPSTransitionSmokeTest(gclient_smoketest_base.GClientSmokeBase):
     @unittest.skip('flaky')
     def testBlinkDEPSChangeUsingGclient(self):
         """Checks that {src,blink} repos are consistent when syncing going back and
-    forth using gclient sync src@revision."""
+        forth using gclient sync src@revision."""
         self.gclient([
             'config', '--spec', 'solutions=['
             '{"name": "src",'
@@ -210,7 +210,7 @@ class BlinkDEPSTransitionSmokeTest(gclient_smoketest_base.GClientSmokeBase):
     @unittest.skip('flaky')
     def testBlinkDEPSChangeUsingGit(self):
         """Like testBlinkDEPSChangeUsingGclient, but move the main project using
-    directly git and not gclient sync."""
+        directly git and not gclient sync."""
         self.gclient([
             'config', '--spec', 'solutions=['
             '{"name": "src",'
@@ -242,7 +242,7 @@ class BlinkDEPSTransitionSmokeTest(gclient_smoketest_base.GClientSmokeBase):
     @unittest.skip('flaky')
     def testBlinkLocalBranchesArePreserved(self):
         """Checks that the state of local git branches are effectively preserved
-    when going back and forth."""
+        when going back and forth."""
         self.gclient([
             'config', '--spec', 'solutions=['
             '{"name": "src",'

+ 9 - 9
tests/git_cl_test.py

@@ -146,15 +146,15 @@ class AuthenticatorMock(object):
 def CookiesAuthenticatorMockFactory(hosts_with_creds=None, same_auth=False):
     """Use to mock Gerrit/Git credentials from ~/.netrc or ~/.gitcookies.
 
-  Usage:
-    >>> self.mock(git_cl.gerrit_util, "CookiesAuthenticator",
-                  CookiesAuthenticatorMockFactory({'host': ('user', _, 'pass')})
-
-  OR
-    >>> self.mock(git_cl.gerrit_util, "CookiesAuthenticator",
-                  CookiesAuthenticatorMockFactory(
-                      same_auth=('user', '', 'pass'))
-  """
+    Usage:
+        >>> self.mock(git_cl.gerrit_util, "CookiesAuthenticator",
+                    CookiesAuthenticatorMockFactory({'host': ('user', _, 'pass')})
+
+    OR
+        >>> self.mock(git_cl.gerrit_util, "CookiesAuthenticator",
+                    CookiesAuthenticatorMockFactory(
+                        same_auth=('user', '', 'pass'))
+    """
     class CookiesAuthenticatorMock(git_cl.gerrit_util.CookiesAuthenticator):
         def __init__(self):  # pylint: disable=super-init-not-called
             # Intentionally not calling super() because it reads actual cookie

+ 2 - 2
tests/git_common_test.py

@@ -96,8 +96,8 @@ class Support(GitCommonTestBase):
 def slow_square(i):
     """Helper for ScopedPoolTest.
 
-  Must be global because non top-level functions aren't pickleable.
-  """
+    Must be global because non top-level functions aren't pickleable.
+    """
     return i**2
 
 

+ 13 - 13
tests/git_hyper_blame_test.py

@@ -47,12 +47,12 @@ class GitHyperBlameTestBase(git_test_utils.GitRepoReadOnlyTestBase):
     def blame_line(self, commit_name, rest, author=None, filename=None):
         """Generate a blame line from a commit.
 
-    Args:
-      commit_name: The commit's schema name.
-      rest: The blame line after the timestamp. e.g., '2) file2 - merged'.
-      author: The author's name. If omitted, reads the name out of the commit.
-      filename: The filename. If omitted, not shown in the blame line.
-    """
+        Args:
+            commit_name: The commit's schema name.
+            rest: The blame line after the timestamp. e.g., '2) file2 - merged'.
+            author: The author's name. If omitted, reads the name out of the commit.
+            filename: The filename. If omitted, not shown in the blame line.
+        """
         short = self.repo[commit_name][:8]
         start = '%s %s' % (short, filename) if filename else short
         if author is None:
@@ -647,10 +647,10 @@ class GitHyperBlameLineNumberTest(GitHyperBlameTestBase):
     def testTwoChangesWithAddedLines(self):
         """Regression test for https://crbug.com/709831.
 
-    Tests a line with multiple ignored edits, and a line number change in
-    between (such that the line number in the current revision is bigger than
-    the file's line count at the older ignored revision).
-    """
+        Tests a line with multiple ignored edits, and a line number change in
+        between (such that the line number in the current revision is bigger than
+        the file's line count at the older ignored revision).
+        """
         expected_output = [
             self.blame_line('C', ' 1) '),
             self.blame_line('C', ' 2) '),
@@ -698,9 +698,9 @@ class GitHyperBlameUnicodeTest(GitHyperBlameTestBase):
     def testNonUTF8Data(self):
         """Ensures correct behaviour even if author or file data is not UTF-8.
 
-    There is no guarantee that a file will be UTF-8-encoded, so this is
-    realistic.
-    """
+        There is no guarantee that a file will be UTF-8-encoded, so this is
+        realistic.
+        """
         expected_output = [
             self.blame_line('A', '1) red', author='ASCII Author  '),
             # The Author has been re-encoded as UTF-8. The file data is

+ 2 - 2
tests/presubmit_canned_checks_test.py

@@ -344,8 +344,8 @@ class DescriptionChecksTest(unittest.TestCase):
 class ChromiumDependencyMetadataCheckTest(unittest.TestCase):
     def testDefaultFileFilter(self):
         """Checks the default file filter limits the scope to Chromium dependency
-    metadata files.
-    """
+        metadata files.
+        """
         input_api = MockInputApi()
         input_api.change.RepositoryRoot = lambda: ''
         input_api.files = [

+ 21 - 19
tests/presubmit_unittest.py

@@ -555,8 +555,9 @@ class PresubmitUnittest(PresubmitTestsBase):
                          [mock.call('baz'), mock.call('quux')])
 
     def testExecPresubmitScriptInSourceDirectory(self):
-        """ Tests that the presubmits are executed with the current working
-    directory (CWD) set to the directory of the source presubmit script. """
+        """Tests that the presubmits are executed with the current working
+        directory (CWD) set to the directory of the source presubmit script.
+        """
         orig_dir = os.getcwd()
 
         fake_presubmit_dir = os.path.join(self.fake_root_dir, 'fake_dir')
@@ -576,8 +577,9 @@ class PresubmitUnittest(PresubmitTestsBase):
         ])
 
     def testExecPostUploadHookSourceDirectory(self):
-        """ Tests that the post upload hooks are executed with the current working
-    directory (CWD) set to the directory of the source presubmit script. """
+        """Tests that the post upload hooks are executed with the current working
+        directory (CWD) set to the directory of the source presubmit script.
+        s"""
         orig_dir = os.getcwd()
 
         fake_presubmit_dir = os.path.join(self.fake_root_dir, 'fake_dir')
@@ -1880,14 +1882,14 @@ class CannedChecksUnittest(PresubmitTestsBase):
                     content2_path, error_type):
         """Runs a test of a content-checking rule.
 
-      Args:
-        check: the check to run.
-        content1: content which is expected to pass the check.
-        content1_path: file path for content1.
-        content2: content which is expected to fail the check.
-        content2_path: file path for content2.
-        error_type: the type of the error expected for content2.
-    """
+        Args:
+            check: the check to run.
+            content1: content which is expected to pass the check.
+            content1_path: file path for content1.
+            content2: content which is expected to fail the check.
+            content2_path: file path for content2.
+            error_type: the type of the error expected for content2.
+        """
         change1 = presubmit.Change('foo1', 'foo1\n', self.fake_root_dir, None,
                                    0, 0, None)
         input_api1 = self.MockInputApi(change1, False)
@@ -1927,14 +1929,14 @@ class CannedChecksUnittest(PresubmitTestsBase):
     def PythonLongLineTest(self, maxlen, content, should_pass):
         """Runs a test of Python long-line checking rule.
 
-    Because ContentTest() cannot be used here due to the different code path
-    that the implementation of CheckLongLines() uses for Python files.
+        Because ContentTest() cannot be used here due to the different code path
+        that the implementation of CheckLongLines() uses for Python files.
 
-    Args:
-      maxlen: Maximum line length for content.
-      content: Python source which is expected to pass or fail the test.
-      should_pass: True iff the test should pass, False otherwise.
-    """
+        Args:
+            maxlen: Maximum line length for content.
+            content: Python source which is expected to pass or fail the test.
+            should_pass: True iff the test should pass, False otherwise.
+        """
         change = presubmit.Change('foo1', 'foo1\n', self.fake_root_dir, None, 0,
                                   0, None)
         input_api = self.MockInputApi(change, False)

+ 4 - 4
tests/subprocess2_test.py

@@ -97,10 +97,10 @@ class DefaultsTest(unittest.TestCase):
 
 def _run_test(with_subprocess=True):
     """Runs a tests in 12 combinations:
-  - With universal_newlines=True and False.
-  - With LF, CR, and CRLF output.
-  - With subprocess and subprocess2.
-  """
+    - With universal_newlines=True and False.
+    - With LF, CR, and CRLF output.
+    - With subprocess and subprocess2.
+    """
     subps = (subprocess2, subprocess) if with_subprocess else (subprocess2, )
     no_op = lambda s: s
     to_bytes = lambda s: s.encode()

+ 12 - 12
watchlists.py

@@ -26,13 +26,13 @@ import sys
 class Watchlists(object):
     """Manage Watchlists.
 
-  This class provides mechanism to load watchlists for a repo and identify
-  watchers.
-  Usage:
-    wl = Watchlists("/path/to/repo/root")
-    watchers = wl.GetWatchersForPaths(["/path/to/file1",
-                                       "/path/to/file2",])
-  """
+    This class provides mechanism to load watchlists for a repo and identify
+    watchers.
+    Usage:
+        wl = Watchlists("/path/to/repo/root")
+        watchers = wl.GetWatchersForPaths(["/path/to/file1",
+                                        "/path/to/file2",])
+    """
 
     _RULES = "WATCHLISTS"
     _RULES_FILENAME = _RULES
@@ -108,12 +108,12 @@ class Watchlists(object):
     def GetWatchersForPaths(self, paths):
         """Fetch the list of watchers for |paths|
 
-    Args:
-      paths: [path1, path2, ...]
+        Args:
+            paths: [path1, path2, ...]
 
-    Returns:
-      [u1@chromium.org, u2@gmail.com, ...]
-    """
+        Returns:
+            [u1@chromium.org, u2@gmail.com, ...]
+        """
         watchers = set()  # A set, to avoid duplicates
         for path in paths:
             path = path.replace(os.sep, '/')

+ 15 - 15
win_toolchain/get_toolchain_if_necessary.py

@@ -112,11 +112,11 @@ def MakeTimestampsFileName(root, sha1):
 
 def CalculateHash(root, expected_hash):
     """Calculates the sha1 of the paths to all files in the given |root| and the
-  contents of those files, and returns as a hex string.
+    contents of those files, and returns as a hex string.
 
-  |expected_hash| is the expected hash value for this toolchain if it has
-  already been installed.
-  """
+    |expected_hash| is the expected hash value for this toolchain if it has
+    already been installed.
+    """
     if expected_hash:
         full_root_path = os.path.join(root, expected_hash)
     else:
@@ -197,7 +197,7 @@ def CalculateHash(root, expected_hash):
 
 def CalculateToolchainHashes(root, remove_corrupt_toolchains):
     """Calculate the hash of the different toolchains installed in the |root|
-  directory."""
+    directory."""
     hashes = []
     dir_list = [
         d for d in os.listdir(root) if os.path.isdir(os.path.join(root, d))
@@ -219,7 +219,7 @@ def CalculateToolchainHashes(root, remove_corrupt_toolchains):
 
 def SaveTimestampsAndHash(root, sha1):
     """Saves timestamps and the final hash to be able to early-out more quickly
-  next time."""
+    next time."""
     file_list = GetFileList(os.path.join(root, sha1))
     timestamps_data = {
         'files': [[f, os.path.getmtime(f)] for f in file_list],
@@ -249,7 +249,7 @@ def HaveSrcInternalAccess():
 
 def LooksLikeGoogler():
     """Checks for a USERDOMAIN environment variable of 'GOOGLE', which
-  probably implies the current user is a Googler."""
+    probably implies the current user is a Googler."""
     return os.environ.get('USERDOMAIN', '').upper() == 'GOOGLE'
 
 
@@ -283,9 +283,9 @@ def UsesToolchainFromHttp():
 
 def RequestGsAuthentication():
     """Requests that the user authenticate to be able to access gs:// as a
-  Googler. This allows much faster downloads, and pulling (old) toolchains
-  that match src/ revisions.
-  """
+    Googler. This allows much faster downloads, and pulling (old) toolchains
+    that match src/ revisions.
+    """
     print('Access to gs://chrome-wintoolchain/ not configured.')
     print('-----------------------------------------------------------------')
     print()
@@ -362,8 +362,8 @@ def RmDir(path):
 
 def DoTreeMirror(target_dir, tree_sha1):
     """In order to save temporary space on bots that do not have enough space to
-  download ISOs, unpack them, and copy to the target location, the whole tree
-  is uploaded as a zip to internal storage, and then mirrored here."""
+    download ISOs, unpack them, and copy to the target location, the whole tree
+    is uploaded as a zip to internal storage, and then mirrored here."""
     if UsesToolchainFromFile():
         temp_dir = None
         local_zip = os.path.join(ToolchainBaseURL(), tree_sha1 + '.zip')
@@ -444,9 +444,9 @@ def RemoveUnusedToolchains(root):
 
 def EnableCrashDumpCollection():
     """Tell Windows Error Reporting to record crash dumps so that we can diagnose
-  linker crashes and other toolchain failures. Documented at:
-  https://msdn.microsoft.com/en-us/library/windows/desktop/bb787181.aspx
-  """
+    linker crashes and other toolchain failures. Documented at:
+    https://msdn.microsoft.com/en-us/library/windows/desktop/bb787181.aspx
+    """
     if sys.platform == 'win32' and os.environ.get('CHROME_HEADLESS') == '1':
         key_name = r'SOFTWARE\Microsoft\Windows\Windows Error Reporting'
         try:

+ 5 - 5
win_toolchain/package_from_installed.py

@@ -309,10 +309,10 @@ def BuildFileList(override_dir, include_arm, vs_path):
 
 def GenerateSetEnvCmd(target_dir):
     """Generate a batch file that gyp expects to exist to set up the compiler
-  environment.
+    environment.
 
-  This is normally generated by a full install of the SDK, but we
-  do it here manually since we do not do a full install."""
+    This is normally generated by a full install of the SDK, but we
+    do it here manually since we do not do a full install."""
     vc_tools_parts = _vc_tools.split('/')
 
     # All these paths are relative to the root of the toolchain package.
@@ -452,7 +452,7 @@ def GenerateSetEnvCmd(target_dir):
 
 def AddEnvSetup(files, include_arm):
     """We need to generate this file in the same way that the "from pieces"
-  script does, so pull that in here."""
+    script does, so pull that in here."""
     tempdir = tempfile.mkdtemp()
     os.makedirs(os.path.join(tempdir, 'Windows Kits', '10', 'bin'))
     GenerateSetEnvCmd(tempdir)
@@ -477,7 +477,7 @@ def AddEnvSetup(files, include_arm):
 
 def RenameToSha1(output):
     """Determine the hash in the same way that the unzipper does to rename the
-  # .zip file."""
+    # .zip file."""
     print('Extracting to determine hash...')
     tempdir = tempfile.mkdtemp()
     old_dir = os.getcwd()

Certains fichiers n'ont pas été affichés car il y a eu trop de fichiers modifiés dans ce diff